new advisories work and features gaps work

This commit is contained in:
master
2026-01-14 18:39:19 +02:00
parent 95d5898650
commit 15aeac8e8b
148 changed files with 16731 additions and 554 deletions

View File

@@ -0,0 +1,322 @@
// <copyright file="EvidenceIntegrityCheckTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// Sprint: SPRINT_20260112_004_LB_doctor_evidence_integrity_checks (DOCHECK-002)
// Description: Tests for EvidenceIntegrityCheck
// </copyright>
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Doctor.Models;
using StellaOps.Doctor.Plugins;
using StellaOps.Doctor.Plugins.Security.Checks;
using Xunit;
namespace StellaOps.Doctor.Plugins.Security.Tests.Checks;
[Trait("Category", "Unit")]
public sealed class EvidenceIntegrityCheckTests : IDisposable
{
private readonly string _tempDir;
private readonly EvidenceIntegrityCheck _check;
public EvidenceIntegrityCheckTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"evidence-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_check = new EvidenceIntegrityCheck();
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void CheckId_IsCorrect()
{
Assert.Equal("check.security.evidence.integrity", _check.CheckId);
}
[Fact]
public void Tags_IncludesOffline()
{
Assert.Contains("offline", _check.Tags);
Assert.Contains("evidence", _check.Tags);
Assert.Contains("dsse", _check.Tags);
}
[Fact]
public void CanRun_ReturnsFalse_WhenNoPathConfigured()
{
var context = CreateContext(new Dictionary<string, string?>());
Assert.False(_check.CanRun(context));
}
[Fact]
public void CanRun_ReturnsTrue_WhenPathConfigured()
{
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
Assert.True(_check.CanRun(context));
}
[Fact]
public async Task RunAsync_Skips_WhenPathNotConfigured()
{
var context = CreateContext(new Dictionary<string, string?>());
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Skip, result.Severity);
Assert.Contains("not configured", result.Diagnosis);
}
[Fact]
public async Task RunAsync_Warns_WhenDirectoryDoesNotExist()
{
var nonExistentPath = Path.Combine(_tempDir, "nonexistent");
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = nonExistentPath
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Warn, result.Severity);
Assert.Contains("does not exist", result.Diagnosis);
}
[Fact]
public async Task RunAsync_Passes_WhenDirectoryIsEmpty()
{
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Pass, result.Severity);
Assert.Contains("empty", result.Diagnosis);
}
[Fact]
public async Task RunAsync_Passes_WithValidDsseEnvelope()
{
var envelope = CreateValidDsseEnvelope();
await File.WriteAllTextAsync(Path.Combine(_tempDir, "test.dsse"), envelope);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Pass, result.Severity);
Assert.Contains("1 valid", result.Diagnosis);
}
[Fact]
public async Task RunAsync_Fails_WithInvalidDsseEnvelope_EmptyPayload()
{
var envelope = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.stellaops+json",
payload = "",
signatures = new[] { new { keyid = "key1", sig = "c2lnbmF0dXJl" } }
});
await File.WriteAllTextAsync(Path.Combine(_tempDir, "invalid.dsse"), envelope);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Fail, result.Severity);
Assert.Contains("invalid", result.Diagnosis.ToLowerInvariant());
}
[Fact]
public async Task RunAsync_Fails_WithInvalidDsseEnvelope_NoSignatures()
{
var envelope = JsonSerializer.Serialize(new
{
payloadType = "application/vnd.stellaops+json",
payload = Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"test\":1}")),
signatures = Array.Empty<object>()
});
await File.WriteAllTextAsync(Path.Combine(_tempDir, "nosig.dsse"), envelope);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Fail, result.Severity);
}
[Fact]
public async Task RunAsync_Passes_WithValidEvidenceBundle()
{
var bundle = JsonSerializer.Serialize(new
{
bundleId = "bundle-123",
manifest = new { version = "1.0.0", artifacts = new[] { "sbom.json" } },
contentDigest = "sha256:abc123"
});
await File.WriteAllTextAsync(Path.Combine(_tempDir, "bundle.json"), bundle);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Pass, result.Severity);
}
[Fact]
public async Task RunAsync_Fails_WithInvalidRekorReceipt()
{
var bundle = JsonSerializer.Serialize(new
{
bundleId = "bundle-123",
manifest = new { version = "1.0.0" },
rekorReceipt = new { uuid = "", logIndex = -1 } // Invalid
});
await File.WriteAllTextAsync(Path.Combine(_tempDir, "bad-rekor.json"), bundle);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Fail, result.Severity);
}
[Fact]
public async Task RunAsync_Passes_WithValidRekorReceipt()
{
var bundle = JsonSerializer.Serialize(new
{
bundleId = "bundle-123",
manifest = new { version = "1.0.0" },
rekorReceipt = new
{
uuid = "abc123def456",
logIndex = 12345,
logId = "0x1234",
inclusionProof = new
{
hashes = new[] { "hash1", "hash2" },
treeSize = 100000,
rootHash = "roothash"
}
}
});
await File.WriteAllTextAsync(Path.Combine(_tempDir, "good-rekor.json"), bundle);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(DoctorSeverity.Pass, result.Severity);
}
[Fact]
public async Task RunAsync_IsDeterministic()
{
var envelope = CreateValidDsseEnvelope();
await File.WriteAllTextAsync(Path.Combine(_tempDir, "test.dsse"), envelope);
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
var result1 = await _check.RunAsync(context, CancellationToken.None);
var result2 = await _check.RunAsync(context, CancellationToken.None);
Assert.Equal(result1.Severity, result2.Severity);
Assert.Equal(result1.Diagnosis, result2.Diagnosis);
}
[Fact]
public async Task RunAsync_RespectsCanellation()
{
// Create many files to increase chance of hitting cancellation
for (int i = 0; i < 50; i++)
{
await File.WriteAllTextAsync(
Path.Combine(_tempDir, $"file{i}.json"),
CreateValidDsseEnvelope());
}
var context = CreateContext(new Dictionary<string, string?>
{
["EvidenceLocker:LocalPath"] = _tempDir
});
using var cts = new CancellationTokenSource();
cts.Cancel();
await Assert.ThrowsAsync<OperationCanceledException>(
() => _check.RunAsync(context, cts.Token));
}
private static string CreateValidDsseEnvelope()
{
var payload = JsonSerializer.Serialize(new { test = "data", timestamp = "2026-01-14T00:00:00Z" });
var payloadBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(payload));
return JsonSerializer.Serialize(new
{
payloadType = "application/vnd.stellaops.evidence+json",
payload = payloadBase64,
signatures = new[]
{
new { keyid = "test-key-1", sig = Convert.ToBase64String(Encoding.UTF8.GetBytes("signature")) }
}
});
}
private DoctorPluginContext CreateContext(Dictionary<string, string?> configValues)
{
var config = new ConfigurationBuilder()
.AddInMemoryCollection(configValues)
.Build();
return new DoctorPluginContext
{
Services = new EmptyServiceProvider(),
Configuration = config,
TimeProvider = TimeProvider.System,
Logger = NullLogger.Instance,
EnvironmentName = "Test",
PluginConfig = config.GetSection("Doctor:Plugins:Security")
};
}
private sealed class EmptyServiceProvider : IServiceProvider
{
public object? GetService(Type serviceType) => null;
}
}

View File

@@ -0,0 +1,260 @@
// <copyright file="EvidenceCardServiceTests.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the AGPL-3.0-or-later.
// Sprint: SPRINT_20260112_004_LB_evidence_card_core (EVPCARD-LB-004)
// Description: Tests for EvidenceCardService
// </copyright>
using System.Collections.Immutable;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Determinism;
using StellaOps.Evidence.Pack;
using StellaOps.Evidence.Pack.Models;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Evidence.Pack.Tests;
public sealed class EvidenceCardServiceTests
{
private readonly FixedGuidProvider _guidProvider = new(Guid.Parse("11111111-1111-1111-1111-111111111111"));
private readonly TestTimeProvider _timeProvider = new(new DateTimeOffset(2026, 1, 14, 10, 0, 0, TimeSpan.Zero));
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task CreateCardAsync_WithValidRequest_ReturnsCard()
{
var service = CreateService();
var request = new EvidenceCardRequest
{
FindingId = "CVE-2024-12345",
ArtifactDigest = "sha256:abc123",
ComponentPurl = "pkg:npm/lodash@4.17.21",
TenantId = "tenant-1"
};
var card = await service.CreateCardAsync(request);
Assert.NotNull(card);
Assert.Equal("11111111111111111111111111111111", card.CardId);
Assert.Equal("CVE-2024-12345", card.Subject.FindingId);
Assert.Equal("sha256:abc123", card.Subject.ArtifactDigest);
Assert.NotNull(card.Envelope);
Assert.NotNull(card.SbomExcerpt);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task CreateCardAsync_SetsGeneratedAtFromTimeProvider()
{
var service = CreateService();
var request = new EvidenceCardRequest
{
FindingId = "CVE-2024-12345",
ArtifactDigest = "sha256:abc123",
TenantId = "tenant-1"
};
var card = await service.CreateCardAsync(request);
Assert.Equal(_timeProvider.GetUtcNow(), card.GeneratedAt);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task CreateCardAsync_WithComponentPurl_ExtractsComponentInfo()
{
var service = CreateService();
var request = new EvidenceCardRequest
{
FindingId = "CVE-2024-12345",
ArtifactDigest = "sha256:abc123",
ComponentPurl = "pkg:npm/lodash@4.17.21",
TenantId = "tenant-1"
};
var card = await service.CreateCardAsync(request);
Assert.Single(card.SbomExcerpt.Components);
Assert.Equal("pkg:npm/lodash@4.17.21", card.SbomExcerpt.Components[0].Purl);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ExportCardAsync_Json_ReturnsValidJson()
{
var service = CreateService();
var card = await CreateTestCard(service);
var export = await service.ExportCardAsync(card, EvidenceCardExportFormat.Json);
Assert.Equal("application/json", export.ContentType);
Assert.StartsWith("sha256:", export.ContentDigest);
var json = Encoding.UTF8.GetString(export.Content);
using var document = JsonDocument.Parse(json);
Assert.Equal(JsonValueKind.Object, document.RootElement.ValueKind);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ExportCardAsync_CompactJson_IsSmallerThanIndented()
{
var service = CreateService();
var card = await CreateTestCard(service);
var jsonExport = await service.ExportCardAsync(card, EvidenceCardExportFormat.Json);
var compactExport = await service.ExportCardAsync(card, EvidenceCardExportFormat.CompactJson);
Assert.True(compactExport.Content.Length < jsonExport.Content.Length);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ExportCardAsync_CanonicalJson_IsDeterministic()
{
var service1 = CreateService();
var service2 = CreateService();
var card1 = await CreateTestCard(service1);
var card2 = await CreateTestCard(service2);
var export1 = await service1.ExportCardAsync(card1, EvidenceCardExportFormat.CanonicalJson);
var export2 = await service2.ExportCardAsync(card2, EvidenceCardExportFormat.CanonicalJson);
Assert.Equal(export1.ContentDigest, export2.ContentDigest);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task VerifyCardAsync_ValidCard_ReturnsValid()
{
var service = CreateService();
var card = await CreateTestCard(service);
var result = await service.VerifyCardAsync(card);
Assert.True(result.Valid);
Assert.True(result.SignatureValid);
Assert.True(result.SbomDigestValid);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task VerifyCardAsync_WithMissingReceipt_AllowedByDefault()
{
var service = CreateService();
var card = await CreateTestCard(service);
var result = await service.VerifyCardAsync(card, new EvidenceCardVerificationOptions
{
AllowMissingReceipt = true
});
Assert.True(result.Valid);
Assert.Null(result.RekorReceiptValid);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task VerifyCardAsync_WithMissingReceipt_FailsWhenRequired()
{
var service = CreateService();
var card = await CreateTestCard(service);
var result = await service.VerifyCardAsync(card, new EvidenceCardVerificationOptions
{
AllowMissingReceipt = false
});
Assert.False(result.Valid);
Assert.Contains(result.Issues, i => i.Contains("Rekor receipt is required"));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task VerifyCardAsync_WithValidRekorReceipt_ReturnsTrue()
{
var service = CreateService();
var card = await CreateTestCard(service);
// Add a valid-looking Rekor receipt
var cardWithReceipt = card with
{
RekorReceipt = new RekorReceiptMetadata
{
Uuid = "abc123def456",
LogIndex = 12345,
LogId = "0x1234",
LogUrl = "https://rekor.sigstore.dev",
IntegratedTime = DateTimeOffset.UtcNow.ToUnixTimeSeconds(),
RootHash = "sha256:root123",
TreeSize = 100000,
InclusionProofHashes = ImmutableArray.Create("hash1", "hash2"),
CheckpointNote = "rekor.sigstore.dev - 12345\n100000\nroot123\n",
CheckpointSignatures = ImmutableArray.Create(new CheckpointSignature
{
KeyId = "key1",
Signature = "c2lnbmF0dXJl"
})
}
};
var result = await service.VerifyCardAsync(cardWithReceipt);
Assert.True(result.Valid);
Assert.True(result.RekorReceiptValid);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ExportCardAsync_SetsCorrectFileName()
{
var service = CreateService();
var card = await CreateTestCard(service);
var export = await service.ExportCardAsync(card, EvidenceCardExportFormat.Json);
Assert.Equal($"evidence-card-{card.CardId}.json", export.FileName);
}
private EvidenceCardService CreateService()
{
return new EvidenceCardService(
_timeProvider,
_guidProvider,
NullLogger<EvidenceCardService>.Instance);
}
private async Task<EvidenceCard> CreateTestCard(EvidenceCardService service)
{
var request = new EvidenceCardRequest
{
FindingId = "CVE-2024-12345",
ArtifactDigest = "sha256:abc123",
ComponentPurl = "pkg:npm/lodash@4.17.21",
TenantId = "tenant-1"
};
return await service.CreateCardAsync(request);
}
private sealed class FixedGuidProvider : IGuidProvider
{
private readonly Guid _guid;
public FixedGuidProvider(Guid guid) => _guid = guid;
public Guid NewGuid() => _guid;
}
private sealed class TestTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTime;
public TestTimeProvider(DateTimeOffset fixedTime) => _fixedTime = fixedTime;
public override DateTimeOffset GetUtcNow() => _fixedTime;
}
}

View File

@@ -0,0 +1,176 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2025 StellaOps
// Sprint: SPRINT_20260112_004_SCANNER_path_witness_nodehash (PW-SCN-001)
// Description: Tests for NodeHashRecipe
using Xunit;
namespace StellaOps.Reachability.Core.Tests;
[Trait("Category", "Unit")]
public sealed class NodeHashRecipeTests
{
[Fact]
public void ComputeHash_WithValidInputs_ReturnsConsistentHash()
{
var purl = "pkg:npm/lodash@4.17.21";
var symbolFqn = "lodash.merge(object, object)";
var hash1 = NodeHashRecipe.ComputeHash(purl, symbolFqn);
var hash2 = NodeHashRecipe.ComputeHash(purl, symbolFqn);
Assert.Equal(hash1, hash2);
Assert.StartsWith("sha256:", hash1);
Assert.Equal(71, hash1.Length); // sha256: (7) + 64 hex chars
}
[Fact]
public void ComputeHash_WithSymbolRef_MatchesManualComputation()
{
var symbolRef = new SymbolRef
{
Purl = "pkg:npm/lodash@4.17.21",
Namespace = "lodash",
Type = "_",
Method = "merge",
Signature = "(object, object)"
};
var hashFromRef = NodeHashRecipe.ComputeHash(symbolRef);
var hashManual = NodeHashRecipe.ComputeHash(symbolRef.Purl, symbolRef.DisplayName);
Assert.Equal(hashManual, hashFromRef);
}
[Fact]
public void ComputeHash_DifferentInputs_ProducesDifferentHashes()
{
var hash1 = NodeHashRecipe.ComputeHash("pkg:npm/lodash@4.17.21", "lodash.merge(object)");
var hash2 = NodeHashRecipe.ComputeHash("pkg:npm/lodash@4.17.20", "lodash.merge(object)");
var hash3 = NodeHashRecipe.ComputeHash("pkg:npm/lodash@4.17.21", "lodash.clone(object)");
Assert.NotEqual(hash1, hash2);
Assert.NotEqual(hash1, hash3);
Assert.NotEqual(hash2, hash3);
}
[Theory]
[InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash@4.17.21")]
[InlineData("PKG:NPM/lodash@4.17.21", "pkg:npm/lodash@4.17.21")]
[InlineData("pkg:NPM/lodash@4.17.21", "pkg:npm/lodash@4.17.21")]
[InlineData("pkg:npm/lodash@4.17.21/", "pkg:npm/lodash@4.17.21")]
public void NormalizePurl_NormalizesCorrectly(string input, string expected)
{
var normalized = NodeHashRecipe.NormalizePurl(input);
Assert.Equal(expected, normalized);
}
[Fact]
public void NormalizePurl_SortsQualifiers()
{
var purl = "pkg:npm/foo@1.0?os=linux&arch=x64";
var normalized = NodeHashRecipe.NormalizePurl(purl);
Assert.Equal("pkg:npm/foo@1.0?arch=x64&os=linux", normalized);
}
[Theory]
[InlineData("lodash.merge(object)", "lodash.merge(object)")]
[InlineData("lodash.merge( object )", "lodash.merge(object)")]
[InlineData("lodash.merge(object,object)", "lodash.merge(object, object)")]
[InlineData("lodash..merge(object)", "lodash.merge(object)")]
[InlineData(" lodash.merge(object) ", "lodash.merge(object)")]
public void NormalizeSymbolFqn_NormalizesCorrectly(string input, string expected)
{
var normalized = NodeHashRecipe.NormalizeSymbolFqn(input);
Assert.Equal(expected, normalized);
}
[Fact]
public void ComputeHashes_ReturnsSortedDistinctHashes()
{
var symbols = new[]
{
new SymbolRef { Purl = "pkg:npm/b@1.0", Namespace = "b", Type = "_", Method = "foo" },
new SymbolRef { Purl = "pkg:npm/a@1.0", Namespace = "a", Type = "_", Method = "bar" },
new SymbolRef { Purl = "pkg:npm/b@1.0", Namespace = "b", Type = "_", Method = "foo" }, // Duplicate
};
var hashes = NodeHashRecipe.ComputeHashes(symbols);
Assert.Equal(2, hashes.Count);
Assert.True(string.Compare(hashes[0], hashes[1], StringComparison.Ordinal) < 0);
}
[Theory]
[InlineData("sha256:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", true)]
[InlineData("sha256:ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890", true)]
[InlineData("sha256:abc", false)]
[InlineData("md5:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", false)]
[InlineData("", false)]
[InlineData(null, false)]
public void IsValidHash_ValidatesCorrectly(string? hash, bool expected)
{
Assert.Equal(expected, NodeHashRecipe.IsValidHash(hash!));
}
[Fact]
public void GetHexPart_ExtractsCorrectly()
{
var hash = "sha256:abcdef1234567890";
var hex = NodeHashRecipe.GetHexPart(hash);
Assert.Equal("abcdef1234567890", hex);
}
[Fact]
public void GetHexPart_WithoutPrefix_ReturnsInput()
{
var hex = "abcdef1234567890";
var result = NodeHashRecipe.GetHexPart(hex);
Assert.Equal(hex, result);
}
[Fact]
public void ComputeHash_IsDeterministic_AcrossMultipleCalls()
{
var purl = "pkg:pypi/requests@2.28.0";
var symbol = "requests.get(url, params)";
var hashes = Enumerable.Range(0, 100)
.Select(_ => NodeHashRecipe.ComputeHash(purl, symbol))
.Distinct()
.ToList();
Assert.Single(hashes);
}
[Fact]
public void ComputeHash_ThrowsOnNullPurl()
{
Assert.Throws<ArgumentNullException>(() =>
NodeHashRecipe.ComputeHash(null!, "symbol"));
}
[Fact]
public void ComputeHash_ThrowsOnNullSymbol()
{
Assert.Throws<ArgumentNullException>(() =>
NodeHashRecipe.ComputeHash("pkg:npm/foo@1.0", null!));
}
[Fact]
public void ComputeHash_ThrowsOnEmptyPurl()
{
Assert.Throws<ArgumentException>(() =>
NodeHashRecipe.ComputeHash("", "symbol"));
}
[Fact]
public void ComputeHash_ThrowsOnEmptySymbol()
{
Assert.Throws<ArgumentException>(() =>
NodeHashRecipe.ComputeHash("pkg:npm/foo@1.0", ""));
}
}

View File

@@ -0,0 +1,206 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2025 StellaOps
// Sprint: SPRINT_20260112_004_SCANNER_path_witness_nodehash (PW-SCN-001)
// Description: Tests for PathHashRecipe
using Xunit;
namespace StellaOps.Reachability.Core.Tests;
[Trait("Category", "Unit")]
public sealed class PathHashRecipeTests
{
[Fact]
public void ComputeHash_WithNodeHashes_ReturnsConsistentHash()
{
var nodeHashes = new[]
{
"sha256:aaa1111111111111111111111111111111111111111111111111111111111111",
"sha256:bbb2222222222222222222222222222222222222222222222222222222222222",
"sha256:ccc3333333333333333333333333333333333333333333333333333333333333"
};
var hash1 = PathHashRecipe.ComputeHash(nodeHashes);
var hash2 = PathHashRecipe.ComputeHash(nodeHashes);
Assert.Equal(hash1, hash2);
Assert.StartsWith("sha256:", hash1);
}
[Fact]
public void ComputeHash_DifferentOrder_ProducesDifferentHash()
{
var path1 = new[] { "sha256:aaa", "sha256:bbb", "sha256:ccc" };
var path2 = new[] { "sha256:ccc", "sha256:bbb", "sha256:aaa" };
var hash1 = PathHashRecipe.ComputeHash(path1);
var hash2 = PathHashRecipe.ComputeHash(path2);
Assert.NotEqual(hash1, hash2);
}
[Fact]
public void ComputeHash_WithSymbolRefs_Works()
{
var symbols = new[]
{
new SymbolRef { Purl = "pkg:npm/a@1.0", Namespace = "a", Type = "_", Method = "entry" },
new SymbolRef { Purl = "pkg:npm/b@1.0", Namespace = "b", Type = "B", Method = "process" },
new SymbolRef { Purl = "pkg:npm/c@1.0", Namespace = "c", Type = "C", Method = "vulnerable" }
};
var hash = PathHashRecipe.ComputeHash(symbols);
Assert.StartsWith("sha256:", hash);
Assert.Equal(71, hash.Length);
}
[Fact]
public void ComputeWithTopK_ReturnsCorrectCount()
{
var nodeHashes = Enumerable.Range(1, 20)
.Select(i => $"sha256:{i:d64}")
.ToList();
var (pathHash, topK) = PathHashRecipe.ComputeWithTopK(nodeHashes, topK: 10);
Assert.StartsWith("sha256:", pathHash);
Assert.True(topK.Count <= 10);
}
[Fact]
public void ComputeWithTopK_IncludesSourceAndSink()
{
var nodeHashes = Enumerable.Range(1, 20)
.Select(i => $"sha256:{i:d64}")
.ToList();
var (_, topK) = PathHashRecipe.ComputeWithTopK(nodeHashes, topK: 6);
// Should include first few and last few
Assert.Contains(nodeHashes[0], topK);
Assert.Contains(nodeHashes[^1], topK);
}
[Fact]
public void ComputeHashes_ReturnsSortedDistinctHashes()
{
var paths = new[]
{
new[] { "sha256:bbb", "sha256:ccc" },
new[] { "sha256:aaa", "sha256:ddd" },
new[] { "sha256:bbb", "sha256:ccc" } // Duplicate
};
var hashes = PathHashRecipe.ComputeHashes(paths);
Assert.Equal(2, hashes.Count);
Assert.True(string.Compare(hashes[0], hashes[1], StringComparison.Ordinal) < 0);
}
[Fact]
public void ComputeCombinedHash_CombinesMultiplePaths()
{
var pathHashes = new[]
{
"sha256:path1111111111111111111111111111111111111111111111111111111111",
"sha256:path2222222222222222222222222222222222222222222222222222222222"
};
var combined = PathHashRecipe.ComputeCombinedHash(pathHashes);
Assert.StartsWith("sha256:", combined);
}
[Fact]
public void ComputeCombinedHash_IsDeterministic_RegardlessOfOrder()
{
var pathHashes1 = new[] { "sha256:aaa", "sha256:bbb", "sha256:ccc" };
var pathHashes2 = new[] { "sha256:ccc", "sha256:aaa", "sha256:bbb" };
var combined1 = PathHashRecipe.ComputeCombinedHash(pathHashes1);
var combined2 = PathHashRecipe.ComputeCombinedHash(pathHashes2);
Assert.Equal(combined1, combined2); // Order shouldn't matter for combined hash
}
[Fact]
public void CreateFingerprint_ReturnsCompleteFingerprint()
{
var nodeHashes = new[]
{
"sha256:source11111111111111111111111111111111111111111111111111111111",
"sha256:middle22222222222222222222222222222222222222222222222222222222",
"sha256:sink333333333333333333333333333333333333333333333333333333333"
};
var fingerprint = PathHashRecipe.CreateFingerprint(nodeHashes, topK: 5);
Assert.StartsWith("sha256:", fingerprint.PathHash);
Assert.Equal(3, fingerprint.NodeCount);
Assert.Equal(nodeHashes[0], fingerprint.SourceNodeHash);
Assert.Equal(nodeHashes[2], fingerprint.SinkNodeHash);
Assert.True(fingerprint.TopKNodeHashes.Count <= 5);
}
[Fact]
public void IsValidHash_DelegatesToNodeHashRecipe()
{
Assert.True(PathHashRecipe.IsValidHash(
"sha256:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890"));
Assert.False(PathHashRecipe.IsValidHash("invalid"));
}
[Fact]
public void ComputeHash_ThrowsOnEmptyPath()
{
Assert.Throws<ArgumentException>(() =>
PathHashRecipe.ComputeHash(Array.Empty<string>()));
}
[Fact]
public void ComputeHash_ThrowsOnNullPath()
{
Assert.Throws<ArgumentNullException>(() =>
PathHashRecipe.ComputeHash((IEnumerable<string>)null!));
}
[Fact]
public void ComputeWithTopK_ThrowsOnInvalidTopK()
{
var hashes = new[] { "sha256:aaa" };
Assert.Throws<ArgumentOutOfRangeException>(() =>
PathHashRecipe.ComputeWithTopK(hashes, topK: 0));
}
[Fact]
public void ComputeCombinedHash_ThrowsOnEmptyInput()
{
Assert.Throws<ArgumentException>(() =>
PathHashRecipe.ComputeCombinedHash(Array.Empty<string>()));
}
[Fact]
public void ComputeHash_SingleNode_Works()
{
var singleNode = new[] { "sha256:only1111111111111111111111111111111111111111111111111111111111" };
var hash = PathHashRecipe.ComputeHash(singleNode);
Assert.StartsWith("sha256:", hash);
}
[Fact]
public void ComputeHash_StripsSha256Prefix_ForConsistency()
{
// These should produce the same hash since we strip prefix
var withPrefix = new[] { "sha256:aaa", "sha256:bbb" };
var withoutPrefix = new[] { "aaa", "bbb" };
var hash1 = PathHashRecipe.ComputeHash(withPrefix);
var hash2 = PathHashRecipe.ComputeHash(withoutPrefix);
Assert.Equal(hash1, hash2);
}
}