feat(metrics): Implement scan metrics repository and PostgreSQL integration
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Added IScanMetricsRepository interface for scan metrics persistence and retrieval. - Implemented PostgresScanMetricsRepository for PostgreSQL database interactions, including methods for saving and retrieving scan metrics and execution phases. - Introduced methods for obtaining TTE statistics and recent scans for tenants. - Implemented deletion of old metrics for retention purposes. test(tests): Add SCA Failure Catalogue tests for FC6-FC10 - Created ScaCatalogueDeterminismTests to validate determinism properties of SCA Failure Catalogue fixtures. - Developed ScaFailureCatalogueTests to ensure correct handling of specific failure modes in the scanner. - Included tests for manifest validation, file existence, and expected findings across multiple failure cases. feat(telemetry): Integrate scan completion metrics into the pipeline - Introduced IScanCompletionMetricsIntegration interface and ScanCompletionMetricsIntegration class to record metrics upon scan completion. - Implemented proof coverage and TTE metrics recording with logging for scan completion summaries.
This commit is contained in:
@@ -0,0 +1,302 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ContentAddressedIdGeneratorTests.cs
|
||||
// Sprint: SPRINT_0501_0002_0001_proof_chain_content_addressed_ids
|
||||
// Task: PROOF-ID-0013
|
||||
// Description: Unit tests for ID generation determinism verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Json;
|
||||
using StellaOps.Attestor.ProofChain.Merkle;
|
||||
using StellaOps.Attestor.ProofChain.Predicates;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
public class ContentAddressedIdGeneratorTests
|
||||
{
|
||||
private readonly IContentAddressedIdGenerator _generator;
|
||||
|
||||
public ContentAddressedIdGeneratorTests()
|
||||
{
|
||||
var canonicalizer = new JsonCanonicalizer();
|
||||
var merkleBuilder = new DeterministicMerkleTreeBuilder();
|
||||
_generator = new ContentAddressedIdGenerator(canonicalizer, merkleBuilder);
|
||||
}
|
||||
|
||||
#region Evidence ID Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeEvidenceId_SameInput_ProducesSameId()
|
||||
{
|
||||
var predicate = CreateTestEvidencePredicate();
|
||||
|
||||
var id1 = _generator.ComputeEvidenceId(predicate);
|
||||
var id2 = _generator.ComputeEvidenceId(predicate);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
Assert.Equal(id1.ToString(), id2.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeEvidenceId_DifferentInput_ProducesDifferentId()
|
||||
{
|
||||
var predicate1 = CreateTestEvidencePredicate() with { Source = "scanner-v1" };
|
||||
var predicate2 = CreateTestEvidencePredicate() with { Source = "scanner-v2" };
|
||||
|
||||
var id1 = _generator.ComputeEvidenceId(predicate1);
|
||||
var id2 = _generator.ComputeEvidenceId(predicate2);
|
||||
|
||||
Assert.NotEqual(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeEvidenceId_IgnoresExistingEvidenceId()
|
||||
{
|
||||
var predicate1 = CreateTestEvidencePredicate() with { EvidenceId = null };
|
||||
var predicate2 = CreateTestEvidencePredicate() with { EvidenceId = "sha256:existing" };
|
||||
|
||||
var id1 = _generator.ComputeEvidenceId(predicate1);
|
||||
var id2 = _generator.ComputeEvidenceId(predicate2);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeEvidenceId_ReturnsValidFormat()
|
||||
{
|
||||
var predicate = CreateTestEvidencePredicate();
|
||||
var id = _generator.ComputeEvidenceId(predicate);
|
||||
|
||||
Assert.Equal("sha256", id.Algorithm);
|
||||
Assert.Equal(64, id.Digest.Length);
|
||||
Assert.Matches("^[a-f0-9]{64}$", id.Digest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Reasoning ID Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeReasoningId_SameInput_ProducesSameId()
|
||||
{
|
||||
var predicate = CreateTestReasoningPredicate();
|
||||
|
||||
var id1 = _generator.ComputeReasoningId(predicate);
|
||||
var id2 = _generator.ComputeReasoningId(predicate);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeReasoningId_DifferentInput_ProducesDifferentId()
|
||||
{
|
||||
var predicate1 = CreateTestReasoningPredicate() with { PolicyVersion = "v1" };
|
||||
var predicate2 = CreateTestReasoningPredicate() with { PolicyVersion = "v2" };
|
||||
|
||||
var id1 = _generator.ComputeReasoningId(predicate1);
|
||||
var id2 = _generator.ComputeReasoningId(predicate2);
|
||||
|
||||
Assert.NotEqual(id1, id2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region VEX Verdict ID Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeVexVerdictId_SameInput_ProducesSameId()
|
||||
{
|
||||
var predicate = CreateTestVexPredicate();
|
||||
|
||||
var id1 = _generator.ComputeVexVerdictId(predicate);
|
||||
var id2 = _generator.ComputeVexVerdictId(predicate);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeVexVerdictId_DifferentStatus_ProducesDifferentId()
|
||||
{
|
||||
var predicate1 = CreateTestVexPredicate() with { Status = VexStatus.Affected };
|
||||
var predicate2 = CreateTestVexPredicate() with { Status = VexStatus.NotAffected };
|
||||
|
||||
var id1 = _generator.ComputeVexVerdictId(predicate1);
|
||||
var id2 = _generator.ComputeVexVerdictId(predicate2);
|
||||
|
||||
Assert.NotEqual(id1, id2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Proof Bundle ID Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeProofBundleId_SameInput_ProducesSameId()
|
||||
{
|
||||
var sbomEntryId = CreateTestSbomEntryId();
|
||||
var evidenceIds = new[] { CreateTestEvidenceId("e1"), CreateTestEvidenceId("e2") };
|
||||
var reasoningId = CreateTestReasoningId();
|
||||
var vexVerdictId = CreateTestVexVerdictId();
|
||||
|
||||
var id1 = _generator.ComputeProofBundleId(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
var id2 = _generator.ComputeProofBundleId(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeProofBundleId_EvidenceIds_SortedBeforeMerkle()
|
||||
{
|
||||
var sbomEntryId = CreateTestSbomEntryId();
|
||||
var reasoningId = CreateTestReasoningId();
|
||||
var vexVerdictId = CreateTestVexVerdictId();
|
||||
|
||||
// Different order, should produce same result
|
||||
var unsorted = new[] { CreateTestEvidenceId("z"), CreateTestEvidenceId("a") };
|
||||
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("z") };
|
||||
|
||||
var id1 = _generator.ComputeProofBundleId(sbomEntryId, unsorted, reasoningId, vexVerdictId);
|
||||
var id2 = _generator.ComputeProofBundleId(sbomEntryId, sorted, reasoningId, vexVerdictId);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeProofBundleId_DifferentEvidence_ProducesDifferentId()
|
||||
{
|
||||
var sbomEntryId = CreateTestSbomEntryId();
|
||||
var reasoningId = CreateTestReasoningId();
|
||||
var vexVerdictId = CreateTestVexVerdictId();
|
||||
|
||||
var evidenceIds1 = new[] { CreateTestEvidenceId("e1") };
|
||||
var evidenceIds2 = new[] { CreateTestEvidenceId("e2") };
|
||||
|
||||
var id1 = _generator.ComputeProofBundleId(sbomEntryId, evidenceIds1, reasoningId, vexVerdictId);
|
||||
var id2 = _generator.ComputeProofBundleId(sbomEntryId, evidenceIds2, reasoningId, vexVerdictId);
|
||||
|
||||
Assert.NotEqual(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeProofBundleId_EmptyEvidence_Throws()
|
||||
{
|
||||
var sbomEntryId = CreateTestSbomEntryId();
|
||||
var evidenceIds = Array.Empty<EvidenceId>();
|
||||
var reasoningId = CreateTestReasoningId();
|
||||
var vexVerdictId = CreateTestVexVerdictId();
|
||||
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
_generator.ComputeProofBundleId(sbomEntryId, evidenceIds, reasoningId, vexVerdictId));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Graph Revision ID Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeGraphRevisionId_SameInput_ProducesSameId()
|
||||
{
|
||||
var nodeIds = new[] { "node1", "node2" };
|
||||
var edgeIds = new[] { "edge1", "edge2" };
|
||||
var policyDigest = "sha256:policy";
|
||||
var feedsDigest = "sha256:feeds";
|
||||
var toolchainDigest = "sha256:toolchain";
|
||||
var paramsDigest = "sha256:params";
|
||||
|
||||
var id1 = _generator.ComputeGraphRevisionId(nodeIds, edgeIds, policyDigest, feedsDigest, toolchainDigest, paramsDigest);
|
||||
var id2 = _generator.ComputeGraphRevisionId(nodeIds, edgeIds, policyDigest, feedsDigest, toolchainDigest, paramsDigest);
|
||||
|
||||
Assert.Equal(id1, id2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeGraphRevisionId_DifferentInput_ProducesDifferentId()
|
||||
{
|
||||
var nodeIds = new[] { "node1", "node2" };
|
||||
var edgeIds = new[] { "edge1", "edge2" };
|
||||
|
||||
var id1 = _generator.ComputeGraphRevisionId(
|
||||
nodeIds, edgeIds, "sha256:policy1", "sha256:feeds", "sha256:toolchain", "sha256:params");
|
||||
var id2 = _generator.ComputeGraphRevisionId(
|
||||
nodeIds, edgeIds, "sha256:policy2", "sha256:feeds", "sha256:toolchain", "sha256:params");
|
||||
|
||||
Assert.NotEqual(id1, id2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SBOM Digest Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeSbomDigest_SameInput_ProducesSameDigest()
|
||||
{
|
||||
var sbomJson = """{"name":"test","version":"1.0"}"""u8;
|
||||
|
||||
var digest1 = _generator.ComputeSbomDigest(sbomJson);
|
||||
var digest2 = _generator.ComputeSbomDigest(sbomJson);
|
||||
|
||||
Assert.Equal(digest1, digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeSbomEntryId_SameInput_ProducesSameId()
|
||||
{
|
||||
var sbomJson = """{"name":"test","version":"1.0"}"""u8;
|
||||
var purl = "pkg:npm/lodash";
|
||||
var version = "4.17.21";
|
||||
|
||||
var id1 = _generator.ComputeSbomEntryId(sbomJson, purl, version);
|
||||
var id2 = _generator.ComputeSbomEntryId(sbomJson, purl, version);
|
||||
|
||||
Assert.Equal(id1.SbomDigest, id2.SbomDigest);
|
||||
Assert.Equal(id1.Purl, id2.Purl);
|
||||
Assert.Equal(id1.Version, id2.Version);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static EvidencePredicate CreateTestEvidencePredicate() => new()
|
||||
{
|
||||
Source = "stellaops-scanner",
|
||||
SourceVersion = "1.0.0",
|
||||
CollectionTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero),
|
||||
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
RawFinding = new { severity = "high", score = 9.1 }
|
||||
};
|
||||
|
||||
private static ReasoningPredicate CreateTestReasoningPredicate() => new()
|
||||
{
|
||||
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
|
||||
EvidenceIds = ["sha256:evidence1", "sha256:evidence2"],
|
||||
PolicyVersion = "v2024.12.16",
|
||||
Inputs = new ReasoningInputs
|
||||
{
|
||||
CurrentEvaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
|
||||
}
|
||||
};
|
||||
|
||||
private static VexPredicate CreateTestVexPredicate() => new()
|
||||
{
|
||||
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Status = VexStatus.NotAffected,
|
||||
Justification = "Vulnerable code is not in execution path"
|
||||
};
|
||||
|
||||
private static SbomEntryId CreateTestSbomEntryId() =>
|
||||
new("sha256:sbom123", "pkg:npm/lodash", "4.17.21");
|
||||
|
||||
private static EvidenceId CreateTestEvidenceId(string suffix) =>
|
||||
new($"a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6{suffix.PadLeft(4, '0')}"[..64]);
|
||||
|
||||
private static ReasoningId CreateTestReasoningId() =>
|
||||
new("b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3");
|
||||
|
||||
private static VexVerdictId CreateTestVexVerdictId() =>
|
||||
new("c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4");
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,231 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ContentAddressedIdTests.cs
|
||||
// Sprint: SPRINT_0501_0002_0001_proof_chain_content_addressed_ids
|
||||
// Task: PROOF-ID-0013
|
||||
// Description: Unit tests for content-addressed ID generation and determinism
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
public class ContentAddressedIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Parse_ValidSha256_ReturnsId()
|
||||
{
|
||||
var input = "sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2";
|
||||
var result = ContentAddressedId.Parse(input);
|
||||
|
||||
Assert.Equal("sha256", result.Algorithm);
|
||||
Assert.Equal("a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", result.Digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_ValidSha512_ReturnsId()
|
||||
{
|
||||
var digest = new string('a', 128); // SHA-512 is 128 hex chars
|
||||
var input = $"sha512:{digest}";
|
||||
var result = ContentAddressedId.Parse(input);
|
||||
|
||||
Assert.Equal("sha512", result.Algorithm);
|
||||
Assert.Equal(digest, result.Digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_NormalizesToLowercase()
|
||||
{
|
||||
var input = "SHA256:A1B2C3D4E5F6A1B2C3D4E5F6A1B2C3D4E5F6A1B2C3D4E5F6A1B2C3D4E5F6A1B2";
|
||||
var result = ContentAddressedId.Parse(input);
|
||||
|
||||
Assert.Equal("sha256", result.Algorithm);
|
||||
Assert.Equal("a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2", result.Digest);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("")]
|
||||
[InlineData(" ")]
|
||||
[InlineData("invalid")]
|
||||
[InlineData(":digest")]
|
||||
[InlineData("algo:")]
|
||||
public void Parse_InvalidFormat_Throws(string input)
|
||||
{
|
||||
Assert.Throws<FormatException>(() => ContentAddressedId.Parse(input));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Parse_InvalidDigestLength_Throws()
|
||||
{
|
||||
var input = "sha256:abc"; // Too short
|
||||
Assert.Throws<FormatException>(() => ContentAddressedId.Parse(input));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_ReturnsCanonicalFormat()
|
||||
{
|
||||
var input = "sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2";
|
||||
var id = ContentAddressedId.Parse(input);
|
||||
|
||||
Assert.Equal(input, id.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TrySplit_ValidInput_ReturnsTrue()
|
||||
{
|
||||
var valid = ContentAddressedId.TrySplit(
|
||||
"sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2",
|
||||
out var algorithm,
|
||||
out var digest);
|
||||
|
||||
Assert.True(valid);
|
||||
Assert.Equal("sha256", algorithm);
|
||||
Assert.NotEmpty(digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void TrySplit_InvalidInput_ReturnsFalse()
|
||||
{
|
||||
var valid = ContentAddressedId.TrySplit("invalid", out _, out _);
|
||||
Assert.False(valid);
|
||||
}
|
||||
}
|
||||
|
||||
public class EvidenceIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_ValidDigest_CreatesId()
|
||||
{
|
||||
var digest = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2";
|
||||
var id = new EvidenceId(digest);
|
||||
|
||||
Assert.Equal("sha256", id.Algorithm);
|
||||
Assert.Equal(digest, id.Digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_ReturnsCanonicalFormat()
|
||||
{
|
||||
var digest = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2";
|
||||
var id = new EvidenceId(digest);
|
||||
|
||||
Assert.Equal($"sha256:{digest}", id.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
public class ReasoningIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_ValidDigest_CreatesId()
|
||||
{
|
||||
var digest = "b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3";
|
||||
var id = new ReasoningId(digest);
|
||||
|
||||
Assert.Equal("sha256", id.Algorithm);
|
||||
Assert.Equal(digest, id.Digest);
|
||||
}
|
||||
}
|
||||
|
||||
public class VexVerdictIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_ValidDigest_CreatesId()
|
||||
{
|
||||
var digest = "c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4";
|
||||
var id = new VexVerdictId(digest);
|
||||
|
||||
Assert.Equal("sha256", id.Algorithm);
|
||||
Assert.Equal(digest, id.Digest);
|
||||
}
|
||||
}
|
||||
|
||||
public class ProofBundleIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_ValidDigest_CreatesId()
|
||||
{
|
||||
var digest = "d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5";
|
||||
var id = new ProofBundleId(digest);
|
||||
|
||||
Assert.Equal("sha256", id.Algorithm);
|
||||
Assert.Equal(digest, id.Digest);
|
||||
}
|
||||
}
|
||||
|
||||
public class SbomEntryIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_WithVersion_CreatesId()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
|
||||
|
||||
Assert.Equal("sha256:abc123", id.SbomDigest);
|
||||
Assert.Equal("pkg:npm/lodash", id.Purl);
|
||||
Assert.Equal("4.17.21", id.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_WithoutVersion_CreatesId()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
|
||||
|
||||
Assert.Equal("sha256:abc123", id.SbomDigest);
|
||||
Assert.Equal("pkg:npm/lodash", id.Purl);
|
||||
Assert.Null(id.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_WithVersion_IncludesVersion()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
|
||||
Assert.Equal("sha256:abc123:pkg:npm/lodash@4.17.21", id.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_WithoutVersion_OmitsVersion()
|
||||
{
|
||||
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
|
||||
Assert.Equal("sha256:abc123:pkg:npm/lodash", id.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
public class GraphRevisionIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_ValidDigest_CreatesId()
|
||||
{
|
||||
var digest = "e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6";
|
||||
var id = new GraphRevisionId(digest);
|
||||
|
||||
Assert.Equal(digest, id.Digest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_ReturnsGrvFormat()
|
||||
{
|
||||
var digest = "e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6";
|
||||
var id = new GraphRevisionId(digest);
|
||||
|
||||
Assert.Equal($"grv_sha256:{digest}", id.ToString());
|
||||
}
|
||||
}
|
||||
|
||||
public class TrustAnchorIdTests
|
||||
{
|
||||
[Fact]
|
||||
public void Constructor_ValidGuid_CreatesId()
|
||||
{
|
||||
var guid = Guid.NewGuid();
|
||||
var id = new TrustAnchorId(guid);
|
||||
|
||||
Assert.Equal(guid, id.Value);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ToString_ReturnsGuidString()
|
||||
{
|
||||
var guid = Guid.NewGuid();
|
||||
var id = new TrustAnchorId(guid);
|
||||
|
||||
Assert.Equal(guid.ToString(), id.ToString());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,224 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// JsonCanonicalizerTests.cs
|
||||
// Sprint: SPRINT_0501_0002_0001_proof_chain_content_addressed_ids
|
||||
// Task: PROOF-ID-0014
|
||||
// Description: Property-based tests for JSON canonicalization stability (RFC 8785)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using StellaOps.Attestor.ProofChain.Json;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
public class JsonCanonicalizerTests
|
||||
{
|
||||
private readonly IJsonCanonicalizer _canonicalizer;
|
||||
|
||||
public JsonCanonicalizerTests()
|
||||
{
|
||||
_canonicalizer = new JsonCanonicalizer();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_SortsKeys()
|
||||
{
|
||||
var input = """{"z": 1, "a": 2}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("\"a\":", outputStr);
|
||||
Assert.Contains("\"z\":", outputStr);
|
||||
|
||||
// Verify 'a' comes before 'z'
|
||||
var aIndex = outputStr.IndexOf("\"a\":");
|
||||
var zIndex = outputStr.IndexOf("\"z\":");
|
||||
Assert.True(aIndex < zIndex, "Keys should be sorted alphabetically");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_RemovesWhitespace()
|
||||
{
|
||||
var input = """{ "key" : "value" }"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.DoesNotContain(" ", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_PreservesUtf8()
|
||||
{
|
||||
var input = """{"text": "hello 世界 🌍"}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("世界", outputStr);
|
||||
Assert.Contains("🌍", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_SameInput_ProducesSameOutput()
|
||||
{
|
||||
var input = """{"key": "value", "nested": {"b": 2, "a": 1}}"""u8;
|
||||
|
||||
var output1 = _canonicalizer.Canonicalize(input);
|
||||
var output2 = _canonicalizer.Canonicalize(input);
|
||||
|
||||
Assert.Equal(output1, output2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_NestedObjects_SortsAllLevels()
|
||||
{
|
||||
var input = """{"outer": {"z": 1, "a": 2}, "inner": {"y": 3, "b": 4}}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
|
||||
// Check that nested keys are also sorted
|
||||
var nestedA = outputStr.IndexOf("\"a\":");
|
||||
var nestedZ = outputStr.IndexOf("\"z\":");
|
||||
Assert.True(nestedA < nestedZ, "Nested keys should be sorted");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_Arrays_PreservesOrder()
|
||||
{
|
||||
var input = """{"items": [3, 1, 2]}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("[3,1,2]", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_NullValue_Preserved()
|
||||
{
|
||||
var input = """{"key": null}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("null", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_BooleanValues_LowerCase()
|
||||
{
|
||||
var input = """{"t": true, "f": false}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("true", outputStr);
|
||||
Assert.Contains("false", outputStr);
|
||||
Assert.DoesNotContain("True", outputStr);
|
||||
Assert.DoesNotContain("False", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_Numbers_MinimalRepresentation()
|
||||
{
|
||||
var input = """{"integer": 42, "float": 3.14, "zero": 0}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("42", outputStr);
|
||||
Assert.Contains("3.14", outputStr);
|
||||
Assert.Contains("0", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_EmptyObject_ReturnsEmptyBraces()
|
||||
{
|
||||
var input = "{}"u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Equal("{}", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_EmptyArray_ReturnsEmptyBrackets()
|
||||
{
|
||||
var input = """{"arr": []}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("[]", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_StringEscaping_Preserved()
|
||||
{
|
||||
var input = """{"text": "line1\nline2\ttab"}"""u8;
|
||||
var output = _canonicalizer.Canonicalize(input);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Contains("\\n", outputStr);
|
||||
Assert.Contains("\\t", outputStr);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("""{"a":1}""")]
|
||||
[InlineData("""{"a":1,"b":2}""")]
|
||||
[InlineData("""{"nested":{"key":"value"}}""")]
|
||||
[InlineData("""{"array":[1,2,3]}""")]
|
||||
public void Canonicalize_AlreadyCanonical_Unchanged(string input)
|
||||
{
|
||||
var inputBytes = Encoding.UTF8.GetBytes(input);
|
||||
var output = _canonicalizer.Canonicalize(inputBytes);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output);
|
||||
Assert.Equal(input, outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Canonicalize_ComplexNesting_Deterministic()
|
||||
{
|
||||
var input = """
|
||||
{
|
||||
"level1": {
|
||||
"z": {
|
||||
"y": 1,
|
||||
"x": 2
|
||||
},
|
||||
"a": {
|
||||
"b": 3,
|
||||
"a": 4
|
||||
}
|
||||
},
|
||||
"array": [
|
||||
{"z": 1, "a": 2},
|
||||
{"y": 3, "b": 4}
|
||||
]
|
||||
}
|
||||
"""u8;
|
||||
|
||||
var output1 = _canonicalizer.Canonicalize(input);
|
||||
var output2 = _canonicalizer.Canonicalize(input);
|
||||
|
||||
Assert.Equal(output1, output2);
|
||||
|
||||
var outputStr = Encoding.UTF8.GetString(output1);
|
||||
Assert.DoesNotContain("\n", outputStr);
|
||||
Assert.DoesNotContain(" ", outputStr);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CanonicalizeDifferentWhitespace_ProducesSameOutput()
|
||||
{
|
||||
var input1 = """{"key":"value"}"""u8;
|
||||
var input2 = """{ "key" : "value" }"""u8;
|
||||
var input3 = """
|
||||
{
|
||||
"key": "value"
|
||||
}
|
||||
"""u8;
|
||||
|
||||
var output1 = _canonicalizer.Canonicalize(input1);
|
||||
var output2 = _canonicalizer.Canonicalize(input2);
|
||||
var output3 = _canonicalizer.Canonicalize(input3);
|
||||
|
||||
Assert.Equal(output1, output2);
|
||||
Assert.Equal(output2, output3);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,170 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MerkleTreeBuilderTests.cs
|
||||
// Sprint: SPRINT_0501_0002_0001_proof_chain_content_addressed_ids
|
||||
// Task: PROOF-ID-0013
|
||||
// Description: Unit tests for deterministic merkle tree construction
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using StellaOps.Attestor.ProofChain.Merkle;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
public class MerkleTreeBuilderTests
|
||||
{
|
||||
private readonly IMerkleTreeBuilder _builder;
|
||||
|
||||
public MerkleTreeBuilderTests()
|
||||
{
|
||||
_builder = new DeterministicMerkleTreeBuilder();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_SingleLeaf_ReturnsSha256OfLeaf()
|
||||
{
|
||||
var leaf = Encoding.UTF8.GetBytes("single leaf");
|
||||
var leaves = new[] { (ReadOnlyMemory<byte>)leaf };
|
||||
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length); // SHA-256 produces 32 bytes
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_TwoLeaves_ReturnsCombinedHash()
|
||||
{
|
||||
var leaf1 = Encoding.UTF8.GetBytes("leaf1");
|
||||
var leaf2 = Encoding.UTF8.GetBytes("leaf2");
|
||||
var leaves = new ReadOnlyMemory<byte>[] { leaf1, leaf2 };
|
||||
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_SameInput_ProducesSameRoot()
|
||||
{
|
||||
var leaf1 = Encoding.UTF8.GetBytes("leaf1");
|
||||
var leaf2 = Encoding.UTF8.GetBytes("leaf2");
|
||||
var leaves = new ReadOnlyMemory<byte>[] { leaf1, leaf2 };
|
||||
|
||||
var root1 = _builder.ComputeMerkleRoot(leaves);
|
||||
var root2 = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.Equal(root1, root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_DifferentOrder_ProducesDifferentRoot()
|
||||
{
|
||||
var leaf1 = Encoding.UTF8.GetBytes("leaf1");
|
||||
var leaf2 = Encoding.UTF8.GetBytes("leaf2");
|
||||
|
||||
var leaves1 = new ReadOnlyMemory<byte>[] { leaf1, leaf2 };
|
||||
var leaves2 = new ReadOnlyMemory<byte>[] { leaf2, leaf1 };
|
||||
|
||||
var root1 = _builder.ComputeMerkleRoot(leaves1);
|
||||
var root2 = _builder.ComputeMerkleRoot(leaves2);
|
||||
|
||||
Assert.NotEqual(root1, root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_OddNumberOfLeaves_HandlesCorrectly()
|
||||
{
|
||||
var leaves = new ReadOnlyMemory<byte>[]
|
||||
{
|
||||
Encoding.UTF8.GetBytes("leaf1"),
|
||||
Encoding.UTF8.GetBytes("leaf2"),
|
||||
Encoding.UTF8.GetBytes("leaf3")
|
||||
};
|
||||
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ManyLeaves_ProducesDeterministicRoot()
|
||||
{
|
||||
var leaves = new ReadOnlyMemory<byte>[100];
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
leaves[i] = Encoding.UTF8.GetBytes($"leaf-{i:D3}");
|
||||
}
|
||||
|
||||
var root1 = _builder.ComputeMerkleRoot(leaves);
|
||||
var root2 = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.Equal(root1, root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_EmptyLeaves_ReturnsEmptyOrZeroHash()
|
||||
{
|
||||
var leaves = Array.Empty<ReadOnlyMemory<byte>>();
|
||||
|
||||
// Should handle gracefully (either empty or zero hash)
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_PowerOfTwoLeaves_ProducesBalancedTree()
|
||||
{
|
||||
var leaves = new ReadOnlyMemory<byte>[]
|
||||
{
|
||||
Encoding.UTF8.GetBytes("leaf1"),
|
||||
Encoding.UTF8.GetBytes("leaf2"),
|
||||
Encoding.UTF8.GetBytes("leaf3"),
|
||||
Encoding.UTF8.GetBytes("leaf4")
|
||||
};
|
||||
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_BinaryData_HandlesBinaryInput()
|
||||
{
|
||||
var binary1 = new byte[] { 0x00, 0x01, 0x02, 0xFF, 0xFE, 0xFD };
|
||||
var binary2 = new byte[] { 0xFF, 0xEE, 0xDD, 0x00, 0x11, 0x22 };
|
||||
var leaves = new ReadOnlyMemory<byte>[] { binary1, binary2 };
|
||||
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(1)]
|
||||
[InlineData(2)]
|
||||
[InlineData(3)]
|
||||
[InlineData(4)]
|
||||
[InlineData(5)]
|
||||
[InlineData(7)]
|
||||
[InlineData(8)]
|
||||
[InlineData(15)]
|
||||
[InlineData(16)]
|
||||
[InlineData(17)]
|
||||
public void ComputeMerkleRoot_VariousLeafCounts_AlwaysProduces32Bytes(int leafCount)
|
||||
{
|
||||
var leaves = new ReadOnlyMemory<byte>[leafCount];
|
||||
for (int i = 0; i < leafCount; i++)
|
||||
{
|
||||
leaves[i] = Encoding.UTF8.GetBytes($"leaf-{i}");
|
||||
}
|
||||
|
||||
var root = _builder.ComputeMerkleRoot(leaves);
|
||||
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user