work work hard work

This commit is contained in:
StellaOps Bot
2025-12-18 00:47:24 +02:00
parent dee252940b
commit b4235c134c
189 changed files with 9627 additions and 3258 deletions

View File

@@ -0,0 +1,32 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.Persistence\StellaOps.Attestor.Persistence.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,185 @@
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Persistence.Repositories;
using StellaOps.Attestor.Persistence.Services;
namespace StellaOps.Attestor.Persistence.Tests;
/// <summary>
/// Tests for trust anchor glob matching and allowlists.
/// Sprint: SPRINT_0501_0006_0001_proof_chain_database_schema
/// Task: PROOF-DB-0010
/// </summary>
public sealed class TrustAnchorMatcherTests
{
private readonly IProofChainRepository _repository;
private readonly TrustAnchorMatcher _matcher;
public TrustAnchorMatcherTests()
{
_repository = Substitute.For<IProofChainRepository>();
_matcher = new TrustAnchorMatcher(_repository, NullLogger<TrustAnchorMatcher>.Instance);
}
[Fact]
public async Task FindMatchAsync_ExactPattern_MatchesCorrectly()
{
var anchor = CreateAnchor("pkg:npm/lodash@4.17.21", ["key-1"]);
await SeedAnchors(anchor);
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
result.Should().NotBeNull();
result!.Anchor.AnchorId.Should().Be(anchor.AnchorId);
}
[Fact]
public async Task FindMatchAsync_WildcardPattern_MatchesPackages()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
await SeedAnchors(anchor);
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
result.Should().NotBeNull();
result!.MatchedPattern.Should().Be("pkg:npm/*");
}
[Fact]
public async Task FindMatchAsync_DoubleWildcard_MatchesNestedPaths()
{
var anchor = CreateAnchor("pkg:npm/@scope/**", ["key-1"]);
await SeedAnchors(anchor);
var result = await _matcher.FindMatchAsync("pkg:npm/@scope/sub/package@1.0.0");
result.Should().NotBeNull();
}
[Fact]
public async Task FindMatchAsync_MultipleMatches_ReturnsMoreSpecific()
{
var genericAnchor = CreateAnchor("pkg:npm/*", ["key-generic"], policyRef: "generic");
var specificAnchor = CreateAnchor("pkg:npm/lodash@*", ["key-specific"], policyRef: "specific");
await SeedAnchors(genericAnchor, specificAnchor);
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
result.Should().NotBeNull();
result!.Anchor.PolicyRef.Should().Be("specific");
}
[Fact]
public async Task FindMatchAsync_NoMatch_ReturnsNull()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
await SeedAnchors(anchor);
var result = await _matcher.FindMatchAsync("pkg:pypi/requests@2.28.0");
result.Should().BeNull();
}
[Fact]
public async Task IsKeyAllowedAsync_AllowedKey_ReturnsTrue()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1", "key-2"]);
await SeedAnchors(anchor);
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
allowed.Should().BeTrue();
}
[Fact]
public async Task IsKeyAllowedAsync_DisallowedKey_ReturnsFalse()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
await SeedAnchors(anchor);
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-unknown");
allowed.Should().BeFalse();
}
[Fact]
public async Task IsKeyAllowedAsync_RevokedKey_ReturnsFalse()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1"], revokedKeys: ["key-1"]);
await SeedAnchors(anchor);
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
allowed.Should().BeFalse();
}
[Fact]
public async Task IsPredicateAllowedAsync_NoRestrictions_AllowsAll()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
anchor.AllowedPredicateTypes = null;
await SeedAnchors(anchor);
var allowed = await _matcher.IsPredicateAllowedAsync(
"pkg:npm/lodash@4.17.21",
"https://in-toto.io/attestation/vulns/v0.1");
allowed.Should().BeTrue();
}
[Fact]
public async Task IsPredicateAllowedAsync_WithRestrictions_EnforcesAllowlist()
{
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
anchor.AllowedPredicateTypes = ["evidence.stella/v1", "sbom.stella/v1"];
await SeedAnchors(anchor);
(await _matcher.IsPredicateAllowedAsync("pkg:npm/lodash@4.17.21", "evidence.stella/v1")).Should().BeTrue();
(await _matcher.IsPredicateAllowedAsync("pkg:npm/lodash@4.17.21", "random.predicate/v1")).Should().BeFalse();
}
[Theory]
[InlineData("pkg:npm/*", "pkg:npm/lodash@4.17.21", true)]
[InlineData("pkg:npm/lodash@*", "pkg:npm/lodash@4.17.21", true)]
[InlineData("pkg:npm/lodash@4.17.*", "pkg:npm/lodash@4.17.21", true)]
[InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash@4.17.21", true)]
[InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash@4.17.22", false)]
[InlineData("pkg:pypi/*", "pkg:npm/lodash@4.17.21", false)]
[InlineData("pkg:npm/@scope/*", "pkg:npm/@scope/package@1.0.0", true)]
[InlineData("pkg:npm/@scope/*", "pkg:npm/@other/package@1.0.0", false)]
public async Task FindMatchAsync_PatternVariations_MatchCorrectly(string pattern, string purl, bool shouldMatch)
{
var anchor = CreateAnchor(pattern, ["key-1"]);
await SeedAnchors(anchor);
var result = await _matcher.FindMatchAsync(purl);
(result != null).Should().Be(shouldMatch);
}
private Task SeedAnchors(params TrustAnchorEntity[] anchors)
{
_repository.GetActiveTrustAnchorsAsync(Arg.Any<CancellationToken>())
.Returns(Task.FromResult<IReadOnlyList<TrustAnchorEntity>>(anchors));
return Task.CompletedTask;
}
private static TrustAnchorEntity CreateAnchor(
string pattern,
string[] allowedKeys,
string? policyRef = null,
string[]? revokedKeys = null)
{
return new TrustAnchorEntity
{
AnchorId = Guid.NewGuid(),
PurlPattern = pattern,
AllowedKeyIds = allowedKeys,
PolicyRef = policyRef,
RevokedKeys = revokedKeys ?? []
};
}
}

View File

@@ -1,631 +0,0 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2025 StellaOps Contributors
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;
using StellaOps.Attestor.ProofChain;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Attestor.ProofChain.Verification;
using Xunit;
namespace StellaOps.Attestor.ProofChain.Tests;
/// <summary>
/// Load tests for proof chain API endpoints and verification pipeline.
/// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
/// Task: PROOF-API-0012
/// </summary>
public class ApiLoadTests
{
private readonly ILogger<VerificationPipeline> _logger = NullLogger<VerificationPipeline>.Instance;
#region Proof Spine Creation Load Tests
[Fact]
public async Task CreateProofSpine_ConcurrentRequests_MaintainsThroughput()
{
// Arrange: Create synthetic SBOM entries for load testing
const int concurrencyLevel = 50;
const int operationsPerClient = 20;
var totalOperations = concurrencyLevel * operationsPerClient;
var proofSpineBuilder = CreateTestProofSpineBuilder();
var latencies = new ConcurrentBag<long>();
var errors = new ConcurrentBag<Exception>();
var stopwatch = Stopwatch.StartNew();
// Act: Run concurrent proof spine creations
var tasks = Enumerable.Range(0, concurrencyLevel)
.Select(clientId => Task.Run(async () =>
{
for (var i = 0; i < operationsPerClient; i++)
{
try
{
var sw = Stopwatch.StartNew();
var entryId = GenerateSyntheticEntryId(clientId, i);
var spine = await proofSpineBuilder.BuildAsync(
entryId,
GenerateSyntheticEvidenceIds(3),
$"sha256:{GenerateHash("reasoning")}",
$"sha256:{GenerateHash("vex")}",
"v2.3.1",
CancellationToken.None);
sw.Stop();
latencies.Add(sw.ElapsedMilliseconds);
}
catch (Exception ex)
{
errors.Add(ex);
}
}
}));
await Task.WhenAll(tasks);
stopwatch.Stop();
// Assert: Verify load test metrics
var successCount = latencies.Count;
var errorCount = errors.Count;
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
var avgLatency = latencies.Any() ? latencies.Average() : 0;
var p95Latency = CalculatePercentile(latencies, 95);
var p99Latency = CalculatePercentile(latencies, 99);
// Performance assertions
successCount.Should().Be(totalOperations, "all operations should complete successfully");
errorCount.Should().Be(0, "no errors should occur during load test");
throughput.Should().BeGreaterThan(100, "throughput should exceed 100 ops/sec");
avgLatency.Should().BeLessThan(50, "average latency should be under 50ms");
p99Latency.Should().BeLessThan(200, "p99 latency should be under 200ms");
}
[Fact]
public async Task VerificationPipeline_ConcurrentVerifications_MaintainsAccuracy()
{
// Arrange
const int concurrencyLevel = 30;
const int verificationsPerClient = 10;
var totalVerifications = concurrencyLevel * verificationsPerClient;
var mockDsseVerifier = CreateMockDsseVerifier();
var mockIdRecomputer = CreateMockIdRecomputer();
var mockRekorVerifier = CreateMockRekorVerifier();
var pipeline = new VerificationPipeline(
mockDsseVerifier,
mockIdRecomputer,
mockRekorVerifier,
_logger);
var results = new ConcurrentBag<VerificationResult>();
var latencies = new ConcurrentBag<long>();
// Act: Run concurrent verifications
var tasks = Enumerable.Range(0, concurrencyLevel)
.Select(clientId => Task.Run(async () =>
{
for (var i = 0; i < verificationsPerClient; i++)
{
var sw = Stopwatch.StartNew();
var proof = GenerateSyntheticProof(clientId, i);
var result = await pipeline.VerifyAsync(proof, CancellationToken.None);
sw.Stop();
latencies.Add(sw.ElapsedMilliseconds);
results.Add(result);
}
}));
await Task.WhenAll(tasks);
// Assert: All verifications should be deterministic
results.Count.Should().Be(totalVerifications);
results.All(r => r.IsValid).Should().BeTrue("all synthetic proofs should verify successfully");
var avgLatency = latencies.Average();
avgLatency.Should().BeLessThan(30, "verification should be fast");
}
#endregion
#region Deterministic Ordering Tests Under Load
[Fact]
public void ProofSpineOrdering_UnderConcurrency_RemainsDeterministic()
{
// Arrange: Same inputs should produce same outputs under concurrent access
const int iterations = 100;
var seed = 42;
var random = new Random(seed);
var evidenceIds = Enumerable.Range(0, 5)
.Select(i => $"sha256:{GenerateHash($"evidence{i}")}")
.ToArray();
var results = new ConcurrentBag<string>();
// Act: Compute proof spine hash concurrently multiple times
Parallel.For(0, iterations, _ =>
{
var sorted = evidenceIds.OrderBy(x => x).ToArray();
var combined = string.Join(":", sorted);
var hash = GenerateHash(combined);
results.Add(hash);
});
// Assert: All results should be identical (deterministic)
results.Distinct().Count().Should().Be(1, "concurrent computations should be deterministic");
}
[Fact]
public async Task MerkleTree_ConcurrentBuilding_ProducesSameRoot()
{
// Arrange
const int leafCount = 1000;
const int iterations = 20;
var leaves = Enumerable.Range(0, leafCount)
.Select(i => Encoding.UTF8.GetBytes($"leaf-{i:D5}"))
.ToList();
var roots = new ConcurrentBag<string>();
// Act: Build Merkle tree concurrently
await Parallel.ForEachAsync(Enumerable.Range(0, iterations), async (_, ct) =>
{
var builder = new MerkleTreeBuilder();
foreach (var leaf in leaves)
{
builder.AddLeaf(leaf);
}
var root = builder.ComputeRoot();
roots.Add(Convert.ToHexString(root));
});
// Assert: All roots should be identical
roots.Distinct().Count().Should().Be(1, "Merkle tree root should be deterministic");
}
#endregion
#region Throughput Benchmarks
[Theory]
[InlineData(10, 100)] // Light load
[InlineData(50, 50)] // Medium load
[InlineData(100, 20)] // Heavy load
public async Task ThroughputBenchmark_VariousLoadProfiles(int concurrency, int opsPerClient)
{
// Arrange
var totalOps = concurrency * opsPerClient;
var successCount = 0;
var stopwatch = Stopwatch.StartNew();
// Act: Simulate API calls
var tasks = Enumerable.Range(0, concurrency)
.Select(_ => Task.Run(() =>
{
for (var i = 0; i < opsPerClient; i++)
{
// Simulate proof creation work
var hash = GenerateHash($"proof-{Guid.NewGuid()}");
Interlocked.Increment(ref successCount);
}
}));
await Task.WhenAll(tasks);
stopwatch.Stop();
// Assert
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
successCount.Should().Be(totalOps);
throughput.Should().BeGreaterThan(1000, $"throughput at {concurrency} concurrency should exceed 1000 ops/sec");
}
[Fact]
public async Task LatencyDistribution_UnderLoad_MeetsSloBudgets()
{
// Arrange: Define SLO budgets
const double maxP50Ms = 10;
const double maxP90Ms = 25;
const double maxP99Ms = 100;
const int sampleSize = 1000;
var latencies = new ConcurrentBag<double>();
// Act: Collect latency samples
await Parallel.ForEachAsync(Enumerable.Range(0, sampleSize), async (i, ct) =>
{
var sw = Stopwatch.StartNew();
// Simulate verification work
var hash = GenerateHash($"sample-{i}");
await Task.Delay(1, ct); // Simulate I/O
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
});
// Calculate percentiles
var sorted = latencies.OrderBy(x => x).ToList();
var p50 = CalculatePercentileFromSorted(sorted, 50);
var p90 = CalculatePercentileFromSorted(sorted, 90);
var p99 = CalculatePercentileFromSorted(sorted, 99);
// Assert: SLO compliance
p50.Should().BeLessThan(maxP50Ms, "p50 latency should meet SLO");
p90.Should().BeLessThan(maxP90Ms, "p90 latency should meet SLO");
p99.Should().BeLessThan(maxP99Ms, "p99 latency should meet SLO");
}
#endregion
#region Memory and Resource Tests
[Fact]
public void LargeProofBatch_DoesNotCauseMemorySpike()
{
// Arrange
const int batchSize = 10_000;
var initialMemory = GC.GetTotalMemory(true);
// Act: Create large batch of proofs
var proofs = new List<string>(batchSize);
for (var i = 0; i < batchSize; i++)
{
var proof = GenerateSyntheticProofJson(i);
proofs.Add(proof);
}
// Force GC and measure
var peakMemory = GC.GetTotalMemory(false);
proofs.Clear();
GC.Collect();
var finalMemory = GC.GetTotalMemory(true);
// Assert: Memory should not grow unbounded
var memoryGrowth = peakMemory - initialMemory;
var memoryRetained = finalMemory - initialMemory;
// Each proof is ~500 bytes, so 10k proofs ≈ 5MB is reasonable
memoryGrowth.Should().BeLessThan(50_000_000, "memory growth should be bounded (~50MB max for 10k proofs)");
memoryRetained.Should().BeLessThan(10_000_000, "memory should be released after clearing");
}
#endregion
#region Helper Methods
private static IProofSpineBuilder CreateTestProofSpineBuilder()
{
// Create a mock proof spine builder for load testing
var builder = Substitute.For<IProofSpineBuilder>();
builder.BuildAsync(
Arg.Any<string>(),
Arg.Any<string[]>(),
Arg.Any<string>(),
Arg.Any<string>(),
Arg.Any<string>(),
Arg.Any<CancellationToken>())
.Returns(callInfo =>
{
var entryId = callInfo.ArgAt<string>(0);
return Task.FromResult(new ProofSpine
{
EntryId = entryId,
SpineId = $"sha256:{GenerateHash(entryId)}",
PolicyVersion = callInfo.ArgAt<string>(4),
CreatedAt = DateTimeOffset.UtcNow
});
});
return builder;
}
private static IDsseVerifier CreateMockDsseVerifier()
{
var verifier = Substitute.For<IDsseVerifier>();
verifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
.Returns(Task.FromResult(new DsseVerificationResult { IsValid = true }));
return verifier;
}
private static IIdRecomputer CreateMockIdRecomputer()
{
var recomputer = Substitute.For<IIdRecomputer>();
recomputer.VerifyAsync(Arg.Any<ProofBundle>(), Arg.Any<CancellationToken>())
.Returns(Task.FromResult(new IdVerificationResult { IsValid = true }));
return recomputer;
}
private static IRekorVerifier CreateMockRekorVerifier()
{
var verifier = Substitute.For<IRekorVerifier>();
verifier.VerifyInclusionAsync(Arg.Any<RekorEntry>(), Arg.Any<CancellationToken>())
.Returns(Task.FromResult(new RekorVerificationResult { IsValid = true }));
return verifier;
}
private static string GenerateSyntheticEntryId(int clientId, int index)
{
var hash = GenerateHash($"entry-{clientId}-{index}");
return $"sha256:{hash}:pkg:npm/example@1.0.{index}";
}
private static string[] GenerateSyntheticEvidenceIds(int count)
{
return Enumerable.Range(0, count)
.Select(i => $"sha256:{GenerateHash($"evidence-{i}")}")
.ToArray();
}
private static ProofBundle GenerateSyntheticProof(int clientId, int index)
{
return new ProofBundle
{
EntryId = GenerateSyntheticEntryId(clientId, index),
Envelope = new DsseEnvelope
{
PayloadType = "application/vnd.stellaops.proof+json",
Payload = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{{\"id\":\"{clientId}-{index}\"}}")),
Signatures = new[]
{
new DsseSignature
{
KeyId = "test-key",
Sig = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature"))
}
}
}
};
}
private static string GenerateSyntheticProofJson(int index)
{
return $@"{{
""entryId"": ""sha256:{GenerateHash($"entry-{index}")}:pkg:npm/example@1.0.{index}"",
""spineId"": ""sha256:{GenerateHash($"spine-{index}")}"",
""evidenceIds"": [""{GenerateHash($"ev1-{index}")}"", ""{GenerateHash($"ev2-{index}")}""],
""reasoningId"": ""sha256:{GenerateHash($"reason-{index}")}"",
""vexVerdictId"": ""sha256:{GenerateHash($"vex-{index}")}"",
""policyVersion"": ""v2.3.1"",
""createdAt"": ""{DateTimeOffset.UtcNow:O}""
}}";
}
private static string GenerateHash(string input)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes).ToLowerInvariant();
}
private static double CalculatePercentile(ConcurrentBag<long> values, int percentile)
{
if (!values.Any()) return 0;
var sorted = values.OrderBy(x => x).ToList();
return CalculatePercentileFromSorted(sorted.Select(x => (double)x).ToList(), percentile);
}
private static double CalculatePercentileFromSorted<T>(List<T> sorted, int percentile) where T : IConvertible
{
if (sorted.Count == 0) return 0;
var index = (int)Math.Ceiling(percentile / 100.0 * sorted.Count) - 1;
index = Math.Max(0, Math.Min(index, sorted.Count - 1));
return sorted[index].ToDouble(null);
}
#endregion
}
#region Supporting Types for Load Tests
/// <summary>
/// Interface for proof spine building (mock target for load tests).
/// </summary>
public interface IProofSpineBuilder
{
Task<ProofSpine> BuildAsync(
string entryId,
string[] evidenceIds,
string reasoningId,
string vexVerdictId,
string policyVersion,
CancellationToken cancellationToken);
}
/// <summary>
/// Represents a proof spine created for an SBOM entry.
/// </summary>
public class ProofSpine
{
public required string EntryId { get; init; }
public required string SpineId { get; init; }
public required string PolicyVersion { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Interface for DSSE envelope verification.
/// </summary>
public interface IDsseVerifier
{
Task<DsseVerificationResult> VerifyAsync(DsseEnvelope envelope, CancellationToken cancellationToken);
}
/// <summary>
/// DSSE verification result.
/// </summary>
public class DsseVerificationResult
{
public bool IsValid { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Interface for ID recomputation verification.
/// </summary>
public interface IIdRecomputer
{
Task<IdVerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken);
}
/// <summary>
/// ID verification result.
/// </summary>
public class IdVerificationResult
{
public bool IsValid { get; init; }
public string? ExpectedId { get; init; }
public string? ActualId { get; init; }
}
/// <summary>
/// Interface for Rekor inclusion proof verification.
/// </summary>
public interface IRekorVerifier
{
Task<RekorVerificationResult> VerifyInclusionAsync(RekorEntry entry, CancellationToken cancellationToken);
}
/// <summary>
/// Rekor verification result.
/// </summary>
public class RekorVerificationResult
{
public bool IsValid { get; init; }
public long? LogIndex { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Represents a Rekor transparency log entry.
/// </summary>
public class RekorEntry
{
public long LogIndex { get; init; }
public string? LogId { get; init; }
public string? Body { get; init; }
public DateTimeOffset IntegratedTime { get; init; }
}
/// <summary>
/// DSSE envelope for proof bundles.
/// </summary>
public class DsseEnvelope
{
public required string PayloadType { get; init; }
public required string Payload { get; init; }
public required DsseSignature[] Signatures { get; init; }
}
/// <summary>
/// DSSE signature within an envelope.
/// </summary>
public class DsseSignature
{
public required string KeyId { get; init; }
public required string Sig { get; init; }
}
/// <summary>
/// Complete proof bundle for verification.
/// </summary>
public class ProofBundle
{
public required string EntryId { get; init; }
public required DsseEnvelope Envelope { get; init; }
public RekorEntry? RekorEntry { get; init; }
}
/// <summary>
/// Complete verification result from the pipeline.
/// </summary>
public class VerificationResult
{
public bool IsValid { get; init; }
public DsseVerificationResult? DsseResult { get; init; }
public IdVerificationResult? IdResult { get; init; }
public RekorVerificationResult? RekorResult { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Verification pipeline that runs all verification steps.
/// </summary>
public class VerificationPipeline
{
private readonly IDsseVerifier _dsseVerifier;
private readonly IIdRecomputer _idRecomputer;
private readonly IRekorVerifier _rekorVerifier;
private readonly ILogger<VerificationPipeline> _logger;
public VerificationPipeline(
IDsseVerifier dsseVerifier,
IIdRecomputer idRecomputer,
IRekorVerifier rekorVerifier,
ILogger<VerificationPipeline> logger)
{
_dsseVerifier = dsseVerifier;
_idRecomputer = idRecomputer;
_rekorVerifier = rekorVerifier;
_logger = logger;
}
public async Task<VerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken)
{
// Step 1: DSSE signature verification
var dsseResult = await _dsseVerifier.VerifyAsync(bundle.Envelope, cancellationToken);
if (!dsseResult.IsValid)
{
return new VerificationResult
{
IsValid = false,
DsseResult = dsseResult,
Error = $"DSSE verification failed: {dsseResult.Error}"
};
}
// Step 2: ID recomputation
var idResult = await _idRecomputer.VerifyAsync(bundle, cancellationToken);
if (!idResult.IsValid)
{
return new VerificationResult
{
IsValid = false,
DsseResult = dsseResult,
IdResult = idResult,
Error = $"ID mismatch: expected {idResult.ExpectedId}, got {idResult.ActualId}"
};
}
// Step 3: Rekor inclusion (if entry present)
RekorVerificationResult? rekorResult = null;
if (bundle.RekorEntry != null)
{
rekorResult = await _rekorVerifier.VerifyInclusionAsync(bundle.RekorEntry, cancellationToken);
if (!rekorResult.IsValid)
{
return new VerificationResult
{
IsValid = false,
DsseResult = dsseResult,
IdResult = idResult,
RekorResult = rekorResult,
Error = $"Rekor verification failed: {rekorResult.Error}"
};
}
}
return new VerificationResult
{
IsValid = true,
DsseResult = dsseResult,
IdResult = idResult,
RekorResult = rekorResult
};
}
}
#endregion

View File

@@ -18,7 +18,7 @@ public class ContentAddressedIdGeneratorTests
public ContentAddressedIdGeneratorTests()
{
var canonicalizer = new JsonCanonicalizer();
var canonicalizer = new Rfc8785JsonCanonicalizer();
var merkleBuilder = new DeterministicMerkleTreeBuilder();
_generator = new ContentAddressedIdGenerator(canonicalizer, merkleBuilder);
}
@@ -117,8 +117,8 @@ public class ContentAddressedIdGeneratorTests
[Fact]
public void ComputeVexVerdictId_DifferentStatus_ProducesDifferentId()
{
var predicate1 = CreateTestVexPredicate() with { Status = VexStatus.Affected };
var predicate2 = CreateTestVexPredicate() with { Status = VexStatus.NotAffected };
var predicate1 = CreateTestVexPredicate() with { Status = "affected" };
var predicate2 = CreateTestVexPredicate() with { Status = "not_affected" };
var id1 = _generator.ComputeVexVerdictId(predicate1);
var id2 = _generator.ComputeVexVerdictId(predicate2);
@@ -152,8 +152,8 @@ public class ContentAddressedIdGeneratorTests
var vexVerdictId = CreateTestVexVerdictId();
// Different order, should produce same result
var unsorted = new[] { CreateTestEvidenceId("z"), CreateTestEvidenceId("a") };
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("z") };
var unsorted = new[] { CreateTestEvidenceId("f"), CreateTestEvidenceId("a") };
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("f") };
var id1 = _generator.ComputeProofBundleId(sbomEntryId, unsorted, reasoningId, vexVerdictId);
var id2 = _generator.ComputeProofBundleId(sbomEntryId, sorted, reasoningId, vexVerdictId);
@@ -272,9 +272,9 @@ public class ContentAddressedIdGeneratorTests
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
EvidenceIds = ["sha256:evidence1", "sha256:evidence2"],
PolicyVersion = "v2024.12.16",
Inputs = new ReasoningInputs
Inputs = new Dictionary<string, object>
{
CurrentEvaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
["currentEvaluationTime"] = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
}
};
@@ -282,12 +282,14 @@ public class ContentAddressedIdGeneratorTests
{
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2024-1234",
Status = VexStatus.NotAffected,
Justification = "Vulnerable code is not in execution path"
Status = "not_affected",
Justification = "vulnerable_code_not_present",
PolicyVersion = "v2024.12.16",
ReasoningId = "sha256:reasoning1"
};
private static SbomEntryId CreateTestSbomEntryId() =>
new("sha256:sbom123", "pkg:npm/lodash", "4.17.21");
new($"sha256:{new string('0', 64)}", "pkg:npm/lodash", "4.17.21");
private static EvidenceId CreateTestEvidenceId(string suffix) =>
new($"a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6{suffix.PadLeft(4, '0')}"[..64]);

View File

@@ -43,16 +43,22 @@ public class ContentAddressedIdTests
}
[Theory]
[InlineData("")]
[InlineData(" ")]
[InlineData("invalid")]
[InlineData(":digest")]
[InlineData("algo:")]
public void Parse_InvalidFormat_Throws(string input)
public void Parse_InvalidFormat_ThrowsFormatException(string input)
{
Assert.Throws<FormatException>(() => ContentAddressedId.Parse(input));
}
[Theory]
[InlineData("")]
[InlineData(" ")]
public void Parse_EmptyOrWhitespace_ThrowsArgumentException(string input)
{
Assert.Throws<ArgumentException>(() => ContentAddressedId.Parse(input));
}
[Fact]
public void Parse_InvalidDigestLength_Throws()
{
@@ -68,26 +74,6 @@ public class ContentAddressedIdTests
Assert.Equal(input, id.ToString());
}
[Fact]
public void TrySplit_ValidInput_ReturnsTrue()
{
var valid = ContentAddressedId.TrySplit(
"sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2",
out var algorithm,
out var digest);
Assert.True(valid);
Assert.Equal("sha256", algorithm);
Assert.NotEmpty(digest);
}
[Fact]
public void TrySplit_InvalidInput_ReturnsFalse()
{
var valid = ContentAddressedId.TrySplit("invalid", out _, out _);
Assert.False(valid);
}
}
public class EvidenceIdTests
@@ -153,12 +139,14 @@ public class ProofBundleIdTests
public class SbomEntryIdTests
{
private static readonly string SbomDigest = $"sha256:{new string('a', 64)}";
[Fact]
public void Constructor_WithVersion_CreatesId()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash", "4.17.21");
Assert.Equal("sha256:abc123", id.SbomDigest);
Assert.Equal(SbomDigest, id.SbomDigest);
Assert.Equal("pkg:npm/lodash", id.Purl);
Assert.Equal("4.17.21", id.Version);
}
@@ -166,9 +154,9 @@ public class SbomEntryIdTests
[Fact]
public void Constructor_WithoutVersion_CreatesId()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash");
Assert.Equal("sha256:abc123", id.SbomDigest);
Assert.Equal(SbomDigest, id.SbomDigest);
Assert.Equal("pkg:npm/lodash", id.Purl);
Assert.Null(id.Version);
}
@@ -176,15 +164,15 @@ public class SbomEntryIdTests
[Fact]
public void ToString_WithVersion_IncludesVersion()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
Assert.Equal("sha256:abc123:pkg:npm/lodash@4.17.21", id.ToString());
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash", "4.17.21");
Assert.Equal($"{SbomDigest}:pkg:npm/lodash@4.17.21", id.ToString());
}
[Fact]
public void ToString_WithoutVersion_OmitsVersion()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
Assert.Equal("sha256:abc123:pkg:npm/lodash", id.ToString());
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash");
Assert.Equal($"{SbomDigest}:pkg:npm/lodash", id.ToString());
}
}

View File

@@ -6,18 +6,14 @@
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.ProofChain.Json;
namespace StellaOps.Attestor.ProofChain.Tests;
public class JsonCanonicalizerTests
public sealed class JsonCanonicalizerTests
{
private readonly IJsonCanonicalizer _canonicalizer;
public JsonCanonicalizerTests()
{
_canonicalizer = new JsonCanonicalizer();
}
private readonly IJsonCanonicalizer _canonicalizer = new Rfc8785JsonCanonicalizer();
[Fact]
public void Canonicalize_SortsKeys()
@@ -29,9 +25,8 @@ public class JsonCanonicalizerTests
Assert.Contains("\"a\":", outputStr);
Assert.Contains("\"z\":", outputStr);
// Verify 'a' comes before 'z'
var aIndex = outputStr.IndexOf("\"a\":");
var zIndex = outputStr.IndexOf("\"z\":");
var aIndex = outputStr.IndexOf("\"a\":", StringComparison.Ordinal);
var zIndex = outputStr.IndexOf("\"z\":", StringComparison.Ordinal);
Assert.True(aIndex < zIndex, "Keys should be sorted alphabetically");
}
@@ -43,17 +38,18 @@ public class JsonCanonicalizerTests
var outputStr = Encoding.UTF8.GetString(output);
Assert.DoesNotContain(" ", outputStr);
Assert.Equal("{\"key\":\"value\"}", outputStr);
}
[Fact]
public void Canonicalize_PreservesUtf8()
public void Canonicalize_PreservesUnicodeContent()
{
var input = """{"text": "hello 🌍"}"""u8;
var text = "hello 世界 \U0001F30D";
var input = JsonSerializer.SerializeToUtf8Bytes(new { text });
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("世界", outputStr);
Assert.Contains("🌍", outputStr);
using var document = JsonDocument.Parse(output);
Assert.Equal(text, document.RootElement.GetProperty("text").GetString());
}
[Fact]
@@ -67,20 +63,6 @@ public class JsonCanonicalizerTests
Assert.Equal(output1, output2);
}
[Fact]
public void Canonicalize_NestedObjects_SortsAllLevels()
{
var input = """{"outer": {"z": 1, "a": 2}, "inner": {"y": 3, "b": 4}}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
// Check that nested keys are also sorted
var nestedA = outputStr.IndexOf("\"a\":");
var nestedZ = outputStr.IndexOf("\"z\":");
Assert.True(nestedA < nestedZ, "Nested keys should be sorted");
}
[Fact]
public void Canonicalize_Arrays_PreservesOrder()
{
@@ -91,16 +73,6 @@ public class JsonCanonicalizerTests
Assert.Contains("[3,1,2]", outputStr);
}
[Fact]
public void Canonicalize_NullValue_Preserved()
{
var input = """{"key": null}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("null", outputStr);
}
[Fact]
public void Canonicalize_BooleanValues_LowerCase()
{
@@ -114,18 +86,6 @@ public class JsonCanonicalizerTests
Assert.DoesNotContain("False", outputStr);
}
[Fact]
public void Canonicalize_Numbers_MinimalRepresentation()
{
var input = """{"integer": 42, "float": 3.14, "zero": 0}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("42", outputStr);
Assert.Contains("3.14", outputStr);
Assert.Contains("0", outputStr);
}
[Fact]
public void Canonicalize_EmptyObject_ReturnsEmptyBraces()
{
@@ -135,90 +95,5 @@ public class JsonCanonicalizerTests
var outputStr = Encoding.UTF8.GetString(output);
Assert.Equal("{}", outputStr);
}
[Fact]
public void Canonicalize_EmptyArray_ReturnsEmptyBrackets()
{
var input = """{"arr": []}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("[]", outputStr);
}
[Fact]
public void Canonicalize_StringEscaping_Preserved()
{
var input = """{"text": "line1\nline2\ttab"}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("\\n", outputStr);
Assert.Contains("\\t", outputStr);
}
[Theory]
[InlineData("""{"a":1}""")]
[InlineData("""{"a":1,"b":2}""")]
[InlineData("""{"nested":{"key":"value"}}""")]
[InlineData("""{"array":[1,2,3]}""")]
public void Canonicalize_AlreadyCanonical_Unchanged(string input)
{
var inputBytes = Encoding.UTF8.GetBytes(input);
var output = _canonicalizer.Canonicalize(inputBytes);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Equal(input, outputStr);
}
[Fact]
public void Canonicalize_ComplexNesting_Deterministic()
{
var input = """
{
"level1": {
"z": {
"y": 1,
"x": 2
},
"a": {
"b": 3,
"a": 4
}
},
"array": [
{"z": 1, "a": 2},
{"y": 3, "b": 4}
]
}
"""u8;
var output1 = _canonicalizer.Canonicalize(input);
var output2 = _canonicalizer.Canonicalize(input);
Assert.Equal(output1, output2);
var outputStr = Encoding.UTF8.GetString(output1);
Assert.DoesNotContain("\n", outputStr);
Assert.DoesNotContain(" ", outputStr);
}
[Fact]
public void CanonicalizeDifferentWhitespace_ProducesSameOutput()
{
var input1 = """{"key":"value"}"""u8;
var input2 = """{ "key" : "value" }"""u8;
var input3 = """
{
"key": "value"
}
"""u8;
var output1 = _canonicalizer.Canonicalize(input1);
var output2 = _canonicalizer.Canonicalize(input2);
var output3 = _canonicalizer.Canonicalize(input3);
Assert.Equal(output1, output2);
Assert.Equal(output2, output3);
}
}

View File

@@ -104,14 +104,11 @@ public class MerkleTreeBuilderTests
}
[Fact]
public void ComputeMerkleRoot_EmptyLeaves_ReturnsEmptyOrZeroHash()
public void ComputeMerkleRoot_EmptyLeaves_Throws()
{
var leaves = Array.Empty<ReadOnlyMemory<byte>>();
// Should handle gracefully (either empty or zero hash)
var root = _builder.ComputeMerkleRoot(leaves);
Assert.NotNull(root);
Assert.Throws<ArgumentException>(() => _builder.ComputeMerkleRoot(leaves));
}
[Fact]

View File

@@ -243,7 +243,7 @@ public class ProofSpineAssemblyIntegrationTests
leaves.Add(Encoding.UTF8.GetBytes(vexVerdictId));
// Build merkle tree
return _builder.ComputeMerkleRoot(leaves.ToArray());
return _builder.ComputeMerkleRoot(leaves);
}
private static string FormatAsId(byte[] hash)
@@ -251,65 +251,3 @@ public class ProofSpineAssemblyIntegrationTests
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}
/// <summary>
/// Interface for merkle tree building.
/// </summary>
public interface IMerkleTreeBuilder
{
byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves);
}
/// <summary>
/// Deterministic merkle tree builder using SHA-256.
/// </summary>
public class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
{
public byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves)
{
if (leaves.Length == 0)
{
return new byte[32]; // Zero hash for empty tree
}
// Hash all leaves
var currentLevel = new List<byte[]>();
using var sha256 = System.Security.Cryptography.SHA256.Create();
foreach (var leaf in leaves)
{
currentLevel.Add(sha256.ComputeHash(leaf.ToArray()));
}
// Pad to power of 2 by duplicating last leaf
while (!IsPowerOfTwo(currentLevel.Count))
{
currentLevel.Add(currentLevel[^1]);
}
// Build tree bottom-up
while (currentLevel.Count > 1)
{
var nextLevel = new List<byte[]>();
for (int i = 0; i < currentLevel.Count; i += 2)
{
var left = currentLevel[i];
var right = currentLevel[i + 1];
// Concatenate and hash
var combined = new byte[left.Length + right.Length];
Buffer.BlockCopy(left, 0, combined, 0, left.Length);
Buffer.BlockCopy(right, 0, combined, left.Length, right.Length);
nextLevel.Add(sha256.ComputeHash(combined));
}
currentLevel = nextLevel;
}
return currentLevel[0];
}
private static bool IsPowerOfTwo(int n) => n > 0 && (n & (n - 1)) == 0;
}

View File

@@ -0,0 +1,122 @@
using FluentAssertions;
using Org.BouncyCastle.Crypto.Parameters;
using StellaOps.Attestor.Envelope;
using StellaOps.Attestor.ProofChain.Builders;
using StellaOps.Attestor.ProofChain.Json;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Tests.Signing;
public sealed class ProofChainSignerTests
{
private static readonly DateTimeOffset FixedTime = new(2025, 12, 17, 0, 0, 0, TimeSpan.Zero);
[Fact]
public async Task SignThenVerify_EvidenceStatement_Passes()
{
var (signer, keyId) = CreateSigner();
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('0', 64)}");
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
envelope.PayloadType.Should().Be(ProofChainSigner.InTotoPayloadType);
envelope.Signatures.Should().ContainSingle();
envelope.Signatures[0].KeyId.Should().Be(keyId);
envelope.Signatures[0].Sig.Should().NotBeNullOrWhiteSpace();
envelope.Payload.Should().NotBeNullOrWhiteSpace();
var result = await signer.VerifyEnvelopeAsync(envelope, new[] { keyId });
result.IsValid.Should().BeTrue();
result.KeyId.Should().Be(keyId);
}
[Fact]
public async Task Verify_TamperedPayload_Fails()
{
var (signer, keyId) = CreateSigner();
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('1', 64)}");
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
var payloadBytes = Convert.FromBase64String(envelope.Payload);
payloadBytes[^1] ^= 0xff;
var tampered = envelope with { Payload = Convert.ToBase64String(payloadBytes) };
var result = await signer.VerifyEnvelopeAsync(tampered, new[] { keyId });
result.IsValid.Should().BeFalse();
}
[Fact]
public async Task CrossPlatformVector_Ed25519Signature_IsStable()
{
var (signer, keyId) = CreateSigner(keyIdOverride: "test-key");
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('2', 64)}");
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
envelope.Signatures[0].KeyId.Should().Be(keyId);
// Filled in after the first successful run to lock the vector across platforms/implementations.
const string expectedSig = "zJtzdRX76ENKf4IePv5AyTxqdS2YlVMcseaw2UBh1eBhfarUNq2AdiKyxVMWPftSy2uJJGfo7R7BilQO+Xj8AA==";
envelope.Signatures[0].Sig.Should().Be(expectedSig);
}
private static EvidenceStatement CreateEvidenceStatement(string evidenceId)
{
var builder = new StatementBuilder();
var subject = new ProofSubject
{
Name = "image:demo",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
};
var predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "0.50.0",
CollectionTime = FixedTime,
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2025-1234",
RawFinding = new { severity = "high" },
EvidenceId = evidenceId
};
return builder.BuildEvidenceStatement(subject, predicate);
}
private static (IProofChainSigner Signer, string KeyId) CreateSigner(string? keyIdOverride = null)
{
var seed = Enumerable.Range(0, 32).Select(static i => (byte)i).ToArray();
var privateKey = new Ed25519PrivateKeyParameters(seed, 0);
var publicKey = privateKey.GeneratePublicKey().GetEncoded();
var key = EnvelopeKey.CreateEd25519Signer(seed, publicKey, keyId: keyIdOverride ?? "proofchain-test-key");
var keyStore = new StaticKeyStore(new Dictionary<SigningKeyProfile, EnvelopeKey>
{
[SigningKeyProfile.Evidence] = key
});
return (new ProofChainSigner(keyStore, new Rfc8785JsonCanonicalizer()), key.KeyId);
}
private sealed class StaticKeyStore : IProofChainKeyStore
{
private readonly IReadOnlyDictionary<SigningKeyProfile, EnvelopeKey> _signingKeys;
private readonly IReadOnlyDictionary<string, EnvelopeKey> _verificationKeys;
public StaticKeyStore(IReadOnlyDictionary<SigningKeyProfile, EnvelopeKey> signingKeys)
{
_signingKeys = signingKeys;
_verificationKeys = signingKeys.Values.ToDictionary(static key => key.KeyId, static key => key, StringComparer.Ordinal);
}
public bool TryGetSigningKey(SigningKeyProfile profile, out EnvelopeKey key)
=> _signingKeys.TryGetValue(profile, out key!);
public bool TryGetVerificationKey(string keyId, out EnvelopeKey key)
=> _verificationKeys.TryGetValue(keyId, out key!);
}
}

View File

@@ -8,191 +8,130 @@ using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
/// <summary>
/// Unit tests for all DSSE statement types (Task PROOF-PRED-0012).
/// Unit tests for proof chain statement construction (Task PROOF-PRED-0012).
/// </summary>
public class StatementBuilderTests
public sealed class StatementBuilderTests
{
private readonly StatementBuilder _builder = new();
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
[Fact]
public void BuildEvidenceStatement_SetsPredicateType()
public void BuildEvidenceStatement_SetsPredicateTypeAndSubject()
{
var statement = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
source: "trivy",
sourceVersion: "0.50.0",
collectionTime: _fixedTime,
sbomEntryId: "sbom-123");
var subject = CreateSubject("image:demo", "abc123");
var predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "0.50.0",
CollectionTime = _fixedTime,
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2025-1234",
RawFinding = new { severity = "high" },
EvidenceId = $"sha256:{new string('0', 64)}"
};
var statement = _builder.BuildEvidenceStatement(subject, predicate);
Assert.Equal("evidence.stella/v1", statement.PredicateType);
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
}
[Fact]
public void BuildEvidenceStatement_PopulatesPredicate()
{
var statement = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
source: "trivy",
sourceVersion: "0.50.0",
collectionTime: _fixedTime,
sbomEntryId: "sbom-123",
vulnerabilityId: "CVE-2025-1234");
Assert.Equal("evidence.stella/v1", statement.PredicateType);
Assert.Single(statement.Subject);
Assert.Equal(subject.Name, statement.Subject[0].Name);
Assert.Equal("abc123", statement.Subject[0].Digest["sha256"]);
Assert.Equal("trivy", statement.Predicate.Source);
Assert.Equal("0.50.0", statement.Predicate.SourceVersion);
Assert.Equal(_fixedTime, statement.Predicate.CollectionTime);
Assert.Equal("sbom-123", statement.Predicate.SbomEntryId);
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
}
[Fact]
public void BuildProofSpineStatement_SetsPredicateType()
public void BuildSbomLinkageStatement_SetsAllSubjects()
{
var statement = _builder.BuildProofSpineStatement(
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
spineAlgorithm: "sha256-merkle",
rootHash: "root-hash",
leafHashes: ["leaf1", "leaf2", "leaf3"]);
var subjects = new[]
{
CreateSubject("image:demo", "abc123"),
CreateSubject("pkg:npm/lodash@4.17.21", "def456"),
};
Assert.Equal("proofspine.stella/v1", statement.PredicateType);
var predicate = new SbomLinkagePayload
{
Sbom = new SbomDescriptor
{
Id = "sbom-1",
Format = "cyclonedx",
SpecVersion = "1.6",
MediaType = "application/vnd.cyclonedx+json",
Sha256 = new string('1', 64),
Location = "file:///sboms/demo.json"
},
Generator = new GeneratorDescriptor
{
Name = "stellaops-sbomgen",
Version = "0.1.0"
},
GeneratedAt = _fixedTime,
Tags = new Dictionary<string, string> { ["env"] = "test" }
};
var statement = _builder.BuildSbomLinkageStatement(subjects, predicate);
Assert.Equal("https://stella-ops.org/predicates/sbom-linkage/v1", statement.PredicateType);
Assert.Equal(2, statement.Subject.Count);
Assert.Equal(subjects[0].Name, statement.Subject[0].Name);
Assert.Equal(subjects[1].Name, statement.Subject[1].Name);
}
[Fact]
public void BuildProofSpineStatement_ContainsLeafHashes()
public void BuildSbomLinkageStatement_EmptySubjects_Throws()
{
var leafHashes = new[] { "hash1", "hash2", "hash3", "hash4" };
var statement = _builder.BuildProofSpineStatement(
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
spineAlgorithm: "sha256-merkle",
rootHash: "merkle-root",
leafHashes: leafHashes);
var predicate = new SbomLinkagePayload
{
Sbom = new SbomDescriptor
{
Id = "sbom-1",
Format = "cyclonedx",
SpecVersion = "1.6",
MediaType = "application/vnd.cyclonedx+json",
Sha256 = new string('1', 64)
},
Generator = new GeneratorDescriptor
{
Name = "stellaops-sbomgen",
Version = "0.1.0"
},
GeneratedAt = _fixedTime
};
Assert.Equal("sha256-merkle", statement.Predicate.Algorithm);
Assert.Equal("merkle-root", statement.Predicate.RootHash);
Assert.Equal(4, statement.Predicate.LeafHashes.Length);
}
[Fact]
public void BuildVexVerdictStatement_SetsPredicateType()
{
var statement = _builder.BuildVexVerdictStatement(
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
vulnerabilityId: "CVE-2025-1234",
vexStatus: "not_affected",
justification: "vulnerable_code_not_present",
analysisTime: _fixedTime);
Assert.Equal("vexverdict.stella/v1", statement.PredicateType);
}
[Fact]
public void BuildVexVerdictStatement_PopulatesVexDetails()
{
var statement = _builder.BuildVexVerdictStatement(
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
vulnerabilityId: "CVE-2025-1234",
vexStatus: "not_affected",
justification: "vulnerable_code_not_present",
analysisTime: _fixedTime);
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
Assert.Equal("not_affected", statement.Predicate.Status);
Assert.Equal("vulnerable_code_not_present", statement.Predicate.Justification);
}
[Fact]
public void BuildReasoningStatement_SetsPredicateType()
{
var statement = _builder.BuildReasoningStatement(
subject: new InTotoSubject { Name = "finding:123", Digest = new() { ["sha256"] = "abc123" } },
reasoningType: "exploitability",
conclusion: "not_exploitable",
evidenceRefs: ["evidence1", "evidence2"]);
Assert.Equal("reasoning.stella/v1", statement.PredicateType);
}
[Fact]
public void BuildVerdictReceiptStatement_SetsPredicateType()
{
var statement = _builder.BuildVerdictReceiptStatement(
subject: new InTotoSubject { Name = "scan:456", Digest = new() { ["sha256"] = "abc123" } },
verdictHash: "verdict-hash",
verdictTime: _fixedTime,
signatureAlgorithm: "ECDSA-P256");
Assert.Equal("verdictreceipt.stella/v1", statement.PredicateType);
}
[Fact]
public void BuildSbomLinkageStatement_SetsPredicateType()
{
var statement = _builder.BuildSbomLinkageStatement(
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
sbomDigest: "sbom-digest",
sbomFormat: "cyclonedx",
sbomVersion: "1.6");
Assert.Equal("sbomlinkage.stella/v1", statement.PredicateType);
}
[Fact]
public void AllStatements_SerializeToValidJson()
{
var subject = new InTotoSubject { Name = "test", Digest = new() { ["sha256"] = "abc" } };
var evidence = _builder.BuildEvidenceStatement(subject, "trivy", "1.0", _fixedTime, "sbom1");
var spine = _builder.BuildProofSpineStatement(subject, "sha256", "root", ["leaf1"]);
var vex = _builder.BuildVexVerdictStatement(subject, "CVE-1", "fixed", null, _fixedTime);
var reasoning = _builder.BuildReasoningStatement(subject, "exploitability", "safe", []);
var receipt = _builder.BuildVerdictReceiptStatement(subject, "hash", _fixedTime, "ECDSA");
var sbom = _builder.BuildSbomLinkageStatement(subject, "sbom-hash", "spdx", "3.0");
// All should serialize without throwing
Assert.NotNull(JsonSerializer.Serialize(evidence));
Assert.NotNull(JsonSerializer.Serialize(spine));
Assert.NotNull(JsonSerializer.Serialize(vex));
Assert.NotNull(JsonSerializer.Serialize(reasoning));
Assert.NotNull(JsonSerializer.Serialize(receipt));
Assert.NotNull(JsonSerializer.Serialize(sbom));
Assert.Throws<ArgumentException>(() => _builder.BuildSbomLinkageStatement(Array.Empty<ProofSubject>(), predicate));
}
[Fact]
public void EvidenceStatement_RoundTripsViaJson()
{
var original = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name: "artifact", Digest = new() { ["sha256"] = "hash123" } },
source: "grype",
sourceVersion: "0.80.0",
collectionTime: _fixedTime,
sbomEntryId: "entry-456",
vulnerabilityId: "CVE-2025-9999");
var subject = CreateSubject("image:demo", "abc123");
var statement = _builder.BuildEvidenceStatement(subject, new EvidencePayload
{
Source = "grype",
SourceVersion = "0.80.0",
CollectionTime = _fixedTime,
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2025-9999",
RawFinding = "raw",
EvidenceId = $"sha256:{new string('2', 64)}"
});
var json = JsonSerializer.Serialize(original);
var json = JsonSerializer.Serialize(statement);
var restored = JsonSerializer.Deserialize<EvidenceStatement>(json);
Assert.NotNull(restored);
Assert.Equal(original.PredicateType, restored.PredicateType);
Assert.Equal(original.Predicate.Source, restored.Predicate.Source);
Assert.Equal(original.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
Assert.Equal(statement.PredicateType, restored.PredicateType);
Assert.Equal(statement.Subject[0].Name, restored.Subject[0].Name);
Assert.Equal(statement.Predicate.EvidenceId, restored.Predicate.EvidenceId);
Assert.Equal(statement.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
}
[Fact]
public void ProofSpineStatement_RoundTripsViaJson()
{
var original = _builder.BuildProofSpineStatement(
subject: new InTotoSubject { Name = "image:latest", Digest = new() { ["sha256"] = "img-hash" } },
spineAlgorithm: "sha256-merkle-v2",
rootHash: "merkle-root-abc",
leafHashes: ["a", "b", "c", "d"]);
var json = JsonSerializer.Serialize(original);
var restored = JsonSerializer.Deserialize<ProofSpineStatement>(json);
Assert.NotNull(restored);
Assert.Equal(original.Predicate.RootHash, restored.Predicate.RootHash);
Assert.Equal(original.Predicate.LeafHashes.Length, restored.Predicate.LeafHashes.Length);
}
private static ProofSubject CreateSubject(string name, string sha256Digest)
=> new()
{
Name = name,
Digest = new Dictionary<string, string> { ["sha256"] = sha256Digest }
};
}

View File

@@ -1,172 +0,0 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps Contributors
using System.Text.Json;
using StellaOps.Attestor.ProofChain.Builders;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Attestor.ProofChain.Validation;
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
/// <summary>
/// Unit tests for statement validation (Task PROOF-PRED-0015).
/// </summary>
public class StatementValidatorTests
{
private readonly StatementBuilder _builder = new();
private readonly IStatementValidator _validator = new StatementValidator();
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
[Fact]
public void Validate_ValidEvidenceStatement_ReturnsSuccess()
{
var statement = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc123" } },
source: "trivy",
sourceVersion: "0.50.0",
collectionTime: _fixedTime,
sbomEntryId: "sbom-123");
var result = _validator.Validate(statement);
Assert.True(result.IsValid);
Assert.Empty(result.Errors);
}
[Fact]
public void Validate_EvidenceStatementWithEmptySource_ReturnsError()
{
var statement = new EvidenceStatement
{
Subject = [new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc" } }],
Predicate = new EvidencePayload
{
Source = "",
SourceVersion = "1.0",
CollectionTime = _fixedTime,
SbomEntryId = "sbom-1"
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Source"));
}
[Fact]
public void Validate_StatementWithEmptySubject_ReturnsError()
{
var statement = new EvidenceStatement
{
Subject = [],
Predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "1.0",
CollectionTime = _fixedTime,
SbomEntryId = "sbom-1"
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Subject"));
}
[Fact]
public void Validate_ProofSpineWithEmptyLeafHashes_ReturnsError()
{
var statement = new ProofSpineStatement
{
Subject = [new InTotoSubject { Name = "image", Digest = new() { ["sha256"] = "hash" } }],
Predicate = new ProofSpinePayload
{
Algorithm = "sha256-merkle",
RootHash = "root",
LeafHashes = []
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("LeafHashes"));
}
[Fact]
public void Validate_VexVerdictWithValidStatus_ReturnsSuccess()
{
var validStatuses = new[] { "not_affected", "affected", "fixed", "under_investigation" };
foreach (var status in validStatuses)
{
var statement = _builder.BuildVexVerdictStatement(
subject: new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } },
vulnerabilityId: "CVE-2025-1",
vexStatus: status,
justification: null,
analysisTime: _fixedTime);
var result = _validator.Validate(statement);
Assert.True(result.IsValid, $"Status '{status}' should be valid");
}
}
[Fact]
public void Validate_VexVerdictWithInvalidStatus_ReturnsError()
{
var statement = new VexVerdictStatement
{
Subject = [new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } }],
Predicate = new VexVerdictPayload
{
VulnerabilityId = "CVE-2025-1",
Status = "invalid_status",
AnalysisTime = _fixedTime
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Status"));
}
[Fact]
public void Validate_ReasoningStatementWithEvidence_ReturnsSuccess()
{
var statement = _builder.BuildReasoningStatement(
subject: new InTotoSubject { Name = "finding", Digest = new() { ["sha256"] = "abc" } },
reasoningType: "exploitability",
conclusion: "not_exploitable",
evidenceRefs: ["evidence-1", "evidence-2"]);
var result = _validator.Validate(statement);
Assert.True(result.IsValid);
}
[Fact]
public void Validate_SubjectWithMissingDigest_ReturnsError()
{
var statement = new EvidenceStatement
{
Subject = [new InTotoSubject { Name = "artifact", Digest = new() }],
Predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "1.0",
CollectionTime = _fixedTime,
SbomEntryId = "sbom-1"
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Digest"));
}
}

View File

@@ -14,7 +14,7 @@
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.24407.12" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="xunit" Version="2.9.3" />
@@ -26,7 +26,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include=\"..\\..\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj\" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,465 +0,0 @@
// -----------------------------------------------------------------------------
// VerificationPipelineIntegrationTests.cs
// Sprint: SPRINT_0501_0001_0001_proof_evidence_chain_master
// Task: PROOF-MASTER-0002
// Description: Integration tests for the full proof chain verification pipeline
// -----------------------------------------------------------------------------
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Verification;
using Xunit;
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
/// <summary>
/// Integration tests for the verification pipeline.
/// Tests PROOF-MASTER-0002: Full proof chain verification flow.
/// </summary>
public class VerificationPipelineIntegrationTests
{
private readonly IProofBundleStore _proofStore;
private readonly IDsseVerifier _dsseVerifier;
private readonly IRekorVerifier _rekorVerifier;
private readonly ITrustAnchorResolver _trustAnchorResolver;
private readonly ILogger<VerificationPipeline> _logger;
private readonly FakeTimeProvider _timeProvider;
public VerificationPipelineIntegrationTests()
{
_proofStore = Substitute.For<IProofBundleStore>();
_dsseVerifier = Substitute.For<IDsseVerifier>();
_rekorVerifier = Substitute.For<IRekorVerifier>();
_trustAnchorResolver = Substitute.For<ITrustAnchorResolver>();
_logger = NullLogger<VerificationPipeline>.Instance;
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 17, 12, 0, 0, TimeSpan.Zero));
}
#region Full Pipeline Tests
[Fact]
public async Task VerifyAsync_ValidProofBundle_AllStepsPass()
{
// Arrange
var bundleId = new ProofBundleId("sha256:valid123");
var keyId = "key-1";
SetupValidBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true,
VerifierVersion = "1.0.0-test"
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeTrue();
result.Receipt.Result.Should().Be(VerificationResult.Pass);
result.Steps.Should().HaveCount(4);
result.Steps.Should().OnlyContain(s => s.Passed);
result.FirstFailure.Should().BeNull();
}
[Fact]
public async Task VerifyAsync_InvalidDsseSignature_FailsAtFirstStep()
{
// Arrange
var bundleId = new ProofBundleId("sha256:invalid-sig");
var keyId = "key-1";
SetupValidBundle(bundleId, keyId);
SetupInvalidDsseVerification(keyId, "Signature mismatch");
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Receipt.Result.Should().Be(VerificationResult.Fail);
result.FirstFailure.Should().NotBeNull();
result.FirstFailure!.StepName.Should().Be("dsse_signature");
result.Receipt.FailureReason.Should().Contain("Signature mismatch");
}
[Fact]
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputation()
{
// Arrange
var bundleId = new ProofBundleId("sha256:wrong-id");
var keyId = "key-1";
SetupBundleWithWrongId(bundleId, keyId);
SetupValidDsseVerification(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.StepName == "id_recomputation" && !s.Passed);
}
[Fact]
public async Task VerifyAsync_NoRekorEntry_FailsAtRekorStep()
{
// Arrange
var bundleId = new ProofBundleId("sha256:no-rekor");
var keyId = "key-1";
SetupBundleWithoutRekor(bundleId, keyId);
SetupValidDsseVerification(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.StepName == "rekor_inclusion" && !s.Passed);
}
[Fact]
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
{
// Arrange
var bundleId = new ProofBundleId("sha256:skip-rekor");
var keyId = "key-1";
SetupBundleWithoutRekor(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidTrustAnchor(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false // Skip Rekor
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
rekorStep.Should().NotBeNull();
rekorStep!.Passed.Should().BeTrue();
rekorStep.Details.Should().Contain("skipped");
}
[Fact]
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchor()
{
// Arrange
var bundleId = new ProofBundleId("sha256:bad-key");
var keyId = "unauthorized-key";
SetupValidBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupTrustAnchorWithoutKey(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.StepName == "trust_anchor" && !s.Passed);
}
#endregion
#region Receipt Generation Tests
[Fact]
public async Task VerifyAsync_GeneratesReceipt_WithCorrectFields()
{
// Arrange
var bundleId = new ProofBundleId("sha256:receipt-test");
var keyId = "key-1";
SetupValidBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifierVersion = "2.0.0"
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.Receipt.Should().NotBeNull();
result.Receipt.ReceiptId.Should().StartWith("receipt:");
result.Receipt.VerifierVersion.Should().Be("2.0.0");
result.Receipt.ProofBundleId.Should().Be(bundleId.Value);
result.Receipt.StepsSummary.Should().HaveCount(4);
result.Receipt.TotalDurationMs.Should().BeGreaterOrEqualTo(0);
}
[Fact]
public async Task VerifyAsync_FailingPipeline_ReceiptContainsFailureReason()
{
// Arrange
var bundleId = new ProofBundleId("sha256:fail-receipt");
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns((ProofBundle?)null);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.Receipt.Result.Should().Be(VerificationResult.Fail);
result.Receipt.FailureReason.Should().NotBeNullOrEmpty();
}
#endregion
#region Cancellation Tests
[Fact]
public async Task VerifyAsync_Cancelled_ReturnsFailure()
{
// Arrange
var bundleId = new ProofBundleId("sha256:cancel-test");
var cts = new CancellationTokenSource();
cts.Cancel();
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request, cts.Token);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.ErrorMessage?.Contains("cancelled") == true);
}
#endregion
#region Helper Methods
private VerificationPipeline CreatePipeline()
{
return VerificationPipeline.CreateDefault(
_proofStore,
_dsseVerifier,
_rekorVerifier,
_trustAnchorResolver,
_logger,
_timeProvider);
}
private void SetupValidBundle(ProofBundleId bundleId, string keyId)
{
var bundle = CreateTestBundle(keyId, includeRekor: true);
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns(bundle);
}
private void SetupBundleWithWrongId(ProofBundleId bundleId, string keyId)
{
// Create a bundle but the ID won't match when recomputed
var bundle = new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:wrong-statement-id", // Won't match content
PredicateType = "evidence.stella/v1",
Predicate = new { test = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = "test"u8.ToArray(),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
}
}
},
RekorLogEntry = CreateTestRekorEntry()
};
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns(bundle);
}
private void SetupBundleWithoutRekor(ProofBundleId bundleId, string keyId)
{
var bundle = CreateTestBundle(keyId, includeRekor: false);
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns(bundle);
}
private void SetupValidDsseVerification(string keyId)
{
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
.Returns(new DsseVerificationResult { IsValid = true, KeyId = keyId });
}
private void SetupInvalidDsseVerification(string keyId, string error)
{
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
.Returns(new DsseVerificationResult
{
IsValid = false,
KeyId = keyId,
ErrorMessage = error
});
}
private void SetupValidRekorVerification()
{
_rekorVerifier.VerifyInclusionAsync(
Arg.Any<string>(),
Arg.Any<long>(),
Arg.Any<InclusionProof>(),
Arg.Any<SignedTreeHead>(),
Arg.Any<CancellationToken>())
.Returns(new RekorVerificationResult { IsValid = true });
}
private void SetupValidTrustAnchor(string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = Guid.NewGuid(),
AllowedKeyIds = new List<string> { keyId },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolver.GetAnchorAsync(Arg.Any<Guid>(), Arg.Any<CancellationToken>())
.Returns(anchor);
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
.Returns(anchor);
}
private void SetupTrustAnchorWithoutKey(string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = Guid.NewGuid(),
AllowedKeyIds = new List<string> { "different-key" },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
.Returns(anchor);
}
private static ProofBundle CreateTestBundle(string keyId, bool includeRekor)
{
return new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:test-statement",
PredicateType = "evidence.stella/v1",
Predicate = new { test = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = "test"u8.ToArray(),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
}
}
},
RekorLogEntry = includeRekor ? CreateTestRekorEntry() : null
};
}
private static RekorLogEntry CreateTestRekorEntry()
{
return new RekorLogEntry
{
LogId = "test-log",
LogIndex = 12345,
InclusionProof = new InclusionProof
{
Hashes = new List<byte[]> { new byte[] { 0x01 } },
TreeSize = 1000,
RootHash = new byte[] { 0x02 }
},
SignedTreeHead = new SignedTreeHead
{
TreeSize = 1000,
RootHash = new byte[] { 0x02 },
Signature = new byte[] { 0x03 }
}
};
}
#endregion
}
/// <summary>
/// Fake time provider for testing.
/// </summary>
internal sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public FakeTimeProvider(DateTimeOffset initialTime)
{
_now = initialTime;
}
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
public void SetTime(DateTimeOffset time) => _now = time;
}

View File

@@ -1,484 +0,0 @@
// -----------------------------------------------------------------------------
// VerificationPipelineTests.cs
// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
// Task: PROOF-API-0011 - Integration tests for verification pipeline
// Description: Tests for the full verification pipeline including DSSE, ID
// recomputation, Rekor inclusion, and trust anchor verification
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Receipts;
using StellaOps.Attestor.ProofChain.Verification;
using Xunit;
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
/// <summary>
/// Integration tests for the verification pipeline.
/// </summary>
public class VerificationPipelineTests
{
private readonly Mock<IProofBundleStore> _proofStoreMock;
private readonly Mock<IDsseVerifier> _dsseVerifierMock;
private readonly Mock<IRekorVerifier> _rekorVerifierMock;
private readonly Mock<ITrustAnchorResolver> _trustAnchorResolverMock;
private readonly VerificationPipeline _pipeline;
public VerificationPipelineTests()
{
_proofStoreMock = new Mock<IProofBundleStore>();
_dsseVerifierMock = new Mock<IDsseVerifier>();
_rekorVerifierMock = new Mock<IRekorVerifier>();
_trustAnchorResolverMock = new Mock<ITrustAnchorResolver>();
_pipeline = VerificationPipeline.CreateDefault(
_proofStoreMock.Object,
_dsseVerifierMock.Object,
_rekorVerifierMock.Object,
_trustAnchorResolverMock.Object,
NullLogger<VerificationPipeline>.Instance);
}
#region Full Pipeline Tests
[Fact]
public async Task VerifyAsync_AllStepsPass_ReturnsValidResult()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var anchorId = Guid.NewGuid();
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
Assert.Equal(VerificationResult.Pass, result.Receipt.Result);
Assert.All(result.Steps, step => Assert.True(step.Passed));
Assert.Null(result.FirstFailure);
}
[Fact]
public async Task VerifyAsync_DsseSignatureInvalid_FailsAtDsseStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "invalid-key";
SetupValidProofBundle(bundleId, keyId);
SetupInvalidDsseVerification("Signature verification failed");
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
Assert.NotNull(result.FirstFailure);
Assert.Equal("dsse_signature", result.FirstFailure.StepName);
Assert.Contains("Signature verification failed", result.FirstFailure.ErrorMessage);
}
[Fact]
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputationStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
// Setup a bundle with mismatched ID
SetupProofBundleWithMismatchedId(bundleId, keyId);
SetupValidDsseVerification(keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
var idStep = result.Steps.FirstOrDefault(s => s.StepName == "id_recomputation");
Assert.NotNull(idStep);
// Note: The actual result depends on how the bundle is constructed
}
[Fact]
public async Task VerifyAsync_RekorInclusionFails_FailsAtRekorStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupInvalidRekorVerification("Inclusion proof invalid");
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
Assert.NotNull(rekorStep);
Assert.False(rekorStep.Passed);
Assert.Contains("Inclusion proof invalid", rekorStep.ErrorMessage);
}
[Fact]
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var anchorId = Guid.NewGuid();
SetupValidProofBundle(bundleId, keyId, includeRekorEntry: false);
SetupValidDsseVerification(keyId);
SetupValidTrustAnchor(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
Assert.NotNull(rekorStep);
Assert.True(rekorStep.Passed);
Assert.Contains("skipped", rekorStep.Details, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchorStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "unauthorized-key";
var anchorId = Guid.NewGuid();
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupTrustAnchorWithoutKey(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
var anchorStep = result.Steps.FirstOrDefault(s => s.StepName == "trust_anchor");
Assert.NotNull(anchorStep);
Assert.False(anchorStep.Passed);
Assert.Contains("not authorized", anchorStep.ErrorMessage);
}
#endregion
#region Receipt Generation Tests
[Fact]
public async Task VerifyAsync_GeneratesReceiptWithCorrectFields()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var anchorId = Guid.NewGuid();
var verifierVersion = "2.0.0";
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true,
VerifierVersion = verifierVersion
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.NotNull(result.Receipt);
Assert.NotEmpty(result.Receipt.ReceiptId);
Assert.Equal(bundleId.Value, result.Receipt.ProofBundleId);
Assert.Equal(verifierVersion, result.Receipt.VerifierVersion);
Assert.True(result.Receipt.TotalDurationMs >= 0);
Assert.NotEmpty(result.Receipt.StepsSummary!);
}
[Fact]
public async Task VerifyAsync_FailedVerification_ReceiptContainsFailureReason()
{
// Arrange
var bundleId = CreateTestBundleId();
_proofStoreMock
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
.ReturnsAsync((ProofBundle?)null);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
Assert.NotNull(result.Receipt.FailureReason);
Assert.Contains("not found", result.Receipt.FailureReason);
}
#endregion
#region Cancellation Tests
[Fact]
public async Task VerifyAsync_Cancelled_ReturnsPartialResults()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var cts = new CancellationTokenSource();
SetupValidProofBundle(bundleId, keyId);
// Setup DSSE verification to cancel
_dsseVerifierMock
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
.Returns(async (DsseEnvelope _, CancellationToken ct) =>
{
await cts.CancelAsync();
ct.ThrowIfCancellationRequested();
return new DsseVerificationResult { IsValid = true, KeyId = keyId };
});
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act & Assert - should complete but show cancellation
// The actual behavior depends on implementation
var result = await _pipeline.VerifyAsync(request, cts.Token);
// Pipeline may handle cancellation gracefully
}
#endregion
#region Helper Methods
private static ProofBundleId CreateTestBundleId()
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()));
return new ProofBundleId($"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}");
}
private void SetupValidProofBundle(ProofBundleId bundleId, string keyId, bool includeRekorEntry = true)
{
var bundle = new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:statement123",
PredicateType = "https://stella-ops.io/v1/evidence",
Predicate = new { test = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = Encoding.UTF8.GetBytes("{}"),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
}
}
},
RekorLogEntry = includeRekorEntry ? new RekorLogEntry
{
LogId = "test-log",
LogIndex = 12345,
InclusionProof = new InclusionProof
{
Hashes = new List<byte[]>(),
TreeSize = 100,
RootHash = new byte[32]
},
SignedTreeHead = new SignedTreeHead
{
TreeSize = 100,
RootHash = new byte[32],
Signature = new byte[64]
}
} : null
};
_proofStoreMock
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
.ReturnsAsync(bundle);
}
private void SetupProofBundleWithMismatchedId(ProofBundleId bundleId, string keyId)
{
// Create a bundle that will compute to a different ID
var bundle = new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:differentstatement",
PredicateType = "https://stella-ops.io/v1/evidence",
Predicate = new { different = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = Encoding.UTF8.GetBytes("{\"different\":\"payload\"}"),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
}
}
}
};
_proofStoreMock
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
.ReturnsAsync(bundle);
}
private void SetupValidDsseVerification(string keyId)
{
_dsseVerifierMock
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseVerificationResult { IsValid = true, KeyId = keyId });
}
private void SetupInvalidDsseVerification(string errorMessage)
{
_dsseVerifierMock
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseVerificationResult
{
IsValid = false,
KeyId = "unknown",
ErrorMessage = errorMessage
});
}
private void SetupValidRekorVerification()
{
_rekorVerifierMock
.Setup(x => x.VerifyInclusionAsync(
It.IsAny<string>(),
It.IsAny<long>(),
It.IsAny<InclusionProof>(),
It.IsAny<SignedTreeHead>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(new RekorVerificationResult { IsValid = true });
}
private void SetupInvalidRekorVerification(string errorMessage)
{
_rekorVerifierMock
.Setup(x => x.VerifyInclusionAsync(
It.IsAny<string>(),
It.IsAny<long>(),
It.IsAny<InclusionProof>(),
It.IsAny<SignedTreeHead>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(new RekorVerificationResult { IsValid = false, ErrorMessage = errorMessage });
}
private void SetupValidTrustAnchor(Guid anchorId, string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = anchorId,
AllowedKeyIds = new List<string> { keyId },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolverMock
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(anchor);
_trustAnchorResolverMock
.Setup(x => x.GetAnchorAsync(anchorId, It.IsAny<CancellationToken>()))
.ReturnsAsync(anchor);
}
private void SetupTrustAnchorWithoutKey(Guid anchorId, string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = anchorId,
AllowedKeyIds = new List<string> { "other-key-not-matching" },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolverMock
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(anchor);
}
#endregion
}