work work hard work

This commit is contained in:
StellaOps Bot
2025-12-18 00:47:24 +02:00
parent dee252940b
commit b4235c134c
189 changed files with 9627 additions and 3258 deletions

View File

@@ -1,25 +1,15 @@
// =============================================================================
// IEvidenceReconciler.cs
// Main orchestrator for the 5-step evidence reconciliation algorithm
// =============================================================================
using System.Diagnostics;
using StellaOps.AirGap.Importer.Contracts;
using StellaOps.AirGap.Importer.Reconciliation.Parsers;
using StellaOps.AirGap.Importer.Reconciliation.Signing;
using StellaOps.AirGap.Importer.Validation;
namespace StellaOps.AirGap.Importer.Reconciliation;
/// <summary>
/// Orchestrates the 5-step deterministic evidence reconciliation algorithm.
/// Orchestrates the deterministic evidence reconciliation algorithm (advisory A5).
/// </summary>
public interface IEvidenceReconciler
{
/// <summary>
/// Reconciles evidence from an input directory into a deterministic evidence graph.
/// </summary>
/// <param name="inputDirectory">Directory containing SBOMs, attestations, and VEX documents.</param>
/// <param name="outputDirectory">Directory for output files.</param>
/// <param name="options">Reconciliation options.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The reconciled evidence graph.</returns>
Task<EvidenceGraph> ReconcileAsync(
string inputDirectory,
string outputDirectory,
@@ -35,54 +25,65 @@ public sealed record ReconciliationOptions
public static readonly ReconciliationOptions Default = new();
/// <summary>
/// Whether to sign the output with DSSE.
/// When null, a deterministic epoch timestamp is used for output stability.
/// </summary>
public DateTimeOffset? GeneratedAtUtc { get; init; }
/// <summary>
/// Whether to sign the output with DSSE (implemented in later tasks).
/// </summary>
public bool SignOutput { get; init; }
/// <summary>
/// Key ID for DSSE signing.
/// Optional key ID for DSSE signing (implemented in later tasks).
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// JSON normalization options.
/// Private key PEM path used for DSSE signing when <see cref="SignOutput"/> is enabled.
/// </summary>
public string? SigningPrivateKeyPemPath { get; init; }
public NormalizationOptions Normalization { get; init; } = NormalizationOptions.Default;
/// <summary>
/// Lattice configuration for precedence rules.
/// </summary>
public LatticeConfiguration Lattice { get; init; } = LatticeConfiguration.Default;
/// <summary>
/// Whether to verify attestation signatures.
/// </summary>
public bool VerifySignatures { get; init; } = true;
/// <summary>
/// Whether to verify Rekor inclusion proofs.
/// </summary>
public bool VerifyRekorProofs { get; init; }
/// <summary>
/// Trust roots used for DSSE signature verification.
/// </summary>
public TrustRootConfig? TrustRoots { get; init; }
/// <summary>
/// Rekor public key path used to verify checkpoint signatures when <see cref="VerifyRekorProofs"/> is enabled.
/// </summary>
public string? RekorPublicKeyPath { get; init; }
}
/// <summary>
/// Default implementation of the evidence reconciler.
/// Implements the 5-step algorithm from advisory §5.
/// </summary>
public sealed class EvidenceReconciler : IEvidenceReconciler
{
private readonly EvidenceDirectoryDiscovery _discovery;
private readonly SourcePrecedenceLattice _lattice;
private static readonly DateTimeOffset DeterministicEpoch = DateTimeOffset.UnixEpoch;
private readonly SbomCollector _sbomCollector;
private readonly AttestationCollector _attestationCollector;
private readonly EvidenceGraphSerializer _serializer;
private readonly EvidenceGraphDsseSigner _dsseSigner;
public EvidenceReconciler(
EvidenceDirectoryDiscovery? discovery = null,
SourcePrecedenceLattice? lattice = null,
SbomCollector? sbomCollector = null,
AttestationCollector? attestationCollector = null,
EvidenceGraphSerializer? serializer = null)
{
_discovery = discovery ?? new EvidenceDirectoryDiscovery();
_lattice = lattice ?? new SourcePrecedenceLattice();
_sbomCollector = sbomCollector ?? new SbomCollector();
_attestationCollector = attestationCollector ?? new AttestationCollector(dsseVerifier: new DsseVerifier());
_serializer = serializer ?? new EvidenceGraphSerializer();
_dsseSigner = new EvidenceGraphDsseSigner(_serializer);
}
public async Task<EvidenceGraph> ReconcileAsync(
@@ -95,129 +96,67 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
ArgumentException.ThrowIfNullOrWhiteSpace(outputDirectory);
options ??= ReconciliationOptions.Default;
var stopwatch = Stopwatch.StartNew();
// ========================================
// Step 1: Index artifacts by immutable digest
// ========================================
var index = await IndexArtifactsAsync(inputDirectory, ct);
var index = new ArtifactIndex();
// ========================================
// Step 2: Collect evidence for each artifact
// ========================================
var collectedIndex = await CollectEvidenceAsync(index, inputDirectory, options, ct);
// Step 2: Evidence collection (SBOM + attestations). VEX parsing is not yet implemented.
await _sbomCollector.CollectAsync(Path.Combine(inputDirectory, "sboms"), index, ct).ConfigureAwait(false);
// ========================================
// Step 3: Normalize all documents
// ========================================
// Normalization is applied during evidence collection
// ========================================
// Step 4: Apply lattice precedence rules
// ========================================
var mergedStatements = ApplyLatticeRules(collectedIndex);
// ========================================
// Step 5: Emit evidence graph
// ========================================
var graph = BuildGraph(collectedIndex, mergedStatements, stopwatch.ElapsedMilliseconds);
// Write output files
await _serializer.WriteAsync(graph, outputDirectory, ct);
// Optionally sign with DSSE
if (options.SignOutput && !string.IsNullOrEmpty(options.SigningKeyId))
var attestationOptions = new AttestationCollectionOptions
{
await SignOutputAsync(outputDirectory, options.SigningKeyId, ct);
MarkAsUnverified = !options.VerifySignatures,
VerifySignatures = options.VerifySignatures,
VerifyRekorProofs = options.VerifyRekorProofs,
RekorPublicKeyPath = options.RekorPublicKeyPath,
TrustRoots = options.TrustRoots
};
await _attestationCollector.CollectAsync(
Path.Combine(inputDirectory, "attestations"),
index,
attestationOptions,
ct)
.ConfigureAwait(false);
// Step 4: Lattice merge (currently no VEX ingestion; returns empty).
var mergedStatements = new Dictionary<string, VexStatement>(StringComparer.Ordinal);
// Step 5: Graph emission.
var graph = BuildGraph(index, mergedStatements, generatedAtUtc: options.GeneratedAtUtc ?? DeterministicEpoch);
await _serializer.WriteAsync(graph, outputDirectory, ct).ConfigureAwait(false);
if (options.SignOutput)
{
if (string.IsNullOrWhiteSpace(options.SigningPrivateKeyPemPath))
{
throw new InvalidOperationException("SignOutput requires SigningPrivateKeyPemPath.");
}
await _dsseSigner.WriteEvidenceGraphEnvelopeAsync(
graph,
outputDirectory,
options.SigningPrivateKeyPemPath,
options.SigningKeyId,
ct)
.ConfigureAwait(false);
}
stopwatch.Stop();
return graph;
}
private async Task<ArtifactIndex> IndexArtifactsAsync(string inputDirectory, CancellationToken ct)
{
// Use the discovery service to find all artifacts
var discoveredFiles = await _discovery.DiscoverAsync(inputDirectory, ct);
var index = new ArtifactIndex();
foreach (var file in discoveredFiles)
{
// Create entry for each discovered file
var entry = ArtifactEntry.Empty(file.ContentHash, file.Path);
index.AddOrUpdate(entry);
}
return index;
}
private async Task<ArtifactIndex> CollectEvidenceAsync(
private static EvidenceGraph BuildGraph(
ArtifactIndex index,
string inputDirectory,
ReconciliationOptions options,
CancellationToken ct)
{
// In a full implementation, this would:
// 1. Parse SBOM files (CycloneDX, SPDX)
// 2. Parse attestation files (DSSE envelopes)
// 3. Parse VEX files (OpenVEX)
// 4. Validate signatures if enabled
// 5. Verify Rekor proofs if enabled
// For now, return the index with discovered files
await Task.CompletedTask;
return index;
}
private Dictionary<string, VexStatement> ApplyLatticeRules(ArtifactIndex index)
{
var mergedStatements = new Dictionary<string, VexStatement>(StringComparer.Ordinal);
foreach (var (digest, entry) in index.GetAll())
{
// Group VEX statements by vulnerability ID
var groupedByVuln = entry.VexDocuments
.GroupBy(v => v.VulnerabilityId, StringComparer.OrdinalIgnoreCase);
foreach (var group in groupedByVuln)
{
// Convert VexReference to VexStatement
var statements = group.Select(v => new VexStatement
{
VulnerabilityId = v.VulnerabilityId,
ProductId = digest,
Status = ParseVexStatus(v.Status),
Source = ParseSourcePrecedence(v.Source),
Justification = v.Justification,
DocumentRef = v.Path
}).ToList();
if (statements.Count > 0)
{
// Merge using lattice rules
var merged = _lattice.Merge(statements);
var key = $"{digest}:{merged.VulnerabilityId}";
mergedStatements[key] = merged;
}
}
}
return mergedStatements;
}
private EvidenceGraph BuildGraph(
ArtifactIndex index,
Dictionary<string, VexStatement> mergedStatements,
long elapsedMs)
IReadOnlyDictionary<string, VexStatement> mergedStatements,
DateTimeOffset generatedAtUtc)
{
var nodes = new List<EvidenceNode>();
var edges = new List<EvidenceEdge>();
int sbomCount = 0, attestationCount = 0, vexCount = 0;
var sbomCount = 0;
var attestationCount = 0;
foreach (var (digest, entry) in index.GetAll())
{
// Create node for artifact
var node = new EvidenceNode
{
Id = digest,
@@ -226,16 +165,16 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
Name = entry.Name,
Sboms = entry.Sboms.Select(s => new SbomNodeRef
{
Format = s.Format,
Path = s.Path,
Format = s.Format.ToString(),
Path = s.FilePath,
ContentHash = s.ContentHash
}).ToList(),
Attestations = entry.Attestations.Select(a => new AttestationNodeRef
{
PredicateType = a.PredicateType,
Path = a.Path,
SignatureValid = a.SignatureValid,
RekorVerified = a.RekorVerified
Path = a.FilePath,
SignatureValid = a.SignatureVerified,
RekorVerified = a.TlogVerified
}).ToList(),
VexStatements = mergedStatements
.Where(kv => kv.Key.StartsWith(digest + ":", StringComparison.Ordinal))
@@ -251,9 +190,7 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
nodes.Add(node);
sbomCount += entry.Sboms.Count;
attestationCount += entry.Attestations.Count;
vexCount += entry.VexDocuments.Count;
// Create edges from artifacts to SBOMs
foreach (var sbom in entry.Sboms)
{
edges.Add(new EvidenceEdge
@@ -264,13 +201,12 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
});
}
// Create edges from artifacts to attestations
foreach (var att in entry.Attestations)
foreach (var attestation in entry.Attestations)
{
edges.Add(new EvidenceEdge
{
Source = digest,
Target = att.Path,
Target = attestation.ContentHash,
Relationship = "attested-by"
});
}
@@ -278,7 +214,7 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
return new EvidenceGraph
{
GeneratedAt = DateTimeOffset.UtcNow.ToString("O"),
GeneratedAt = generatedAtUtc.ToString("O"),
Nodes = nodes,
Edges = edges,
Metadata = new EvidenceGraphMetadata
@@ -287,39 +223,9 @@ public sealed class EvidenceReconciler : IEvidenceReconciler
SbomCount = sbomCount,
AttestationCount = attestationCount,
VexStatementCount = mergedStatements.Count,
ConflictCount = 0, // TODO: Track conflicts during merge
ReconciliationDurationMs = elapsedMs
ConflictCount = 0,
ReconciliationDurationMs = 0
}
};
}
private static async Task SignOutputAsync(string outputDirectory, string keyId, CancellationToken ct)
{
// Placeholder for DSSE signing integration
// Would use the Signer module to create a DSSE envelope
await Task.CompletedTask;
}
private static VexStatus ParseVexStatus(string status)
{
return status.ToLowerInvariant() switch
{
"affected" => VexStatus.Affected,
"not_affected" or "notaffected" => VexStatus.NotAffected,
"fixed" => VexStatus.Fixed,
"under_investigation" or "underinvestigation" => VexStatus.UnderInvestigation,
_ => VexStatus.Unknown
};
}
private static SourcePrecedence ParseSourcePrecedence(string source)
{
return source.ToLowerInvariant() switch
{
"vendor" => SourcePrecedence.Vendor,
"maintainer" => SourcePrecedence.Maintainer,
"third-party" or "thirdparty" => SourcePrecedence.ThirdParty,
_ => SourcePrecedence.Unknown
};
}
}

View File

@@ -124,9 +124,19 @@ public sealed class AttestationCollector
bool tlogVerified = false;
string? rekorUuid = null;
if (options.TrustRoots is not null && _dsseVerifier is not null)
if (options.VerifySignatures && options.TrustRoots is not null && _dsseVerifier is not null)
{
var verifyResult = _dsseVerifier.Verify(envelope, options.TrustRoots, _logger);
var validationEnvelope = new StellaOps.AirGap.Importer.Validation.DsseEnvelope(
envelope.PayloadType,
envelope.Payload,
envelope.Signatures
.Where(sig => !string.IsNullOrWhiteSpace(sig.KeyId))
.Select(sig => new StellaOps.AirGap.Importer.Validation.DsseSignature(
sig.KeyId!.Trim(),
sig.Sig))
.ToList());
var verifyResult = _dsseVerifier.Verify(validationEnvelope, options.TrustRoots, _logger);
signatureVerified = verifyResult.IsValid;
if (signatureVerified)
@@ -139,7 +149,7 @@ public sealed class AttestationCollector
_logger.LogWarning(
"DSSE signature verification failed for attestation: {File}, reason={Reason}",
relativePath,
verifyResult.ErrorCode);
verifyResult.Reason);
}
}
else if (options.MarkAsUnverified)
@@ -149,6 +159,53 @@ public sealed class AttestationCollector
tlogVerified = false;
}
// Verify Rekor inclusion proof (T8 integration)
if (options.VerifyRekorProofs)
{
if (string.IsNullOrWhiteSpace(options.RekorPublicKeyPath))
{
result.FailedFiles.Add((filePath, "Rekor public key path not configured for VerifyRekorProofs."));
}
else
{
var receiptPath = ResolveRekorReceiptPath(filePath);
if (receiptPath is null)
{
result.FailedFiles.Add((filePath, "Rekor receipt file not found for attestation."));
}
else
{
try
{
var dsseSha256 = ParseSha256Digest(contentHash);
var verify = await RekorOfflineReceiptVerifier.VerifyAsync(
receiptPath,
dsseSha256,
options.RekorPublicKeyPath,
cancellationToken)
.ConfigureAwait(false);
if (verify.Verified)
{
tlogVerified = true;
rekorUuid = verify.RekorUuid;
_logger.LogDebug("Rekor inclusion verified for attestation: {File}", relativePath);
}
else
{
tlogVerified = false;
rekorUuid = null;
result.FailedFiles.Add((filePath, $"Rekor verification failed: {verify.FailureReason}"));
}
}
catch (Exception ex)
{
result.FailedFiles.Add((filePath, $"Rekor verification exception: {ex.Message}"));
}
}
}
}
// Get all subject digests for this attestation
var subjectDigests = statement.Subjects
.Select(s => s.GetSha256Digest())
@@ -258,6 +315,56 @@ public sealed class AttestationCollector
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private static byte[] ParseSha256Digest(string sha256Digest)
{
if (!sha256Digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
throw new FormatException("Expected sha256:<hex> digest.");
}
return Convert.FromHexString(sha256Digest["sha256:".Length..]);
}
private static string? ResolveRekorReceiptPath(string attestationFilePath)
{
var directory = Path.GetDirectoryName(attestationFilePath);
if (string.IsNullOrWhiteSpace(directory))
{
return null;
}
var fileName = Path.GetFileName(attestationFilePath);
var withoutExtension = Path.GetFileNameWithoutExtension(attestationFilePath);
var candidates = new List<string>
{
Path.Combine(directory, withoutExtension + ".rekor.json"),
Path.Combine(directory, withoutExtension + ".rekor-receipt.json"),
Path.Combine(directory, "rekor-receipt.json"),
Path.Combine(directory, "offline-update.rekor.json")
};
if (fileName.EndsWith(".dsse.json", StringComparison.OrdinalIgnoreCase))
{
candidates.Insert(0, Path.Combine(directory, fileName[..^".dsse.json".Length] + ".rekor.json"));
}
if (fileName.EndsWith(".jsonl.dsig", StringComparison.OrdinalIgnoreCase))
{
candidates.Insert(0, Path.Combine(directory, fileName[..^".jsonl.dsig".Length] + ".rekor.json"));
}
foreach (var candidate in candidates.Distinct(StringComparer.Ordinal))
{
if (File.Exists(candidate))
{
return candidate;
}
}
return null;
}
}
/// <summary>
@@ -282,6 +389,11 @@ public sealed record AttestationCollectionOptions
/// </summary>
public bool VerifyRekorProofs { get; init; } = false;
/// <summary>
/// Rekor public key path used to verify checkpoint signatures when <see cref="VerifyRekorProofs"/> is enabled.
/// </summary>
public string? RekorPublicKeyPath { get; init; }
/// <summary>
/// Trust roots configuration for DSSE signature verification.
/// Required when VerifySignatures is true.

View File

@@ -0,0 +1,148 @@
using System.Security.Cryptography;
using System.Text;
using Org.BouncyCastle.Asn1;
using Org.BouncyCastle.Crypto;
using Org.BouncyCastle.Crypto.Digests;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.OpenSsl;
using StellaOps.Attestor.Envelope;
namespace StellaOps.AirGap.Importer.Reconciliation.Signing;
internal sealed class EvidenceGraphDsseSigner
{
internal const string EvidenceGraphPayloadType = "application/vnd.stellaops.evidence-graph+json";
private readonly EvidenceGraphSerializer serializer;
public EvidenceGraphDsseSigner(EvidenceGraphSerializer serializer)
=> this.serializer = serializer ?? throw new ArgumentNullException(nameof(serializer));
public async Task<string> WriteEvidenceGraphEnvelopeAsync(
EvidenceGraph graph,
string outputDirectory,
string signingPrivateKeyPemPath,
string? signingKeyId,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(graph);
ArgumentException.ThrowIfNullOrWhiteSpace(outputDirectory);
ArgumentException.ThrowIfNullOrWhiteSpace(signingPrivateKeyPemPath);
Directory.CreateDirectory(outputDirectory);
var canonicalJson = serializer.Serialize(graph, pretty: false);
var payloadBytes = Encoding.UTF8.GetBytes(canonicalJson);
var pae = DssePreAuthenticationEncoding.Encode(EvidenceGraphPayloadType, payloadBytes);
var envelopeKey = await LoadEcdsaEnvelopeKeyAsync(signingPrivateKeyPemPath, signingKeyId, ct).ConfigureAwait(false);
var signature = SignDeterministicEcdsa(pae, signingPrivateKeyPemPath, envelopeKey.AlgorithmId);
var envelope = new DsseEnvelope(
EvidenceGraphPayloadType,
payloadBytes,
signatures: [DsseSignature.FromBytes(signature, envelopeKey.KeyId)],
payloadContentType: "application/json");
var serialized = DsseEnvelopeSerializer.Serialize(
envelope,
new DsseEnvelopeSerializationOptions
{
EmitCompactJson = true,
EmitExpandedJson = false,
CompressionAlgorithm = DsseCompressionAlgorithm.None
});
if (serialized.CompactJson is null)
{
throw new InvalidOperationException("DSSE envelope serialization did not emit compact JSON.");
}
var dssePath = Path.Combine(outputDirectory, "evidence-graph.dsse.json");
await File.WriteAllBytesAsync(dssePath, serialized.CompactJson, ct).ConfigureAwait(false);
return dssePath;
}
private static async Task<EnvelopeKey> LoadEcdsaEnvelopeKeyAsync(string pemPath, string? keyIdOverride, CancellationToken ct)
{
var pem = await File.ReadAllTextAsync(pemPath, ct).ConfigureAwait(false);
using var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pem);
var algorithmId = ResolveEcdsaAlgorithmId(ecdsa.KeySize);
var parameters = ecdsa.ExportParameters(includePrivateParameters: true);
return EnvelopeKey.CreateEcdsaSigner(algorithmId, parameters, keyIdOverride);
}
private static string ResolveEcdsaAlgorithmId(int keySizeBits) => keySizeBits switch
{
256 => "ES256",
384 => "ES384",
521 => "ES512",
_ => throw new NotSupportedException($"Unsupported ECDSA key size {keySizeBits} bits.")
};
private static byte[] SignDeterministicEcdsa(ReadOnlySpan<byte> message, string pemPath, string algorithmId)
{
var (digest, calculatorDigest) = CreateSignatureDigest(message, algorithmId);
var privateKey = LoadEcPrivateKey(pemPath);
var signer = new ECDsaSigner(new HMacDsaKCalculator(calculatorDigest));
signer.Init(true, privateKey);
var rs = signer.GenerateSignature(digest);
var r = rs[0];
var s = rs[1];
var sequence = new DerSequence(new DerInteger(r), new DerInteger(s));
return sequence.GetDerEncoded();
}
private static (byte[] Digest, IDigest CalculatorDigest) CreateSignatureDigest(ReadOnlySpan<byte> message, string algorithmId)
{
return algorithmId?.ToUpperInvariant() switch
{
"ES256" => (SHA256.HashData(message), new Sha256Digest()),
"ES384" => (SHA384.HashData(message), new Sha384Digest()),
"ES512" => (SHA512.HashData(message), new Sha512Digest()),
_ => throw new NotSupportedException($"Unsupported ECDSA algorithm '{algorithmId}'.")
};
}
private static ECPrivateKeyParameters LoadEcPrivateKey(string pemPath)
{
using var reader = File.OpenText(pemPath);
var pemReader = new PemReader(reader);
var pemObject = pemReader.ReadObject();
return pemObject switch
{
AsymmetricCipherKeyPair pair when pair.Private is ECPrivateKeyParameters ecPrivate => ecPrivate,
ECPrivateKeyParameters ecPrivate => ecPrivate,
_ => throw new InvalidOperationException($"Unsupported private key content in '{pemPath}'.")
};
}
}
internal static class DssePreAuthenticationEncoding
{
private const string Prefix = "DSSEv1";
public static byte[] Encode(string payloadType, ReadOnlySpan<byte> payload)
{
if (string.IsNullOrWhiteSpace(payloadType))
{
throw new ArgumentException("payloadType must be provided.", nameof(payloadType));
}
var payloadTypeByteCount = Encoding.UTF8.GetByteCount(payloadType);
var header = $"{Prefix} {payloadTypeByteCount} {payloadType} {payload.Length} ";
var headerBytes = Encoding.UTF8.GetBytes(header);
var buffer = new byte[headerBytes.Length + payload.Length];
headerBytes.CopyTo(buffer.AsSpan());
payload.CopyTo(buffer.AsSpan(headerBytes.Length));
return buffer;
}
}

View File

@@ -7,7 +7,12 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="BouncyCastle.Cryptography" Version="2.6.2" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\..\\Attestor\\StellaOps.Attestor.Envelope\\StellaOps.Attestor.Envelope.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,638 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Crypto.Signers;
using Org.BouncyCastle.Security;
namespace StellaOps.AirGap.Importer.Validation;
/// <summary>
/// Offline Rekor receipt verifier for air-gapped environments.
/// Verifies checkpoint signature and Merkle inclusion (RFC 6962).
/// </summary>
public static class RekorOfflineReceiptVerifier
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
public static async Task<RekorOfflineReceiptVerificationResult> VerifyAsync(
string receiptPath,
ReadOnlyMemory<byte> dsseSha256,
string rekorPublicKeyPath,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(receiptPath);
ArgumentException.ThrowIfNullOrWhiteSpace(rekorPublicKeyPath);
if (!File.Exists(receiptPath))
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor receipt file not found.");
}
if (!File.Exists(rekorPublicKeyPath))
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor public key file not found.");
}
var receiptJson = await File.ReadAllTextAsync(receiptPath, cancellationToken).ConfigureAwait(false);
RekorReceiptDocument? receipt;
try
{
receipt = JsonSerializer.Deserialize<RekorReceiptDocument>(receiptJson, SerializerOptions);
}
catch (JsonException ex)
{
return RekorOfflineReceiptVerificationResult.Failure($"Rekor receipt JSON invalid: {ex.Message}");
}
if (receipt is null ||
string.IsNullOrWhiteSpace(receipt.Uuid) ||
receipt.LogIndex < 0 ||
string.IsNullOrWhiteSpace(receipt.RootHash) ||
receipt.Hashes is null ||
receipt.Hashes.Count == 0 ||
string.IsNullOrWhiteSpace(receipt.Checkpoint))
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor receipt is missing required fields.");
}
if (dsseSha256.Length != 32)
{
return RekorOfflineReceiptVerificationResult.Failure("DSSE digest must be 32 bytes (sha256).");
}
var publicKeyBytes = await LoadPublicKeyBytesAsync(rekorPublicKeyPath, cancellationToken).ConfigureAwait(false);
var receiptDirectory = Path.GetDirectoryName(Path.GetFullPath(receiptPath)) ?? Environment.CurrentDirectory;
var checkpointText = await ResolveCheckpointAsync(receipt.Checkpoint, receiptDirectory, cancellationToken).ConfigureAwait(false);
if (checkpointText is null)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor checkpoint file not found.");
}
var checkpoint = SigstoreCheckpoint.TryParse(checkpointText);
if (checkpoint is null)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor checkpoint format invalid.");
}
if (checkpoint.Signatures.Count == 0)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor checkpoint signature missing.");
}
var signatureVerified = VerifyCheckpointSignature(checkpoint.BodyCanonicalUtf8, checkpoint.Signatures, publicKeyBytes);
if (!signatureVerified)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor checkpoint signature verification failed.");
}
byte[] expectedRoot;
try
{
expectedRoot = Convert.FromBase64String(checkpoint.RootHashBase64);
}
catch (FormatException)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor checkpoint root hash is not valid base64.");
}
if (expectedRoot.Length != 32)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor checkpoint root hash must be 32 bytes (sha256).");
}
var receiptRootBytes = TryParseHashBytes(receipt.RootHash);
if (receiptRootBytes is null)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor receipt rootHash has invalid encoding.");
}
if (!CryptographicOperations.FixedTimeEquals(receiptRootBytes, expectedRoot))
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor receipt rootHash does not match checkpoint root hash.");
}
var proofHashes = new List<byte[]>(capacity: receipt.Hashes.Count);
foreach (var h in receipt.Hashes)
{
if (TryParseHashBytes(h) is not { } bytes)
{
return RekorOfflineReceiptVerificationResult.Failure("Rekor receipt hashes contains an invalid hash value.");
}
proofHashes.Add(bytes);
}
var leafHash = Rfc6962Merkle.HashLeaf(dsseSha256.Span);
var computedRoot = Rfc6962Merkle.ComputeRootFromPath(
leafHash,
receipt.LogIndex,
checkpoint.TreeSize,
proofHashes);
if (computedRoot is null)
{
return RekorOfflineReceiptVerificationResult.Failure("Failed to compute Rekor Merkle root from inclusion proof.");
}
var computedRootHex = Convert.ToHexString(computedRoot).ToLowerInvariant();
var expectedRootHex = Convert.ToHexString(expectedRoot).ToLowerInvariant();
var included = CryptographicOperations.FixedTimeEquals(computedRoot, expectedRoot);
if (!included)
{
return RekorOfflineReceiptVerificationResult.Failure(
"Rekor inclusion proof verification failed (computed root mismatch).",
computedRootHex,
expectedRootHex,
checkpoint.TreeSize,
checkpointSignatureVerified: true);
}
return RekorOfflineReceiptVerificationResult.Success(
receipt.Uuid.Trim(),
receipt.LogIndex,
computedRootHex,
expectedRootHex,
checkpoint.TreeSize,
checkpointSignatureVerified: true);
}
private static async Task<byte[]> LoadPublicKeyBytesAsync(string path, CancellationToken ct)
{
var bytes = await File.ReadAllBytesAsync(path, ct).ConfigureAwait(false);
var text = Encoding.UTF8.GetString(bytes);
const string Begin = "-----BEGIN PUBLIC KEY-----";
const string End = "-----END PUBLIC KEY-----";
var begin = text.IndexOf(Begin, StringComparison.Ordinal);
var end = text.IndexOf(End, StringComparison.Ordinal);
if (begin >= 0 && end > begin)
{
var base64 = text
.Substring(begin + Begin.Length, end - (begin + Begin.Length))
.Replace("\r", string.Empty, StringComparison.Ordinal)
.Replace("\n", string.Empty, StringComparison.Ordinal)
.Trim();
return Convert.FromBase64String(base64);
}
// Note public key format: origin+keyid+base64(pubkey)
var trimmed = text.Trim();
if (trimmed.Contains('+', StringComparison.Ordinal) && trimmed.Count(static c => c == '+') >= 2)
{
var last = trimmed.Split('+')[^1];
try
{
return Convert.FromBase64String(last);
}
catch
{
// fall through to raw bytes
}
}
return bytes;
}
private static async Task<string?> ResolveCheckpointAsync(string checkpointField, string receiptDirectory, CancellationToken ct)
{
var value = checkpointField.Trim();
// If the value looks like a path and exists, load it.
var candidates = new List<string>();
if (value.IndexOfAny(['/', '\\']) >= 0 || value.EndsWith(".sig", StringComparison.OrdinalIgnoreCase))
{
candidates.Add(Path.IsPathRooted(value) ? value : Path.Combine(receiptDirectory, value));
}
candidates.Add(Path.Combine(receiptDirectory, "checkpoint.sig"));
candidates.Add(Path.Combine(receiptDirectory, "tlog", "checkpoint.sig"));
candidates.Add(Path.Combine(receiptDirectory, "evidence", "tlog", "checkpoint.sig"));
foreach (var candidate in candidates.Distinct(StringComparer.Ordinal))
{
if (File.Exists(candidate))
{
return await File.ReadAllTextAsync(candidate, ct).ConfigureAwait(false);
}
}
// Otherwise treat as inline checkpoint content.
return value.Length > 0 ? checkpointField : null;
}
private static bool VerifyCheckpointSignature(ReadOnlySpan<byte> bodyUtf8, IReadOnlyList<byte[]> signatures, byte[] publicKey)
{
// Try ECDSA first (SPKI)
if (TryVerifyEcdsaCheckpoint(bodyUtf8, signatures, publicKey))
{
return true;
}
// Ed25519 fallback (raw 32-byte key or SPKI parsed via BouncyCastle)
if (TryVerifyEd25519Checkpoint(bodyUtf8, signatures, publicKey))
{
return true;
}
return false;
}
private static bool TryVerifyEcdsaCheckpoint(ReadOnlySpan<byte> bodyUtf8, IReadOnlyList<byte[]> signatures, byte[] publicKey)
{
try
{
using var ecdsa = ECDsa.Create();
ecdsa.ImportSubjectPublicKeyInfo(publicKey, out _);
foreach (var sig in signatures)
{
if (ecdsa.VerifyData(bodyUtf8, sig, HashAlgorithmName.SHA256))
{
return true;
}
// Some encoders store a raw (r||s) 64-byte signature.
if (sig.Length == 64 && ecdsa.VerifyData(bodyUtf8, sig, HashAlgorithmName.SHA256, DSASignatureFormat.IeeeP1363FixedFieldConcatenation))
{
return true;
}
}
}
catch
{
// Not an ECDSA key or signature format mismatch.
}
return false;
}
private static bool TryVerifyEd25519Checkpoint(ReadOnlySpan<byte> bodyUtf8, IReadOnlyList<byte[]> signatures, byte[] publicKey)
{
try
{
Ed25519PublicKeyParameters key;
if (publicKey.Length == 32)
{
key = new Ed25519PublicKeyParameters(publicKey, 0);
}
else
{
var parsed = PublicKeyFactory.CreateKey(publicKey);
if (parsed is not Ed25519PublicKeyParameters edKey)
{
return false;
}
key = edKey;
}
foreach (var sig in signatures)
{
var verifier = new Ed25519Signer();
verifier.Init(false, key);
var buffer = bodyUtf8.ToArray();
verifier.BlockUpdate(buffer, 0, buffer.Length);
if (verifier.VerifySignature(sig))
{
return true;
}
}
}
catch
{
return false;
}
return false;
}
private static byte[]? TryParseHashBytes(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
var trimmed = value.Trim();
if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
trimmed = trimmed["sha256:".Length..];
}
// Hex (most common)
if (trimmed.Length % 2 == 0 && trimmed.All(static c => (c >= '0' && c <= '9') ||
(c >= 'a' && c <= 'f') ||
(c >= 'A' && c <= 'F')))
{
try
{
return Convert.FromHexString(trimmed);
}
catch
{
return null;
}
}
// Base64
try
{
return Convert.FromBase64String(trimmed);
}
catch
{
return null;
}
}
private sealed record RekorReceiptDocument(
[property: JsonPropertyName("uuid")] string Uuid,
[property: JsonPropertyName("logIndex")] long LogIndex,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("hashes")] IReadOnlyList<string> Hashes,
[property: JsonPropertyName("checkpoint")] string Checkpoint);
private sealed class SigstoreCheckpoint
{
private SigstoreCheckpoint(
string origin,
long treeSize,
string rootHashBase64,
string? timestamp,
IReadOnlyList<byte[]> signatures,
byte[] bodyCanonicalUtf8)
{
Origin = origin;
TreeSize = treeSize;
RootHashBase64 = rootHashBase64;
Timestamp = timestamp;
Signatures = signatures;
BodyCanonicalUtf8 = bodyCanonicalUtf8;
}
public string Origin { get; }
public long TreeSize { get; }
public string RootHashBase64 { get; }
public string? Timestamp { get; }
public IReadOnlyList<byte[]> Signatures { get; }
public byte[] BodyCanonicalUtf8 { get; }
public static SigstoreCheckpoint? TryParse(string checkpointContent)
{
if (string.IsNullOrWhiteSpace(checkpointContent))
{
return null;
}
var lines = checkpointContent
.Replace("\r", string.Empty, StringComparison.Ordinal)
.Split('\n')
.Select(static line => line.TrimEnd())
.ToList();
// Extract signatures first (note format: "— origin base64sig", or "sig <base64>").
var signatures = new List<byte[]>();
foreach (var line in lines)
{
var trimmed = line.Trim();
if (trimmed.Length == 0)
{
continue;
}
if (trimmed.StartsWith("—", StringComparison.Ordinal) || trimmed.StartsWith("--", StringComparison.OrdinalIgnoreCase))
{
var token = trimmed.Split(' ', StringSplitOptions.RemoveEmptyEntries).LastOrDefault();
if (!string.IsNullOrWhiteSpace(token) && TryDecodeBase64(token, out var sigBytes))
{
signatures.Add(sigBytes);
}
continue;
}
if (trimmed.StartsWith("sig ", StringComparison.OrdinalIgnoreCase) ||
trimmed.StartsWith("signature ", StringComparison.OrdinalIgnoreCase))
{
var token = trimmed.Split(' ', StringSplitOptions.RemoveEmptyEntries).LastOrDefault();
if (!string.IsNullOrWhiteSpace(token) && TryDecodeBase64(token, out var sigBytes))
{
signatures.Add(sigBytes);
}
}
}
// Body: first non-empty 3 lines (origin, size, root), optional 4th timestamp (digits).
var bodyLines = lines
.Select(static l => l.Trim())
.Where(static l => l.Length > 0)
.Where(static l => !LooksLikeSignatureLine(l))
.ToList();
if (bodyLines.Count < 3)
{
return null;
}
var origin = bodyLines[0];
if (!long.TryParse(bodyLines[1], out var treeSize) || treeSize <= 0)
{
return null;
}
var rootBase64 = bodyLines[2];
// Validate base64 now; decode later for error messages.
if (!TryDecodeBase64(rootBase64, out _))
{
return null;
}
string? timestamp = null;
if (bodyLines.Count >= 4 && bodyLines[3].All(static c => c >= '0' && c <= '9'))
{
timestamp = bodyLines[3];
}
var canonical = new StringBuilder();
canonical.Append(origin);
canonical.Append('\n');
canonical.Append(treeSize.ToString(System.Globalization.CultureInfo.InvariantCulture));
canonical.Append('\n');
canonical.Append(rootBase64);
canonical.Append('\n');
if (!string.IsNullOrWhiteSpace(timestamp))
{
canonical.Append(timestamp);
canonical.Append('\n');
}
return new SigstoreCheckpoint(
origin,
treeSize,
rootBase64,
timestamp,
signatures,
Encoding.UTF8.GetBytes(canonical.ToString()));
}
private static bool LooksLikeSignatureLine(string trimmedLine)
{
if (trimmedLine.StartsWith("—", StringComparison.Ordinal))
{
return true;
}
if (trimmedLine.StartsWith("--", StringComparison.Ordinal))
{
return true;
}
if (trimmedLine.StartsWith("sig ", StringComparison.OrdinalIgnoreCase) ||
trimmedLine.StartsWith("signature ", StringComparison.OrdinalIgnoreCase))
{
return true;
}
return false;
}
private static bool TryDecodeBase64(string token, out byte[] bytes)
{
try
{
bytes = Convert.FromBase64String(token);
return true;
}
catch
{
bytes = Array.Empty<byte>();
return false;
}
}
}
private static class Rfc6962Merkle
{
private const byte LeafPrefix = 0x00;
private const byte NodePrefix = 0x01;
public static byte[] HashLeaf(ReadOnlySpan<byte> leafData)
{
var buffer = new byte[1 + leafData.Length];
buffer[0] = LeafPrefix;
leafData.CopyTo(buffer.AsSpan(1));
return SHA256.HashData(buffer);
}
public static byte[] HashInterior(ReadOnlySpan<byte> left, ReadOnlySpan<byte> right)
{
var buffer = new byte[1 + left.Length + right.Length];
buffer[0] = NodePrefix;
left.CopyTo(buffer.AsSpan(1));
right.CopyTo(buffer.AsSpan(1 + left.Length));
return SHA256.HashData(buffer);
}
public static byte[]? ComputeRootFromPath(
byte[] leafHash,
long leafIndex,
long treeSize,
IReadOnlyList<byte[]> proofHashes)
{
if (leafIndex < 0 || treeSize <= 0 || leafIndex >= treeSize)
{
return null;
}
if (proofHashes.Count == 0)
{
return treeSize == 1 ? leafHash : null;
}
var currentHash = leafHash;
var proofIndex = 0;
var index = leafIndex;
var size = treeSize;
while (size > 1)
{
if (proofIndex >= proofHashes.Count)
{
return null;
}
var sibling = proofHashes[proofIndex++];
if (index % 2 == 0)
{
if (index + 1 < size)
{
currentHash = HashInterior(currentHash, sibling);
}
}
else
{
currentHash = HashInterior(sibling, currentHash);
}
index /= 2;
size = (size + 1) / 2;
}
return currentHash;
}
}
}
public sealed record RekorOfflineReceiptVerificationResult
{
public required bool Verified { get; init; }
public string? FailureReason { get; init; }
public string? RekorUuid { get; init; }
public long? LogIndex { get; init; }
public string? ComputedRootHash { get; init; }
public string? ExpectedRootHash { get; init; }
public long? TreeSize { get; init; }
public bool CheckpointSignatureVerified { get; init; }
public static RekorOfflineReceiptVerificationResult Success(
string rekorUuid,
long logIndex,
string computedRootHash,
string expectedRootHash,
long treeSize,
bool checkpointSignatureVerified) => new()
{
Verified = true,
RekorUuid = rekorUuid,
LogIndex = logIndex,
ComputedRootHash = computedRootHash,
ExpectedRootHash = expectedRootHash,
TreeSize = treeSize,
CheckpointSignatureVerified = checkpointSignatureVerified
};
public static RekorOfflineReceiptVerificationResult Failure(
string reason,
string? computedRootHash = null,
string? expectedRootHash = null,
long? treeSize = null,
bool checkpointSignatureVerified = false) => new()
{
Verified = false,
FailureReason = reason,
ComputedRootHash = computedRootHash,
ExpectedRootHash = expectedRootHash,
TreeSize = treeSize,
CheckpointSignatureVerified = checkpointSignatureVerified
};
}

View File

@@ -0,0 +1,75 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.AirGap.Importer.Reconciliation;
namespace StellaOps.AirGap.Importer.Tests.Reconciliation;
public sealed class EvidenceReconcilerDsseSigningTests
{
[Fact]
public async Task ReconcileAsync_WhenSignOutputEnabled_WritesDeterministicDsseEnvelopeWithValidSignature()
{
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var pem = ecdsa.ExportPkcs8PrivateKeyPem();
var root = Path.Combine(Path.GetTempPath(), "stellaops-airgap-importer-tests", Guid.NewGuid().ToString("n"));
var inputDir = Path.Combine(root, "input");
var outputDir = Path.Combine(root, "output");
Directory.CreateDirectory(inputDir);
Directory.CreateDirectory(outputDir);
var keyPath = Path.Combine(root, "evidence-signing-key.pem");
await File.WriteAllTextAsync(keyPath, pem, Encoding.UTF8);
var reconciler = new EvidenceReconciler();
var options = new ReconciliationOptions
{
GeneratedAtUtc = DateTimeOffset.UnixEpoch,
SignOutput = true,
SigningPrivateKeyPemPath = keyPath
};
var graph1 = await reconciler.ReconcileAsync(inputDir, outputDir, options);
var dssePath = Path.Combine(outputDir, "evidence-graph.dsse.json");
var firstBytes = await File.ReadAllBytesAsync(dssePath);
var graph2 = await reconciler.ReconcileAsync(inputDir, outputDir, options);
var secondBytes = await File.ReadAllBytesAsync(dssePath);
Assert.Equal(firstBytes, secondBytes);
using var json = JsonDocument.Parse(firstBytes);
var rootElement = json.RootElement;
Assert.Equal("application/vnd.stellaops.evidence-graph+json", rootElement.GetProperty("payloadType").GetString());
var payloadBytes = Convert.FromBase64String(rootElement.GetProperty("payload").GetString()!);
var signatureElement = rootElement.GetProperty("signatures")[0];
var signatureBytes = Convert.FromBase64String(signatureElement.GetProperty("sig").GetString()!);
var expectedPayload = new EvidenceGraphSerializer().Serialize(graph1, pretty: false);
Assert.Equal(expectedPayload, Encoding.UTF8.GetString(payloadBytes));
var pae = EncodeDssePreAuth("application/vnd.stellaops.evidence-graph+json", payloadBytes);
Assert.True(ecdsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256));
var keyId = signatureElement.GetProperty("keyid").GetString();
Assert.False(string.IsNullOrWhiteSpace(keyId));
Assert.Equal(new EvidenceGraphSerializer().Serialize(graph1, pretty: false), new EvidenceGraphSerializer().Serialize(graph2, pretty: false));
}
private static byte[] EncodeDssePreAuth(string payloadType, ReadOnlySpan<byte> payload)
{
var payloadTypeByteCount = Encoding.UTF8.GetByteCount(payloadType);
var header = $"DSSEv1 {payloadTypeByteCount} {payloadType} {payload.Length} ";
var headerBytes = Encoding.UTF8.GetBytes(header);
var buffer = new byte[headerBytes.Length + payload.Length];
headerBytes.CopyTo(buffer.AsSpan());
payload.CopyTo(buffer.AsSpan(headerBytes.Length));
return buffer;
}
}

View File

@@ -0,0 +1,29 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\..\\StellaOps.AirGap.Importer\\StellaOps.AirGap.Importer.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,25 @@
# StellaOps.Attestor.Persistence — Local Agent Charter
## Scope
- This charter applies to `src/Attestor/__Libraries/StellaOps.Attestor.Persistence/**`.
## Primary roles
- Backend engineer (C# / .NET 10, EF Core, Npgsql).
- QA automation engineer (xUnit) for persistence + matcher logic.
## Required reading (treat as read before edits)
- `docs/modules/attestor/architecture.md`
- `docs/db/SPECIFICATION.md`
- `docs/db/MIGRATION_STRATEGY.md`
- PostgreSQL 16 docs (arrays, indexes, JSONB, query plans).
## Working agreements
- Determinism is mandatory where hashes/IDs are produced; all timestamps are UTC.
- Offline-friendly defaults: no network calls from library code paths.
- Migrations must be idempotent and safe to re-run.
- Prefer small, composable services with explicit interfaces (`I*`).
## Testing expectations
- Unit/integration tests live in `src/Attestor/__Tests/StellaOps.Attestor.Persistence.Tests`.
- Perf dataset and query harness lives under `src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf` and must be deterministic (fixed data, fixed sizes, documented parameters).

View File

@@ -5,6 +5,9 @@
-- Create schema
CREATE SCHEMA IF NOT EXISTS proofchain;
-- Required for gen_random_uuid() defaults
CREATE EXTENSION IF NOT EXISTS pgcrypto;
-- Create verification_result enum type
DO $$
BEGIN

View File

@@ -0,0 +1,18 @@
# ProofChain DB perf harness
This folder provides a deterministic, production-like dataset and a small harness to validate index/query performance for the ProofChain schema (`proofchain.*`).
## Files
- `seed.sql` deterministic dataset generator (uses SQL functions + `generate_series`).
- `queries.sql` representative queries with `EXPLAIN (ANALYZE, BUFFERS)`.
- `run-perf.ps1` starts a local PostgreSQL 16 container, applies migrations, seeds data, runs queries, and captures output.
## Run
From repo root:
```powershell
pwsh -File src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf/run-perf.ps1
```
Output is written to `docs/db/reports/proofchain-schema-perf-2025-12-17.md`.

View File

@@ -0,0 +1,57 @@
-- Representative query set for ProofChain schema perf validation.
-- Run after applying migrations + seeding (`seed.sql`).
\timing on
-- Row counts
SELECT
(SELECT count(*) FROM proofchain.trust_anchors) AS trust_anchors,
(SELECT count(*) FROM proofchain.sbom_entries) AS sbom_entries,
(SELECT count(*) FROM proofchain.dsse_envelopes) AS dsse_envelopes,
(SELECT count(*) FROM proofchain.spines) AS spines,
(SELECT count(*) FROM proofchain.rekor_entries) AS rekor_entries;
-- 1) SBOM entry lookup via unique constraint (bom_digest, purl, version)
EXPLAIN (ANALYZE, BUFFERS)
SELECT entry_id, bom_digest, purl, version
FROM proofchain.sbom_entries
WHERE bom_digest = proofchain.hex64('bom:1')
AND purl = format('pkg:npm/vendor-%02s/pkg-%05s', 1, 1)
AND version = '1.0.1';
-- 2) Fetch all entries for a given SBOM digest (index on bom_digest)
EXPLAIN (ANALYZE, BUFFERS)
SELECT entry_id, purl, version
FROM proofchain.sbom_entries
WHERE bom_digest = proofchain.hex64('bom:1')
ORDER BY purl
LIMIT 100;
-- 3) Envelopes for entry + predicate (compound index)
EXPLAIN (ANALYZE, BUFFERS)
SELECT env_id, predicate_type, signer_keyid, body_hash
FROM proofchain.dsse_envelopes
WHERE entry_id = proofchain.uuid_from_text('entry:1')
AND predicate_type = 'evidence.stella/v1';
-- 4) Spine lookup via bundle_id (unique index)
EXPLAIN (ANALYZE, BUFFERS)
SELECT entry_id, bundle_id, policy_version
FROM proofchain.spines
WHERE bundle_id = proofchain.hex64('bundle:1');
-- 5) Rekor lookup by log index (index)
EXPLAIN (ANALYZE, BUFFERS)
SELECT dsse_sha256, uuid, integrated_time
FROM proofchain.rekor_entries
WHERE log_index = 10;
-- 6) Join: entries -> envelopes by bom_digest
EXPLAIN (ANALYZE, BUFFERS)
SELECT e.entry_id, d.predicate_type, d.body_hash
FROM proofchain.sbom_entries e
JOIN proofchain.dsse_envelopes d ON d.entry_id = e.entry_id
WHERE e.bom_digest = proofchain.hex64('bom:1')
AND d.predicate_type = 'evidence.stella/v1'
ORDER BY e.purl
LIMIT 100;

View File

@@ -0,0 +1,104 @@
param(
[string]$PostgresImage = "postgres:16",
[string]$ContainerName = "stellaops-proofchain-perf",
[int]$Port = 54329,
[string]$Database = "proofchain_perf",
[string]$User = "postgres",
[string]$Password = "postgres"
)
$ErrorActionPreference = "Stop"
function Resolve-RepoRoot {
$here = Split-Path -Parent $PSCommandPath
return (Resolve-Path (Join-Path $here "../../../../..")).Path
}
$repoRoot = Resolve-RepoRoot
$perfDir = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf"
$migrationFile = Join-Path $repoRoot "src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Migrations/20251214000001_AddProofChainSchema.sql"
$seedFile = Join-Path $perfDir "seed.sql"
$queriesFile = Join-Path $perfDir "queries.sql"
$reportFile = Join-Path $repoRoot "docs/db/reports/proofchain-schema-perf-2025-12-17.md"
Write-Host "Using repo root: $repoRoot"
Write-Host "Starting PostgreSQL container '$ContainerName' on localhost:$Port..."
try {
docker rm -f $ContainerName *> $null 2>&1
} catch {}
$null = docker run --rm -d --name $ContainerName `
-e POSTGRES_PASSWORD=$Password `
-e POSTGRES_DB=$Database `
-p ${Port}:5432 `
$PostgresImage
try {
$ready = $false
for ($i = 0; $i -lt 60; $i++) {
docker exec $ContainerName pg_isready -U $User -d $Database *> $null 2>&1
if ($LASTEXITCODE -eq 0) {
$ready = $true
break
}
Start-Sleep -Seconds 1
}
if (-not $ready) {
throw "PostgreSQL did not become ready within 60 seconds."
}
Write-Host "Applying migrations..."
$migrationSql = Get-Content -Raw -Encoding UTF8 $migrationFile
$migrationSql | docker exec -i $ContainerName psql -v ON_ERROR_STOP=1 -U $User -d $Database | Out-Host
Write-Host "Seeding deterministic dataset..."
$seedSql = Get-Content -Raw -Encoding UTF8 $seedFile
$seedSql | docker exec -i $ContainerName psql -v ON_ERROR_STOP=1 -U $User -d $Database | Out-Host
Write-Host "Running query suite..."
$queriesSql = Get-Content -Raw -Encoding UTF8 $queriesFile
$queryOutput = $queriesSql | docker exec -i $ContainerName psql -v ON_ERROR_STOP=1 -U $User -d $Database
$queryOutputText = ($queryOutput -join "`n").TrimEnd()
$headerLines = @(
'# ProofChain schema performance report (2025-12-17)',
'',
'## Environment',
('- Postgres image: `{0}`' -f $PostgresImage),
('- DB: `{0}`' -f $Database),
('- Port: `{0}`' -f $Port),
'- Host: `localhost`',
'',
'## Dataset',
'- Source: `src/Attestor/__Libraries/StellaOps.Attestor.Persistence/Perf/seed.sql`',
'- Rows:',
' - `trust_anchors`: 50',
' - `sbom_entries`: 20000',
' - `dsse_envelopes`: 60000',
' - `spines`: 20000',
' - `rekor_entries`: 2000',
'',
'## Query Output',
'',
'```text',
$queryOutputText,
'```',
''
)
$header = ($headerLines -join "`n")
$dir = Split-Path -Parent $reportFile
if (!(Test-Path $dir)) {
New-Item -ItemType Directory -Path $dir -Force | Out-Null
}
Set-Content -Path $reportFile -Value $header -Encoding UTF8
Write-Host "Wrote report: $reportFile"
}
finally {
Write-Host "Stopping container..."
docker rm -f $ContainerName *> $null 2>&1
}

View File

@@ -0,0 +1,166 @@
-- Deterministic ProofChain dataset generator (offline-friendly).
-- Designed for index/query perf validation (SPRINT_0501_0006_0001 · PROOF-DB-0011).
-- Helper: deterministic UUID from text (no extensions required).
CREATE OR REPLACE FUNCTION proofchain.uuid_from_text(input text) RETURNS uuid
LANGUAGE SQL
IMMUTABLE
STRICT
AS $$
SELECT (
substring(md5(input), 1, 8) || '-' ||
substring(md5(input), 9, 4) || '-' ||
substring(md5(input), 13, 4) || '-' ||
substring(md5(input), 17, 4) || '-' ||
substring(md5(input), 21, 12)
)::uuid;
$$;
-- Helper: deterministic 64-hex string from text.
CREATE OR REPLACE FUNCTION proofchain.hex64(input text) RETURNS text
LANGUAGE SQL
IMMUTABLE
STRICT
AS $$
SELECT md5(input) || md5(input || ':2');
$$;
-- Parameters
-- Anchors: 50
-- SBOM entries: 20_000 (200 SBOM digests * 100 entries each)
-- Envelopes: 60_000 (3 per entry)
-- Spines: 20_000 (1 per entry)
-- Rekor entries: 2_000 (every 10th entry)
-- Trust anchors
INSERT INTO proofchain.trust_anchors(
anchor_id,
purl_pattern,
allowed_keyids,
allowed_predicate_types,
policy_ref,
policy_version,
revoked_keys,
is_active,
created_at,
updated_at
)
SELECT
proofchain.uuid_from_text('anchor:' || i),
format('pkg:npm/vendor-%02s/*', i),
ARRAY[format('key-%02s', i)]::text[],
ARRAY[
'evidence.stella/v1',
'reasoning.stella/v1',
'cdx-vex.stella/v1',
'proofspine.stella/v1',
'verdict.stella/v1',
'https://stella-ops.org/predicates/sbom-linkage/v1'
]::text[],
format('policy-%02s', i),
'v2025.12',
ARRAY[]::text[],
TRUE,
TIMESTAMPTZ '2025-12-17T00:00:00Z',
TIMESTAMPTZ '2025-12-17T00:00:00Z'
FROM generate_series(1, 50) i
ON CONFLICT (anchor_id) DO NOTHING;
-- SBOM entries
INSERT INTO proofchain.sbom_entries(
entry_id,
bom_digest,
purl,
version,
artifact_digest,
trust_anchor_id,
created_at
)
SELECT
proofchain.uuid_from_text('entry:' || i),
proofchain.hex64('bom:' || (((i - 1) / 100) + 1)),
format('pkg:npm/vendor-%02s/pkg-%05s', (((i - 1) % 50) + 1), i),
format('1.0.%s', (((i - 1) % 50) + 1)),
proofchain.hex64('artifact:' || i),
proofchain.uuid_from_text('anchor:' || (((i - 1) % 50) + 1)),
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval
FROM generate_series(1, 20000) i
ON CONFLICT ON CONSTRAINT uq_sbom_entry DO NOTHING;
-- DSSE envelopes (3 per entry)
INSERT INTO proofchain.dsse_envelopes(
env_id,
entry_id,
predicate_type,
signer_keyid,
body_hash,
envelope_blob_ref,
signed_at,
created_at
)
SELECT
proofchain.uuid_from_text('env:' || i || ':' || p.predicate_type),
proofchain.uuid_from_text('entry:' || i),
p.predicate_type,
format('key-%02s', (((i - 1) % 50) + 1)),
proofchain.hex64('body:' || i || ':' || p.predicate_type),
format('oci://proofchain/blobs/%s', proofchain.hex64('body:' || i || ':' || p.predicate_type)),
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval,
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval
FROM generate_series(1, 20000) i
CROSS JOIN (
VALUES
('evidence.stella/v1'),
('reasoning.stella/v1'),
('cdx-vex.stella/v1')
) AS p(predicate_type)
ON CONFLICT ON CONSTRAINT uq_dsse_envelope DO NOTHING;
-- Spines (1 per entry)
INSERT INTO proofchain.spines(
entry_id,
bundle_id,
evidence_ids,
reasoning_id,
vex_id,
anchor_id,
policy_version,
created_at
)
SELECT
proofchain.uuid_from_text('entry:' || i),
proofchain.hex64('bundle:' || i),
ARRAY[
'sha256:' || proofchain.hex64('evidence:' || i || ':1'),
'sha256:' || proofchain.hex64('evidence:' || i || ':2'),
'sha256:' || proofchain.hex64('evidence:' || i || ':3')
]::text[],
proofchain.hex64('reasoning:' || i),
proofchain.hex64('vex:' || i),
proofchain.uuid_from_text('anchor:' || (((i - 1) % 50) + 1)),
'v2025.12',
TIMESTAMPTZ '2025-12-17T00:00:00Z' + ((i - 1) || ' seconds')::interval
FROM generate_series(1, 20000) i
ON CONFLICT ON CONSTRAINT uq_spine_bundle DO NOTHING;
-- Rekor entries (every 10th entry, points at the evidence envelope)
INSERT INTO proofchain.rekor_entries(
dsse_sha256,
log_index,
log_id,
uuid,
integrated_time,
inclusion_proof,
env_id
)
SELECT
proofchain.hex64('rekor:' || i),
i,
'test-log',
format('uuid-%s', i),
1734393600 + i,
'{"hashes":[],"treeSize":1,"rootHash":"00"}'::jsonb,
proofchain.uuid_from_text('env:' || i || ':evidence.stella/v1')
FROM generate_series(1, 20000, 10) i
ON CONFLICT (dsse_sha256) DO NOTHING;

View File

@@ -1,6 +1,7 @@
using System.Text.RegularExpressions;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Persistence.Repositories;
namespace StellaOps.Attestor.Persistence.Services;
@@ -75,7 +76,7 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
{
ArgumentException.ThrowIfNullOrEmpty(purl);
var anchors = await _repository.GetActiveAnchorsAsync(cancellationToken);
var anchors = await _repository.GetActiveTrustAnchorsAsync(cancellationToken);
TrustAnchorMatchResult? bestMatch = null;
@@ -284,14 +285,3 @@ public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
return true;
}
}
/// <summary>
/// Repository interface extension for trust anchor queries.
/// </summary>
public interface IProofChainRepository
{
/// <summary>
/// Gets all active trust anchors.
/// </summary>
Task<IReadOnlyList<TrustAnchorEntity>> GetActiveAnchorsAsync(CancellationToken cancellationToken = default);
}

View File

@@ -20,4 +20,8 @@
</None>
</ItemGroup>
<ItemGroup>
<Compile Remove="Tests\\**\\*.cs" />
</ItemGroup>
</Project>

View File

@@ -84,10 +84,15 @@ public abstract record ContentAddressedId
}
}
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest);
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest)
{
public override string ToString() => base.ToString();
}
public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ArtifactId Parse(string value) => new(ParseSha256(value));
public static bool TryParse(string value, out ArtifactId? id) => TryParseSha256(value, out id);
@@ -122,21 +127,29 @@ public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Di
public sealed record EvidenceId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static EvidenceId Parse(string value) => new(Sha256IdParser.Parse(value, "EvidenceID"));
}
public sealed record ReasoningId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ReasoningId Parse(string value) => new(Sha256IdParser.Parse(value, "ReasoningID"));
}
public sealed record VexVerdictId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static VexVerdictId Parse(string value) => new(Sha256IdParser.Parse(value, "VEXVerdictID"));
}
public sealed record ProofBundleId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ProofBundleId Parse(string value) => new(Sha256IdParser.Parse(value, "ProofBundleID"));
}

View File

@@ -0,0 +1,42 @@
using System;
using System.Globalization;
using System.Text;
namespace StellaOps.Attestor.ProofChain.Signing;
internal static class DssePreAuthenticationEncoding
{
public static byte[] Compute(string payloadType, ReadOnlySpan<byte> payload)
{
static byte[] Cat(params byte[][] parts)
{
var len = 0;
for (var i = 0; i < parts.Length; i++)
{
len += parts[i].Length;
}
var buf = new byte[len];
var offset = 0;
for (var i = 0; i < parts.Length; i++)
{
var part = parts[i];
Buffer.BlockCopy(part, 0, buf, offset, part.Length);
offset += part.Length;
}
return buf;
}
static byte[] Utf8(string value) => Encoding.UTF8.GetBytes(value);
var header = Utf8("DSSEv1");
var pt = Utf8(payloadType ?? string.Empty);
var lenPt = Utf8(pt.Length.ToString(CultureInfo.InvariantCulture));
var lenPayload = Utf8(payload.Length.ToString(CultureInfo.InvariantCulture));
var space = new byte[] { (byte)' ' };
return Cat(header, space, lenPt, space, pt, space, lenPayload, space, payload.ToArray());
}
}

View File

@@ -0,0 +1,20 @@
using StellaOps.Attestor.Envelope;
namespace StellaOps.Attestor.ProofChain.Signing;
/// <summary>
/// Provides key material for signing and verifying proof chain DSSE envelopes.
/// </summary>
public interface IProofChainKeyStore
{
/// <summary>
/// Resolve the signing key for a given key profile.
/// </summary>
bool TryGetSigningKey(SigningKeyProfile profile, out EnvelopeKey key);
/// <summary>
/// Resolve a verification key by key identifier.
/// </summary>
bool TryGetVerificationKey(string keyId, out EnvelopeKey key);
}

View File

@@ -1,6 +1,7 @@
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using System.Text.Json.Serialization;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Signing;
@@ -55,16 +56,19 @@ public sealed record DsseEnvelope
/// <summary>
/// The payload type (always "application/vnd.in-toto+json").
/// </summary>
[JsonPropertyName("payloadType")]
public required string PayloadType { get; init; }
/// <summary>
/// Base64-encoded payload (the statement JSON).
/// </summary>
[JsonPropertyName("payload")]
public required string Payload { get; init; }
/// <summary>
/// Signatures over the payload.
/// </summary>
[JsonPropertyName("signatures")]
public required IReadOnlyList<DsseSignature> Signatures { get; init; }
}
@@ -76,11 +80,13 @@ public sealed record DsseSignature
/// <summary>
/// The key ID that produced this signature.
/// </summary>
[JsonPropertyName("keyid")]
public required string KeyId { get; init; }
/// <summary>
/// Base64-encoded signature.
/// </summary>
[JsonPropertyName("sig")]
public required string Sig { get; init; }
}

View File

@@ -0,0 +1,196 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Attestor.Envelope;
using StellaOps.Attestor.ProofChain.Json;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Signing;
/// <summary>
/// Default implementation for creating and verifying DSSE envelopes for proof chain statements.
/// </summary>
public sealed class ProofChainSigner : IProofChainSigner
{
public const string InTotoPayloadType = "application/vnd.in-toto+json";
private static readonly JsonSerializerOptions StatementSerializerOptions = new()
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = null,
WriteIndented = false
};
private readonly IProofChainKeyStore _keyStore;
private readonly IJsonCanonicalizer _canonicalizer;
private readonly EnvelopeSignatureService _signatureService;
public ProofChainSigner(
IProofChainKeyStore keyStore,
IJsonCanonicalizer canonicalizer,
EnvelopeSignatureService? signatureService = null)
{
_keyStore = keyStore ?? throw new ArgumentNullException(nameof(keyStore));
_canonicalizer = canonicalizer ?? throw new ArgumentNullException(nameof(canonicalizer));
_signatureService = signatureService ?? new EnvelopeSignatureService();
}
public Task<DsseEnvelope> SignStatementAsync<T>(
T statement,
SigningKeyProfile keyProfile,
CancellationToken ct = default) where T : InTotoStatement
{
ArgumentNullException.ThrowIfNull(statement);
ct.ThrowIfCancellationRequested();
if (!_keyStore.TryGetSigningKey(keyProfile, out var key))
{
throw new InvalidOperationException($"No signing key configured for profile '{keyProfile}'.");
}
var statementJson = JsonSerializer.SerializeToUtf8Bytes(statement, statement.GetType(), StatementSerializerOptions);
var canonicalPayload = _canonicalizer.Canonicalize(statementJson);
var pae = DssePreAuthenticationEncoding.Compute(InTotoPayloadType, canonicalPayload);
var signatureResult = _signatureService.Sign(pae, key, ct);
if (!signatureResult.IsSuccess)
{
throw new InvalidOperationException($"DSSE signing failed: {signatureResult.Error.Code} {signatureResult.Error.Message}");
}
var signature = signatureResult.Value;
return Task.FromResult(new DsseEnvelope
{
PayloadType = InTotoPayloadType,
Payload = Convert.ToBase64String(canonicalPayload),
Signatures =
[
new DsseSignature
{
KeyId = signature.KeyId,
Sig = Convert.ToBase64String(signature.Value.Span)
}
]
});
}
public Task<SignatureVerificationResult> VerifyEnvelopeAsync(
DsseEnvelope envelope,
IReadOnlyList<string> allowedKeyIds,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(envelope);
ArgumentNullException.ThrowIfNull(allowedKeyIds);
ct.ThrowIfCancellationRequested();
if (envelope.Signatures is null || envelope.Signatures.Count == 0)
{
return Task.FromResult(new SignatureVerificationResult
{
IsValid = false,
KeyId = string.Empty,
ErrorMessage = "Envelope contains no signatures."
});
}
if (string.IsNullOrWhiteSpace(envelope.Payload))
{
return Task.FromResult(new SignatureVerificationResult
{
IsValid = false,
KeyId = string.Empty,
ErrorMessage = "Envelope payload is missing."
});
}
byte[] payloadBytes;
try
{
payloadBytes = Convert.FromBase64String(envelope.Payload);
}
catch (FormatException ex)
{
return Task.FromResult(new SignatureVerificationResult
{
IsValid = false,
KeyId = string.Empty,
ErrorMessage = $"Envelope payload is not valid base64: {ex.Message}"
});
}
var pae = DssePreAuthenticationEncoding.Compute(envelope.PayloadType, payloadBytes);
var allowAnyKey = allowedKeyIds.Count == 0;
var allowedSet = allowAnyKey ? null : new HashSet<string>(allowedKeyIds, StringComparer.Ordinal);
string? lastError = null;
foreach (var signature in envelope.Signatures.OrderBy(static s => s.KeyId, StringComparer.Ordinal))
{
if (signature is null)
{
continue;
}
if (!allowAnyKey && !allowedSet!.Contains(signature.KeyId))
{
continue;
}
if (!_keyStore.TryGetVerificationKey(signature.KeyId, out var verificationKey))
{
lastError = $"No verification key available for keyid '{signature.KeyId}'.";
continue;
}
byte[] signatureBytes;
try
{
signatureBytes = Convert.FromBase64String(signature.Sig);
}
catch (FormatException ex)
{
lastError = $"Signature for keyid '{signature.KeyId}' is not valid base64: {ex.Message}";
continue;
}
var envelopeSignature = new EnvelopeSignature(signature.KeyId, verificationKey.AlgorithmId, signatureBytes);
var verificationResult = _signatureService.Verify(pae, envelopeSignature, verificationKey, ct);
if (verificationResult.IsSuccess)
{
return Task.FromResult(new SignatureVerificationResult
{
IsValid = true,
KeyId = signature.KeyId
});
}
lastError = verificationResult.Error.Message;
}
if (!allowAnyKey)
{
var hasAllowed = envelope.Signatures.Any(s => allowedSet!.Contains(s.KeyId));
if (!hasAllowed)
{
return Task.FromResult(new SignatureVerificationResult
{
IsValid = false,
KeyId = string.Empty,
ErrorMessage = "No signatures match the allowed key IDs."
});
}
}
return Task.FromResult(new SignatureVerificationResult
{
IsValid = false,
KeyId = string.Empty,
ErrorMessage = lastError ?? "No valid signature found."
});
}
}

View File

@@ -8,4 +8,12 @@
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
</ItemGroup>
</Project>

View File

@@ -133,21 +133,26 @@ public sealed class VerificationPipeline : IVerificationPipeline
var pipelineDuration = _timeProvider.GetUtcNow() - pipelineStartTime;
// Generate receipt
var anchorId = context.TrustAnchorId ?? request.TrustAnchorId ?? new TrustAnchorId(Guid.Empty);
var checks = stepResults.Select(step => new VerificationCheck
{
Check = step.StepName,
Status = step.Passed ? VerificationResult.Pass : VerificationResult.Fail,
KeyId = step.KeyId,
Expected = step.Expected,
Actual = step.Actual,
LogIndex = step.LogIndex,
Details = step.Passed ? step.Details : step.ErrorMessage
}).ToList();
var receipt = new VerificationReceipt
{
ReceiptId = GenerateReceiptId(),
Result = overallPassed ? VerificationResult.Pass : VerificationResult.Fail,
ProofBundleId = request.ProofBundleId,
VerifiedAt = pipelineStartTime,
VerifierVersion = request.VerifierVersion,
ProofBundleId = request.ProofBundleId.Value,
FailureReason = failureReason,
StepsSummary = stepResults.Select(s => new VerificationStepSummary
{
StepName = s.StepName,
Passed = s.Passed,
DurationMs = (int)s.Duration.TotalMilliseconds
}).ToList(),
TotalDurationMs = (int)pipelineDuration.TotalMilliseconds
AnchorId = anchorId,
Result = overallPassed ? VerificationResult.Pass : VerificationResult.Fail,
Checks = checks
};
_logger.LogInformation(
@@ -170,12 +175,6 @@ public sealed class VerificationPipeline : IVerificationPipeline
ErrorMessage = "Verification cancelled"
};
private static string GenerateReceiptId()
{
var bytes = new byte[16];
RandomNumberGenerator.Fill(bytes);
return $"receipt:{Convert.ToHexString(bytes).ToLowerInvariant()}";
}
}
/// <summary>
@@ -296,7 +295,7 @@ public sealed class IdRecomputationVerificationStep : IVerificationStep
var recomputedId = ComputeProofBundleId(bundle);
// Compare with claimed ID
var claimedId = context.ProofBundleId.Value;
var claimedId = context.ProofBundleId.ToString();
if (!recomputedId.Equals(claimedId, StringComparison.OrdinalIgnoreCase))
{
return new VerificationStepResult
@@ -516,9 +515,19 @@ public sealed class TrustAnchorVerificationStep : IVerificationStep
}
// Resolve trust anchor
var anchor = context.TrustAnchorId is not null
? await _trustAnchorResolver.GetAnchorAsync(context.TrustAnchorId.Value, ct)
: await _trustAnchorResolver.FindAnchorForProofAsync(context.ProofBundleId, ct);
TrustAnchorInfo? anchor;
if (context.TrustAnchorId is TrustAnchorId anchorId)
{
anchor = await _trustAnchorResolver.GetAnchorAsync(anchorId.Value, ct);
}
else
{
anchor = await _trustAnchorResolver.FindAnchorForProofAsync(context.ProofBundleId, ct);
if (anchor is not null)
{
context.TrustAnchorId = new TrustAnchorId(anchor.AnchorId);
}
}
if (anchor is null)
{

View File

@@ -0,0 +1,32 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.Persistence\StellaOps.Attestor.Persistence.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,184 +1,143 @@
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Persistence.Services;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using Xunit;
using NSubstitute;
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Persistence.Repositories;
using StellaOps.Attestor.Persistence.Services;
namespace StellaOps.Attestor.Persistence.Tests;
/// <summary>
/// Integration tests for proof chain database operations.
/// SPRINT_0501_0006_0001 - Task #10
/// Tests for trust anchor glob matching and allowlists.
/// Sprint: SPRINT_0501_0006_0001_proof_chain_database_schema
/// Task: PROOF-DB-0010
/// </summary>
public sealed class ProofChainRepositoryIntegrationTests
public sealed class TrustAnchorMatcherTests
{
private readonly Mock<IProofChainRepository> _repositoryMock;
private readonly IProofChainRepository _repository;
private readonly TrustAnchorMatcher _matcher;
public ProofChainRepositoryIntegrationTests()
public TrustAnchorMatcherTests()
{
_repositoryMock = new Mock<IProofChainRepository>();
_matcher = new TrustAnchorMatcher(
_repositoryMock.Object,
NullLogger<TrustAnchorMatcher>.Instance);
_repository = Substitute.For<IProofChainRepository>();
_matcher = new TrustAnchorMatcher(_repository, NullLogger<TrustAnchorMatcher>.Instance);
}
[Fact]
public async Task FindMatchAsync_ExactPattern_MatchesCorrectly()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/lodash@4.17.21", ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
// Assert
Assert.NotNull(result);
Assert.Equal(anchor.AnchorId, result.Anchor.AnchorId);
result.Should().NotBeNull();
result!.Anchor.AnchorId.Should().Be(anchor.AnchorId);
}
[Fact]
public async Task FindMatchAsync_WildcardPattern_MatchesPackages()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
// Assert
Assert.NotNull(result);
Assert.Equal("pkg:npm/*", result.MatchedPattern);
result.Should().NotBeNull();
result!.MatchedPattern.Should().Be("pkg:npm/*");
}
[Fact]
public async Task FindMatchAsync_DoubleWildcard_MatchesNestedPaths()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/@scope/**", ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var result = await _matcher.FindMatchAsync("pkg:npm/@scope/sub/package@1.0.0");
// Assert
Assert.NotNull(result);
result.Should().NotBeNull();
}
[Fact]
public async Task FindMatchAsync_MultipleMatches_ReturnsMoreSpecific()
{
// Arrange
var genericAnchor = CreateAnchor("pkg:npm/*", ["key-generic"], "generic");
var specificAnchor = CreateAnchor("pkg:npm/lodash@*", ["key-specific"], "specific");
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([genericAnchor, specificAnchor]);
var genericAnchor = CreateAnchor("pkg:npm/*", ["key-generic"], policyRef: "generic");
var specificAnchor = CreateAnchor("pkg:npm/lodash@*", ["key-specific"], policyRef: "specific");
await SeedAnchors(genericAnchor, specificAnchor);
// Act
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
// Assert
Assert.NotNull(result);
Assert.Equal("specific", result.Anchor.PolicyRef);
result.Should().NotBeNull();
result!.Anchor.PolicyRef.Should().Be("specific");
}
[Fact]
public async Task FindMatchAsync_NoMatch_ReturnsNull()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var result = await _matcher.FindMatchAsync("pkg:pypi/requests@2.28.0");
// Assert
Assert.Null(result);
result.Should().BeNull();
}
[Fact]
public async Task IsKeyAllowedAsync_AllowedKey_ReturnsTrue()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1", "key-2"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
// Assert
Assert.True(allowed);
allowed.Should().BeTrue();
}
[Fact]
public async Task IsKeyAllowedAsync_DisallowedKey_ReturnsFalse()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-unknown");
// Assert
Assert.False(allowed);
allowed.Should().BeFalse();
}
[Fact]
public async Task IsKeyAllowedAsync_RevokedKey_ReturnsFalse()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1"], revokedKeys: ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
// Assert
Assert.False(allowed); // Key is revoked even if in allowed list
allowed.Should().BeFalse();
}
[Fact]
public async Task IsPredicateAllowedAsync_NoRestrictions_AllowsAll()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
anchor.AllowedPredicateTypes = null;
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var allowed = await _matcher.IsPredicateAllowedAsync(
"pkg:npm/lodash@4.17.21",
"https://in-toto.io/attestation/vulns/v0.1");
// Assert
Assert.True(allowed);
allowed.Should().BeTrue();
}
[Fact]
public async Task IsPredicateAllowedAsync_WithRestrictions_EnforcesAllowlist()
{
// Arrange
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
anchor.AllowedPredicateTypes = ["evidence.stella/v1", "sbom.stella/v1"];
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act & Assert
Assert.True(await _matcher.IsPredicateAllowedAsync(
"pkg:npm/lodash@4.17.21", "evidence.stella/v1"));
Assert.False(await _matcher.IsPredicateAllowedAsync(
"pkg:npm/lodash@4.17.21", "random.predicate/v1"));
(await _matcher.IsPredicateAllowedAsync("pkg:npm/lodash@4.17.21", "evidence.stella/v1")).Should().BeTrue();
(await _matcher.IsPredicateAllowedAsync("pkg:npm/lodash@4.17.21", "random.predicate/v1")).Should().BeFalse();
}
[Theory]
@@ -190,19 +149,21 @@ public sealed class ProofChainRepositoryIntegrationTests
[InlineData("pkg:pypi/*", "pkg:npm/lodash@4.17.21", false)]
[InlineData("pkg:npm/@scope/*", "pkg:npm/@scope/package@1.0.0", true)]
[InlineData("pkg:npm/@scope/*", "pkg:npm/@other/package@1.0.0", false)]
public async Task FindMatchAsync_PatternVariations_MatchCorrectly(
string pattern, string purl, bool shouldMatch)
public async Task FindMatchAsync_PatternVariations_MatchCorrectly(string pattern, string purl, bool shouldMatch)
{
// Arrange
var anchor = CreateAnchor(pattern, ["key-1"]);
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
.ReturnsAsync([anchor]);
await SeedAnchors(anchor);
// Act
var result = await _matcher.FindMatchAsync(purl);
// Assert
Assert.Equal(shouldMatch, result != null);
(result != null).Should().Be(shouldMatch);
}
private Task SeedAnchors(params TrustAnchorEntity[] anchors)
{
_repository.GetActiveTrustAnchorsAsync(Arg.Any<CancellationToken>())
.Returns(Task.FromResult<IReadOnlyList<TrustAnchorEntity>>(anchors));
return Task.CompletedTask;
}
private static TrustAnchorEntity CreateAnchor(
@@ -217,7 +178,8 @@ public sealed class ProofChainRepositoryIntegrationTests
PurlPattern = pattern,
AllowedKeyIds = allowedKeys,
PolicyRef = policyRef,
RevokedKeys = revokedKeys ?? [],
RevokedKeys = revokedKeys ?? []
};
}
}

View File

@@ -1,631 +0,0 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) 2025 StellaOps Contributors
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;
using StellaOps.Attestor.ProofChain;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Attestor.ProofChain.Verification;
using Xunit;
namespace StellaOps.Attestor.ProofChain.Tests;
/// <summary>
/// Load tests for proof chain API endpoints and verification pipeline.
/// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
/// Task: PROOF-API-0012
/// </summary>
public class ApiLoadTests
{
private readonly ILogger<VerificationPipeline> _logger = NullLogger<VerificationPipeline>.Instance;
#region Proof Spine Creation Load Tests
[Fact]
public async Task CreateProofSpine_ConcurrentRequests_MaintainsThroughput()
{
// Arrange: Create synthetic SBOM entries for load testing
const int concurrencyLevel = 50;
const int operationsPerClient = 20;
var totalOperations = concurrencyLevel * operationsPerClient;
var proofSpineBuilder = CreateTestProofSpineBuilder();
var latencies = new ConcurrentBag<long>();
var errors = new ConcurrentBag<Exception>();
var stopwatch = Stopwatch.StartNew();
// Act: Run concurrent proof spine creations
var tasks = Enumerable.Range(0, concurrencyLevel)
.Select(clientId => Task.Run(async () =>
{
for (var i = 0; i < operationsPerClient; i++)
{
try
{
var sw = Stopwatch.StartNew();
var entryId = GenerateSyntheticEntryId(clientId, i);
var spine = await proofSpineBuilder.BuildAsync(
entryId,
GenerateSyntheticEvidenceIds(3),
$"sha256:{GenerateHash("reasoning")}",
$"sha256:{GenerateHash("vex")}",
"v2.3.1",
CancellationToken.None);
sw.Stop();
latencies.Add(sw.ElapsedMilliseconds);
}
catch (Exception ex)
{
errors.Add(ex);
}
}
}));
await Task.WhenAll(tasks);
stopwatch.Stop();
// Assert: Verify load test metrics
var successCount = latencies.Count;
var errorCount = errors.Count;
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
var avgLatency = latencies.Any() ? latencies.Average() : 0;
var p95Latency = CalculatePercentile(latencies, 95);
var p99Latency = CalculatePercentile(latencies, 99);
// Performance assertions
successCount.Should().Be(totalOperations, "all operations should complete successfully");
errorCount.Should().Be(0, "no errors should occur during load test");
throughput.Should().BeGreaterThan(100, "throughput should exceed 100 ops/sec");
avgLatency.Should().BeLessThan(50, "average latency should be under 50ms");
p99Latency.Should().BeLessThan(200, "p99 latency should be under 200ms");
}
[Fact]
public async Task VerificationPipeline_ConcurrentVerifications_MaintainsAccuracy()
{
// Arrange
const int concurrencyLevel = 30;
const int verificationsPerClient = 10;
var totalVerifications = concurrencyLevel * verificationsPerClient;
var mockDsseVerifier = CreateMockDsseVerifier();
var mockIdRecomputer = CreateMockIdRecomputer();
var mockRekorVerifier = CreateMockRekorVerifier();
var pipeline = new VerificationPipeline(
mockDsseVerifier,
mockIdRecomputer,
mockRekorVerifier,
_logger);
var results = new ConcurrentBag<VerificationResult>();
var latencies = new ConcurrentBag<long>();
// Act: Run concurrent verifications
var tasks = Enumerable.Range(0, concurrencyLevel)
.Select(clientId => Task.Run(async () =>
{
for (var i = 0; i < verificationsPerClient; i++)
{
var sw = Stopwatch.StartNew();
var proof = GenerateSyntheticProof(clientId, i);
var result = await pipeline.VerifyAsync(proof, CancellationToken.None);
sw.Stop();
latencies.Add(sw.ElapsedMilliseconds);
results.Add(result);
}
}));
await Task.WhenAll(tasks);
// Assert: All verifications should be deterministic
results.Count.Should().Be(totalVerifications);
results.All(r => r.IsValid).Should().BeTrue("all synthetic proofs should verify successfully");
var avgLatency = latencies.Average();
avgLatency.Should().BeLessThan(30, "verification should be fast");
}
#endregion
#region Deterministic Ordering Tests Under Load
[Fact]
public void ProofSpineOrdering_UnderConcurrency_RemainsDeterministic()
{
// Arrange: Same inputs should produce same outputs under concurrent access
const int iterations = 100;
var seed = 42;
var random = new Random(seed);
var evidenceIds = Enumerable.Range(0, 5)
.Select(i => $"sha256:{GenerateHash($"evidence{i}")}")
.ToArray();
var results = new ConcurrentBag<string>();
// Act: Compute proof spine hash concurrently multiple times
Parallel.For(0, iterations, _ =>
{
var sorted = evidenceIds.OrderBy(x => x).ToArray();
var combined = string.Join(":", sorted);
var hash = GenerateHash(combined);
results.Add(hash);
});
// Assert: All results should be identical (deterministic)
results.Distinct().Count().Should().Be(1, "concurrent computations should be deterministic");
}
[Fact]
public async Task MerkleTree_ConcurrentBuilding_ProducesSameRoot()
{
// Arrange
const int leafCount = 1000;
const int iterations = 20;
var leaves = Enumerable.Range(0, leafCount)
.Select(i => Encoding.UTF8.GetBytes($"leaf-{i:D5}"))
.ToList();
var roots = new ConcurrentBag<string>();
// Act: Build Merkle tree concurrently
await Parallel.ForEachAsync(Enumerable.Range(0, iterations), async (_, ct) =>
{
var builder = new MerkleTreeBuilder();
foreach (var leaf in leaves)
{
builder.AddLeaf(leaf);
}
var root = builder.ComputeRoot();
roots.Add(Convert.ToHexString(root));
});
// Assert: All roots should be identical
roots.Distinct().Count().Should().Be(1, "Merkle tree root should be deterministic");
}
#endregion
#region Throughput Benchmarks
[Theory]
[InlineData(10, 100)] // Light load
[InlineData(50, 50)] // Medium load
[InlineData(100, 20)] // Heavy load
public async Task ThroughputBenchmark_VariousLoadProfiles(int concurrency, int opsPerClient)
{
// Arrange
var totalOps = concurrency * opsPerClient;
var successCount = 0;
var stopwatch = Stopwatch.StartNew();
// Act: Simulate API calls
var tasks = Enumerable.Range(0, concurrency)
.Select(_ => Task.Run(() =>
{
for (var i = 0; i < opsPerClient; i++)
{
// Simulate proof creation work
var hash = GenerateHash($"proof-{Guid.NewGuid()}");
Interlocked.Increment(ref successCount);
}
}));
await Task.WhenAll(tasks);
stopwatch.Stop();
// Assert
var throughput = successCount / stopwatch.Elapsed.TotalSeconds;
successCount.Should().Be(totalOps);
throughput.Should().BeGreaterThan(1000, $"throughput at {concurrency} concurrency should exceed 1000 ops/sec");
}
[Fact]
public async Task LatencyDistribution_UnderLoad_MeetsSloBudgets()
{
// Arrange: Define SLO budgets
const double maxP50Ms = 10;
const double maxP90Ms = 25;
const double maxP99Ms = 100;
const int sampleSize = 1000;
var latencies = new ConcurrentBag<double>();
// Act: Collect latency samples
await Parallel.ForEachAsync(Enumerable.Range(0, sampleSize), async (i, ct) =>
{
var sw = Stopwatch.StartNew();
// Simulate verification work
var hash = GenerateHash($"sample-{i}");
await Task.Delay(1, ct); // Simulate I/O
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
});
// Calculate percentiles
var sorted = latencies.OrderBy(x => x).ToList();
var p50 = CalculatePercentileFromSorted(sorted, 50);
var p90 = CalculatePercentileFromSorted(sorted, 90);
var p99 = CalculatePercentileFromSorted(sorted, 99);
// Assert: SLO compliance
p50.Should().BeLessThan(maxP50Ms, "p50 latency should meet SLO");
p90.Should().BeLessThan(maxP90Ms, "p90 latency should meet SLO");
p99.Should().BeLessThan(maxP99Ms, "p99 latency should meet SLO");
}
#endregion
#region Memory and Resource Tests
[Fact]
public void LargeProofBatch_DoesNotCauseMemorySpike()
{
// Arrange
const int batchSize = 10_000;
var initialMemory = GC.GetTotalMemory(true);
// Act: Create large batch of proofs
var proofs = new List<string>(batchSize);
for (var i = 0; i < batchSize; i++)
{
var proof = GenerateSyntheticProofJson(i);
proofs.Add(proof);
}
// Force GC and measure
var peakMemory = GC.GetTotalMemory(false);
proofs.Clear();
GC.Collect();
var finalMemory = GC.GetTotalMemory(true);
// Assert: Memory should not grow unbounded
var memoryGrowth = peakMemory - initialMemory;
var memoryRetained = finalMemory - initialMemory;
// Each proof is ~500 bytes, so 10k proofs ≈ 5MB is reasonable
memoryGrowth.Should().BeLessThan(50_000_000, "memory growth should be bounded (~50MB max for 10k proofs)");
memoryRetained.Should().BeLessThan(10_000_000, "memory should be released after clearing");
}
#endregion
#region Helper Methods
private static IProofSpineBuilder CreateTestProofSpineBuilder()
{
// Create a mock proof spine builder for load testing
var builder = Substitute.For<IProofSpineBuilder>();
builder.BuildAsync(
Arg.Any<string>(),
Arg.Any<string[]>(),
Arg.Any<string>(),
Arg.Any<string>(),
Arg.Any<string>(),
Arg.Any<CancellationToken>())
.Returns(callInfo =>
{
var entryId = callInfo.ArgAt<string>(0);
return Task.FromResult(new ProofSpine
{
EntryId = entryId,
SpineId = $"sha256:{GenerateHash(entryId)}",
PolicyVersion = callInfo.ArgAt<string>(4),
CreatedAt = DateTimeOffset.UtcNow
});
});
return builder;
}
private static IDsseVerifier CreateMockDsseVerifier()
{
var verifier = Substitute.For<IDsseVerifier>();
verifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
.Returns(Task.FromResult(new DsseVerificationResult { IsValid = true }));
return verifier;
}
private static IIdRecomputer CreateMockIdRecomputer()
{
var recomputer = Substitute.For<IIdRecomputer>();
recomputer.VerifyAsync(Arg.Any<ProofBundle>(), Arg.Any<CancellationToken>())
.Returns(Task.FromResult(new IdVerificationResult { IsValid = true }));
return recomputer;
}
private static IRekorVerifier CreateMockRekorVerifier()
{
var verifier = Substitute.For<IRekorVerifier>();
verifier.VerifyInclusionAsync(Arg.Any<RekorEntry>(), Arg.Any<CancellationToken>())
.Returns(Task.FromResult(new RekorVerificationResult { IsValid = true }));
return verifier;
}
private static string GenerateSyntheticEntryId(int clientId, int index)
{
var hash = GenerateHash($"entry-{clientId}-{index}");
return $"sha256:{hash}:pkg:npm/example@1.0.{index}";
}
private static string[] GenerateSyntheticEvidenceIds(int count)
{
return Enumerable.Range(0, count)
.Select(i => $"sha256:{GenerateHash($"evidence-{i}")}")
.ToArray();
}
private static ProofBundle GenerateSyntheticProof(int clientId, int index)
{
return new ProofBundle
{
EntryId = GenerateSyntheticEntryId(clientId, index),
Envelope = new DsseEnvelope
{
PayloadType = "application/vnd.stellaops.proof+json",
Payload = Convert.ToBase64String(Encoding.UTF8.GetBytes($"{{\"id\":\"{clientId}-{index}\"}}")),
Signatures = new[]
{
new DsseSignature
{
KeyId = "test-key",
Sig = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature"))
}
}
}
};
}
private static string GenerateSyntheticProofJson(int index)
{
return $@"{{
""entryId"": ""sha256:{GenerateHash($"entry-{index}")}:pkg:npm/example@1.0.{index}"",
""spineId"": ""sha256:{GenerateHash($"spine-{index}")}"",
""evidenceIds"": [""{GenerateHash($"ev1-{index}")}"", ""{GenerateHash($"ev2-{index}")}""],
""reasoningId"": ""sha256:{GenerateHash($"reason-{index}")}"",
""vexVerdictId"": ""sha256:{GenerateHash($"vex-{index}")}"",
""policyVersion"": ""v2.3.1"",
""createdAt"": ""{DateTimeOffset.UtcNow:O}""
}}";
}
private static string GenerateHash(string input)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(bytes).ToLowerInvariant();
}
private static double CalculatePercentile(ConcurrentBag<long> values, int percentile)
{
if (!values.Any()) return 0;
var sorted = values.OrderBy(x => x).ToList();
return CalculatePercentileFromSorted(sorted.Select(x => (double)x).ToList(), percentile);
}
private static double CalculatePercentileFromSorted<T>(List<T> sorted, int percentile) where T : IConvertible
{
if (sorted.Count == 0) return 0;
var index = (int)Math.Ceiling(percentile / 100.0 * sorted.Count) - 1;
index = Math.Max(0, Math.Min(index, sorted.Count - 1));
return sorted[index].ToDouble(null);
}
#endregion
}
#region Supporting Types for Load Tests
/// <summary>
/// Interface for proof spine building (mock target for load tests).
/// </summary>
public interface IProofSpineBuilder
{
Task<ProofSpine> BuildAsync(
string entryId,
string[] evidenceIds,
string reasoningId,
string vexVerdictId,
string policyVersion,
CancellationToken cancellationToken);
}
/// <summary>
/// Represents a proof spine created for an SBOM entry.
/// </summary>
public class ProofSpine
{
public required string EntryId { get; init; }
public required string SpineId { get; init; }
public required string PolicyVersion { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Interface for DSSE envelope verification.
/// </summary>
public interface IDsseVerifier
{
Task<DsseVerificationResult> VerifyAsync(DsseEnvelope envelope, CancellationToken cancellationToken);
}
/// <summary>
/// DSSE verification result.
/// </summary>
public class DsseVerificationResult
{
public bool IsValid { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Interface for ID recomputation verification.
/// </summary>
public interface IIdRecomputer
{
Task<IdVerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken);
}
/// <summary>
/// ID verification result.
/// </summary>
public class IdVerificationResult
{
public bool IsValid { get; init; }
public string? ExpectedId { get; init; }
public string? ActualId { get; init; }
}
/// <summary>
/// Interface for Rekor inclusion proof verification.
/// </summary>
public interface IRekorVerifier
{
Task<RekorVerificationResult> VerifyInclusionAsync(RekorEntry entry, CancellationToken cancellationToken);
}
/// <summary>
/// Rekor verification result.
/// </summary>
public class RekorVerificationResult
{
public bool IsValid { get; init; }
public long? LogIndex { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Represents a Rekor transparency log entry.
/// </summary>
public class RekorEntry
{
public long LogIndex { get; init; }
public string? LogId { get; init; }
public string? Body { get; init; }
public DateTimeOffset IntegratedTime { get; init; }
}
/// <summary>
/// DSSE envelope for proof bundles.
/// </summary>
public class DsseEnvelope
{
public required string PayloadType { get; init; }
public required string Payload { get; init; }
public required DsseSignature[] Signatures { get; init; }
}
/// <summary>
/// DSSE signature within an envelope.
/// </summary>
public class DsseSignature
{
public required string KeyId { get; init; }
public required string Sig { get; init; }
}
/// <summary>
/// Complete proof bundle for verification.
/// </summary>
public class ProofBundle
{
public required string EntryId { get; init; }
public required DsseEnvelope Envelope { get; init; }
public RekorEntry? RekorEntry { get; init; }
}
/// <summary>
/// Complete verification result from the pipeline.
/// </summary>
public class VerificationResult
{
public bool IsValid { get; init; }
public DsseVerificationResult? DsseResult { get; init; }
public IdVerificationResult? IdResult { get; init; }
public RekorVerificationResult? RekorResult { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Verification pipeline that runs all verification steps.
/// </summary>
public class VerificationPipeline
{
private readonly IDsseVerifier _dsseVerifier;
private readonly IIdRecomputer _idRecomputer;
private readonly IRekorVerifier _rekorVerifier;
private readonly ILogger<VerificationPipeline> _logger;
public VerificationPipeline(
IDsseVerifier dsseVerifier,
IIdRecomputer idRecomputer,
IRekorVerifier rekorVerifier,
ILogger<VerificationPipeline> logger)
{
_dsseVerifier = dsseVerifier;
_idRecomputer = idRecomputer;
_rekorVerifier = rekorVerifier;
_logger = logger;
}
public async Task<VerificationResult> VerifyAsync(ProofBundle bundle, CancellationToken cancellationToken)
{
// Step 1: DSSE signature verification
var dsseResult = await _dsseVerifier.VerifyAsync(bundle.Envelope, cancellationToken);
if (!dsseResult.IsValid)
{
return new VerificationResult
{
IsValid = false,
DsseResult = dsseResult,
Error = $"DSSE verification failed: {dsseResult.Error}"
};
}
// Step 2: ID recomputation
var idResult = await _idRecomputer.VerifyAsync(bundle, cancellationToken);
if (!idResult.IsValid)
{
return new VerificationResult
{
IsValid = false,
DsseResult = dsseResult,
IdResult = idResult,
Error = $"ID mismatch: expected {idResult.ExpectedId}, got {idResult.ActualId}"
};
}
// Step 3: Rekor inclusion (if entry present)
RekorVerificationResult? rekorResult = null;
if (bundle.RekorEntry != null)
{
rekorResult = await _rekorVerifier.VerifyInclusionAsync(bundle.RekorEntry, cancellationToken);
if (!rekorResult.IsValid)
{
return new VerificationResult
{
IsValid = false,
DsseResult = dsseResult,
IdResult = idResult,
RekorResult = rekorResult,
Error = $"Rekor verification failed: {rekorResult.Error}"
};
}
}
return new VerificationResult
{
IsValid = true,
DsseResult = dsseResult,
IdResult = idResult,
RekorResult = rekorResult
};
}
}
#endregion

View File

@@ -18,7 +18,7 @@ public class ContentAddressedIdGeneratorTests
public ContentAddressedIdGeneratorTests()
{
var canonicalizer = new JsonCanonicalizer();
var canonicalizer = new Rfc8785JsonCanonicalizer();
var merkleBuilder = new DeterministicMerkleTreeBuilder();
_generator = new ContentAddressedIdGenerator(canonicalizer, merkleBuilder);
}
@@ -117,8 +117,8 @@ public class ContentAddressedIdGeneratorTests
[Fact]
public void ComputeVexVerdictId_DifferentStatus_ProducesDifferentId()
{
var predicate1 = CreateTestVexPredicate() with { Status = VexStatus.Affected };
var predicate2 = CreateTestVexPredicate() with { Status = VexStatus.NotAffected };
var predicate1 = CreateTestVexPredicate() with { Status = "affected" };
var predicate2 = CreateTestVexPredicate() with { Status = "not_affected" };
var id1 = _generator.ComputeVexVerdictId(predicate1);
var id2 = _generator.ComputeVexVerdictId(predicate2);
@@ -152,8 +152,8 @@ public class ContentAddressedIdGeneratorTests
var vexVerdictId = CreateTestVexVerdictId();
// Different order, should produce same result
var unsorted = new[] { CreateTestEvidenceId("z"), CreateTestEvidenceId("a") };
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("z") };
var unsorted = new[] { CreateTestEvidenceId("f"), CreateTestEvidenceId("a") };
var sorted = new[] { CreateTestEvidenceId("a"), CreateTestEvidenceId("f") };
var id1 = _generator.ComputeProofBundleId(sbomEntryId, unsorted, reasoningId, vexVerdictId);
var id2 = _generator.ComputeProofBundleId(sbomEntryId, sorted, reasoningId, vexVerdictId);
@@ -272,9 +272,9 @@ public class ContentAddressedIdGeneratorTests
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
EvidenceIds = ["sha256:evidence1", "sha256:evidence2"],
PolicyVersion = "v2024.12.16",
Inputs = new ReasoningInputs
Inputs = new Dictionary<string, object>
{
CurrentEvaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
["currentEvaluationTime"] = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero)
}
};
@@ -282,12 +282,14 @@ public class ContentAddressedIdGeneratorTests
{
SbomEntryId = "sha256:sbom123:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2024-1234",
Status = VexStatus.NotAffected,
Justification = "Vulnerable code is not in execution path"
Status = "not_affected",
Justification = "vulnerable_code_not_present",
PolicyVersion = "v2024.12.16",
ReasoningId = "sha256:reasoning1"
};
private static SbomEntryId CreateTestSbomEntryId() =>
new("sha256:sbom123", "pkg:npm/lodash", "4.17.21");
new($"sha256:{new string('0', 64)}", "pkg:npm/lodash", "4.17.21");
private static EvidenceId CreateTestEvidenceId(string suffix) =>
new($"a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6{suffix.PadLeft(4, '0')}"[..64]);

View File

@@ -43,16 +43,22 @@ public class ContentAddressedIdTests
}
[Theory]
[InlineData("")]
[InlineData(" ")]
[InlineData("invalid")]
[InlineData(":digest")]
[InlineData("algo:")]
public void Parse_InvalidFormat_Throws(string input)
public void Parse_InvalidFormat_ThrowsFormatException(string input)
{
Assert.Throws<FormatException>(() => ContentAddressedId.Parse(input));
}
[Theory]
[InlineData("")]
[InlineData(" ")]
public void Parse_EmptyOrWhitespace_ThrowsArgumentException(string input)
{
Assert.Throws<ArgumentException>(() => ContentAddressedId.Parse(input));
}
[Fact]
public void Parse_InvalidDigestLength_Throws()
{
@@ -68,26 +74,6 @@ public class ContentAddressedIdTests
Assert.Equal(input, id.ToString());
}
[Fact]
public void TrySplit_ValidInput_ReturnsTrue()
{
var valid = ContentAddressedId.TrySplit(
"sha256:a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4e5f6a1b2",
out var algorithm,
out var digest);
Assert.True(valid);
Assert.Equal("sha256", algorithm);
Assert.NotEmpty(digest);
}
[Fact]
public void TrySplit_InvalidInput_ReturnsFalse()
{
var valid = ContentAddressedId.TrySplit("invalid", out _, out _);
Assert.False(valid);
}
}
public class EvidenceIdTests
@@ -153,12 +139,14 @@ public class ProofBundleIdTests
public class SbomEntryIdTests
{
private static readonly string SbomDigest = $"sha256:{new string('a', 64)}";
[Fact]
public void Constructor_WithVersion_CreatesId()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash", "4.17.21");
Assert.Equal("sha256:abc123", id.SbomDigest);
Assert.Equal(SbomDigest, id.SbomDigest);
Assert.Equal("pkg:npm/lodash", id.Purl);
Assert.Equal("4.17.21", id.Version);
}
@@ -166,9 +154,9 @@ public class SbomEntryIdTests
[Fact]
public void Constructor_WithoutVersion_CreatesId()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash");
Assert.Equal("sha256:abc123", id.SbomDigest);
Assert.Equal(SbomDigest, id.SbomDigest);
Assert.Equal("pkg:npm/lodash", id.Purl);
Assert.Null(id.Version);
}
@@ -176,15 +164,15 @@ public class SbomEntryIdTests
[Fact]
public void ToString_WithVersion_IncludesVersion()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash", "4.17.21");
Assert.Equal("sha256:abc123:pkg:npm/lodash@4.17.21", id.ToString());
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash", "4.17.21");
Assert.Equal($"{SbomDigest}:pkg:npm/lodash@4.17.21", id.ToString());
}
[Fact]
public void ToString_WithoutVersion_OmitsVersion()
{
var id = new SbomEntryId("sha256:abc123", "pkg:npm/lodash");
Assert.Equal("sha256:abc123:pkg:npm/lodash", id.ToString());
var id = new SbomEntryId(SbomDigest, "pkg:npm/lodash");
Assert.Equal($"{SbomDigest}:pkg:npm/lodash", id.ToString());
}
}

View File

@@ -6,18 +6,14 @@
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.ProofChain.Json;
namespace StellaOps.Attestor.ProofChain.Tests;
public class JsonCanonicalizerTests
public sealed class JsonCanonicalizerTests
{
private readonly IJsonCanonicalizer _canonicalizer;
public JsonCanonicalizerTests()
{
_canonicalizer = new JsonCanonicalizer();
}
private readonly IJsonCanonicalizer _canonicalizer = new Rfc8785JsonCanonicalizer();
[Fact]
public void Canonicalize_SortsKeys()
@@ -29,9 +25,8 @@ public class JsonCanonicalizerTests
Assert.Contains("\"a\":", outputStr);
Assert.Contains("\"z\":", outputStr);
// Verify 'a' comes before 'z'
var aIndex = outputStr.IndexOf("\"a\":");
var zIndex = outputStr.IndexOf("\"z\":");
var aIndex = outputStr.IndexOf("\"a\":", StringComparison.Ordinal);
var zIndex = outputStr.IndexOf("\"z\":", StringComparison.Ordinal);
Assert.True(aIndex < zIndex, "Keys should be sorted alphabetically");
}
@@ -43,17 +38,18 @@ public class JsonCanonicalizerTests
var outputStr = Encoding.UTF8.GetString(output);
Assert.DoesNotContain(" ", outputStr);
Assert.Equal("{\"key\":\"value\"}", outputStr);
}
[Fact]
public void Canonicalize_PreservesUtf8()
public void Canonicalize_PreservesUnicodeContent()
{
var input = """{"text": "hello 🌍"}"""u8;
var text = "hello 世界 \U0001F30D";
var input = JsonSerializer.SerializeToUtf8Bytes(new { text });
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("世界", outputStr);
Assert.Contains("🌍", outputStr);
using var document = JsonDocument.Parse(output);
Assert.Equal(text, document.RootElement.GetProperty("text").GetString());
}
[Fact]
@@ -67,20 +63,6 @@ public class JsonCanonicalizerTests
Assert.Equal(output1, output2);
}
[Fact]
public void Canonicalize_NestedObjects_SortsAllLevels()
{
var input = """{"outer": {"z": 1, "a": 2}, "inner": {"y": 3, "b": 4}}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
// Check that nested keys are also sorted
var nestedA = outputStr.IndexOf("\"a\":");
var nestedZ = outputStr.IndexOf("\"z\":");
Assert.True(nestedA < nestedZ, "Nested keys should be sorted");
}
[Fact]
public void Canonicalize_Arrays_PreservesOrder()
{
@@ -91,16 +73,6 @@ public class JsonCanonicalizerTests
Assert.Contains("[3,1,2]", outputStr);
}
[Fact]
public void Canonicalize_NullValue_Preserved()
{
var input = """{"key": null}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("null", outputStr);
}
[Fact]
public void Canonicalize_BooleanValues_LowerCase()
{
@@ -114,18 +86,6 @@ public class JsonCanonicalizerTests
Assert.DoesNotContain("False", outputStr);
}
[Fact]
public void Canonicalize_Numbers_MinimalRepresentation()
{
var input = """{"integer": 42, "float": 3.14, "zero": 0}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("42", outputStr);
Assert.Contains("3.14", outputStr);
Assert.Contains("0", outputStr);
}
[Fact]
public void Canonicalize_EmptyObject_ReturnsEmptyBraces()
{
@@ -135,90 +95,5 @@ public class JsonCanonicalizerTests
var outputStr = Encoding.UTF8.GetString(output);
Assert.Equal("{}", outputStr);
}
[Fact]
public void Canonicalize_EmptyArray_ReturnsEmptyBrackets()
{
var input = """{"arr": []}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("[]", outputStr);
}
[Fact]
public void Canonicalize_StringEscaping_Preserved()
{
var input = """{"text": "line1\nline2\ttab"}"""u8;
var output = _canonicalizer.Canonicalize(input);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Contains("\\n", outputStr);
Assert.Contains("\\t", outputStr);
}
[Theory]
[InlineData("""{"a":1}""")]
[InlineData("""{"a":1,"b":2}""")]
[InlineData("""{"nested":{"key":"value"}}""")]
[InlineData("""{"array":[1,2,3]}""")]
public void Canonicalize_AlreadyCanonical_Unchanged(string input)
{
var inputBytes = Encoding.UTF8.GetBytes(input);
var output = _canonicalizer.Canonicalize(inputBytes);
var outputStr = Encoding.UTF8.GetString(output);
Assert.Equal(input, outputStr);
}
[Fact]
public void Canonicalize_ComplexNesting_Deterministic()
{
var input = """
{
"level1": {
"z": {
"y": 1,
"x": 2
},
"a": {
"b": 3,
"a": 4
}
},
"array": [
{"z": 1, "a": 2},
{"y": 3, "b": 4}
]
}
"""u8;
var output1 = _canonicalizer.Canonicalize(input);
var output2 = _canonicalizer.Canonicalize(input);
Assert.Equal(output1, output2);
var outputStr = Encoding.UTF8.GetString(output1);
Assert.DoesNotContain("\n", outputStr);
Assert.DoesNotContain(" ", outputStr);
}
[Fact]
public void CanonicalizeDifferentWhitespace_ProducesSameOutput()
{
var input1 = """{"key":"value"}"""u8;
var input2 = """{ "key" : "value" }"""u8;
var input3 = """
{
"key": "value"
}
"""u8;
var output1 = _canonicalizer.Canonicalize(input1);
var output2 = _canonicalizer.Canonicalize(input2);
var output3 = _canonicalizer.Canonicalize(input3);
Assert.Equal(output1, output2);
Assert.Equal(output2, output3);
}
}

View File

@@ -104,14 +104,11 @@ public class MerkleTreeBuilderTests
}
[Fact]
public void ComputeMerkleRoot_EmptyLeaves_ReturnsEmptyOrZeroHash()
public void ComputeMerkleRoot_EmptyLeaves_Throws()
{
var leaves = Array.Empty<ReadOnlyMemory<byte>>();
// Should handle gracefully (either empty or zero hash)
var root = _builder.ComputeMerkleRoot(leaves);
Assert.NotNull(root);
Assert.Throws<ArgumentException>(() => _builder.ComputeMerkleRoot(leaves));
}
[Fact]

View File

@@ -243,7 +243,7 @@ public class ProofSpineAssemblyIntegrationTests
leaves.Add(Encoding.UTF8.GetBytes(vexVerdictId));
// Build merkle tree
return _builder.ComputeMerkleRoot(leaves.ToArray());
return _builder.ComputeMerkleRoot(leaves);
}
private static string FormatAsId(byte[] hash)
@@ -251,65 +251,3 @@ public class ProofSpineAssemblyIntegrationTests
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}
/// <summary>
/// Interface for merkle tree building.
/// </summary>
public interface IMerkleTreeBuilder
{
byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves);
}
/// <summary>
/// Deterministic merkle tree builder using SHA-256.
/// </summary>
public class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
{
public byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves)
{
if (leaves.Length == 0)
{
return new byte[32]; // Zero hash for empty tree
}
// Hash all leaves
var currentLevel = new List<byte[]>();
using var sha256 = System.Security.Cryptography.SHA256.Create();
foreach (var leaf in leaves)
{
currentLevel.Add(sha256.ComputeHash(leaf.ToArray()));
}
// Pad to power of 2 by duplicating last leaf
while (!IsPowerOfTwo(currentLevel.Count))
{
currentLevel.Add(currentLevel[^1]);
}
// Build tree bottom-up
while (currentLevel.Count > 1)
{
var nextLevel = new List<byte[]>();
for (int i = 0; i < currentLevel.Count; i += 2)
{
var left = currentLevel[i];
var right = currentLevel[i + 1];
// Concatenate and hash
var combined = new byte[left.Length + right.Length];
Buffer.BlockCopy(left, 0, combined, 0, left.Length);
Buffer.BlockCopy(right, 0, combined, left.Length, right.Length);
nextLevel.Add(sha256.ComputeHash(combined));
}
currentLevel = nextLevel;
}
return currentLevel[0];
}
private static bool IsPowerOfTwo(int n) => n > 0 && (n & (n - 1)) == 0;
}

View File

@@ -0,0 +1,122 @@
using FluentAssertions;
using Org.BouncyCastle.Crypto.Parameters;
using StellaOps.Attestor.Envelope;
using StellaOps.Attestor.ProofChain.Builders;
using StellaOps.Attestor.ProofChain.Json;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Tests.Signing;
public sealed class ProofChainSignerTests
{
private static readonly DateTimeOffset FixedTime = new(2025, 12, 17, 0, 0, 0, TimeSpan.Zero);
[Fact]
public async Task SignThenVerify_EvidenceStatement_Passes()
{
var (signer, keyId) = CreateSigner();
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('0', 64)}");
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
envelope.PayloadType.Should().Be(ProofChainSigner.InTotoPayloadType);
envelope.Signatures.Should().ContainSingle();
envelope.Signatures[0].KeyId.Should().Be(keyId);
envelope.Signatures[0].Sig.Should().NotBeNullOrWhiteSpace();
envelope.Payload.Should().NotBeNullOrWhiteSpace();
var result = await signer.VerifyEnvelopeAsync(envelope, new[] { keyId });
result.IsValid.Should().BeTrue();
result.KeyId.Should().Be(keyId);
}
[Fact]
public async Task Verify_TamperedPayload_Fails()
{
var (signer, keyId) = CreateSigner();
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('1', 64)}");
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
var payloadBytes = Convert.FromBase64String(envelope.Payload);
payloadBytes[^1] ^= 0xff;
var tampered = envelope with { Payload = Convert.ToBase64String(payloadBytes) };
var result = await signer.VerifyEnvelopeAsync(tampered, new[] { keyId });
result.IsValid.Should().BeFalse();
}
[Fact]
public async Task CrossPlatformVector_Ed25519Signature_IsStable()
{
var (signer, keyId) = CreateSigner(keyIdOverride: "test-key");
var statement = CreateEvidenceStatement(evidenceId: $"sha256:{new string('2', 64)}");
var envelope = await signer.SignStatementAsync(statement, SigningKeyProfile.Evidence);
envelope.Signatures[0].KeyId.Should().Be(keyId);
// Filled in after the first successful run to lock the vector across platforms/implementations.
const string expectedSig = "zJtzdRX76ENKf4IePv5AyTxqdS2YlVMcseaw2UBh1eBhfarUNq2AdiKyxVMWPftSy2uJJGfo7R7BilQO+Xj8AA==";
envelope.Signatures[0].Sig.Should().Be(expectedSig);
}
private static EvidenceStatement CreateEvidenceStatement(string evidenceId)
{
var builder = new StatementBuilder();
var subject = new ProofSubject
{
Name = "image:demo",
Digest = new Dictionary<string, string> { ["sha256"] = "abc123" }
};
var predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "0.50.0",
CollectionTime = FixedTime,
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2025-1234",
RawFinding = new { severity = "high" },
EvidenceId = evidenceId
};
return builder.BuildEvidenceStatement(subject, predicate);
}
private static (IProofChainSigner Signer, string KeyId) CreateSigner(string? keyIdOverride = null)
{
var seed = Enumerable.Range(0, 32).Select(static i => (byte)i).ToArray();
var privateKey = new Ed25519PrivateKeyParameters(seed, 0);
var publicKey = privateKey.GeneratePublicKey().GetEncoded();
var key = EnvelopeKey.CreateEd25519Signer(seed, publicKey, keyId: keyIdOverride ?? "proofchain-test-key");
var keyStore = new StaticKeyStore(new Dictionary<SigningKeyProfile, EnvelopeKey>
{
[SigningKeyProfile.Evidence] = key
});
return (new ProofChainSigner(keyStore, new Rfc8785JsonCanonicalizer()), key.KeyId);
}
private sealed class StaticKeyStore : IProofChainKeyStore
{
private readonly IReadOnlyDictionary<SigningKeyProfile, EnvelopeKey> _signingKeys;
private readonly IReadOnlyDictionary<string, EnvelopeKey> _verificationKeys;
public StaticKeyStore(IReadOnlyDictionary<SigningKeyProfile, EnvelopeKey> signingKeys)
{
_signingKeys = signingKeys;
_verificationKeys = signingKeys.Values.ToDictionary(static key => key.KeyId, static key => key, StringComparer.Ordinal);
}
public bool TryGetSigningKey(SigningKeyProfile profile, out EnvelopeKey key)
=> _signingKeys.TryGetValue(profile, out key!);
public bool TryGetVerificationKey(string keyId, out EnvelopeKey key)
=> _verificationKeys.TryGetValue(keyId, out key!);
}
}

View File

@@ -8,191 +8,130 @@ using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
/// <summary>
/// Unit tests for all DSSE statement types (Task PROOF-PRED-0012).
/// Unit tests for proof chain statement construction (Task PROOF-PRED-0012).
/// </summary>
public class StatementBuilderTests
public sealed class StatementBuilderTests
{
private readonly StatementBuilder _builder = new();
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
[Fact]
public void BuildEvidenceStatement_SetsPredicateType()
public void BuildEvidenceStatement_SetsPredicateTypeAndSubject()
{
var statement = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
source: "trivy",
sourceVersion: "0.50.0",
collectionTime: _fixedTime,
sbomEntryId: "sbom-123");
var subject = CreateSubject("image:demo", "abc123");
var predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "0.50.0",
CollectionTime = _fixedTime,
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2025-1234",
RawFinding = new { severity = "high" },
EvidenceId = $"sha256:{new string('0', 64)}"
};
var statement = _builder.BuildEvidenceStatement(subject, predicate);
Assert.Equal("evidence.stella/v1", statement.PredicateType);
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
}
[Fact]
public void BuildEvidenceStatement_PopulatesPredicate()
{
var statement = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
source: "trivy",
sourceVersion: "0.50.0",
collectionTime: _fixedTime,
sbomEntryId: "sbom-123",
vulnerabilityId: "CVE-2025-1234");
Assert.Equal("evidence.stella/v1", statement.PredicateType);
Assert.Single(statement.Subject);
Assert.Equal(subject.Name, statement.Subject[0].Name);
Assert.Equal("abc123", statement.Subject[0].Digest["sha256"]);
Assert.Equal("trivy", statement.Predicate.Source);
Assert.Equal("0.50.0", statement.Predicate.SourceVersion);
Assert.Equal(_fixedTime, statement.Predicate.CollectionTime);
Assert.Equal("sbom-123", statement.Predicate.SbomEntryId);
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
}
[Fact]
public void BuildProofSpineStatement_SetsPredicateType()
public void BuildSbomLinkageStatement_SetsAllSubjects()
{
var statement = _builder.BuildProofSpineStatement(
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
spineAlgorithm: "sha256-merkle",
rootHash: "root-hash",
leafHashes: ["leaf1", "leaf2", "leaf3"]);
var subjects = new[]
{
CreateSubject("image:demo", "abc123"),
CreateSubject("pkg:npm/lodash@4.17.21", "def456"),
};
Assert.Equal("proofspine.stella/v1", statement.PredicateType);
var predicate = new SbomLinkagePayload
{
Sbom = new SbomDescriptor
{
Id = "sbom-1",
Format = "cyclonedx",
SpecVersion = "1.6",
MediaType = "application/vnd.cyclonedx+json",
Sha256 = new string('1', 64),
Location = "file:///sboms/demo.json"
},
Generator = new GeneratorDescriptor
{
Name = "stellaops-sbomgen",
Version = "0.1.0"
},
GeneratedAt = _fixedTime,
Tags = new Dictionary<string, string> { ["env"] = "test" }
};
var statement = _builder.BuildSbomLinkageStatement(subjects, predicate);
Assert.Equal("https://stella-ops.org/predicates/sbom-linkage/v1", statement.PredicateType);
Assert.Equal(2, statement.Subject.Count);
Assert.Equal(subjects[0].Name, statement.Subject[0].Name);
Assert.Equal(subjects[1].Name, statement.Subject[1].Name);
}
[Fact]
public void BuildProofSpineStatement_ContainsLeafHashes()
public void BuildSbomLinkageStatement_EmptySubjects_Throws()
{
var leafHashes = new[] { "hash1", "hash2", "hash3", "hash4" };
var statement = _builder.BuildProofSpineStatement(
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
spineAlgorithm: "sha256-merkle",
rootHash: "merkle-root",
leafHashes: leafHashes);
var predicate = new SbomLinkagePayload
{
Sbom = new SbomDescriptor
{
Id = "sbom-1",
Format = "cyclonedx",
SpecVersion = "1.6",
MediaType = "application/vnd.cyclonedx+json",
Sha256 = new string('1', 64)
},
Generator = new GeneratorDescriptor
{
Name = "stellaops-sbomgen",
Version = "0.1.0"
},
GeneratedAt = _fixedTime
};
Assert.Equal("sha256-merkle", statement.Predicate.Algorithm);
Assert.Equal("merkle-root", statement.Predicate.RootHash);
Assert.Equal(4, statement.Predicate.LeafHashes.Length);
}
[Fact]
public void BuildVexVerdictStatement_SetsPredicateType()
{
var statement = _builder.BuildVexVerdictStatement(
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
vulnerabilityId: "CVE-2025-1234",
vexStatus: "not_affected",
justification: "vulnerable_code_not_present",
analysisTime: _fixedTime);
Assert.Equal("vexverdict.stella/v1", statement.PredicateType);
}
[Fact]
public void BuildVexVerdictStatement_PopulatesVexDetails()
{
var statement = _builder.BuildVexVerdictStatement(
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
vulnerabilityId: "CVE-2025-1234",
vexStatus: "not_affected",
justification: "vulnerable_code_not_present",
analysisTime: _fixedTime);
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
Assert.Equal("not_affected", statement.Predicate.Status);
Assert.Equal("vulnerable_code_not_present", statement.Predicate.Justification);
}
[Fact]
public void BuildReasoningStatement_SetsPredicateType()
{
var statement = _builder.BuildReasoningStatement(
subject: new InTotoSubject { Name = "finding:123", Digest = new() { ["sha256"] = "abc123" } },
reasoningType: "exploitability",
conclusion: "not_exploitable",
evidenceRefs: ["evidence1", "evidence2"]);
Assert.Equal("reasoning.stella/v1", statement.PredicateType);
}
[Fact]
public void BuildVerdictReceiptStatement_SetsPredicateType()
{
var statement = _builder.BuildVerdictReceiptStatement(
subject: new InTotoSubject { Name = "scan:456", Digest = new() { ["sha256"] = "abc123" } },
verdictHash: "verdict-hash",
verdictTime: _fixedTime,
signatureAlgorithm: "ECDSA-P256");
Assert.Equal("verdictreceipt.stella/v1", statement.PredicateType);
}
[Fact]
public void BuildSbomLinkageStatement_SetsPredicateType()
{
var statement = _builder.BuildSbomLinkageStatement(
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
sbomDigest: "sbom-digest",
sbomFormat: "cyclonedx",
sbomVersion: "1.6");
Assert.Equal("sbomlinkage.stella/v1", statement.PredicateType);
}
[Fact]
public void AllStatements_SerializeToValidJson()
{
var subject = new InTotoSubject { Name = "test", Digest = new() { ["sha256"] = "abc" } };
var evidence = _builder.BuildEvidenceStatement(subject, "trivy", "1.0", _fixedTime, "sbom1");
var spine = _builder.BuildProofSpineStatement(subject, "sha256", "root", ["leaf1"]);
var vex = _builder.BuildVexVerdictStatement(subject, "CVE-1", "fixed", null, _fixedTime);
var reasoning = _builder.BuildReasoningStatement(subject, "exploitability", "safe", []);
var receipt = _builder.BuildVerdictReceiptStatement(subject, "hash", _fixedTime, "ECDSA");
var sbom = _builder.BuildSbomLinkageStatement(subject, "sbom-hash", "spdx", "3.0");
// All should serialize without throwing
Assert.NotNull(JsonSerializer.Serialize(evidence));
Assert.NotNull(JsonSerializer.Serialize(spine));
Assert.NotNull(JsonSerializer.Serialize(vex));
Assert.NotNull(JsonSerializer.Serialize(reasoning));
Assert.NotNull(JsonSerializer.Serialize(receipt));
Assert.NotNull(JsonSerializer.Serialize(sbom));
Assert.Throws<ArgumentException>(() => _builder.BuildSbomLinkageStatement(Array.Empty<ProofSubject>(), predicate));
}
[Fact]
public void EvidenceStatement_RoundTripsViaJson()
{
var original = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name: "artifact", Digest = new() { ["sha256"] = "hash123" } },
source: "grype",
sourceVersion: "0.80.0",
collectionTime: _fixedTime,
sbomEntryId: "entry-456",
vulnerabilityId: "CVE-2025-9999");
var subject = CreateSubject("image:demo", "abc123");
var statement = _builder.BuildEvidenceStatement(subject, new EvidencePayload
{
Source = "grype",
SourceVersion = "0.80.0",
CollectionTime = _fixedTime,
SbomEntryId = "sha256:sbom:pkg:npm/lodash@4.17.21",
VulnerabilityId = "CVE-2025-9999",
RawFinding = "raw",
EvidenceId = $"sha256:{new string('2', 64)}"
});
var json = JsonSerializer.Serialize(original);
var json = JsonSerializer.Serialize(statement);
var restored = JsonSerializer.Deserialize<EvidenceStatement>(json);
Assert.NotNull(restored);
Assert.Equal(original.PredicateType, restored.PredicateType);
Assert.Equal(original.Predicate.Source, restored.Predicate.Source);
Assert.Equal(original.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
Assert.Equal(statement.PredicateType, restored.PredicateType);
Assert.Equal(statement.Subject[0].Name, restored.Subject[0].Name);
Assert.Equal(statement.Predicate.EvidenceId, restored.Predicate.EvidenceId);
Assert.Equal(statement.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
}
[Fact]
public void ProofSpineStatement_RoundTripsViaJson()
{
var original = _builder.BuildProofSpineStatement(
subject: new InTotoSubject { Name = "image:latest", Digest = new() { ["sha256"] = "img-hash" } },
spineAlgorithm: "sha256-merkle-v2",
rootHash: "merkle-root-abc",
leafHashes: ["a", "b", "c", "d"]);
var json = JsonSerializer.Serialize(original);
var restored = JsonSerializer.Deserialize<ProofSpineStatement>(json);
Assert.NotNull(restored);
Assert.Equal(original.Predicate.RootHash, restored.Predicate.RootHash);
Assert.Equal(original.Predicate.LeafHashes.Length, restored.Predicate.LeafHashes.Length);
}
private static ProofSubject CreateSubject(string name, string sha256Digest)
=> new()
{
Name = name,
Digest = new Dictionary<string, string> { ["sha256"] = sha256Digest }
};
}

View File

@@ -1,172 +0,0 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps Contributors
using System.Text.Json;
using StellaOps.Attestor.ProofChain.Builders;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Attestor.ProofChain.Validation;
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
/// <summary>
/// Unit tests for statement validation (Task PROOF-PRED-0015).
/// </summary>
public class StatementValidatorTests
{
private readonly StatementBuilder _builder = new();
private readonly IStatementValidator _validator = new StatementValidator();
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
[Fact]
public void Validate_ValidEvidenceStatement_ReturnsSuccess()
{
var statement = _builder.BuildEvidenceStatement(
subject: new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc123" } },
source: "trivy",
sourceVersion: "0.50.0",
collectionTime: _fixedTime,
sbomEntryId: "sbom-123");
var result = _validator.Validate(statement);
Assert.True(result.IsValid);
Assert.Empty(result.Errors);
}
[Fact]
public void Validate_EvidenceStatementWithEmptySource_ReturnsError()
{
var statement = new EvidenceStatement
{
Subject = [new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc" } }],
Predicate = new EvidencePayload
{
Source = "",
SourceVersion = "1.0",
CollectionTime = _fixedTime,
SbomEntryId = "sbom-1"
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Source"));
}
[Fact]
public void Validate_StatementWithEmptySubject_ReturnsError()
{
var statement = new EvidenceStatement
{
Subject = [],
Predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "1.0",
CollectionTime = _fixedTime,
SbomEntryId = "sbom-1"
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Subject"));
}
[Fact]
public void Validate_ProofSpineWithEmptyLeafHashes_ReturnsError()
{
var statement = new ProofSpineStatement
{
Subject = [new InTotoSubject { Name = "image", Digest = new() { ["sha256"] = "hash" } }],
Predicate = new ProofSpinePayload
{
Algorithm = "sha256-merkle",
RootHash = "root",
LeafHashes = []
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("LeafHashes"));
}
[Fact]
public void Validate_VexVerdictWithValidStatus_ReturnsSuccess()
{
var validStatuses = new[] { "not_affected", "affected", "fixed", "under_investigation" };
foreach (var status in validStatuses)
{
var statement = _builder.BuildVexVerdictStatement(
subject: new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } },
vulnerabilityId: "CVE-2025-1",
vexStatus: status,
justification: null,
analysisTime: _fixedTime);
var result = _validator.Validate(statement);
Assert.True(result.IsValid, $"Status '{status}' should be valid");
}
}
[Fact]
public void Validate_VexVerdictWithInvalidStatus_ReturnsError()
{
var statement = new VexVerdictStatement
{
Subject = [new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } }],
Predicate = new VexVerdictPayload
{
VulnerabilityId = "CVE-2025-1",
Status = "invalid_status",
AnalysisTime = _fixedTime
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Status"));
}
[Fact]
public void Validate_ReasoningStatementWithEvidence_ReturnsSuccess()
{
var statement = _builder.BuildReasoningStatement(
subject: new InTotoSubject { Name = "finding", Digest = new() { ["sha256"] = "abc" } },
reasoningType: "exploitability",
conclusion: "not_exploitable",
evidenceRefs: ["evidence-1", "evidence-2"]);
var result = _validator.Validate(statement);
Assert.True(result.IsValid);
}
[Fact]
public void Validate_SubjectWithMissingDigest_ReturnsError()
{
var statement = new EvidenceStatement
{
Subject = [new InTotoSubject { Name = "artifact", Digest = new() }],
Predicate = new EvidencePayload
{
Source = "trivy",
SourceVersion = "1.0",
CollectionTime = _fixedTime,
SbomEntryId = "sbom-1"
}
};
var result = _validator.Validate(statement);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("Digest"));
}
}

View File

@@ -14,7 +14,7 @@
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-preview.7.24407.12" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="NSubstitute" Version="5.1.0" />
<PackageReference Include="xunit" Version="2.9.3" />
@@ -26,7 +26,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include=\"..\\..\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj\" />
<ProjectReference Include="..\..\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
</ItemGroup>
</Project>

View File

@@ -1,465 +0,0 @@
// -----------------------------------------------------------------------------
// VerificationPipelineIntegrationTests.cs
// Sprint: SPRINT_0501_0001_0001_proof_evidence_chain_master
// Task: PROOF-MASTER-0002
// Description: Integration tests for the full proof chain verification pipeline
// -----------------------------------------------------------------------------
using FluentAssertions;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using NSubstitute;
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Verification;
using Xunit;
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
/// <summary>
/// Integration tests for the verification pipeline.
/// Tests PROOF-MASTER-0002: Full proof chain verification flow.
/// </summary>
public class VerificationPipelineIntegrationTests
{
private readonly IProofBundleStore _proofStore;
private readonly IDsseVerifier _dsseVerifier;
private readonly IRekorVerifier _rekorVerifier;
private readonly ITrustAnchorResolver _trustAnchorResolver;
private readonly ILogger<VerificationPipeline> _logger;
private readonly FakeTimeProvider _timeProvider;
public VerificationPipelineIntegrationTests()
{
_proofStore = Substitute.For<IProofBundleStore>();
_dsseVerifier = Substitute.For<IDsseVerifier>();
_rekorVerifier = Substitute.For<IRekorVerifier>();
_trustAnchorResolver = Substitute.For<ITrustAnchorResolver>();
_logger = NullLogger<VerificationPipeline>.Instance;
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 17, 12, 0, 0, TimeSpan.Zero));
}
#region Full Pipeline Tests
[Fact]
public async Task VerifyAsync_ValidProofBundle_AllStepsPass()
{
// Arrange
var bundleId = new ProofBundleId("sha256:valid123");
var keyId = "key-1";
SetupValidBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true,
VerifierVersion = "1.0.0-test"
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeTrue();
result.Receipt.Result.Should().Be(VerificationResult.Pass);
result.Steps.Should().HaveCount(4);
result.Steps.Should().OnlyContain(s => s.Passed);
result.FirstFailure.Should().BeNull();
}
[Fact]
public async Task VerifyAsync_InvalidDsseSignature_FailsAtFirstStep()
{
// Arrange
var bundleId = new ProofBundleId("sha256:invalid-sig");
var keyId = "key-1";
SetupValidBundle(bundleId, keyId);
SetupInvalidDsseVerification(keyId, "Signature mismatch");
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Receipt.Result.Should().Be(VerificationResult.Fail);
result.FirstFailure.Should().NotBeNull();
result.FirstFailure!.StepName.Should().Be("dsse_signature");
result.Receipt.FailureReason.Should().Contain("Signature mismatch");
}
[Fact]
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputation()
{
// Arrange
var bundleId = new ProofBundleId("sha256:wrong-id");
var keyId = "key-1";
SetupBundleWithWrongId(bundleId, keyId);
SetupValidDsseVerification(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.StepName == "id_recomputation" && !s.Passed);
}
[Fact]
public async Task VerifyAsync_NoRekorEntry_FailsAtRekorStep()
{
// Arrange
var bundleId = new ProofBundleId("sha256:no-rekor");
var keyId = "key-1";
SetupBundleWithoutRekor(bundleId, keyId);
SetupValidDsseVerification(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.StepName == "rekor_inclusion" && !s.Passed);
}
[Fact]
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
{
// Arrange
var bundleId = new ProofBundleId("sha256:skip-rekor");
var keyId = "key-1";
SetupBundleWithoutRekor(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidTrustAnchor(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false // Skip Rekor
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
rekorStep.Should().NotBeNull();
rekorStep!.Passed.Should().BeTrue();
rekorStep.Details.Should().Contain("skipped");
}
[Fact]
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchor()
{
// Arrange
var bundleId = new ProofBundleId("sha256:bad-key");
var keyId = "unauthorized-key";
SetupValidBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupTrustAnchorWithoutKey(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.StepName == "trust_anchor" && !s.Passed);
}
#endregion
#region Receipt Generation Tests
[Fact]
public async Task VerifyAsync_GeneratesReceipt_WithCorrectFields()
{
// Arrange
var bundleId = new ProofBundleId("sha256:receipt-test");
var keyId = "key-1";
SetupValidBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(keyId);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifierVersion = "2.0.0"
};
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.Receipt.Should().NotBeNull();
result.Receipt.ReceiptId.Should().StartWith("receipt:");
result.Receipt.VerifierVersion.Should().Be("2.0.0");
result.Receipt.ProofBundleId.Should().Be(bundleId.Value);
result.Receipt.StepsSummary.Should().HaveCount(4);
result.Receipt.TotalDurationMs.Should().BeGreaterOrEqualTo(0);
}
[Fact]
public async Task VerifyAsync_FailingPipeline_ReceiptContainsFailureReason()
{
// Arrange
var bundleId = new ProofBundleId("sha256:fail-receipt");
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns((ProofBundle?)null);
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request);
// Assert
result.Receipt.Result.Should().Be(VerificationResult.Fail);
result.Receipt.FailureReason.Should().NotBeNullOrEmpty();
}
#endregion
#region Cancellation Tests
[Fact]
public async Task VerifyAsync_Cancelled_ReturnsFailure()
{
// Arrange
var bundleId = new ProofBundleId("sha256:cancel-test");
var cts = new CancellationTokenSource();
cts.Cancel();
var pipeline = CreatePipeline();
var request = new VerificationPipelineRequest { ProofBundleId = bundleId };
// Act
var result = await pipeline.VerifyAsync(request, cts.Token);
// Assert
result.IsValid.Should().BeFalse();
result.Steps.Should().Contain(s => s.ErrorMessage?.Contains("cancelled") == true);
}
#endregion
#region Helper Methods
private VerificationPipeline CreatePipeline()
{
return VerificationPipeline.CreateDefault(
_proofStore,
_dsseVerifier,
_rekorVerifier,
_trustAnchorResolver,
_logger,
_timeProvider);
}
private void SetupValidBundle(ProofBundleId bundleId, string keyId)
{
var bundle = CreateTestBundle(keyId, includeRekor: true);
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns(bundle);
}
private void SetupBundleWithWrongId(ProofBundleId bundleId, string keyId)
{
// Create a bundle but the ID won't match when recomputed
var bundle = new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:wrong-statement-id", // Won't match content
PredicateType = "evidence.stella/v1",
Predicate = new { test = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = "test"u8.ToArray(),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
}
}
},
RekorLogEntry = CreateTestRekorEntry()
};
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns(bundle);
}
private void SetupBundleWithoutRekor(ProofBundleId bundleId, string keyId)
{
var bundle = CreateTestBundle(keyId, includeRekor: false);
_proofStore.GetBundleAsync(bundleId, Arg.Any<CancellationToken>())
.Returns(bundle);
}
private void SetupValidDsseVerification(string keyId)
{
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
.Returns(new DsseVerificationResult { IsValid = true, KeyId = keyId });
}
private void SetupInvalidDsseVerification(string keyId, string error)
{
_dsseVerifier.VerifyAsync(Arg.Any<DsseEnvelope>(), Arg.Any<CancellationToken>())
.Returns(new DsseVerificationResult
{
IsValid = false,
KeyId = keyId,
ErrorMessage = error
});
}
private void SetupValidRekorVerification()
{
_rekorVerifier.VerifyInclusionAsync(
Arg.Any<string>(),
Arg.Any<long>(),
Arg.Any<InclusionProof>(),
Arg.Any<SignedTreeHead>(),
Arg.Any<CancellationToken>())
.Returns(new RekorVerificationResult { IsValid = true });
}
private void SetupValidTrustAnchor(string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = Guid.NewGuid(),
AllowedKeyIds = new List<string> { keyId },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolver.GetAnchorAsync(Arg.Any<Guid>(), Arg.Any<CancellationToken>())
.Returns(anchor);
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
.Returns(anchor);
}
private void SetupTrustAnchorWithoutKey(string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = Guid.NewGuid(),
AllowedKeyIds = new List<string> { "different-key" },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolver.FindAnchorForProofAsync(Arg.Any<ProofBundleId>(), Arg.Any<CancellationToken>())
.Returns(anchor);
}
private static ProofBundle CreateTestBundle(string keyId, bool includeRekor)
{
return new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:test-statement",
PredicateType = "evidence.stella/v1",
Predicate = new { test = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = "test"u8.ToArray(),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[] { 0x01 } }
}
}
},
RekorLogEntry = includeRekor ? CreateTestRekorEntry() : null
};
}
private static RekorLogEntry CreateTestRekorEntry()
{
return new RekorLogEntry
{
LogId = "test-log",
LogIndex = 12345,
InclusionProof = new InclusionProof
{
Hashes = new List<byte[]> { new byte[] { 0x01 } },
TreeSize = 1000,
RootHash = new byte[] { 0x02 }
},
SignedTreeHead = new SignedTreeHead
{
TreeSize = 1000,
RootHash = new byte[] { 0x02 },
Signature = new byte[] { 0x03 }
}
};
}
#endregion
}
/// <summary>
/// Fake time provider for testing.
/// </summary>
internal sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public FakeTimeProvider(DateTimeOffset initialTime)
{
_now = initialTime;
}
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
public void SetTime(DateTimeOffset time) => _now = time;
}

View File

@@ -1,484 +0,0 @@
// -----------------------------------------------------------------------------
// VerificationPipelineTests.cs
// Sprint: SPRINT_0501_0005_0001_proof_chain_api_surface
// Task: PROOF-API-0011 - Integration tests for verification pipeline
// Description: Tests for the full verification pipeline including DSSE, ID
// recomputation, Rekor inclusion, and trust anchor verification
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Receipts;
using StellaOps.Attestor.ProofChain.Verification;
using Xunit;
namespace StellaOps.Attestor.ProofChain.Tests.Verification;
/// <summary>
/// Integration tests for the verification pipeline.
/// </summary>
public class VerificationPipelineTests
{
private readonly Mock<IProofBundleStore> _proofStoreMock;
private readonly Mock<IDsseVerifier> _dsseVerifierMock;
private readonly Mock<IRekorVerifier> _rekorVerifierMock;
private readonly Mock<ITrustAnchorResolver> _trustAnchorResolverMock;
private readonly VerificationPipeline _pipeline;
public VerificationPipelineTests()
{
_proofStoreMock = new Mock<IProofBundleStore>();
_dsseVerifierMock = new Mock<IDsseVerifier>();
_rekorVerifierMock = new Mock<IRekorVerifier>();
_trustAnchorResolverMock = new Mock<ITrustAnchorResolver>();
_pipeline = VerificationPipeline.CreateDefault(
_proofStoreMock.Object,
_dsseVerifierMock.Object,
_rekorVerifierMock.Object,
_trustAnchorResolverMock.Object,
NullLogger<VerificationPipeline>.Instance);
}
#region Full Pipeline Tests
[Fact]
public async Task VerifyAsync_AllStepsPass_ReturnsValidResult()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var anchorId = Guid.NewGuid();
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
Assert.Equal(VerificationResult.Pass, result.Receipt.Result);
Assert.All(result.Steps, step => Assert.True(step.Passed));
Assert.Null(result.FirstFailure);
}
[Fact]
public async Task VerifyAsync_DsseSignatureInvalid_FailsAtDsseStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "invalid-key";
SetupValidProofBundle(bundleId, keyId);
SetupInvalidDsseVerification("Signature verification failed");
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
Assert.NotNull(result.FirstFailure);
Assert.Equal("dsse_signature", result.FirstFailure.StepName);
Assert.Contains("Signature verification failed", result.FirstFailure.ErrorMessage);
}
[Fact]
public async Task VerifyAsync_IdMismatch_FailsAtIdRecomputationStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
// Setup a bundle with mismatched ID
SetupProofBundleWithMismatchedId(bundleId, keyId);
SetupValidDsseVerification(keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
var idStep = result.Steps.FirstOrDefault(s => s.StepName == "id_recomputation");
Assert.NotNull(idStep);
// Note: The actual result depends on how the bundle is constructed
}
[Fact]
public async Task VerifyAsync_RekorInclusionFails_FailsAtRekorStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupInvalidRekorVerification("Inclusion proof invalid");
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
Assert.NotNull(rekorStep);
Assert.False(rekorStep.Passed);
Assert.Contains("Inclusion proof invalid", rekorStep.ErrorMessage);
}
[Fact]
public async Task VerifyAsync_RekorDisabled_SkipsRekorStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var anchorId = Guid.NewGuid();
SetupValidProofBundle(bundleId, keyId, includeRekorEntry: false);
SetupValidDsseVerification(keyId);
SetupValidTrustAnchor(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.True(result.IsValid);
var rekorStep = result.Steps.FirstOrDefault(s => s.StepName == "rekor_inclusion");
Assert.NotNull(rekorStep);
Assert.True(rekorStep.Passed);
Assert.Contains("skipped", rekorStep.Details, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task VerifyAsync_UnauthorizedKey_FailsAtTrustAnchorStep()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "unauthorized-key";
var anchorId = Guid.NewGuid();
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupTrustAnchorWithoutKey(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
var anchorStep = result.Steps.FirstOrDefault(s => s.StepName == "trust_anchor");
Assert.NotNull(anchorStep);
Assert.False(anchorStep.Passed);
Assert.Contains("not authorized", anchorStep.ErrorMessage);
}
#endregion
#region Receipt Generation Tests
[Fact]
public async Task VerifyAsync_GeneratesReceiptWithCorrectFields()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var anchorId = Guid.NewGuid();
var verifierVersion = "2.0.0";
SetupValidProofBundle(bundleId, keyId);
SetupValidDsseVerification(keyId);
SetupValidRekorVerification();
SetupValidTrustAnchor(anchorId, keyId);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = true,
VerifierVersion = verifierVersion
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.NotNull(result.Receipt);
Assert.NotEmpty(result.Receipt.ReceiptId);
Assert.Equal(bundleId.Value, result.Receipt.ProofBundleId);
Assert.Equal(verifierVersion, result.Receipt.VerifierVersion);
Assert.True(result.Receipt.TotalDurationMs >= 0);
Assert.NotEmpty(result.Receipt.StepsSummary!);
}
[Fact]
public async Task VerifyAsync_FailedVerification_ReceiptContainsFailureReason()
{
// Arrange
var bundleId = CreateTestBundleId();
_proofStoreMock
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
.ReturnsAsync((ProofBundle?)null);
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act
var result = await _pipeline.VerifyAsync(request);
// Assert
Assert.False(result.IsValid);
Assert.Equal(VerificationResult.Fail, result.Receipt.Result);
Assert.NotNull(result.Receipt.FailureReason);
Assert.Contains("not found", result.Receipt.FailureReason);
}
#endregion
#region Cancellation Tests
[Fact]
public async Task VerifyAsync_Cancelled_ReturnsPartialResults()
{
// Arrange
var bundleId = CreateTestBundleId();
var keyId = "test-key-id";
var cts = new CancellationTokenSource();
SetupValidProofBundle(bundleId, keyId);
// Setup DSSE verification to cancel
_dsseVerifierMock
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
.Returns(async (DsseEnvelope _, CancellationToken ct) =>
{
await cts.CancelAsync();
ct.ThrowIfCancellationRequested();
return new DsseVerificationResult { IsValid = true, KeyId = keyId };
});
var request = new VerificationPipelineRequest
{
ProofBundleId = bundleId,
VerifyRekor = false
};
// Act & Assert - should complete but show cancellation
// The actual behavior depends on implementation
var result = await _pipeline.VerifyAsync(request, cts.Token);
// Pipeline may handle cancellation gracefully
}
#endregion
#region Helper Methods
private static ProofBundleId CreateTestBundleId()
{
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(Guid.NewGuid().ToString()));
return new ProofBundleId($"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}");
}
private void SetupValidProofBundle(ProofBundleId bundleId, string keyId, bool includeRekorEntry = true)
{
var bundle = new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:statement123",
PredicateType = "https://stella-ops.io/v1/evidence",
Predicate = new { test = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = Encoding.UTF8.GetBytes("{}"),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
}
}
},
RekorLogEntry = includeRekorEntry ? new RekorLogEntry
{
LogId = "test-log",
LogIndex = 12345,
InclusionProof = new InclusionProof
{
Hashes = new List<byte[]>(),
TreeSize = 100,
RootHash = new byte[32]
},
SignedTreeHead = new SignedTreeHead
{
TreeSize = 100,
RootHash = new byte[32],
Signature = new byte[64]
}
} : null
};
_proofStoreMock
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
.ReturnsAsync(bundle);
}
private void SetupProofBundleWithMismatchedId(ProofBundleId bundleId, string keyId)
{
// Create a bundle that will compute to a different ID
var bundle = new ProofBundle
{
Statements = new List<ProofStatement>
{
new ProofStatement
{
StatementId = "sha256:differentstatement",
PredicateType = "https://stella-ops.io/v1/evidence",
Predicate = new { different = "data" }
}
},
Envelopes = new List<DsseEnvelope>
{
new DsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = Encoding.UTF8.GetBytes("{\"different\":\"payload\"}"),
Signatures = new List<DsseSignature>
{
new DsseSignature { KeyId = keyId, Sig = new byte[64] }
}
}
}
};
_proofStoreMock
.Setup(x => x.GetBundleAsync(bundleId, It.IsAny<CancellationToken>()))
.ReturnsAsync(bundle);
}
private void SetupValidDsseVerification(string keyId)
{
_dsseVerifierMock
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseVerificationResult { IsValid = true, KeyId = keyId });
}
private void SetupInvalidDsseVerification(string errorMessage)
{
_dsseVerifierMock
.Setup(x => x.VerifyAsync(It.IsAny<DsseEnvelope>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new DsseVerificationResult
{
IsValid = false,
KeyId = "unknown",
ErrorMessage = errorMessage
});
}
private void SetupValidRekorVerification()
{
_rekorVerifierMock
.Setup(x => x.VerifyInclusionAsync(
It.IsAny<string>(),
It.IsAny<long>(),
It.IsAny<InclusionProof>(),
It.IsAny<SignedTreeHead>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(new RekorVerificationResult { IsValid = true });
}
private void SetupInvalidRekorVerification(string errorMessage)
{
_rekorVerifierMock
.Setup(x => x.VerifyInclusionAsync(
It.IsAny<string>(),
It.IsAny<long>(),
It.IsAny<InclusionProof>(),
It.IsAny<SignedTreeHead>(),
It.IsAny<CancellationToken>()))
.ReturnsAsync(new RekorVerificationResult { IsValid = false, ErrorMessage = errorMessage });
}
private void SetupValidTrustAnchor(Guid anchorId, string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = anchorId,
AllowedKeyIds = new List<string> { keyId },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolverMock
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(anchor);
_trustAnchorResolverMock
.Setup(x => x.GetAnchorAsync(anchorId, It.IsAny<CancellationToken>()))
.ReturnsAsync(anchor);
}
private void SetupTrustAnchorWithoutKey(Guid anchorId, string keyId)
{
var anchor = new TrustAnchorInfo
{
AnchorId = anchorId,
AllowedKeyIds = new List<string> { "other-key-not-matching" },
RevokedKeyIds = new List<string>()
};
_trustAnchorResolverMock
.Setup(x => x.FindAnchorForProofAsync(It.IsAny<ProofBundleId>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(anchor);
}
#endregion
}

View File

@@ -286,6 +286,8 @@ internal static partial class CommandHandlers
}
}
var dssePath = (verifyDsse || verifyRekor) ? ResolveOfflineDssePath(bundleDir) : null;
var dsseVerified = false;
if (verifyDsse)
{
@@ -304,7 +306,6 @@ internal static partial class CommandHandlers
return;
}
var dssePath = ResolveOfflineDssePath(bundleDir);
if (dssePath is null)
{
verificationLog.Add("dsse:missing");
@@ -507,6 +508,44 @@ internal static partial class CommandHandlers
var rekorVerified = false;
if (verifyRekor)
{
if (dssePath is null)
{
verificationLog.Add("rekor:missing-dsse");
var quarantineId = await TryQuarantineOfflineBundleAsync(
loggerFactory,
quarantineRoot,
effectiveTenant,
bundlePath,
manifestJson,
reasonCode: "REKOR_VERIFY_FAIL",
reasonMessage: "Rekor verification requires a DSSE statement file (statement.dsse.json).",
verificationLog,
cancellationToken).ConfigureAwait(false);
await WriteOfflineImportResultAsync(
emitJson,
new OfflineImportResultPayload(
Status: "failed",
ExitCode: OfflineExitCodes.RekorVerificationFailed,
TenantId: effectiveTenant,
BundlePath: bundlePath,
ManifestPath: manifestPath,
Version: manifest.Version,
Digest: $"sha256:{bundleDigest}",
DsseVerified: dsseVerified,
RekorVerified: false,
ActivatedAt: null,
WasForceActivated: false,
ForceActivateReason: null,
QuarantineId: quarantineId,
ReasonCode: "REKOR_VERIFY_FAIL",
ReasonMessage: "Rekor verification requires a DSSE statement file (statement.dsse.json)."),
cancellationToken).ConfigureAwait(false);
Environment.ExitCode = OfflineExitCodes.RekorVerificationFailed;
return;
}
var rekorPath = ResolveOfflineRekorReceiptPath(bundleDir);
if (rekorPath is null)
{
@@ -546,20 +585,10 @@ internal static partial class CommandHandlers
return;
}
var receiptJson = await File.ReadAllTextAsync(rekorPath, cancellationToken).ConfigureAwait(false);
var receipt = JsonSerializer.Deserialize<OfflineKitRekorReceiptDocument>(receiptJson, new JsonSerializerOptions(JsonSerializerDefaults.Web)
var rekorKeyPath = ResolveOfflineRekorPublicKeyPath(bundleDir);
if (rekorKeyPath is null)
{
PropertyNameCaseInsensitive = true
});
if (receipt is null ||
string.IsNullOrWhiteSpace(receipt.Uuid) ||
receipt.LogIndex < 0 ||
string.IsNullOrWhiteSpace(receipt.RootHash) ||
receipt.Hashes is not { Count: > 0 } ||
string.IsNullOrWhiteSpace(receipt.Checkpoint))
{
verificationLog.Add("rekor:invalid");
verificationLog.Add("rekor:missing-public-key");
var quarantineId = await TryQuarantineOfflineBundleAsync(
loggerFactory,
quarantineRoot,
@@ -567,7 +596,7 @@ internal static partial class CommandHandlers
bundlePath,
manifestJson,
reasonCode: "REKOR_VERIFY_FAIL",
reasonMessage: "Rekor receipt is missing required fields.",
reasonMessage: "Rekor public key not found in offline bundle (rekor-pub.pem).",
verificationLog,
cancellationToken).ConfigureAwait(false);
@@ -588,16 +617,26 @@ internal static partial class CommandHandlers
ForceActivateReason: null,
QuarantineId: quarantineId,
ReasonCode: "REKOR_VERIFY_FAIL",
ReasonMessage: "Rekor receipt is missing required fields."),
ReasonMessage: "Rekor public key not found in offline bundle (rekor-pub.pem)."),
cancellationToken).ConfigureAwait(false);
Environment.ExitCode = OfflineExitCodes.RekorVerificationFailed;
return;
}
if (receipt.Checkpoint.IndexOf(receipt.RootHash, StringComparison.OrdinalIgnoreCase) < 0)
var dsseBytes = await File.ReadAllBytesAsync(dssePath, cancellationToken).ConfigureAwait(false);
var dsseSha256 = SHA256.HashData(dsseBytes);
var verify = await RekorOfflineReceiptVerifier.VerifyAsync(
rekorPath,
dsseSha256,
rekorKeyPath,
cancellationToken)
.ConfigureAwait(false);
if (!verify.Verified)
{
verificationLog.Add("rekor:checkpoint-mismatch");
verificationLog.Add("rekor:verify-failed");
var quarantineId = await TryQuarantineOfflineBundleAsync(
loggerFactory,
quarantineRoot,
@@ -605,7 +644,7 @@ internal static partial class CommandHandlers
bundlePath,
manifestJson,
reasonCode: "REKOR_VERIFY_FAIL",
reasonMessage: "Rekor checkpoint does not reference receipt rootHash.",
reasonMessage: verify.FailureReason ?? "Rekor verification failed.",
verificationLog,
cancellationToken).ConfigureAwait(false);
@@ -626,7 +665,7 @@ internal static partial class CommandHandlers
ForceActivateReason: null,
QuarantineId: quarantineId,
ReasonCode: "REKOR_VERIFY_FAIL",
ReasonMessage: "Rekor checkpoint does not reference receipt rootHash."),
ReasonMessage: verify.FailureReason ?? "Rekor verification failed."),
cancellationToken).ConfigureAwait(false);
Environment.ExitCode = OfflineExitCodes.RekorVerificationFailed;
@@ -635,8 +674,15 @@ internal static partial class CommandHandlers
rekorVerified = true;
verificationLog.Add("rekor:ok");
activity?.SetTag("stellaops.cli.offline.rekor_uuid", receipt.Uuid);
activity?.SetTag("stellaops.cli.offline.rekor_log_index", receipt.LogIndex);
if (!string.IsNullOrWhiteSpace(verify.RekorUuid))
{
activity?.SetTag("stellaops.cli.offline.rekor_uuid", verify.RekorUuid);
}
if (verify.LogIndex is not null)
{
activity?.SetTag("stellaops.cli.offline.rekor_log_index", verify.LogIndex.Value);
}
}
BundleVersion incomingVersion;
@@ -947,6 +993,25 @@ internal static partial class CommandHandlers
return candidates.FirstOrDefault(File.Exists);
}
private static string? ResolveOfflineRekorPublicKeyPath(string bundleDirectory)
{
var candidates = new[]
{
Path.Combine(bundleDirectory, "rekor-pub.pem"),
Path.Combine(bundleDirectory, "rekor.pub"),
Path.Combine(bundleDirectory, "tlog-root.pub"),
Path.Combine(bundleDirectory, "tlog-root.pem"),
Path.Combine(bundleDirectory, "tlog", "rekor-pub.pem"),
Path.Combine(bundleDirectory, "tlog", "rekor.pub"),
Path.Combine(bundleDirectory, "keys", "tlog-root", "rekor-pub.pem"),
Path.Combine(bundleDirectory, "keys", "tlog-root", "rekor.pub"),
Path.Combine(bundleDirectory, "evidence", "keys", "tlog-root", "rekor-pub.pem"),
Path.Combine(bundleDirectory, "evidence", "keys", "tlog-root", "rekor.pub"),
};
return candidates.FirstOrDefault(File.Exists);
}
private static async Task<byte[]> LoadTrustRootPublicKeyAsync(string path, CancellationToken cancellationToken)
{
var bytes = await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);

View File

@@ -121,15 +121,58 @@ public sealed class OfflineCommandHandlersTests
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
await File.WriteAllTextAsync(dssePath, dsseJson, CancellationToken.None);
var rootHash = "deadbeef";
static byte[] HashLeaf(byte[] leafData)
{
var buffer = new byte[1 + leafData.Length];
buffer[0] = 0x00;
leafData.CopyTo(buffer, 1);
return SHA256.HashData(buffer);
}
static byte[] HashInterior(byte[] left, byte[] right)
{
var buffer = new byte[1 + left.Length + right.Length];
buffer[0] = 0x01;
left.CopyTo(buffer, 1);
right.CopyTo(buffer, 1 + left.Length);
return SHA256.HashData(buffer);
}
// Deterministic DSSE digest used as the Rekor leaf input.
var dsseBytes = await File.ReadAllBytesAsync(dssePath, CancellationToken.None);
var dsseSha256 = SHA256.HashData(dsseBytes);
// Build a minimal 2-leaf RFC6962 Merkle tree proof for logIndex=0.
var leaf0 = HashLeaf(dsseSha256);
var leaf1 = HashLeaf(SHA256.HashData(Encoding.UTF8.GetBytes("other-envelope")));
var rootHashBytes = HashInterior(leaf0, leaf1);
using var rekorKey = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var checkpointOrigin = "rekor.sigstore.dev - 2605736670972794746";
var checkpointTimestamp = "1700000000";
var checkpointBody = $"{checkpointOrigin}\n2\n{Convert.ToBase64String(rootHashBytes)}\n{checkpointTimestamp}\n";
var checkpointSig = rekorKey.SignData(Encoding.UTF8.GetBytes(checkpointBody), HashAlgorithmName.SHA256);
var rekorPublicKeyPath = Path.Combine(bundleDir, "rekor-pub.pem");
await File.WriteAllTextAsync(
rekorPublicKeyPath,
WrapPem("PUBLIC KEY", rekorKey.ExportSubjectPublicKeyInfo()),
CancellationToken.None);
var checkpointPath = Path.Combine(bundleDir, "checkpoint.sig");
await File.WriteAllTextAsync(
checkpointPath,
checkpointBody + $"sig {Convert.ToBase64String(checkpointSig)}\n",
CancellationToken.None);
var rekorPath = Path.Combine(bundleDir, "rekor-receipt.json");
var rekorJson = JsonSerializer.Serialize(new
{
uuid = "rekor-test",
logIndex = 42,
rootHash,
hashes = new[] { "hash-1" },
checkpoint = $"checkpoint {rootHash}"
logIndex = 0,
rootHash = Convert.ToHexString(rootHashBytes).ToLowerInvariant(),
hashes = new[] { Convert.ToHexString(leaf1).ToLowerInvariant() },
checkpoint = "checkpoint.sig"
}, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true });
await File.WriteAllTextAsync(rekorPath, rekorJson, CancellationToken.None);

View File

@@ -24,6 +24,20 @@
- `docs/modules/export-center/operations/kms-envelope-pattern.md` (for 37-002 encryption/KMS)
- `docs/modules/export-center/operations/risk-bundle-provider-matrix.md` (for 69/70 risk bundle chain)
- Sprint file `docs/implplan/SPRINT_0164_0001_0001_exportcenter_iii.md`
- Offline triage bundle format: `docs/airgap/offline-bundle-format.md` (SPRINT_3603/3605)
## Offline Evidence Bundles & Cache (SPRINT_3603 / SPRINT_3605)
- Bundle format: `.stella.bundle.tgz` with DSSE-signed manifest and deterministic entry hashing (no external fetches required to verify).
- Core implementation (source of truth):
- `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundleManifest.cs`
- `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/BundlePredicate.cs`
- `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/OfflineBundle/OfflineBundlePackager.cs`
- Determinism requirements:
- All manifest entries and tarball paths must be sorted deterministically (ordinal string compare).
- Hash inputs must be canonical and stable; retrying packaging MUST yield identical bundle bytes when inputs are unchanged.
- Local evidence cache (offline-first, side-by-side with scan artefacts):
- `src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/EvidenceCache/LocalEvidenceCacheService.cs`
- Cache manifests and enrichment queue must be deterministic and replay-safe.
## Working Agreements
- Enforce tenant scoping and RBAC on every API, worker fetch, and distribution path; no cross-tenant exports unless explicitly whitelisted and logged.

View File

@@ -20,6 +20,13 @@ Deliver the Export Center service described in Epic10. Provide reproducible,
## Required Reading
- `docs/modules/export-center/architecture.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/airgap/offline-bundle-format.md` (triage offline bundles)
## Contracts (Offline Triage Bundles)
- Offline triage bundles are `.stella.bundle.tgz` files with a DSSE-signed manifest and deterministic entry ordering.
- Source of truth code paths:
- `StellaOps.ExportCenter.Core/OfflineBundle/*` (bundle schema, predicate, packager)
- `StellaOps.ExportCenter.Core/EvidenceCache/*` (local evidence cache + enrichment queue)
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both correspoding sprint file `/docs/implplan/SPRINT_*.md` and the local `TASKS.md` when you start or finish work.

View File

@@ -0,0 +1,7 @@
# Export Center · Local Tasks
This file mirrors sprint work for the Export Center module.
| Task ID | Sprint | Status | Notes |
| --- | --- | --- | --- |
| `TRI-MASTER-0005` | `docs/implplan/SPRINT_3600_0001_0001_triage_unknowns_master.md` | DONE (2025-12-17) | Sync ExportCenter AGENTS with offline triage bundle (`.stella.bundle.tgz`) + local evidence cache contracts. |

View File

@@ -20,6 +20,21 @@
- `docs/modules/findings-ledger/schema.md` (sealed-mode and Merkle root structure)
- `docs/modules/findings-ledger/workflow-inference.md` (projection rules)
- Observability policy: `docs/observability/policy.md`.
- Triage & Unknowns (Alerts/Decisions): `docs/implplan/SPRINT_3602_0001_0001_evidence_decision_apis.md`.
## Triage Alerts & Decisions (SPRINT_3602)
- REST endpoints live in `src/Findings/StellaOps.Findings.Ledger.WebService/Program.cs` and must remain deterministic and tenant-scoped:
- `GET /v1/alerts` (filters + pagination)
- `GET /v1/alerts/{alertId}` (summary)
- `POST /v1/alerts/{alertId}/decisions` (append-only decision event)
- `GET /v1/alerts/{alertId}/audit` (decision timeline)
- `GET /v1/alerts/{alertId}/bundle` + `POST /v1/alerts/{alertId}/bundle/verify` (portable evidence bundle download + offline verification)
- Contracts/DTOs are defined under `src/Findings/StellaOps.Findings.Ledger.WebService/Contracts/AlertContracts.cs` (snake_case JSON).
- Decision domain model lives under `src/Findings/StellaOps.Findings.Ledger/Domain/DecisionModels.cs`.
- Decision invariants:
- Decisions are append-only (corrections are new events).
- Every decision MUST include a `replay_token` (content-addressed reproduce key).
- Evidence hashes captured at decision time must be stable and ordered deterministically.
## Execution rules
- Update sprint `Delivery Tracker` status when you start/stop/finish: TODO → DOING → DONE/BLOCKED.

View File

@@ -23,3 +23,9 @@ Status changes must be mirrored in `docs/implplan/SPRINT_0120_0001_0001_policy_r
| LEDGER-OAS-62-001 | DONE | SDK-facing OpenAPI assertions for pagination, evidence links, provenance added. | 2025-12-08 |
| LEDGER-OAS-63-001 | DONE | Deprecation headers and notifications applied to legacy findings export endpoint. | 2025-12-08 |
| LEDGER-OBS-55-001 | DONE | Incident-mode diagnostics (lag/conflict/replay traces), retention extension for snapshots, timeline/notifier hooks. | 2025-12-08 |
# Findings Ledger · Sprint 3600-0001-0001 (Triage & Unknowns)
| Task ID | Status | Notes | Updated (UTC) |
| --- | --- | --- | --- |
| TRI-MASTER-0004 | DONE | Sync Findings AGENTS with Alerts/Decisions API contract references (SPRINT_3602). | 2025-12-17 |

View File

@@ -133,8 +133,6 @@ public sealed record EvidencePoints
public static EvidencePoints Default => new();
}
public sealed record FreshnessBucket(int MaxAgeDays, int MultiplierBps);
/// <summary>
/// Provenance scoring configuration.
/// </summary>

View File

@@ -270,7 +270,7 @@ internal static class SmartDiffEndpoints
return new MaterialChangeDto
{
VulnId = change.FindingKey.VulnId,
Purl = change.FindingKey.Purl,
Purl = change.FindingKey.ComponentPurl,
HasMaterialChange = change.HasMaterialChange,
PriorityScore = change.PriorityScore,
PreviousStateHash = change.PreviousStateHash,
@@ -284,7 +284,7 @@ internal static class SmartDiffEndpoints
PreviousValue = c.PreviousValue,
CurrentValue = c.CurrentValue,
Weight = c.Weight,
SubType = c.SubType
SubType = null
}).ToImmutableArray()
};
}
@@ -295,7 +295,7 @@ internal static class SmartDiffEndpoints
{
CandidateId = candidate.CandidateId,
VulnId = candidate.FindingKey.VulnId,
Purl = candidate.FindingKey.Purl,
Purl = candidate.FindingKey.ComponentPurl,
ImageDigest = candidate.ImageDigest,
SuggestedStatus = candidate.SuggestedStatus.ToString().ToLowerInvariant(),
Justification = MapJustificationToString(candidate.Justification),
@@ -344,7 +344,7 @@ public sealed class MaterialChangeDto
public required string VulnId { get; init; }
public required string Purl { get; init; }
public bool HasMaterialChange { get; init; }
public int PriorityScore { get; init; }
public double PriorityScore { get; init; }
public required string PreviousStateHash { get; init; }
public required string CurrentStateHash { get; init; }
public required ImmutableArray<DetectedChangeDto> Changes { get; init; }

View File

@@ -4,11 +4,15 @@ using System.Collections.Immutable;
using System.Diagnostics;
using System.Linq;
using System.Security.Claims;
using System.Text;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy;
using StellaOps.Scanner.Core.Utility;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Services;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Options;
@@ -19,7 +23,12 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
private const string DefaultTenant = "default";
private const string Source = "scanner.webservice";
private static readonly Guid TenantNamespace = new("ac8f2b54-72ea-43fa-9c3b-6a87ebd2d48a");
private static readonly Guid ExecutionNamespace = new("f0b1f40c-0f04-447b-a102-50de3ff79a33");
private static readonly Guid ManifestNamespace = new("d9c8858c-e2a4-47d6-bf0f-1e76d2865bea");
private readonly IPlatformEventPublisher _publisher;
private readonly IClassificationChangeTracker _classificationChangeTracker;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ReportEventDispatcher> _logger;
private readonly string[] _apiBaseSegments;
@@ -32,11 +41,13 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
public ReportEventDispatcher(
IPlatformEventPublisher publisher,
IClassificationChangeTracker classificationChangeTracker,
IOptions<ScannerWebServiceOptions> options,
TimeProvider timeProvider,
ILogger<ReportEventDispatcher> logger)
{
_publisher = publisher ?? throw new ArgumentNullException(nameof(publisher));
_classificationChangeTracker = classificationChangeTracker ?? throw new ArgumentNullException(nameof(classificationChangeTracker));
if (options is null)
{
throw new ArgumentNullException(nameof(options));
@@ -109,6 +120,8 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
await PublishSafelyAsync(reportEvent, document.ReportId, cancellationToken).ConfigureAwait(false);
await TrackFnDriftSafelyAsync(request, preview, document, tenant, occurredAt, cancellationToken).ConfigureAwait(false);
var scanCompletedEvent = new OrchestratorEvent
{
EventId = Guid.NewGuid(),
@@ -130,6 +143,200 @@ internal sealed class ReportEventDispatcher : IReportEventDispatcher
await PublishSafelyAsync(scanCompletedEvent, document.ReportId, cancellationToken).ConfigureAwait(false);
}
private async Task TrackFnDriftSafelyAsync(
ReportRequestDto request,
PolicyPreviewResponse preview,
ReportDocumentDto document,
string tenant,
DateTimeOffset occurredAt,
CancellationToken cancellationToken)
{
if (preview.Diffs.IsDefaultOrEmpty)
{
return;
}
try
{
var changes = BuildClassificationChanges(request, preview, document, tenant, occurredAt);
if (changes.Count == 0)
{
return;
}
await _classificationChangeTracker.TrackChangesAsync(changes, cancellationToken).ConfigureAwait(false);
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
throw;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Failed to record FN-drift classification changes for report {ReportId}.", document.ReportId);
}
}
private static IReadOnlyList<ClassificationChange> BuildClassificationChanges(
ReportRequestDto request,
PolicyPreviewResponse preview,
ReportDocumentDto document,
string tenant,
DateTimeOffset occurredAt)
{
var findings = request.Findings ?? Array.Empty<PolicyPreviewFindingDto>();
if (findings.Count == 0)
{
return Array.Empty<ClassificationChange>();
}
var findingsById = findings
.Where(finding => !string.IsNullOrWhiteSpace(finding.Id))
.ToDictionary(finding => finding.Id!, StringComparer.Ordinal);
var tenantId = ResolveTenantId(tenant);
var executionId = ResolveExecutionId(tenantId, document.ReportId);
var manifestId = ResolveManifestId(tenantId, document);
var artifactDigest = string.IsNullOrWhiteSpace(document.ImageDigest) ? request.ImageDigest ?? string.Empty : document.ImageDigest;
var changes = new List<ClassificationChange>();
foreach (var diff in preview.Diffs)
{
var projected = diff.Projected;
if (projected is null || string.IsNullOrWhiteSpace(projected.FindingId))
{
continue;
}
if (!findingsById.TryGetValue(projected.FindingId, out var finding))
{
continue;
}
if (string.IsNullOrWhiteSpace(finding.Cve) || string.IsNullOrWhiteSpace(finding.Purl))
{
continue;
}
var previousStatus = MapVerdictStatus(diff.Baseline.Status);
var newStatus = MapVerdictStatus(projected.Status);
if (previousStatus == ClassificationStatus.Affected && newStatus == ClassificationStatus.Unaffected)
{
newStatus = ClassificationStatus.Fixed;
}
changes.Add(new ClassificationChange
{
ArtifactDigest = artifactDigest,
VulnId = finding.Cve!,
PackagePurl = finding.Purl!,
TenantId = tenantId,
ManifestId = manifestId,
ExecutionId = executionId,
PreviousStatus = previousStatus,
NewStatus = newStatus,
Cause = DetermineCause(diff),
CauseDetail = BuildCauseDetail(diff, finding),
ChangedAt = occurredAt
});
}
return changes;
}
private static Guid ResolveTenantId(string tenant)
{
if (Guid.TryParse(tenant, out var tenantId))
{
return tenantId;
}
var normalized = tenant.Trim().ToLowerInvariant();
return ScannerIdentifiers.CreateDeterministicGuid(TenantNamespace, Encoding.UTF8.GetBytes(normalized));
}
private static Guid ResolveExecutionId(Guid tenantId, string reportId)
{
var payload = $"{tenantId:D}:{reportId}".Trim().ToLowerInvariant();
return ScannerIdentifiers.CreateDeterministicGuid(ExecutionNamespace, Encoding.UTF8.GetBytes(payload));
}
private static Guid ResolveManifestId(Guid tenantId, ReportDocumentDto document)
{
var manifestDigest = document.Surface?.ManifestDigest;
var payloadSource = string.IsNullOrWhiteSpace(manifestDigest)
? document.ImageDigest
: manifestDigest;
var payload = $"{tenantId:D}:{payloadSource}".Trim().ToLowerInvariant();
return ScannerIdentifiers.CreateDeterministicGuid(ManifestNamespace, Encoding.UTF8.GetBytes(payload));
}
private static ClassificationStatus MapVerdictStatus(PolicyVerdictStatus status) => status switch
{
PolicyVerdictStatus.Blocked or PolicyVerdictStatus.Escalated => ClassificationStatus.Affected,
PolicyVerdictStatus.Warned or PolicyVerdictStatus.Deferred or PolicyVerdictStatus.RequiresVex => ClassificationStatus.Unknown,
_ => ClassificationStatus.Unaffected
};
private static DriftCause DetermineCause(PolicyVerdictDiff diff)
{
if (!string.Equals(diff.Baseline.RuleName, diff.Projected.RuleName, StringComparison.Ordinal)
|| !string.Equals(diff.Baseline.RuleAction, diff.Projected.RuleAction, StringComparison.Ordinal))
{
return DriftCause.RuleDelta;
}
if (!string.Equals(diff.Baseline.Reachability, diff.Projected.Reachability, StringComparison.Ordinal))
{
return DriftCause.ReachabilityDelta;
}
if (!string.Equals(diff.Baseline.SourceTrust, diff.Projected.SourceTrust, StringComparison.Ordinal))
{
return DriftCause.FeedDelta;
}
if (diff.Baseline.Quiet != diff.Projected.Quiet
|| !string.Equals(diff.Baseline.QuietedBy, diff.Projected.QuietedBy, StringComparison.Ordinal))
{
return DriftCause.LatticeDelta;
}
return DriftCause.Other;
}
private static IReadOnlyDictionary<string, string>? BuildCauseDetail(PolicyVerdictDiff diff, PolicyPreviewFindingDto finding)
{
var details = new SortedDictionary<string, string>(StringComparer.Ordinal);
if (!string.IsNullOrWhiteSpace(diff.Projected.RuleName))
{
details["ruleName"] = diff.Projected.RuleName!;
}
if (!string.IsNullOrWhiteSpace(diff.Projected.RuleAction))
{
details["ruleAction"] = diff.Projected.RuleAction!;
}
if (!string.IsNullOrWhiteSpace(diff.Projected.Reachability))
{
details["reachability"] = diff.Projected.Reachability!;
}
if (!string.IsNullOrWhiteSpace(diff.Projected.SourceTrust))
{
details["sourceTrust"] = diff.Projected.SourceTrust!;
}
if (!string.IsNullOrWhiteSpace(finding.Source))
{
details["findingSource"] = finding.Source!;
}
return details.Count == 0 ? null : details;
}
private async Task PublishSafelyAsync(OrchestratorEvent @event, string reportId, CancellationToken cancellationToken)
{
try

View File

@@ -6,6 +6,7 @@ using Microsoft.Extensions.Hosting;
using OpenTelemetry.Metrics;
using OpenTelemetry.Resources;
using OpenTelemetry.Trace;
using StellaOps.Scanner.Storage.Services;
using StellaOps.Scanner.Worker.Options;
namespace StellaOps.Scanner.Worker.Diagnostics;
@@ -61,6 +62,7 @@ public static class TelemetryExtensions
metrics
.AddMeter(
ScannerWorkerInstrumentation.MeterName,
FnDriftMetricsExporter.MeterName,
"StellaOps.Scanner.Analyzers.Lang.Node",
"StellaOps.Scanner.Analyzers.Lang.Go")
.AddRuntimeInstrumentation()

View File

@@ -27,6 +27,7 @@ using StellaOps.Scanner.Worker.Determinism;
using StellaOps.Scanner.Worker.Processing.Surface;
using StellaOps.Scanner.Storage.Extensions;
using StellaOps.Scanner.Storage;
using StellaOps.Scanner.Storage.Services;
using Reachability = StellaOps.Scanner.Worker.Processing.Reachability;
var builder = Host.CreateApplicationBuilder(args);
@@ -98,6 +99,7 @@ var connectionString = storageSection.GetValue<string>("Postgres:ConnectionStrin
if (!string.IsNullOrWhiteSpace(connectionString))
{
builder.Services.AddScannerStorage(storageSection);
builder.Services.AddHostedService<FnDriftMetricsExporter>();
builder.Services.AddSingleton<IConfigureOptions<ScannerStorageOptions>, ScannerStorageSurfaceSecretConfigurator>();
builder.Services.AddSingleton<ISurfaceManifestPublisher, SurfaceManifestPublisher>();
builder.Services.AddSingleton<IScanStageExecutor, SurfaceManifestStageExecutor>();

View File

@@ -0,0 +1,181 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.CallGraph;
public sealed class ReachabilityAnalyzer
{
private readonly TimeProvider _timeProvider;
private readonly int _maxDepth;
public ReachabilityAnalyzer(TimeProvider? timeProvider = null, int maxDepth = 256)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_maxDepth = maxDepth <= 0 ? 256 : maxDepth;
}
public ReachabilityAnalysisResult Analyze(CallGraphSnapshot snapshot)
{
ArgumentNullException.ThrowIfNull(snapshot);
var trimmed = snapshot.Trimmed();
var adjacency = BuildAdjacency(trimmed);
var entrypoints = trimmed.EntrypointIds;
if (entrypoints.IsDefaultOrEmpty)
{
return EmptyResult(trimmed);
}
var origins = new Dictionary<string, string>(StringComparer.Ordinal);
var parents = new Dictionary<string, string?>(StringComparer.Ordinal);
var depths = new Dictionary<string, int>(StringComparer.Ordinal);
var queue = new Queue<string>();
foreach (var entry in entrypoints.OrderBy(e => e, StringComparer.Ordinal))
{
origins[entry] = entry;
parents[entry] = null;
depths[entry] = 0;
queue.Enqueue(entry);
}
while (queue.Count > 0)
{
var current = queue.Dequeue();
if (!depths.TryGetValue(current, out var depth))
{
continue;
}
if (depth >= _maxDepth)
{
continue;
}
if (!adjacency.TryGetValue(current, out var neighbors))
{
continue;
}
foreach (var next in neighbors)
{
if (origins.ContainsKey(next))
{
continue;
}
origins[next] = origins[current];
parents[next] = current;
depths[next] = depth + 1;
queue.Enqueue(next);
}
}
var reachableNodes = origins.Keys.OrderBy(id => id, StringComparer.Ordinal).ToImmutableArray();
var reachableSinks = trimmed.SinkIds
.Where(origins.ContainsKey)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var paths = BuildPaths(reachableSinks, origins, parents);
var computedAt = _timeProvider.GetUtcNow();
var provisional = new ReachabilityAnalysisResult(
ScanId: trimmed.ScanId,
GraphDigest: trimmed.GraphDigest,
Language: trimmed.Language,
ComputedAt: computedAt,
ReachableNodeIds: reachableNodes,
ReachableSinkIds: reachableSinks,
Paths: paths,
ResultDigest: string.Empty);
var resultDigest = CallGraphDigests.ComputeResultDigest(provisional);
return provisional with { ResultDigest = resultDigest };
}
private static Dictionary<string, ImmutableArray<string>> BuildAdjacency(CallGraphSnapshot snapshot)
{
var map = new Dictionary<string, List<string>>(StringComparer.Ordinal);
foreach (var edge in snapshot.Edges)
{
if (!map.TryGetValue(edge.SourceId, out var list))
{
list = new List<string>();
map[edge.SourceId] = list;
}
list.Add(edge.TargetId);
}
return map.ToDictionary(
kvp => kvp.Key,
kvp => kvp.Value
.Where(v => !string.IsNullOrWhiteSpace(v))
.Distinct(StringComparer.Ordinal)
.OrderBy(v => v, StringComparer.Ordinal)
.ToImmutableArray(),
StringComparer.Ordinal);
}
private static ReachabilityAnalysisResult EmptyResult(CallGraphSnapshot snapshot)
{
var computedAt = TimeProvider.System.GetUtcNow();
var provisional = new ReachabilityAnalysisResult(
ScanId: snapshot.ScanId,
GraphDigest: snapshot.GraphDigest,
Language: snapshot.Language,
ComputedAt: computedAt,
ReachableNodeIds: ImmutableArray<string>.Empty,
ReachableSinkIds: ImmutableArray<string>.Empty,
Paths: ImmutableArray<ReachabilityPath>.Empty,
ResultDigest: string.Empty);
return provisional with { ResultDigest = CallGraphDigests.ComputeResultDigest(provisional) };
}
private static ImmutableArray<ReachabilityPath> BuildPaths(
ImmutableArray<string> reachableSinks,
Dictionary<string, string> origins,
Dictionary<string, string?> parents)
{
var paths = new List<ReachabilityPath>(reachableSinks.Length);
foreach (var sinkId in reachableSinks)
{
if (!origins.TryGetValue(sinkId, out var origin))
{
continue;
}
var nodeIds = ReconstructPathNodeIds(sinkId, parents);
paths.Add(new ReachabilityPath(origin, sinkId, nodeIds));
}
return paths
.OrderBy(p => p.SinkId, StringComparer.Ordinal)
.ThenBy(p => p.EntrypointId, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<string> ReconstructPathNodeIds(string sinkId, Dictionary<string, string?> parents)
{
var stack = new Stack<string>();
var cursor = sinkId;
while (true)
{
stack.Push(cursor);
if (!parents.TryGetValue(cursor, out var parent) || parent is null)
{
break;
}
cursor = parent;
}
var builder = ImmutableArray.CreateBuilder<string>(stack.Count);
while (stack.Count > 0)
{
builder.Add(stack.Pop());
}
return builder.ToImmutable();
}
}

View File

@@ -0,0 +1,25 @@
using Microsoft.Extensions.Configuration;
namespace StellaOps.Scanner.CallGraph.Caching;
public sealed class CallGraphCacheConfig
{
[ConfigurationKeyName("enabled")]
public bool Enabled { get; set; } = true;
[ConfigurationKeyName("connection_string")]
public string ConnectionString { get; set; } = string.Empty;
[ConfigurationKeyName("key_prefix")]
public string KeyPrefix { get; set; } = "callgraph:";
[ConfigurationKeyName("ttl_seconds")]
public int TtlSeconds { get; set; } = 3600;
[ConfigurationKeyName("gzip")]
public bool EnableGzip { get; set; } = true;
[ConfigurationKeyName("circuit_breaker")]
public CircuitBreakerConfig CircuitBreaker { get; set; } = new();
}

View File

@@ -0,0 +1,16 @@
using Microsoft.Extensions.Configuration;
namespace StellaOps.Scanner.CallGraph.Caching;
public sealed class CircuitBreakerConfig
{
[ConfigurationKeyName("failure_threshold")]
public int FailureThreshold { get; set; } = 5;
[ConfigurationKeyName("timeout_seconds")]
public int TimeoutSeconds { get; set; } = 30;
[ConfigurationKeyName("half_open_timeout")]
public int HalfOpenTimeout { get; set; } = 10;
}

View File

@@ -0,0 +1,133 @@
namespace StellaOps.Scanner.CallGraph.Caching;
public enum CircuitState
{
Closed,
Open,
HalfOpen
}
public sealed class CircuitBreakerState
{
private readonly object _lock = new();
private readonly TimeProvider _timeProvider;
private readonly int _failureThreshold;
private readonly TimeSpan _openTimeout;
private readonly TimeSpan _halfOpenTimeout;
private CircuitState _state = CircuitState.Closed;
private int _failureCount;
private DateTimeOffset _openedAt;
public CircuitBreakerState(CircuitBreakerConfig config, TimeProvider? timeProvider = null)
{
ArgumentNullException.ThrowIfNull(config);
_timeProvider = timeProvider ?? TimeProvider.System;
_failureThreshold = Math.Max(1, config.FailureThreshold);
_openTimeout = TimeSpan.FromSeconds(Math.Max(1, config.TimeoutSeconds));
_halfOpenTimeout = TimeSpan.FromSeconds(Math.Max(1, config.HalfOpenTimeout));
}
public CircuitState State
{
get
{
lock (_lock)
{
UpdateState();
return _state;
}
}
}
public bool IsOpen
{
get
{
lock (_lock)
{
UpdateState();
return _state == CircuitState.Open;
}
}
}
public bool IsHalfOpen
{
get
{
lock (_lock)
{
UpdateState();
return _state == CircuitState.HalfOpen;
}
}
}
public void RecordSuccess()
{
lock (_lock)
{
if (_state is CircuitState.HalfOpen or CircuitState.Open)
{
_state = CircuitState.Closed;
}
_failureCount = 0;
}
}
public void RecordFailure()
{
lock (_lock)
{
var now = _timeProvider.GetUtcNow();
if (_state == CircuitState.HalfOpen)
{
_state = CircuitState.Open;
_openedAt = now;
_failureCount = _failureThreshold;
return;
}
_failureCount++;
if (_failureCount >= _failureThreshold)
{
_state = CircuitState.Open;
_openedAt = now;
}
}
}
public void Reset()
{
lock (_lock)
{
_state = CircuitState.Closed;
_failureCount = 0;
}
}
private void UpdateState()
{
var now = _timeProvider.GetUtcNow();
if (_state == CircuitState.Open)
{
if (now - _openedAt >= _openTimeout)
{
_state = CircuitState.HalfOpen;
}
}
else if (_state == CircuitState.HalfOpen)
{
if (now - _openedAt >= _openTimeout + _halfOpenTimeout)
{
_state = CircuitState.Open;
_openedAt = now;
}
}
}
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.Scanner.CallGraph.Caching;
public interface ICallGraphCacheService
{
ValueTask<CallGraphSnapshot?> TryGetCallGraphAsync(string scanId, string language, CancellationToken cancellationToken = default);
Task SetCallGraphAsync(CallGraphSnapshot snapshot, TimeSpan? ttl = null, CancellationToken cancellationToken = default);
ValueTask<ReachabilityAnalysisResult?> TryGetReachabilityResultAsync(string scanId, string language, CancellationToken cancellationToken = default);
Task SetReachabilityResultAsync(ReachabilityAnalysisResult result, TimeSpan? ttl = null, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,242 @@
using System.IO.Compression;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StackExchange.Redis;
namespace StellaOps.Scanner.CallGraph.Caching;
public sealed class ValkeyCallGraphCacheService : ICallGraphCacheService, IAsyncDisposable
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private readonly CallGraphCacheConfig _options;
private readonly ILogger<ValkeyCallGraphCacheService> _logger;
private readonly TimeProvider _timeProvider;
private readonly CircuitBreakerState _circuitBreaker;
private readonly SemaphoreSlim _connectionLock = new(1, 1);
private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory;
private IConnectionMultiplexer? _connection;
public ValkeyCallGraphCacheService(
IOptions<CallGraphCacheConfig> options,
ILogger<ValkeyCallGraphCacheService> logger,
TimeProvider? timeProvider = null,
Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null)
{
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value;
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? TimeProvider.System;
_connectionFactory = connectionFactory ?? (config => Task.FromResult<IConnectionMultiplexer>(ConnectionMultiplexer.Connect(config)));
_circuitBreaker = new CircuitBreakerState(_options.CircuitBreaker, _timeProvider);
}
public async ValueTask<CallGraphSnapshot?> TryGetCallGraphAsync(string scanId, string language, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
if (!IsEnabled())
{
return null;
}
var key = BuildKey(scanId, language, kind: "graph");
var payload = await TryGetBytesAsync(key, cancellationToken).ConfigureAwait(false);
if (payload is null)
{
return null;
}
try
{
var bytes = _options.EnableGzip ? Inflate(payload) : payload;
return JsonSerializer.Deserialize<CallGraphSnapshot>(bytes, JsonOptions);
}
catch (Exception ex) when (ex is JsonException or InvalidDataException)
{
_logger.LogWarning(ex, "Failed to deserialize cached call graph for {ScanId}/{Language}", scanId, language);
return null;
}
}
public async Task SetCallGraphAsync(CallGraphSnapshot snapshot, TimeSpan? ttl = null, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(snapshot);
if (!IsEnabled())
{
return;
}
var key = BuildKey(snapshot.ScanId, snapshot.Language, kind: "graph");
var bytes = JsonSerializer.SerializeToUtf8Bytes(snapshot.Trimmed(), JsonOptions);
var payload = _options.EnableGzip ? Deflate(bytes) : bytes;
await SetBytesAsync(key, payload, ttl, cancellationToken).ConfigureAwait(false);
}
public async ValueTask<ReachabilityAnalysisResult?> TryGetReachabilityResultAsync(string scanId, string language, CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
if (!IsEnabled())
{
return null;
}
var key = BuildKey(scanId, language, kind: "reachability");
var payload = await TryGetBytesAsync(key, cancellationToken).ConfigureAwait(false);
if (payload is null)
{
return null;
}
try
{
var bytes = _options.EnableGzip ? Inflate(payload) : payload;
return JsonSerializer.Deserialize<ReachabilityAnalysisResult>(bytes, JsonOptions);
}
catch (Exception ex) when (ex is JsonException or InvalidDataException)
{
_logger.LogWarning(ex, "Failed to deserialize cached reachability result for {ScanId}/{Language}", scanId, language);
return null;
}
}
public async Task SetReachabilityResultAsync(ReachabilityAnalysisResult result, TimeSpan? ttl = null, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
if (!IsEnabled())
{
return;
}
var key = BuildKey(result.ScanId, result.Language, kind: "reachability");
var bytes = JsonSerializer.SerializeToUtf8Bytes(result.Trimmed(), JsonOptions);
var payload = _options.EnableGzip ? Deflate(bytes) : bytes;
await SetBytesAsync(key, payload, ttl, cancellationToken).ConfigureAwait(false);
}
public async ValueTask DisposeAsync()
{
if (_connection is IAsyncDisposable asyncDisposable)
{
await asyncDisposable.DisposeAsync().ConfigureAwait(false);
}
else
{
_connection?.Dispose();
}
}
private bool IsEnabled()
{
if (!_options.Enabled)
{
return false;
}
if (_circuitBreaker.IsOpen)
{
_logger.LogWarning("Call graph cache circuit breaker is open; bypassing Valkey.");
return false;
}
return !string.IsNullOrWhiteSpace(_options.ConnectionString);
}
private async ValueTask<byte[]?> TryGetBytesAsync(string key, CancellationToken cancellationToken)
{
try
{
var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
var value = await db.StringGetAsync(key).ConfigureAwait(false);
_circuitBreaker.RecordSuccess();
return value.IsNull ? null : (byte[]?)value;
}
catch (Exception ex)
{
_logger.LogError(ex, "Valkey cache GET failed for key {Key}", key);
_circuitBreaker.RecordFailure();
return null;
}
}
private async Task SetBytesAsync(string key, byte[] payload, TimeSpan? ttl, CancellationToken cancellationToken)
{
var effectiveTtl = ttl ?? TimeSpan.FromSeconds(Math.Max(1, _options.TtlSeconds));
try
{
var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false);
await db.StringSetAsync(key, payload, expiry: effectiveTtl).ConfigureAwait(false);
_circuitBreaker.RecordSuccess();
}
catch (Exception ex)
{
_logger.LogError(ex, "Valkey cache SET failed for key {Key}", key);
_circuitBreaker.RecordFailure();
}
}
private async Task<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken)
{
var connection = await GetConnectionAsync(cancellationToken).ConfigureAwait(false);
return connection.GetDatabase();
}
private async Task<IConnectionMultiplexer> GetConnectionAsync(CancellationToken cancellationToken)
{
if (_connection is not null)
{
return _connection;
}
await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false);
try
{
if (_connection is not null)
{
return _connection;
}
var config = ConfigurationOptions.Parse(_options.ConnectionString);
_connection = await _connectionFactory(config).ConfigureAwait(false);
return _connection;
}
finally
{
_connectionLock.Release();
}
}
private string BuildKey(string scanId, string language, string kind)
=> $"{_options.KeyPrefix}{kind}:{scanId.Trim()}:{language.Trim().ToLowerInvariant()}";
private static byte[] Deflate(byte[] payload)
{
using var output = new MemoryStream();
using (var gzip = new GZipStream(output, CompressionLevel.SmallestSize, leaveOpen: true))
{
gzip.Write(payload, 0, payload.Length);
}
return output.ToArray();
}
private static byte[] Inflate(byte[] payload)
{
using var input = new MemoryStream(payload);
using var gzip = new GZipStream(input, CompressionMode.Decompress);
using var output = new MemoryStream();
gzip.CopyTo(output);
return output.ToArray();
}
}

View File

@@ -0,0 +1,27 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Scanner.CallGraph.Caching;
using StellaOps.Scanner.CallGraph.DotNet;
using StellaOps.Scanner.CallGraph.Node;
namespace StellaOps.Scanner.CallGraph.DependencyInjection;
public static class CallGraphServiceCollectionExtensions
{
public static IServiceCollection AddCallGraphServices(this IServiceCollection services, IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
services.Configure<CallGraphCacheConfig>(configuration.GetSection("CallGraph:Cache"));
services.AddSingleton<ICallGraphExtractor, DotNetCallGraphExtractor>();
services.AddSingleton<ICallGraphExtractor, NodeCallGraphExtractor>();
services.AddSingleton<ReachabilityAnalyzer>();
services.AddSingleton<ICallGraphCacheService, ValkeyCallGraphCacheService>();
return services;
}
}

View File

@@ -0,0 +1,413 @@
using System.Collections.Immutable;
using Microsoft.Build.Locator;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.MSBuild;
using StellaOps.Scanner.Reachability;
namespace StellaOps.Scanner.CallGraph.DotNet;
public sealed class DotNetCallGraphExtractor : ICallGraphExtractor
{
private readonly TimeProvider _timeProvider;
public DotNetCallGraphExtractor(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public string Language => "dotnet";
public async Task<CallGraphSnapshot> ExtractAsync(CallGraphExtractionRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (!string.Equals(request.Language, Language, StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException($"Expected language '{Language}', got '{request.Language}'.", nameof(request));
}
var resolvedTarget = ResolveTargetPath(request.TargetPath);
if (resolvedTarget is null)
{
throw new FileNotFoundException($"Unable to locate a .sln or .csproj at '{request.TargetPath}'.");
}
var analysisRoot = Path.GetDirectoryName(resolvedTarget) ?? Directory.GetCurrentDirectory();
EnsureMsBuildRegistered();
using var workspace = MSBuildWorkspace.Create();
workspace.WorkspaceFailed += (_, _) => { };
var solution = resolvedTarget.EndsWith(".sln", StringComparison.OrdinalIgnoreCase)
? await workspace.OpenSolutionAsync(resolvedTarget, cancellationToken).ConfigureAwait(false)
: (await workspace.OpenProjectAsync(resolvedTarget, cancellationToken).ConfigureAwait(false)).Solution;
var nodesById = new Dictionary<string, CallGraphNode>(StringComparer.Ordinal);
var edges = new HashSet<CallGraphEdge>(CallGraphEdgeComparer.Instance);
foreach (var project in solution.Projects.OrderBy(p => p.FilePath ?? p.Name, StringComparer.Ordinal))
{
cancellationToken.ThrowIfCancellationRequested();
foreach (var document in project.Documents.OrderBy(d => d.FilePath ?? d.Name, StringComparer.Ordinal))
{
cancellationToken.ThrowIfCancellationRequested();
var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false);
if (root is null)
{
continue;
}
var model = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false);
if (model is null)
{
continue;
}
foreach (var methodSyntax in root.DescendantNodes().OfType<MethodDeclarationSyntax>())
{
var methodSymbol = model.GetDeclaredSymbol(methodSyntax, cancellationToken);
if (methodSymbol is null)
{
continue;
}
var methodNode = CreateMethodNode(analysisRoot, methodSymbol, methodSyntax);
nodesById[methodNode.NodeId] = methodNode;
foreach (var invocation in methodSyntax.DescendantNodes().OfType<InvocationExpressionSyntax>())
{
var invoked = model.GetSymbolInfo(invocation, cancellationToken).Symbol as IMethodSymbol;
if (invoked is null)
{
continue;
}
var targetNode = CreateInvokedNode(analysisRoot, invoked);
nodesById.TryAdd(targetNode.NodeId, targetNode);
edges.Add(new CallGraphEdge(
SourceId: methodNode.NodeId,
TargetId: targetNode.NodeId,
CallKind: ClassifyCallKind(invoked),
CallSite: FormatCallSite(analysisRoot, invocation)));
}
}
}
}
var nodes = nodesById.Values
.Select(n => n.Trimmed())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
.ToImmutableArray();
var entrypoints = nodes
.Where(n => n.IsEntrypoint)
.Select(n => n.NodeId)
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var sinks = nodes
.Where(n => n.IsSink)
.Select(n => n.NodeId)
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var orderedEdges = edges
.Select(e => e.Trimmed())
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
.ThenBy(e => e.CallKind.ToString(), StringComparer.Ordinal)
.ThenBy(e => e.CallSite ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
var extractedAt = _timeProvider.GetUtcNow();
var provisional = new CallGraphSnapshot(
ScanId: request.ScanId,
GraphDigest: string.Empty,
Language: Language,
ExtractedAt: extractedAt,
Nodes: nodes,
Edges: orderedEdges,
EntrypointIds: entrypoints,
SinkIds: sinks);
var digest = CallGraphDigests.ComputeGraphDigest(provisional);
return provisional with { GraphDigest = digest };
}
private static void EnsureMsBuildRegistered()
{
if (MSBuildLocator.IsRegistered)
{
return;
}
MSBuildLocator.RegisterDefaults();
}
private static string? ResolveTargetPath(string targetPath)
{
if (string.IsNullOrWhiteSpace(targetPath))
{
return null;
}
var path = Path.GetFullPath(targetPath);
if (File.Exists(path) && (path.EndsWith(".sln", StringComparison.OrdinalIgnoreCase) || path.EndsWith(".csproj", StringComparison.OrdinalIgnoreCase)))
{
return path;
}
if (Directory.Exists(path))
{
var sln = Directory.EnumerateFiles(path, "*.sln", SearchOption.TopDirectoryOnly)
.OrderBy(p => p, StringComparer.Ordinal)
.FirstOrDefault();
if (sln is not null)
{
return sln;
}
var csproj = Directory.EnumerateFiles(path, "*.csproj", SearchOption.AllDirectories)
.OrderBy(p => p, StringComparer.Ordinal)
.FirstOrDefault();
return csproj;
}
return null;
}
private static CallKind ClassifyCallKind(IMethodSymbol invoked)
{
if (invoked.MethodKind == MethodKind.DelegateInvoke)
{
return CallKind.Delegate;
}
if (invoked.IsVirtual || invoked.IsAbstract || invoked.IsOverride)
{
return CallKind.Virtual;
}
return CallKind.Direct;
}
private static CallGraphNode CreateMethodNode(string analysisRoot, IMethodSymbol method, MethodDeclarationSyntax syntax)
{
var id = CallGraphNodeIds.Compute(GetStableSymbolId(method));
var (file, line) = GetSourceLocation(analysisRoot, syntax.GetLocation());
var (isEntrypoint, entryType) = EntrypointClassifier.IsEntrypoint(method);
return new CallGraphNode(
NodeId: id,
Symbol: method.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat),
File: file,
Line: line,
Package: method.ContainingAssembly?.Name ?? "unknown",
Visibility: MapVisibility(method.DeclaredAccessibility),
IsEntrypoint: isEntrypoint,
EntrypointType: entryType,
IsSink: false,
SinkCategory: null);
}
private static CallGraphNode CreateInvokedNode(string analysisRoot, IMethodSymbol method)
{
var id = CallGraphNodeIds.Compute(GetStableSymbolId(method));
var definitionLocation = method.Locations.FirstOrDefault(l => l.IsInSource) ?? Location.None;
var (file, line) = GetSourceLocation(analysisRoot, definitionLocation);
var sink = SinkRegistry.MatchSink("dotnet", method.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat));
return new CallGraphNode(
NodeId: id,
Symbol: method.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat),
File: file,
Line: line,
Package: method.ContainingAssembly?.Name ?? "unknown",
Visibility: MapVisibility(method.DeclaredAccessibility),
IsEntrypoint: false,
EntrypointType: null,
IsSink: sink is not null,
SinkCategory: sink?.Category);
}
private static Visibility MapVisibility(Accessibility accessibility)
{
return accessibility switch
{
Accessibility.Public => Visibility.Public,
Accessibility.Internal => Visibility.Internal,
Accessibility.Protected => Visibility.Protected,
_ => Visibility.Private
};
}
private static (string File, int Line) GetSourceLocation(string analysisRoot, Location location)
{
if (location is null || !location.IsInSource || location.SourceTree is null)
{
return (string.Empty, 0);
}
var span = location.GetLineSpan();
var relative = Path.GetRelativePath(analysisRoot, span.Path ?? string.Empty);
if (relative.StartsWith("..", StringComparison.Ordinal))
{
relative = Path.GetFileName(span.Path ?? string.Empty);
}
var file = relative.Replace('\\', '/');
var line = span.StartLinePosition.Line + 1;
return (file, line);
}
private static string? FormatCallSite(string analysisRoot, InvocationExpressionSyntax invocation)
{
var location = invocation.GetLocation();
if (location is null || !location.IsInSource || location.SourceTree is null)
{
return null;
}
var span = location.GetLineSpan();
var relative = Path.GetRelativePath(analysisRoot, span.Path ?? string.Empty);
if (relative.StartsWith("..", StringComparison.Ordinal))
{
relative = Path.GetFileName(span.Path ?? string.Empty);
}
var file = relative.Replace('\\', '/');
var line = span.StartLinePosition.Line + 1;
if (string.IsNullOrWhiteSpace(file) || line <= 0)
{
return null;
}
return $"{file}:{line}";
}
private static string GetStableSymbolId(IMethodSymbol method)
{
var docId = method.GetDocumentationCommentId();
if (!string.IsNullOrWhiteSpace(docId))
{
return $"dotnet:{method.ContainingAssembly?.Name}:{docId}";
}
return $"dotnet:{method.ContainingAssembly?.Name}:{method.ToDisplayString(SymbolDisplayFormat.FullyQualifiedFormat)}";
}
private sealed class CallGraphEdgeComparer : IEqualityComparer<CallGraphEdge>
{
public static readonly CallGraphEdgeComparer Instance = new();
public bool Equals(CallGraphEdge? x, CallGraphEdge? y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (x is null || y is null)
{
return false;
}
return string.Equals(x.SourceId, y.SourceId, StringComparison.Ordinal)
&& string.Equals(x.TargetId, y.TargetId, StringComparison.Ordinal)
&& x.CallKind == y.CallKind
&& string.Equals(x.CallSite ?? string.Empty, y.CallSite ?? string.Empty, StringComparison.Ordinal);
}
public int GetHashCode(CallGraphEdge obj)
{
return HashCode.Combine(
obj.SourceId,
obj.TargetId,
obj.CallKind,
obj.CallSite ?? string.Empty);
}
}
}
internal static class EntrypointClassifier
{
private static readonly HashSet<string> HttpMethodAttributes = new(StringComparer.Ordinal)
{
"HttpGetAttribute",
"HttpPostAttribute",
"HttpPutAttribute",
"HttpDeleteAttribute",
"HttpPatchAttribute",
"RouteAttribute"
};
public static (bool IsEntrypoint, EntrypointType? Type) IsEntrypoint(IMethodSymbol method)
{
if (method is null)
{
return (false, null);
}
// Main()
if (method.IsStatic && method.Name == "Main" && method.ContainingType is not null)
{
return (true, EntrypointType.CliCommand);
}
// ASP.NET attributes
foreach (var attribute in method.GetAttributes())
{
var name = attribute.AttributeClass?.Name;
if (name is not null && HttpMethodAttributes.Contains(name))
{
return (true, EntrypointType.HttpHandler);
}
}
// Hosted services
if (method.ContainingType is not null)
{
var type = method.ContainingType;
if (type.AllInterfaces.Any(i => i.ToDisplayString() == "Microsoft.Extensions.Hosting.IHostedService")
|| DerivesFrom(type, "Microsoft.Extensions.Hosting.BackgroundService"))
{
if (method.Name is "StartAsync" or "ExecuteAsync")
{
return (true, EntrypointType.BackgroundJob);
}
}
// gRPC base type
if (DerivesFrom(type, "Grpc.Core.BindableService") || DerivesFrom(type, "Grpc.AspNetCore.Server.BindableService"))
{
if (method.DeclaredAccessibility == Accessibility.Public)
{
return (true, EntrypointType.GrpcMethod);
}
}
}
return (false, null);
}
private static bool DerivesFrom(INamedTypeSymbol type, string fullName)
{
var current = type.BaseType;
while (current is not null)
{
if (current.ToDisplayString() == fullName)
{
return true;
}
current = current.BaseType;
}
return false;
}
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Scanner.CallGraph;
public sealed record CallGraphExtractionRequest(
string ScanId,
string Language,
string TargetPath);
public interface ICallGraphExtractor
{
string Language { get; }
Task<CallGraphSnapshot> ExtractAsync(CallGraphExtractionRequest request, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,212 @@
using System.Collections.Immutable;
using System.Text.Json;
using StellaOps.Scanner.Reachability;
namespace StellaOps.Scanner.CallGraph.Node;
/// <summary>
/// Placeholder Node.js call graph extractor.
/// Babel integration is planned; this implementation is intentionally minimal.
/// </summary>
public sealed class NodeCallGraphExtractor : ICallGraphExtractor
{
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
public NodeCallGraphExtractor(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public string Language => "node";
public async Task<CallGraphSnapshot> ExtractAsync(CallGraphExtractionRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (!string.Equals(request.Language, Language, StringComparison.OrdinalIgnoreCase))
{
throw new ArgumentException($"Expected language '{Language}', got '{request.Language}'.", nameof(request));
}
var tracePath = ResolveTracePath(request.TargetPath);
if (tracePath is not null && File.Exists(tracePath))
{
try
{
await using var stream = File.OpenRead(tracePath);
var trace = await JsonSerializer.DeserializeAsync<TraceDocument>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
if (trace is not null)
{
return BuildFromTrace(request.ScanId, trace);
}
}
catch (Exception ex) when (ex is IOException or JsonException)
{
// fall through to empty snapshot
}
}
var extractedAt = _timeProvider.GetUtcNow();
var provisional = new CallGraphSnapshot(
ScanId: request.ScanId,
GraphDigest: string.Empty,
Language: Language,
ExtractedAt: extractedAt,
Nodes: ImmutableArray<CallGraphNode>.Empty,
Edges: ImmutableArray<CallGraphEdge>.Empty,
EntrypointIds: ImmutableArray<string>.Empty,
SinkIds: ImmutableArray<string>.Empty);
var digest = CallGraphDigests.ComputeGraphDigest(provisional);
return provisional with { GraphDigest = digest };
}
private CallGraphSnapshot BuildFromTrace(string scanId, TraceDocument trace)
{
var extractedAt = _timeProvider.GetUtcNow();
var nodes = new List<CallGraphNode>();
var edges = new List<CallGraphEdge>();
var entrySymbol = trace.Entry?.Trim() ?? "unknown_entry";
var entryId = CallGraphNodeIds.Compute($"node:entry:{entrySymbol}");
nodes.Add(new CallGraphNode(
NodeId: entryId,
Symbol: entrySymbol,
File: string.Empty,
Line: 0,
Package: "app",
Visibility: Visibility.Public,
IsEntrypoint: true,
EntrypointType: EntrypointType.HttpHandler,
IsSink: false,
SinkCategory: null));
var path = trace.Path ?? Array.Empty<string>();
var previousId = entryId;
foreach (var raw in path)
{
var symbol = raw?.Trim() ?? string.Empty;
if (string.IsNullOrWhiteSpace(symbol))
{
continue;
}
var nodeId = CallGraphNodeIds.Compute($"node:{symbol}");
var (file, line) = ParseFileLine(symbol);
var sink = SinkRegistry.MatchSink("node", symbol);
nodes.Add(new CallGraphNode(
NodeId: nodeId,
Symbol: symbol,
File: file,
Line: line,
Package: "app",
Visibility: Visibility.Public,
IsEntrypoint: false,
EntrypointType: null,
IsSink: sink is not null,
SinkCategory: sink?.Category));
edges.Add(new CallGraphEdge(previousId, nodeId, CallKind.Direct));
previousId = nodeId;
}
var distinctNodes = nodes
.GroupBy(n => n.NodeId, StringComparer.Ordinal)
.Select(g => g.First())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
.ToImmutableArray();
var distinctEdges = edges
.Distinct(CallGraphEdgeStructuralComparer.Instance)
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
.ToImmutableArray();
var sinkIds = distinctNodes
.Where(n => n.IsSink)
.Select(n => n.NodeId)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var provisional = new CallGraphSnapshot(
ScanId: scanId,
GraphDigest: string.Empty,
Language: Language,
ExtractedAt: extractedAt,
Nodes: distinctNodes,
Edges: distinctEdges,
EntrypointIds: ImmutableArray.Create(entryId),
SinkIds: sinkIds);
return provisional with { GraphDigest = CallGraphDigests.ComputeGraphDigest(provisional) };
}
private static (string File, int Line) ParseFileLine(string symbol)
{
// Common benchmark shape: "app.js:handleRequest" or "app.js::createServer"
var idx = symbol.IndexOf(".js", StringComparison.OrdinalIgnoreCase);
if (idx < 0)
{
return (string.Empty, 0);
}
var end = idx + 3;
var file = symbol[..end].Replace('\\', '/');
return (file, 0);
}
private static string? ResolveTracePath(string targetPath)
{
if (string.IsNullOrWhiteSpace(targetPath))
{
return null;
}
var path = Path.GetFullPath(targetPath);
if (File.Exists(path))
{
return path;
}
if (Directory.Exists(path))
{
var candidate = Path.Combine(path, "outputs", "traces", "traces.json");
if (File.Exists(candidate))
{
return candidate;
}
}
return null;
}
private sealed record TraceDocument(string Entry, string[] Path, string Sink, string Notes);
private sealed class CallGraphEdgeStructuralComparer : IEqualityComparer<CallGraphEdge>
{
public static readonly CallGraphEdgeStructuralComparer Instance = new();
public bool Equals(CallGraphEdge? x, CallGraphEdge? y)
{
if (ReferenceEquals(x, y))
{
return true;
}
if (x is null || y is null)
{
return false;
}
return string.Equals(x.SourceId, y.SourceId, StringComparison.Ordinal)
&& string.Equals(x.TargetId, y.TargetId, StringComparison.Ordinal)
&& x.CallKind == y.CallKind;
}
public int GetHashCode(CallGraphEdge obj)
=> HashCode.Combine(obj.SourceId, obj.TargetId, obj.CallKind);
}
}

View File

@@ -0,0 +1,367 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Scanner.Reachability;
namespace StellaOps.Scanner.CallGraph;
public sealed record CallGraphSnapshot(
[property: JsonPropertyName("scanId")] string ScanId,
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("extractedAt")] DateTimeOffset ExtractedAt,
[property: JsonPropertyName("nodes")] ImmutableArray<CallGraphNode> Nodes,
[property: JsonPropertyName("edges")] ImmutableArray<CallGraphEdge> Edges,
[property: JsonPropertyName("entrypointIds")] ImmutableArray<string> EntrypointIds,
[property: JsonPropertyName("sinkIds")] ImmutableArray<string> SinkIds)
{
public CallGraphSnapshot Trimmed()
{
var nodes = (Nodes.IsDefault ? ImmutableArray<CallGraphNode>.Empty : Nodes)
.Where(n => !string.IsNullOrWhiteSpace(n.NodeId))
.Select(n => n.Trimmed())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
.ToImmutableArray();
var edges = (Edges.IsDefault ? ImmutableArray<CallGraphEdge>.Empty : Edges)
.Where(e => !string.IsNullOrWhiteSpace(e.SourceId) && !string.IsNullOrWhiteSpace(e.TargetId))
.Select(e => e.Trimmed())
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
.ThenBy(e => e.CallKind.ToString(), StringComparer.Ordinal)
.ThenBy(e => e.CallSite ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
var entrypoints = (EntrypointIds.IsDefault ? ImmutableArray<string>.Empty : EntrypointIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var sinks = (SinkIds.IsDefault ? ImmutableArray<string>.Empty : SinkIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
return this with
{
ScanId = ScanId?.Trim() ?? string.Empty,
GraphDigest = GraphDigest?.Trim() ?? string.Empty,
Language = Language?.Trim() ?? string.Empty,
Nodes = nodes,
Edges = edges,
EntrypointIds = entrypoints,
SinkIds = sinks
};
}
}
public sealed record CallGraphNode(
[property: JsonPropertyName("nodeId")] string NodeId,
[property: JsonPropertyName("symbol")] string Symbol,
[property: JsonPropertyName("file")] string File,
[property: JsonPropertyName("line")] int Line,
[property: JsonPropertyName("package")] string Package,
[property: JsonPropertyName("visibility")] Visibility Visibility,
[property: JsonPropertyName("isEntrypoint")] bool IsEntrypoint,
[property: JsonPropertyName("entrypointType")] EntrypointType? EntrypointType,
[property: JsonPropertyName("isSink")] bool IsSink,
[property: JsonPropertyName("sinkCategory")] SinkCategory? SinkCategory)
{
public CallGraphNode Trimmed()
=> this with
{
NodeId = NodeId?.Trim() ?? string.Empty,
Symbol = Symbol?.Trim() ?? string.Empty,
File = File?.Trim() ?? string.Empty,
Package = Package?.Trim() ?? string.Empty
};
}
public sealed record CallGraphEdge(
[property: JsonPropertyName("sourceId")] string SourceId,
[property: JsonPropertyName("targetId")] string TargetId,
[property: JsonPropertyName("callKind")] CallKind CallKind,
[property: JsonPropertyName("callSite")] string? CallSite = null)
{
public CallGraphEdge Trimmed()
=> this with
{
SourceId = SourceId?.Trim() ?? string.Empty,
TargetId = TargetId?.Trim() ?? string.Empty,
CallSite = string.IsNullOrWhiteSpace(CallSite) ? null : CallSite.Trim()
};
}
[JsonConverter(typeof(JsonStringEnumConverter<Visibility>))]
public enum Visibility
{
Public,
Internal,
Protected,
Private
}
[JsonConverter(typeof(JsonStringEnumConverter<CallKind>))]
public enum CallKind
{
Direct,
Virtual,
Delegate,
Reflection,
Dynamic
}
[JsonConverter(typeof(JsonStringEnumConverter<EntrypointType>))]
public enum EntrypointType
{
HttpHandler,
GrpcMethod,
CliCommand,
BackgroundJob,
ScheduledJob,
MessageHandler,
EventSubscriber,
WebSocketHandler,
Unknown
}
public static class CallGraphDigests
{
private static readonly JsonWriterOptions CanonicalJsonOptions = new()
{
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
Indented = false,
SkipValidation = false
};
public static string ComputeGraphDigest(CallGraphSnapshot snapshot)
{
ArgumentNullException.ThrowIfNull(snapshot);
var trimmed = snapshot.Trimmed();
using var buffer = new MemoryStream(capacity: 64 * 1024);
using (var writer = new Utf8JsonWriter(buffer, CanonicalJsonOptions))
{
WriteDigestPayload(writer, trimmed);
writer.Flush();
}
var hash = SHA256.HashData(buffer.ToArray());
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
public static string ComputeResultDigest(ReachabilityAnalysisResult result)
{
ArgumentNullException.ThrowIfNull(result);
var trimmed = result.Trimmed();
using var buffer = new MemoryStream(capacity: 64 * 1024);
using (var writer = new Utf8JsonWriter(buffer, CanonicalJsonOptions))
{
WriteDigestPayload(writer, trimmed);
writer.Flush();
}
var hash = SHA256.HashData(buffer.ToArray());
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static void WriteDigestPayload(Utf8JsonWriter writer, CallGraphSnapshot snapshot)
{
writer.WriteStartObject();
writer.WriteString("schema", "stellaops.callgraph@v1");
writer.WriteString("language", snapshot.Language);
writer.WritePropertyName("nodes");
writer.WriteStartArray();
foreach (var node in snapshot.Nodes)
{
writer.WriteStartObject();
writer.WriteString("nodeId", node.NodeId);
writer.WriteString("symbol", node.Symbol);
writer.WriteString("file", node.File);
writer.WriteNumber("line", node.Line);
writer.WriteString("package", node.Package);
writer.WriteString("visibility", node.Visibility.ToString());
writer.WriteBoolean("isEntrypoint", node.IsEntrypoint);
if (node.EntrypointType is not null)
{
writer.WriteString("entrypointType", node.EntrypointType.Value.ToString());
}
writer.WriteBoolean("isSink", node.IsSink);
if (node.SinkCategory is not null)
{
writer.WriteString("sinkCategory", node.SinkCategory.Value.ToString());
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("edges");
writer.WriteStartArray();
foreach (var edge in snapshot.Edges)
{
writer.WriteStartObject();
writer.WriteString("sourceId", edge.SourceId);
writer.WriteString("targetId", edge.TargetId);
writer.WriteString("callKind", edge.CallKind.ToString());
if (!string.IsNullOrWhiteSpace(edge.CallSite))
{
writer.WriteString("callSite", edge.CallSite);
}
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WritePropertyName("entrypointIds");
writer.WriteStartArray();
foreach (var id in snapshot.EntrypointIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("sinkIds");
writer.WriteStartArray();
foreach (var id in snapshot.SinkIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
private static void WriteDigestPayload(Utf8JsonWriter writer, ReachabilityAnalysisResult result)
{
writer.WriteStartObject();
writer.WriteString("schema", "stellaops.reachability@v1");
writer.WriteString("graphDigest", result.GraphDigest);
writer.WriteString("language", result.Language);
writer.WritePropertyName("reachableNodeIds");
writer.WriteStartArray();
foreach (var id in result.ReachableNodeIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("reachableSinkIds");
writer.WriteStartArray();
foreach (var id in result.ReachableSinkIds)
{
writer.WriteStringValue(id);
}
writer.WriteEndArray();
writer.WritePropertyName("paths");
writer.WriteStartArray();
foreach (var path in result.Paths)
{
writer.WriteStartObject();
writer.WriteString("entrypointId", path.EntrypointId);
writer.WriteString("sinkId", path.SinkId);
writer.WritePropertyName("nodeIds");
writer.WriteStartArray();
foreach (var nodeId in path.NodeIds)
{
writer.WriteStringValue(nodeId);
}
writer.WriteEndArray();
writer.WriteEndObject();
}
writer.WriteEndArray();
writer.WriteEndObject();
}
}
public sealed record ReachabilityPath(
[property: JsonPropertyName("entrypointId")] string EntrypointId,
[property: JsonPropertyName("sinkId")] string SinkId,
[property: JsonPropertyName("nodeIds")] ImmutableArray<string> NodeIds)
{
public ReachabilityPath Trimmed()
{
var nodes = (NodeIds.IsDefault ? ImmutableArray<string>.Empty : NodeIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.ToImmutableArray();
return this with
{
EntrypointId = EntrypointId?.Trim() ?? string.Empty,
SinkId = SinkId?.Trim() ?? string.Empty,
NodeIds = nodes
};
}
}
public sealed record ReachabilityAnalysisResult(
[property: JsonPropertyName("scanId")] string ScanId,
[property: JsonPropertyName("graphDigest")] string GraphDigest,
[property: JsonPropertyName("language")] string Language,
[property: JsonPropertyName("computedAt")] DateTimeOffset ComputedAt,
[property: JsonPropertyName("reachableNodeIds")] ImmutableArray<string> ReachableNodeIds,
[property: JsonPropertyName("reachableSinkIds")] ImmutableArray<string> ReachableSinkIds,
[property: JsonPropertyName("paths")] ImmutableArray<ReachabilityPath> Paths,
[property: JsonPropertyName("resultDigest")] string ResultDigest)
{
public ReachabilityAnalysisResult Trimmed()
{
var reachableNodes = (ReachableNodeIds.IsDefault ? ImmutableArray<string>.Empty : ReachableNodeIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var reachableSinks = (ReachableSinkIds.IsDefault ? ImmutableArray<string>.Empty : ReachableSinkIds)
.Where(id => !string.IsNullOrWhiteSpace(id))
.Select(id => id.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var paths = (Paths.IsDefault ? ImmutableArray<ReachabilityPath>.Empty : Paths)
.Select(p => p.Trimmed())
.OrderBy(p => p.SinkId, StringComparer.Ordinal)
.ThenBy(p => p.EntrypointId, StringComparer.Ordinal)
.ToImmutableArray();
return this with
{
ScanId = ScanId?.Trim() ?? string.Empty,
GraphDigest = GraphDigest?.Trim() ?? string.Empty,
Language = Language?.Trim() ?? string.Empty,
ResultDigest = ResultDigest?.Trim() ?? string.Empty,
ReachableNodeIds = reachableNodes,
ReachableSinkIds = reachableSinks,
Paths = paths
};
}
}
public static class CallGraphNodeIds
{
public static string Compute(string stableSymbolId)
{
if (string.IsNullOrWhiteSpace(stableSymbolId))
{
throw new ArgumentException("Symbol id must be provided.", nameof(stableSymbolId));
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(stableSymbolId.Trim()));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
public static string StableSymbolId(string language, string symbol)
=> $"{language.Trim().ToLowerInvariant()}:{symbol.Trim()}";
}

View File

@@ -0,0 +1,26 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<GenerateAssemblyInfo>false</GenerateAssemblyInfo>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Build.Locator" Version="1.10.0" />
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="4.14.0" />
<PackageReference Include="Microsoft.CodeAnalysis.Workspaces.MSBuild" Version="4.14.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="StackExchange.Redis" Version="2.8.37" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Scanner.Reachability\\StellaOps.Scanner.Reachability.csproj" />
</ItemGroup>
</Project>

View File

@@ -7,6 +7,7 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Replay.Core;
namespace StellaOps.Scanner.Core;
@@ -60,15 +61,14 @@ public sealed record ScanManifest(
/// <summary>
/// Serialize to canonical JSON (for hashing).
/// </summary>
public string ToCanonicalJson() => JsonSerializer.Serialize(this, CanonicalJsonOptions);
public string ToCanonicalJson() => CanonicalJson.Serialize(this);
/// <summary>
/// Compute the SHA-256 hash of the canonical JSON representation.
/// </summary>
public string ComputeHash()
{
var json = ToCanonicalJson();
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var bytes = CanonicalJson.SerializeToUtf8Bytes(this);
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}

View File

@@ -7,6 +7,8 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Security.Cryptography;
using StellaOps.Replay.Core;
using StellaOps.Scanner.ProofSpine;
namespace StellaOps.Scanner.Core;
@@ -88,11 +90,18 @@ public sealed record ManifestVerificationResult(
public sealed class ScanManifestSigner : IScanManifestSigner
{
private readonly IDsseSigningService _dsseSigningService;
private readonly ICryptoProfile _cryptoProfile;
private readonly TimeProvider _timeProvider;
private const string PredicateType = "scanmanifest.stella/v1";
public ScanManifestSigner(IDsseSigningService dsseSigningService)
public ScanManifestSigner(
IDsseSigningService dsseSigningService,
ICryptoProfile cryptoProfile,
TimeProvider timeProvider)
{
_dsseSigningService = dsseSigningService ?? throw new ArgumentNullException(nameof(dsseSigningService));
_cryptoProfile = cryptoProfile ?? throw new ArgumentNullException(nameof(cryptoProfile));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <inheritdoc />
@@ -101,20 +110,20 @@ public sealed class ScanManifestSigner : IScanManifestSigner
ArgumentNullException.ThrowIfNull(manifest);
var manifestHash = manifest.ComputeHash();
var manifestJson = manifest.ToCanonicalJson();
var manifestBytes = System.Text.Encoding.UTF8.GetBytes(manifestJson);
// Create DSSE envelope
var envelope = await _dsseSigningService.SignAsync(
payload: manifest,
payloadType: PredicateType,
payload: manifestBytes,
cancellationToken);
cryptoProfile: _cryptoProfile,
cancellationToken: cancellationToken)
.ConfigureAwait(false);
return new SignedScanManifest(
Manifest: manifest,
ManifestHash: manifestHash,
Envelope: envelope,
SignedAt: DateTimeOffset.UtcNow);
SignedAt: _timeProvider.GetUtcNow());
}
/// <inheritdoc />
@@ -125,31 +134,93 @@ public sealed class ScanManifestSigner : IScanManifestSigner
try
{
// Verify DSSE signature
var verifyResult = await _dsseSigningService.VerifyAsync(signedManifest.Envelope, cancellationToken);
if (!verifyResult)
var verifyResult = await _dsseSigningService.VerifyAsync(signedManifest.Envelope, cancellationToken)
.ConfigureAwait(false);
if (!verifyResult.IsValid)
{
return ManifestVerificationResult.Failure("DSSE signature verification failed");
return new ManifestVerificationResult(
IsValid: false,
Manifest: null,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: verifyResult.FailureReason ?? "DSSE signature verification failed");
}
// Verify payload type
if (signedManifest.Envelope.PayloadType != PredicateType)
{
return ManifestVerificationResult.Failure($"Unexpected payload type: {signedManifest.Envelope.PayloadType}");
return new ManifestVerificationResult(
IsValid: false,
Manifest: null,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: $"Unexpected payload type: {signedManifest.Envelope.PayloadType}");
}
// Verify manifest hash
var computedHash = signedManifest.Manifest.ComputeHash();
if (computedHash != signedManifest.ManifestHash)
{
return ManifestVerificationResult.Failure("Manifest hash mismatch");
return new ManifestVerificationResult(
IsValid: false,
Manifest: null,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: "Manifest hash mismatch");
}
var keyId = signedManifest.Envelope.Signatures.FirstOrDefault()?.Keyid;
return ManifestVerificationResult.Success(signedManifest.Manifest, keyId);
if (!TryDecodeBase64(signedManifest.Envelope.Payload, out var payloadBytes))
{
return new ManifestVerificationResult(
IsValid: false,
Manifest: null,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: "Envelope payload is not valid base64");
}
var canonicalBytes = CanonicalJson.SerializeToUtf8Bytes(signedManifest.Manifest);
if (!CryptographicOperations.FixedTimeEquals(payloadBytes, canonicalBytes))
{
return new ManifestVerificationResult(
IsValid: false,
Manifest: null,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: "Envelope payload does not match manifest payload");
}
var keyId = signedManifest.Envelope.Signatures.FirstOrDefault()?.KeyId;
return new ManifestVerificationResult(
IsValid: true,
Manifest: signedManifest.Manifest,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: null,
KeyId: keyId);
}
catch (Exception ex)
{
return ManifestVerificationResult.Failure($"Verification error: {ex.Message}");
return new ManifestVerificationResult(
IsValid: false,
Manifest: null,
VerifiedAt: _timeProvider.GetUtcNow(),
ErrorMessage: $"Verification error: {ex.Message}");
}
}
private static bool TryDecodeBase64(string? value, out byte[] bytes)
{
if (string.IsNullOrWhiteSpace(value))
{
bytes = Array.Empty<byte>();
return false;
}
try
{
bytes = Convert.FromBase64String(value);
return true;
}
catch (FormatException)
{
bytes = Array.Empty<byte>();
return false;
}
}
}

View File

@@ -16,5 +16,7 @@
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../StellaOps.Scanner.ProofSpine/StellaOps.Scanner.ProofSpine.csproj" />
<ProjectReference Include="../../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
</ItemGroup>
</Project>

View File

@@ -97,14 +97,14 @@ public static class GatePatterns
[
new GatePattern(@"@feature_flag", "Feature flag decorator", 0.90),
new GatePattern(@"ldclient\.variation", "LaunchDarkly Python", 0.95),
new GatePattern(@"os\.environ\.get\(['\"]FEATURE_", "Env feature flag", 0.70),
new GatePattern(@"os\.environ\.get\(['""]FEATURE_", "Env feature flag", 0.70),
new GatePattern(@"waffle\.flag_is_active", "Django Waffle", 0.90)
],
["go"] =
[
new GatePattern(@"unleash\.IsEnabled", "Unleash Go SDK", 0.95),
new GatePattern(@"ldclient\.BoolVariation", "LaunchDarkly Go", 0.95),
new GatePattern(@"os\.Getenv\(\"FEATURE_", "Env feature flag", 0.70)
new GatePattern(@"os\.Getenv\(""FEATURE_", "Env feature flag", 0.70)
],
["ruby"] =
[

View File

@@ -74,7 +74,11 @@ public static class ServiceCollectionExtensions
services.AddScoped<EntryTraceRepository>();
services.AddScoped<RubyPackageInventoryRepository>();
services.AddScoped<BunPackageInventoryRepository>();
services.TryAddSingleton<IClassificationHistoryRepository, ClassificationHistoryRepository>();
services.TryAddSingleton<IClassificationChangeTracker, ClassificationChangeTracker>();
services.AddScoped<IProofSpineRepository, PostgresProofSpineRepository>();
services.AddScoped<ICallGraphSnapshotRepository, PostgresCallGraphSnapshotRepository>();
services.AddScoped<IReachabilityResultRepository, PostgresReachabilityResultRepository>();
services.AddSingleton<IEntryTraceResultStore, EntryTraceResultStore>();
services.AddSingleton<IRubyPackageInventoryStore, RubyPackageInventoryStore>();
services.AddSingleton<IBunPackageInventoryStore, BunPackageInventoryStore>();

View File

@@ -0,0 +1,11 @@
-- Migration: 0059_scans_table
-- Sprint: SPRINT_3500_0002_0001_score_proofs_foundations (prereq)
-- Description: Minimal `scans` table required by score replay/proof bundle tables.
CREATE TABLE IF NOT EXISTS scans (
scan_id UUID PRIMARY KEY,
created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS ix_scans_created_at
ON scans(created_at_utc DESC);

View File

@@ -0,0 +1,20 @@
-- Migration: 0065_unknowns_table
-- Sprint: SPRINT_3600_0002_0001 (foundation prerequisite)
-- Description: Minimal `unknowns` table required for containment/ranking follow-up migrations.
CREATE TABLE IF NOT EXISTS unknowns (
unknown_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
artifact_digest TEXT NOT NULL,
vuln_id TEXT NOT NULL,
package_purl TEXT NOT NULL,
score DOUBLE PRECISION NOT NULL DEFAULT 0,
created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS ix_unknowns_tenant_artifact
ON unknowns(tenant_id, artifact_digest);
CREATE INDEX IF NOT EXISTS ix_unknowns_created_at
ON unknowns(created_at_utc DESC);

View File

@@ -0,0 +1,18 @@
-- Migration: 0075_scan_findings_table
-- Sprint: Advisory-derived (EPSS Integration prerequisite)
-- Description: Minimal `scan_findings` table required for EPSS-at-scan evidence columns.
CREATE TABLE IF NOT EXISTS scan_findings (
finding_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
scan_id UUID NOT NULL,
tenant_id UUID NOT NULL,
vuln_id TEXT NOT NULL,
package_purl TEXT NOT NULL,
created_at_utc TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS ix_scan_findings_scan_id
ON scan_findings(scan_id);
CREATE INDEX IF NOT EXISTS ix_scan_findings_tenant_vuln
ON scan_findings(tenant_id, vuln_id);

View File

@@ -0,0 +1,78 @@
-- Call graph snapshots + reachability analysis results
-- Sprint: SPRINT_3600_0002_0001_call_graph_infrastructure
CREATE SCHEMA IF NOT EXISTS scanner;
-- -----------------------------------------------------------------------------
-- Table: scanner.call_graph_snapshots
-- Purpose: Cache call graph snapshots per scan/language for reachability drift.
-- -----------------------------------------------------------------------------
CREATE TABLE IF NOT EXISTS scanner.call_graph_snapshots (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
scan_id TEXT NOT NULL,
language TEXT NOT NULL,
graph_digest TEXT NOT NULL,
extracted_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
node_count INT NOT NULL,
edge_count INT NOT NULL,
entrypoint_count INT NOT NULL,
sink_count INT NOT NULL,
snapshot_json JSONB NOT NULL,
CONSTRAINT call_graph_snapshot_unique_per_scan UNIQUE (tenant_id, scan_id, language, graph_digest)
);
CREATE INDEX IF NOT EXISTS idx_call_graph_snapshots_tenant_scan
ON scanner.call_graph_snapshots (tenant_id, scan_id, language);
CREATE INDEX IF NOT EXISTS idx_call_graph_snapshots_graph_digest
ON scanner.call_graph_snapshots (graph_digest);
CREATE INDEX IF NOT EXISTS idx_call_graph_snapshots_extracted_at
ON scanner.call_graph_snapshots USING BRIN (extracted_at);
ALTER TABLE scanner.call_graph_snapshots ENABLE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS call_graph_snapshots_tenant_isolation ON scanner.call_graph_snapshots;
CREATE POLICY call_graph_snapshots_tenant_isolation ON scanner.call_graph_snapshots
USING (tenant_id = scanner.current_tenant_id());
COMMENT ON TABLE scanner.call_graph_snapshots IS 'Call graph snapshots per scan/language for reachability drift detection.';
-- -----------------------------------------------------------------------------
-- Table: scanner.reachability_results
-- Purpose: Cache reachability BFS results (reachable sinks + shortest paths).
-- -----------------------------------------------------------------------------
CREATE TABLE IF NOT EXISTS scanner.reachability_results (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
scan_id TEXT NOT NULL,
language TEXT NOT NULL,
graph_digest TEXT NOT NULL,
result_digest TEXT NOT NULL,
computed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
reachable_node_count INT NOT NULL,
reachable_sink_count INT NOT NULL,
result_json JSONB NOT NULL,
CONSTRAINT reachability_result_unique_per_scan UNIQUE (tenant_id, scan_id, language, graph_digest, result_digest)
);
CREATE INDEX IF NOT EXISTS idx_reachability_results_tenant_scan
ON scanner.reachability_results (tenant_id, scan_id, language);
CREATE INDEX IF NOT EXISTS idx_reachability_results_graph_digest
ON scanner.reachability_results (graph_digest);
CREATE INDEX IF NOT EXISTS idx_reachability_results_computed_at
ON scanner.reachability_results USING BRIN (computed_at);
ALTER TABLE scanner.reachability_results ENABLE ROW LEVEL SECURITY;
DROP POLICY IF EXISTS reachability_results_tenant_isolation ON scanner.reachability_results;
CREATE POLICY reachability_results_tenant_isolation ON scanner.reachability_results
USING (tenant_id = scanner.current_tenant_id());
COMMENT ON TABLE scanner.reachability_results IS 'Reachability analysis results per scan/language with shortest paths.';

View File

@@ -0,0 +1,322 @@
-- Migration: 009_smart_diff_tables_search_path
-- Sprint: SPRINT_3500_0003_0001_smart_diff_detection
-- Task: SDIFF-DET-016 (follow-up)
-- Description: Ensure Smart-Diff tables/types live in the active schema (search_path) and align tenant context key with DataSourceBase (`app.tenant_id`).
-- =============================================================================
-- Enums for Smart-Diff (created in the active schema)
-- =============================================================================
DO $$ BEGIN
CREATE TYPE vex_status_type AS ENUM (
'unknown',
'affected',
'not_affected',
'fixed',
'under_investigation'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
DO $$ BEGIN
CREATE TYPE policy_decision_type AS ENUM (
'allow',
'warn',
'block'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
DO $$ BEGIN
CREATE TYPE detection_rule AS ENUM (
'R1_ReachabilityFlip',
'R2_VexFlip',
'R3_RangeBoundary',
'R4_IntelligenceFlip'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
DO $$ BEGIN
CREATE TYPE material_change_type AS ENUM (
'reachability_flip',
'vex_flip',
'range_boundary',
'kev_added',
'kev_removed',
'epss_threshold',
'policy_flip'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
DO $$ BEGIN
CREATE TYPE risk_direction AS ENUM (
'increased',
'decreased',
'neutral'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
DO $$ BEGIN
CREATE TYPE vex_justification AS ENUM (
'component_not_present',
'vulnerable_code_not_present',
'vulnerable_code_not_in_execute_path',
'vulnerable_code_cannot_be_controlled_by_adversary',
'inline_mitigations_already_exist'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
DO $$ BEGIN
CREATE TYPE vex_review_action AS ENUM (
'accept',
'reject',
'defer'
);
EXCEPTION
WHEN duplicate_object THEN NULL;
END $$;
-- =============================================================================
-- Table: risk_state_snapshots
-- =============================================================================
CREATE TABLE IF NOT EXISTS risk_state_snapshots (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
vuln_id TEXT NOT NULL,
purl TEXT NOT NULL,
scan_id TEXT NOT NULL,
captured_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
reachable BOOLEAN,
lattice_state TEXT,
vex_status vex_status_type NOT NULL DEFAULT 'unknown',
in_affected_range BOOLEAN,
kev BOOLEAN NOT NULL DEFAULT FALSE,
epss_score NUMERIC(5, 4),
policy_flags TEXT[] DEFAULT '{}',
policy_decision policy_decision_type,
state_hash TEXT NOT NULL,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT risk_state_unique_per_scan UNIQUE (tenant_id, scan_id, vuln_id, purl)
);
CREATE INDEX IF NOT EXISTS idx_risk_state_tenant_finding
ON risk_state_snapshots (tenant_id, vuln_id, purl);
CREATE INDEX IF NOT EXISTS idx_risk_state_scan
ON risk_state_snapshots (scan_id);
CREATE INDEX IF NOT EXISTS idx_risk_state_captured_at
ON risk_state_snapshots USING BRIN (captured_at);
CREATE INDEX IF NOT EXISTS idx_risk_state_hash
ON risk_state_snapshots (state_hash);
-- =============================================================================
-- Table: material_risk_changes
-- =============================================================================
CREATE TABLE IF NOT EXISTS material_risk_changes (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
tenant_id UUID NOT NULL,
vuln_id TEXT NOT NULL,
purl TEXT NOT NULL,
scan_id TEXT NOT NULL,
has_material_change BOOLEAN NOT NULL DEFAULT FALSE,
priority_score NUMERIC(6, 4) NOT NULL DEFAULT 0,
previous_state_hash TEXT NOT NULL,
current_state_hash TEXT NOT NULL,
changes JSONB NOT NULL DEFAULT '[]',
detected_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT material_change_unique_per_scan UNIQUE (tenant_id, scan_id, vuln_id, purl)
);
CREATE INDEX IF NOT EXISTS idx_material_changes_tenant_scan
ON material_risk_changes (tenant_id, scan_id);
CREATE INDEX IF NOT EXISTS idx_material_changes_priority
ON material_risk_changes (priority_score DESC)
WHERE has_material_change = TRUE;
CREATE INDEX IF NOT EXISTS idx_material_changes_detected_at
ON material_risk_changes USING BRIN (detected_at);
CREATE INDEX IF NOT EXISTS idx_material_changes_changes_gin
ON material_risk_changes USING GIN (changes);
-- =============================================================================
-- Table: vex_candidates
-- =============================================================================
CREATE TABLE IF NOT EXISTS vex_candidates (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
candidate_id TEXT NOT NULL UNIQUE,
tenant_id UUID NOT NULL,
vuln_id TEXT NOT NULL,
purl TEXT NOT NULL,
image_digest TEXT NOT NULL,
suggested_status vex_status_type NOT NULL,
justification vex_justification NOT NULL,
rationale TEXT NOT NULL,
evidence_links JSONB NOT NULL DEFAULT '[]',
confidence NUMERIC(4, 3) NOT NULL,
generated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ NOT NULL,
requires_review BOOLEAN NOT NULL DEFAULT TRUE,
review_action vex_review_action,
reviewed_by TEXT,
reviewed_at TIMESTAMPTZ,
review_comment TEXT,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
CREATE INDEX IF NOT EXISTS idx_vex_candidates_tenant_image
ON vex_candidates (tenant_id, image_digest);
CREATE INDEX IF NOT EXISTS idx_vex_candidates_pending_review
ON vex_candidates (tenant_id, requires_review, confidence DESC)
WHERE requires_review = TRUE;
CREATE INDEX IF NOT EXISTS idx_vex_candidates_expires
ON vex_candidates (expires_at);
CREATE INDEX IF NOT EXISTS idx_vex_candidates_candidate_id
ON vex_candidates (candidate_id);
CREATE INDEX IF NOT EXISTS idx_vex_candidates_evidence_gin
ON vex_candidates USING GIN (evidence_links);
-- =============================================================================
-- RLS Policies (tenant isolation via app.tenant_id)
-- =============================================================================
ALTER TABLE risk_state_snapshots ENABLE ROW LEVEL SECURITY;
ALTER TABLE material_risk_changes ENABLE ROW LEVEL SECURITY;
ALTER TABLE vex_candidates ENABLE ROW LEVEL SECURITY;
CREATE OR REPLACE FUNCTION current_tenant_id()
RETURNS UUID AS $$
BEGIN
RETURN NULLIF(current_setting('app.tenant_id', TRUE), '')::UUID;
END;
$$ LANGUAGE plpgsql STABLE;
DROP POLICY IF EXISTS risk_state_tenant_isolation ON risk_state_snapshots;
CREATE POLICY risk_state_tenant_isolation ON risk_state_snapshots
FOR ALL
USING (tenant_id = current_tenant_id())
WITH CHECK (tenant_id = current_tenant_id());
DROP POLICY IF EXISTS material_changes_tenant_isolation ON material_risk_changes;
CREATE POLICY material_changes_tenant_isolation ON material_risk_changes
FOR ALL
USING (tenant_id = current_tenant_id())
WITH CHECK (tenant_id = current_tenant_id());
DROP POLICY IF EXISTS vex_candidates_tenant_isolation ON vex_candidates;
CREATE POLICY vex_candidates_tenant_isolation ON vex_candidates
FOR ALL
USING (tenant_id = current_tenant_id())
WITH CHECK (tenant_id = current_tenant_id());
-- =============================================================================
-- Helper Functions
-- =============================================================================
CREATE OR REPLACE FUNCTION get_material_changes_for_scan(
p_scan_id TEXT,
p_min_priority NUMERIC DEFAULT NULL
)
RETURNS TABLE (
vuln_id TEXT,
purl TEXT,
priority_score NUMERIC,
changes JSONB
) AS $$
BEGIN
RETURN QUERY
SELECT
mc.vuln_id,
mc.purl,
mc.priority_score,
mc.changes
FROM material_risk_changes mc
WHERE mc.scan_id = p_scan_id
AND mc.has_material_change = TRUE
AND (p_min_priority IS NULL OR mc.priority_score >= p_min_priority)
ORDER BY mc.priority_score DESC;
END;
$$ LANGUAGE plpgsql STABLE;
CREATE OR REPLACE FUNCTION get_pending_vex_candidates(
p_image_digest TEXT DEFAULT NULL,
p_min_confidence NUMERIC DEFAULT 0.7,
p_limit INT DEFAULT 50
)
RETURNS TABLE (
candidate_id TEXT,
vuln_id TEXT,
purl TEXT,
image_digest TEXT,
suggested_status vex_status_type,
justification vex_justification,
rationale TEXT,
confidence NUMERIC,
evidence_links JSONB
) AS $$
BEGIN
RETURN QUERY
SELECT
vc.candidate_id,
vc.vuln_id,
vc.purl,
vc.image_digest,
vc.suggested_status,
vc.justification,
vc.rationale,
vc.confidence,
vc.evidence_links
FROM vex_candidates vc
WHERE vc.requires_review = TRUE
AND vc.expires_at > NOW()
AND vc.confidence >= p_min_confidence
AND (p_image_digest IS NULL OR vc.image_digest = p_image_digest)
ORDER BY vc.confidence DESC
LIMIT p_limit;
END;
$$ LANGUAGE plpgsql STABLE;
COMMENT ON TABLE risk_state_snapshots IS
'Point-in-time risk state snapshots for Smart-Diff change detection';
COMMENT ON TABLE material_risk_changes IS
'Detected material risk changes between scans (R1-R4 rules)';
COMMENT ON TABLE vex_candidates IS
'Auto-generated VEX candidates based on absent vulnerable APIs';
COMMENT ON COLUMN risk_state_snapshots.state_hash IS
'SHA-256 of normalized state for deterministic change detection';
COMMENT ON COLUMN material_risk_changes.changes IS
'JSONB array of DetectedChange records';
COMMENT ON COLUMN vex_candidates.evidence_links IS
'JSONB array of EvidenceLink records with type, uri, digest';

View File

@@ -10,4 +10,5 @@ internal static class MigrationIds
public const string ScoreReplayTables = "006_score_replay_tables.sql";
public const string UnknownsRankingContainment = "007_unknowns_ranking_containment.sql";
public const string EpssIntegration = "008_epss_integration.sql";
public const string CallGraphTables = "009_call_graph_tables.sql";
}

View File

@@ -0,0 +1,125 @@
using System.Text.Json;
using Dapper;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
public sealed class PostgresCallGraphSnapshotRepository : ICallGraphSnapshotRepository
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresCallGraphSnapshotRepository> _logger;
public PostgresCallGraphSnapshotRepository(
ScannerDataSource dataSource,
ILogger<PostgresCallGraphSnapshotRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task StoreAsync(CallGraphSnapshot snapshot, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(snapshot);
var trimmed = snapshot.Trimmed();
const string sql = """
INSERT INTO scanner.call_graph_snapshots (
tenant_id,
scan_id,
language,
graph_digest,
extracted_at,
node_count,
edge_count,
entrypoint_count,
sink_count,
snapshot_json
) VALUES (
@TenantId,
@ScanId,
@Language,
@GraphDigest,
@ExtractedAt,
@NodeCount,
@EdgeCount,
@EntrypointCount,
@SinkCount,
@SnapshotJson::jsonb
)
ON CONFLICT (tenant_id, scan_id, language, graph_digest) DO UPDATE SET
extracted_at = EXCLUDED.extracted_at,
node_count = EXCLUDED.node_count,
edge_count = EXCLUDED.edge_count,
entrypoint_count = EXCLUDED.entrypoint_count,
sink_count = EXCLUDED.sink_count,
snapshot_json = EXCLUDED.snapshot_json
""";
var json = JsonSerializer.Serialize(trimmed, JsonOptions);
var tenantId = GetCurrentTenantId();
await using var connection = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
TenantId = tenantId,
ScanId = trimmed.ScanId,
Language = trimmed.Language,
GraphDigest = trimmed.GraphDigest,
ExtractedAt = trimmed.ExtractedAt.UtcDateTime,
NodeCount = trimmed.Nodes.Length,
EdgeCount = trimmed.Edges.Length,
EntrypointCount = trimmed.EntrypointIds.Length,
SinkCount = trimmed.SinkIds.Length,
SnapshotJson = json
}, cancellationToken: ct)).ConfigureAwait(false);
_logger.LogDebug(
"Stored call graph snapshot scan={ScanId} lang={Language} nodes={Nodes} edges={Edges}",
trimmed.ScanId,
trimmed.Language,
trimmed.Nodes.Length,
trimmed.Edges.Length);
}
public async Task<CallGraphSnapshot?> TryGetLatestAsync(string scanId, string language, CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
const string sql = """
SELECT snapshot_json
FROM scanner.call_graph_snapshots
WHERE tenant_id = @TenantId AND scan_id = @ScanId AND language = @Language
ORDER BY extracted_at DESC
LIMIT 1
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
var json = await connection.ExecuteScalarAsync<string?>(new CommandDefinition(sql, new
{
TenantId = GetCurrentTenantId(),
ScanId = scanId,
Language = language
}, cancellationToken: ct)).ConfigureAwait(false);
if (string.IsNullOrWhiteSpace(json))
{
return null;
}
return JsonSerializer.Deserialize<CallGraphSnapshot>(json, JsonOptions);
}
private static Guid GetCurrentTenantId()
{
return Guid.Parse("00000000-0000-0000-0000-000000000001");
}
}

View File

@@ -13,8 +13,15 @@ namespace StellaOps.Scanner.Storage.Postgres;
/// </summary>
public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRepository
{
private const string TenantContext = "00000000-0000-0000-0000-000000000001";
private static readonly Guid TenantId = Guid.Parse(TenantContext);
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresMaterialRiskChangeRepository> _logger;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string MaterialRiskChangesTable => $"{SchemaName}.material_risk_changes";
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
@@ -30,49 +37,58 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
public async Task StoreChangeAsync(MaterialRiskChangeResult change, string scanId, CancellationToken ct = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await InsertChangeAsync(connection, change, scanId, ct);
ArgumentNullException.ThrowIfNull(change);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
await InsertChangeAsync(connection, change, scanId.Trim(), ct).ConfigureAwait(false);
}
public async Task StoreChangesAsync(IReadOnlyList<MaterialRiskChangeResult> changes, string scanId, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(changes);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
if (changes.Count == 0)
return;
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var transaction = await connection.BeginTransactionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(ct).ConfigureAwait(false);
try
{
foreach (var change in changes)
{
await InsertChangeAsync(connection, change, scanId, ct, transaction);
await InsertChangeAsync(connection, change, scanId.Trim(), ct, transaction).ConfigureAwait(false);
}
await transaction.CommitAsync(ct);
await transaction.CommitAsync(ct).ConfigureAwait(false);
_logger.LogDebug("Stored {Count} material risk changes for scan {ScanId}", changes.Count, scanId);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store material risk changes for scan {ScanId}", scanId);
await transaction.RollbackAsync(ct);
await transaction.RollbackAsync(ct).ConfigureAwait(false);
throw;
}
}
public async Task<IReadOnlyList<MaterialRiskChangeResult>> GetChangesForScanAsync(string scanId, CancellationToken ct = default)
{
const string sql = """
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
var sql = $"""
SELECT
vuln_id, purl, has_material_change, priority_score,
previous_state_hash, current_state_hash, changes
FROM scanner.material_risk_changes
WHERE scan_id = @ScanId
FROM {MaterialRiskChangesTable}
WHERE tenant_id = @TenantId
AND scan_id = @ScanId
ORDER BY priority_score DESC
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
var rows = await connection.QueryAsync<MaterialRiskChangeRow>(sql, new { ScanId = scanId });
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var rows = await connection.QueryAsync<MaterialRiskChangeRow>(sql, new { TenantId, ScanId = scanId.Trim() });
return rows.Select(r => r.ToResult()).ToList();
}
@@ -82,21 +98,27 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
int limit = 10,
CancellationToken ct = default)
{
const string sql = """
ArgumentNullException.ThrowIfNull(findingKey);
ArgumentOutOfRangeException.ThrowIfLessThan(limit, 1);
var sql = $"""
SELECT
vuln_id, purl, has_material_change, priority_score,
previous_state_hash, current_state_hash, changes
FROM scanner.material_risk_changes
WHERE vuln_id = @VulnId AND purl = @Purl
FROM {MaterialRiskChangesTable}
WHERE tenant_id = @TenantId
AND vuln_id = @VulnId
AND purl = @Purl
ORDER BY detected_at DESC
LIMIT @Limit
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var rows = await connection.QueryAsync<MaterialRiskChangeRow>(sql, new
{
TenantId,
VulnId = findingKey.VulnId,
Purl = findingKey.Purl,
Purl = findingKey.ComponentPurl,
Limit = limit
});
@@ -107,6 +129,8 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
MaterialRiskChangeQuery query,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(query);
var conditions = new List<string> { "has_material_change = TRUE" };
var parameters = new DynamicParameters();
@@ -134,17 +158,20 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
parameters.Add("MinPriority", query.MinPriorityScore.Value);
}
conditions.Add("tenant_id = @TenantId");
parameters.Add("TenantId", TenantId);
var whereClause = string.Join(" AND ", conditions);
// Count query
var countSql = $"SELECT COUNT(*) FROM scanner.material_risk_changes WHERE {whereClause}";
var countSql = $"SELECT COUNT(*) FROM {MaterialRiskChangesTable} WHERE {whereClause}";
// Data query
var dataSql = $"""
SELECT
vuln_id, purl, has_material_change, priority_score,
previous_state_hash, current_state_hash, changes
FROM scanner.material_risk_changes
FROM {MaterialRiskChangesTable}
WHERE {whereClause}
ORDER BY priority_score DESC
OFFSET @Offset LIMIT @Limit
@@ -153,7 +180,7 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
parameters.Add("Offset", query.Offset);
parameters.Add("Limit", query.Limit);
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var totalCount = await connection.ExecuteScalarAsync<int>(countSql, parameters);
var rows = await connection.QueryAsync<MaterialRiskChangeRow>(dataSql, parameters);
@@ -167,15 +194,19 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
Limit: query.Limit);
}
private static async Task InsertChangeAsync(
private async Task InsertChangeAsync(
NpgsqlConnection connection,
MaterialRiskChangeResult change,
string scanId,
CancellationToken ct,
NpgsqlTransaction? transaction = null)
{
const string sql = """
INSERT INTO scanner.material_risk_changes (
ArgumentNullException.ThrowIfNull(connection);
ArgumentNullException.ThrowIfNull(change);
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
var sql = $"""
INSERT INTO {MaterialRiskChangesTable} (
tenant_id, vuln_id, purl, scan_id,
has_material_change, priority_score,
previous_state_hash, current_state_hash, changes
@@ -192,14 +223,13 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
changes = EXCLUDED.changes
""";
var tenantId = GetCurrentTenantId();
var changesJson = JsonSerializer.Serialize(change.Changes, JsonOptions);
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
TenantId = tenantId,
TenantId,
VulnId = change.FindingKey.VulnId,
Purl = change.FindingKey.Purl,
Purl = change.FindingKey.ComponentPurl,
ScanId = scanId,
HasMaterialChange = change.HasMaterialChange,
PriorityScore = change.PriorityScore,
@@ -209,11 +239,6 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
}, transaction: transaction, cancellationToken: ct));
}
private static Guid GetCurrentTenantId()
{
return Guid.Parse("00000000-0000-0000-0000-000000000001");
}
/// <summary>
/// Row mapping class for Dapper.
/// </summary>
@@ -236,7 +261,7 @@ public sealed class PostgresMaterialRiskChangeRepository : IMaterialRiskChangeRe
FindingKey: new FindingKey(vuln_id, purl),
HasMaterialChange: has_material_change,
Changes: [.. detectedChanges],
PriorityScore: (int)priority_score,
PriorityScore: (double)priority_score,
PreviousStateHash: previous_state_hash,
CurrentStateHash: current_state_hash);
}

View File

@@ -0,0 +1,119 @@
using System.Text.Json;
using Dapper;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
public sealed class PostgresReachabilityResultRepository : IReachabilityResultRepository
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresReachabilityResultRepository> _logger;
public PostgresReachabilityResultRepository(
ScannerDataSource dataSource,
ILogger<PostgresReachabilityResultRepository> logger)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task StoreAsync(ReachabilityAnalysisResult result, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(result);
var trimmed = result.Trimmed();
const string sql = """
INSERT INTO scanner.reachability_results (
tenant_id,
scan_id,
language,
graph_digest,
result_digest,
computed_at,
reachable_node_count,
reachable_sink_count,
result_json
) VALUES (
@TenantId,
@ScanId,
@Language,
@GraphDigest,
@ResultDigest,
@ComputedAt,
@ReachableNodeCount,
@ReachableSinkCount,
@ResultJson::jsonb
)
ON CONFLICT (tenant_id, scan_id, language, graph_digest, result_digest) DO UPDATE SET
computed_at = EXCLUDED.computed_at,
reachable_node_count = EXCLUDED.reachable_node_count,
reachable_sink_count = EXCLUDED.reachable_sink_count,
result_json = EXCLUDED.result_json
""";
var json = JsonSerializer.Serialize(trimmed, JsonOptions);
var tenantId = GetCurrentTenantId();
await using var connection = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
TenantId = tenantId,
ScanId = trimmed.ScanId,
Language = trimmed.Language,
GraphDigest = trimmed.GraphDigest,
ResultDigest = trimmed.ResultDigest,
ComputedAt = trimmed.ComputedAt.UtcDateTime,
ReachableNodeCount = trimmed.ReachableNodeIds.Length,
ReachableSinkCount = trimmed.ReachableSinkIds.Length,
ResultJson = json
}, cancellationToken: ct)).ConfigureAwait(false);
_logger.LogDebug(
"Stored reachability result scan={ScanId} lang={Language} sinks={Sinks}",
trimmed.ScanId,
trimmed.Language,
trimmed.ReachableSinkIds.Length);
}
public async Task<ReachabilityAnalysisResult?> TryGetLatestAsync(string scanId, string language, CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
ArgumentException.ThrowIfNullOrWhiteSpace(language);
const string sql = """
SELECT result_json
FROM scanner.reachability_results
WHERE tenant_id = @TenantId AND scan_id = @ScanId AND language = @Language
ORDER BY computed_at DESC
LIMIT 1
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct).ConfigureAwait(false);
var json = await connection.ExecuteScalarAsync<string?>(new CommandDefinition(sql, new
{
TenantId = GetCurrentTenantId(),
ScanId = scanId,
Language = language
}, cancellationToken: ct)).ConfigureAwait(false);
if (string.IsNullOrWhiteSpace(json))
{
return null;
}
return JsonSerializer.Deserialize<ReachabilityAnalysisResult>(json, JsonOptions);
}
private static Guid GetCurrentTenantId()
{
return Guid.Parse("00000000-0000-0000-0000-000000000001");
}
}

View File

@@ -1,6 +1,4 @@
using System.Collections.Immutable;
using System.Data;
using System.Text.Json;
using Dapper;
using Microsoft.Extensions.Logging;
using Npgsql;
@@ -9,14 +7,20 @@ using StellaOps.Scanner.SmartDiff.Detection;
namespace StellaOps.Scanner.Storage.Postgres;
/// <summary>
/// PostgreSQL implementation of IRiskStateRepository.
/// PostgreSQL implementation of <see cref="IRiskStateRepository"/>.
/// Per Sprint 3500.3 - Smart-Diff Detection Rules.
/// </summary>
public sealed class PostgresRiskStateRepository : IRiskStateRepository
{
private const string TenantContext = "00000000-0000-0000-0000-000000000001";
private static readonly Guid TenantId = Guid.Parse(TenantContext);
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresRiskStateRepository> _logger;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string RiskStateSnapshotsTable => $"{SchemaName}.risk_state_snapshots";
public PostgresRiskStateRepository(
ScannerDataSource dataSource,
ILogger<PostgresRiskStateRepository> logger)
@@ -27,52 +31,63 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
public async Task StoreSnapshotAsync(RiskStateSnapshot snapshot, CancellationToken ct = default)
{
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await InsertSnapshotAsync(connection, snapshot, ct);
ArgumentNullException.ThrowIfNull(snapshot);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
await InsertSnapshotAsync(connection, snapshot, ct).ConfigureAwait(false);
}
public async Task StoreSnapshotsAsync(IReadOnlyList<RiskStateSnapshot> snapshots, CancellationToken ct = default)
{
if (snapshots.Count == 0)
return;
ArgumentNullException.ThrowIfNull(snapshots);
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var transaction = await connection.BeginTransactionAsync(ct);
if (snapshots.Count == 0)
{
return;
}
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(ct).ConfigureAwait(false);
try
{
foreach (var snapshot in snapshots)
{
await InsertSnapshotAsync(connection, snapshot, ct, transaction);
await InsertSnapshotAsync(connection, snapshot, ct, transaction).ConfigureAwait(false);
}
await transaction.CommitAsync(ct);
await transaction.CommitAsync(ct).ConfigureAwait(false);
}
catch
{
await transaction.RollbackAsync(ct);
await transaction.RollbackAsync(ct).ConfigureAwait(false);
throw;
}
}
public async Task<RiskStateSnapshot?> GetLatestSnapshotAsync(FindingKey findingKey, CancellationToken ct = default)
{
const string sql = """
SELECT
ArgumentNullException.ThrowIfNull(findingKey);
var sql = $"""
SELECT
vuln_id, purl, scan_id, captured_at,
reachable, lattice_state, vex_status::TEXT, in_affected_range,
kev, epss_score, policy_flags, policy_decision::TEXT, state_hash
FROM scanner.risk_state_snapshots
WHERE vuln_id = @VulnId AND purl = @Purl
FROM {RiskStateSnapshotsTable}
WHERE tenant_id = @TenantId
AND vuln_id = @VulnId
AND purl = @Purl
ORDER BY captured_at DESC
LIMIT 1
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var row = await connection.QuerySingleOrDefaultAsync<RiskStateRow>(sql, new
{
TenantId,
VulnId = findingKey.VulnId,
Purl = findingKey.Purl
Purl = findingKey.ComponentPurl
});
return row?.ToSnapshot();
@@ -80,18 +95,21 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
public async Task<IReadOnlyList<RiskStateSnapshot>> GetSnapshotsForScanAsync(string scanId, CancellationToken ct = default)
{
const string sql = """
SELECT
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
var sql = $"""
SELECT
vuln_id, purl, scan_id, captured_at,
reachable, lattice_state, vex_status::TEXT, in_affected_range,
kev, epss_score, policy_flags, policy_decision::TEXT, state_hash
FROM scanner.risk_state_snapshots
WHERE scan_id = @ScanId
FROM {RiskStateSnapshotsTable}
WHERE tenant_id = @TenantId
AND scan_id = @ScanId
ORDER BY vuln_id, purl
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
var rows = await connection.QueryAsync<RiskStateRow>(sql, new { ScanId = scanId });
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var rows = await connection.QueryAsync<RiskStateRow>(sql, new { TenantId, ScanId = scanId.Trim() });
return rows.Select(r => r.ToSnapshot()).ToList();
}
@@ -101,22 +119,28 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
int limit = 10,
CancellationToken ct = default)
{
const string sql = """
SELECT
ArgumentNullException.ThrowIfNull(findingKey);
ArgumentOutOfRangeException.ThrowIfLessThan(limit, 1);
var sql = $"""
SELECT
vuln_id, purl, scan_id, captured_at,
reachable, lattice_state, vex_status::TEXT, in_affected_range,
kev, epss_score, policy_flags, policy_decision::TEXT, state_hash
FROM scanner.risk_state_snapshots
WHERE vuln_id = @VulnId AND purl = @Purl
FROM {RiskStateSnapshotsTable}
WHERE tenant_id = @TenantId
AND vuln_id = @VulnId
AND purl = @Purl
ORDER BY captured_at DESC
LIMIT @Limit
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var rows = await connection.QueryAsync<RiskStateRow>(sql, new
{
TenantId,
VulnId = findingKey.VulnId,
Purl = findingKey.Purl,
Purl = findingKey.ComponentPurl,
Limit = limit
});
@@ -125,37 +149,42 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
public async Task<IReadOnlyList<RiskStateSnapshot>> GetSnapshotsByHashAsync(string stateHash, CancellationToken ct = default)
{
const string sql = """
SELECT
ArgumentException.ThrowIfNullOrWhiteSpace(stateHash);
var sql = $"""
SELECT
vuln_id, purl, scan_id, captured_at,
reachable, lattice_state, vex_status::TEXT, in_affected_range,
kev, epss_score, policy_flags, policy_decision::TEXT, state_hash
FROM scanner.risk_state_snapshots
WHERE state_hash = @StateHash
FROM {RiskStateSnapshotsTable}
WHERE tenant_id = @TenantId
AND state_hash = @StateHash
ORDER BY captured_at DESC
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
var rows = await connection.QueryAsync<RiskStateRow>(sql, new { StateHash = stateHash });
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var rows = await connection.QueryAsync<RiskStateRow>(sql, new { TenantId, StateHash = stateHash.Trim() });
return rows.Select(r => r.ToSnapshot()).ToList();
}
private static async Task InsertSnapshotAsync(
private async Task InsertSnapshotAsync(
NpgsqlConnection connection,
RiskStateSnapshot snapshot,
CancellationToken ct,
NpgsqlTransaction? transaction = null)
{
const string sql = """
INSERT INTO scanner.risk_state_snapshots (
ArgumentNullException.ThrowIfNull(snapshot);
var sql = $"""
INSERT INTO {RiskStateSnapshotsTable} (
tenant_id, vuln_id, purl, scan_id, captured_at,
reachable, lattice_state, vex_status, in_affected_range,
kev, epss_score, policy_flags, policy_decision, state_hash
) VALUES (
@TenantId, @VulnId, @Purl, @ScanId, @CapturedAt,
@Reachable, @LatticeState, @VexStatus::scanner.vex_status_type, @InAffectedRange,
@Kev, @EpssScore, @PolicyFlags, @PolicyDecision::scanner.policy_decision_type, @StateHash
@Reachable, @LatticeState, @VexStatus::vex_status_type, @InAffectedRange,
@Kev, @EpssScore, @PolicyFlags, @PolicyDecision::policy_decision_type, @StateHash
)
ON CONFLICT (tenant_id, scan_id, vuln_id, purl) DO UPDATE SET
reachable = EXCLUDED.reachable,
@@ -169,32 +198,27 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
state_hash = EXCLUDED.state_hash
""";
var tenantId = GetCurrentTenantId();
await connection.ExecuteAsync(new CommandDefinition(sql, new
{
TenantId = tenantId,
VulnId = snapshot.FindingKey.VulnId,
Purl = snapshot.FindingKey.Purl,
ScanId = snapshot.ScanId,
CapturedAt = snapshot.CapturedAt,
Reachable = snapshot.Reachable,
LatticeState = snapshot.LatticeState,
VexStatus = snapshot.VexStatus.ToString().ToLowerInvariant(),
InAffectedRange = snapshot.InAffectedRange,
Kev = snapshot.Kev,
EpssScore = snapshot.EpssScore,
PolicyFlags = snapshot.PolicyFlags.ToArray(),
PolicyDecision = snapshot.PolicyDecision?.ToString().ToLowerInvariant(),
StateHash = snapshot.ComputeStateHash()
}, transaction: transaction, cancellationToken: ct));
}
private static Guid GetCurrentTenantId()
{
// In production, this would come from the current context
// For now, return a default tenant ID
return Guid.Parse("00000000-0000-0000-0000-000000000001");
await connection.ExecuteAsync(new CommandDefinition(
sql,
new
{
TenantId,
VulnId = snapshot.FindingKey.VulnId,
Purl = snapshot.FindingKey.ComponentPurl,
ScanId = snapshot.ScanId,
CapturedAt = snapshot.CapturedAt,
Reachable = snapshot.Reachable,
LatticeState = snapshot.LatticeState,
VexStatus = snapshot.VexStatus.ToString().ToLowerInvariant(),
InAffectedRange = snapshot.InAffectedRange,
Kev = snapshot.Kev,
EpssScore = snapshot.EpssScore,
PolicyFlags = snapshot.PolicyFlags.ToArray(),
PolicyDecision = snapshot.PolicyDecision?.ToString().ToLowerInvariant(),
StateHash = snapshot.ComputeStateHash()
},
transaction: transaction,
cancellationToken: ct)).ConfigureAwait(false);
}
/// <summary>
@@ -214,7 +238,6 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
public decimal? epss_score { get; set; }
public string[]? policy_flags { get; set; }
public string? policy_decision { get; set; }
public string state_hash { get; set; } = "";
public RiskStateSnapshot ToSnapshot()
{
@@ -247,7 +270,9 @@ public sealed class PostgresRiskStateRepository : IRiskStateRepository
private static PolicyDecisionType? ParsePolicyDecision(string? value)
{
if (string.IsNullOrEmpty(value))
{
return null;
}
return value.ToLowerInvariant() switch
{

View File

@@ -13,8 +13,15 @@ namespace StellaOps.Scanner.Storage.Postgres;
/// </summary>
public sealed class PostgresVexCandidateStore : IVexCandidateStore
{
private const string TenantContext = "00000000-0000-0000-0000-000000000001";
private static readonly Guid TenantId = Guid.Parse(TenantContext);
private readonly ScannerDataSource _dataSource;
private readonly ILogger<PostgresVexCandidateStore> _logger;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string VexCandidatesTable => $"{SchemaName}.vex_candidates";
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
@@ -30,83 +37,96 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
public async Task StoreCandidatesAsync(IReadOnlyList<VexCandidate> candidates, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(candidates);
if (candidates.Count == 0)
return;
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var transaction = await connection.BeginTransactionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
await using var transaction = await connection.BeginTransactionAsync(ct).ConfigureAwait(false);
try
{
foreach (var candidate in candidates)
{
await InsertCandidateAsync(connection, candidate, ct, transaction);
await InsertCandidateAsync(connection, candidate, ct, transaction).ConfigureAwait(false);
}
await transaction.CommitAsync(ct);
await transaction.CommitAsync(ct).ConfigureAwait(false);
_logger.LogDebug("Stored {Count} VEX candidates", candidates.Count);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to store VEX candidates");
await transaction.RollbackAsync(ct);
await transaction.RollbackAsync(ct).ConfigureAwait(false);
throw;
}
}
public async Task<IReadOnlyList<VexCandidate>> GetCandidatesAsync(string imageDigest, CancellationToken ct = default)
{
const string sql = """
ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest);
var sql = $"""
SELECT
candidate_id, vuln_id, purl, image_digest,
suggested_status::TEXT, justification::TEXT, rationale,
evidence_links, confidence, generated_at, expires_at,
requires_review, review_action::TEXT, reviewed_by, reviewed_at, review_comment
FROM scanner.vex_candidates
WHERE image_digest = @ImageDigest
FROM {VexCandidatesTable}
WHERE tenant_id = @TenantId
AND image_digest = @ImageDigest
ORDER BY confidence DESC
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
var rows = await connection.QueryAsync<VexCandidateRow>(sql, new { ImageDigest = imageDigest });
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var rows = await connection.QueryAsync<VexCandidateRow>(sql, new { TenantId, ImageDigest = imageDigest.Trim() });
return rows.Select(r => r.ToCandidate()).ToList();
}
public async Task<VexCandidate?> GetCandidateAsync(string candidateId, CancellationToken ct = default)
{
const string sql = """
ArgumentException.ThrowIfNullOrWhiteSpace(candidateId);
var sql = $"""
SELECT
candidate_id, vuln_id, purl, image_digest,
suggested_status::TEXT, justification::TEXT, rationale,
evidence_links, confidence, generated_at, expires_at,
requires_review, review_action::TEXT, reviewed_by, reviewed_at, review_comment
FROM scanner.vex_candidates
WHERE candidate_id = @CandidateId
FROM {VexCandidatesTable}
WHERE tenant_id = @TenantId
AND candidate_id = @CandidateId
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
var row = await connection.QuerySingleOrDefaultAsync<VexCandidateRow>(sql, new { CandidateId = candidateId });
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var row = await connection.QuerySingleOrDefaultAsync<VexCandidateRow>(sql, new { TenantId, CandidateId = candidateId.Trim() });
return row?.ToCandidate();
}
public async Task<bool> ReviewCandidateAsync(string candidateId, VexCandidateReview review, CancellationToken ct = default)
{
const string sql = """
UPDATE scanner.vex_candidates SET
ArgumentException.ThrowIfNullOrWhiteSpace(candidateId);
ArgumentNullException.ThrowIfNull(review);
var sql = $"""
UPDATE {VexCandidatesTable} SET
requires_review = FALSE,
review_action = @ReviewAction::scanner.vex_review_action,
review_action = @ReviewAction::vex_review_action,
reviewed_by = @ReviewedBy,
reviewed_at = @ReviewedAt,
review_comment = @ReviewComment
WHERE candidate_id = @CandidateId
WHERE tenant_id = @TenantId
AND candidate_id = @CandidateId
""";
await using var connection = await _dataSource.OpenConnectionAsync(ct);
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, ct).ConfigureAwait(false);
var affected = await connection.ExecuteAsync(sql, new
{
CandidateId = candidateId,
TenantId,
CandidateId = candidateId.Trim(),
ReviewAction = review.Action.ToString().ToLowerInvariant(),
ReviewedBy = review.Reviewer,
ReviewedAt = review.ReviewedAt,
@@ -122,20 +142,23 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
return affected > 0;
}
private static async Task InsertCandidateAsync(
private async Task InsertCandidateAsync(
NpgsqlConnection connection,
VexCandidate candidate,
CancellationToken ct,
NpgsqlTransaction? transaction = null)
{
const string sql = """
INSERT INTO scanner.vex_candidates (
ArgumentNullException.ThrowIfNull(connection);
ArgumentNullException.ThrowIfNull(candidate);
var sql = $"""
INSERT INTO {VexCandidatesTable} (
tenant_id, candidate_id, vuln_id, purl, image_digest,
suggested_status, justification, rationale,
evidence_links, confidence, generated_at, expires_at, requires_review
) VALUES (
@TenantId, @CandidateId, @VulnId, @Purl, @ImageDigest,
@SuggestedStatus::scanner.vex_status_type, @Justification::scanner.vex_justification, @Rationale,
@SuggestedStatus::vex_status_type, @Justification::vex_justification, @Rationale,
@EvidenceLinks::jsonb, @Confidence, @GeneratedAt, @ExpiresAt, @RequiresReview
)
ON CONFLICT (candidate_id) DO UPDATE SET
@@ -147,7 +170,7 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
expires_at = EXCLUDED.expires_at
""";
var tenantId = GetCurrentTenantId();
var tenantId = TenantId;
var evidenceLinksJson = JsonSerializer.Serialize(candidate.EvidenceLinks, JsonOptions);
await connection.ExecuteAsync(new CommandDefinition(sql, new
@@ -155,7 +178,7 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
TenantId = tenantId,
CandidateId = candidate.CandidateId,
VulnId = candidate.FindingKey.VulnId,
Purl = candidate.FindingKey.Purl,
Purl = candidate.FindingKey.ComponentPurl,
ImageDigest = candidate.ImageDigest,
SuggestedStatus = MapVexStatus(candidate.SuggestedStatus),
Justification = MapJustification(candidate.Justification),
@@ -193,12 +216,6 @@ public sealed class PostgresVexCandidateStore : IVexCandidateStore
};
}
private static Guid GetCurrentTenantId()
{
// In production, this would come from the current context
return Guid.Parse("00000000-0000-0000-0000-000000000001");
}
/// <summary>
/// Row mapping class for Dapper.
/// </summary>

View File

@@ -150,21 +150,34 @@ public sealed class ClassificationHistoryRepository : RepositoryBase<ScannerData
DateOnly toDate,
CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT day_bucket, tenant_id, cause, total_reclassified, fn_count, fn_drift_percent,
feed_delta_count, rule_delta_count, lattice_delta_count, reachability_delta_count,
engine_count, other_count
FROM {DriftStatsView}
WHERE tenant_id = @tenant_id AND day_bucket >= @from_date AND day_bucket <= @to_date
ORDER BY day_bucket DESC
""";
var sql = tenantId == Guid.Empty
? $"""
SELECT day_bucket, tenant_id, cause, total_reclassified, fn_count, fn_drift_percent,
feed_delta_count, rule_delta_count, lattice_delta_count, reachability_delta_count,
engine_count, other_count
FROM {DriftStatsView}
WHERE day_bucket >= @from_date AND day_bucket <= @to_date
ORDER BY day_bucket DESC
"""
: $"""
SELECT day_bucket, tenant_id, cause, total_reclassified, fn_count, fn_drift_percent,
feed_delta_count, rule_delta_count, lattice_delta_count, reachability_delta_count,
engine_count, other_count
FROM {DriftStatsView}
WHERE tenant_id = @tenant_id AND day_bucket >= @from_date AND day_bucket <= @to_date
ORDER BY day_bucket DESC
""";
return QueryAsync(
Tenant,
sql,
cmd =>
{
AddParameter(cmd, "tenant_id", tenantId);
if (tenantId != Guid.Empty)
{
AddParameter(cmd, "tenant_id", tenantId);
}
AddParameter(cmd, "from_date", fromDate);
AddParameter(cmd, "to_date", toDate);
},

View File

@@ -0,0 +1,11 @@
using StellaOps.Scanner.CallGraph;
namespace StellaOps.Scanner.Storage.Repositories;
public interface ICallGraphSnapshotRepository
{
Task StoreAsync(CallGraphSnapshot snapshot, CancellationToken ct = default);
Task<CallGraphSnapshot?> TryGetLatestAsync(string scanId, string language, CancellationToken ct = default);
}

View File

@@ -0,0 +1,11 @@
using StellaOps.Scanner.CallGraph;
namespace StellaOps.Scanner.Storage.Repositories;
public interface IReachabilityResultRepository
{
Task StoreAsync(ReachabilityAnalysisResult result, CancellationToken ct = default);
Task<ReachabilityAnalysisResult?> TryGetLatestAsync(string scanId, string language, CancellationToken ct = default);
}

View File

@@ -2,7 +2,7 @@ using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Core.Drift;
namespace StellaOps.Scanner.Storage.Services;
/// <summary>
/// Calculates FN-Drift rate with stratification.

View File

@@ -142,6 +142,8 @@ public sealed class FnDriftMetricsExporter : BackgroundService
private async Task RefreshMetricsAsync(CancellationToken cancellationToken)
{
await _repository.RefreshDriftStatsAsync(cancellationToken);
// Get 30-day summary for all tenants (aggregated)
// In production, this would iterate over active tenants
var now = _timeProvider.GetUtcNow();

View File

@@ -8,6 +8,7 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" Version="3.7.305.6" />
<PackageReference Include="Dapper" Version="2.1.35" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
@@ -20,8 +21,10 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Scanner.EntryTrace\\StellaOps.Scanner.EntryTrace.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.CallGraph\\StellaOps.Scanner.CallGraph.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.Core\\StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.ProofSpine\\StellaOps.Scanner.ProofSpine.csproj" />
<ProjectReference Include="..\\StellaOps.Scanner.SmartDiff\\StellaOps.Scanner.SmartDiff.csproj" />
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Infrastructure.Postgres\\StellaOps.Infrastructure.Postgres.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,45 @@
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.CallGraph.Node;
using Xunit;
namespace StellaOps.Scanner.CallGraph.Tests;
public class BenchmarkIntegrationTests
{
[Theory]
[InlineData("unsafe-eval", true)]
[InlineData("guarded-eval", false)]
public async Task NodeTraceExtractor_AlignsWithBenchmarkReachability(string caseName, bool expectSinkReachable)
{
var repoRoot = FindRepoRoot();
var caseDir = Path.Combine(repoRoot, "bench", "reachability-benchmark", "cases", "js", caseName);
var extractor = new NodeCallGraphExtractor();
var snapshot = await extractor.ExtractAsync(new CallGraphExtractionRequest(
ScanId: $"bench-{caseName}",
Language: "node",
TargetPath: caseDir));
var analyzer = new ReachabilityAnalyzer();
var result = analyzer.Analyze(snapshot);
Assert.Equal(expectSinkReachable, result.ReachableSinkIds.Length > 0);
}
private static string FindRepoRoot()
{
var directory = new DirectoryInfo(AppContext.BaseDirectory);
while (directory is not null)
{
if (Directory.Exists(Path.Combine(directory.FullName, "bench", "reachability-benchmark")))
{
return directory.FullName;
}
directory = directory.Parent;
}
throw new InvalidOperationException("Unable to locate repository root for benchmark integration tests.");
}
}

View File

@@ -0,0 +1,42 @@
using StellaOps.Scanner.CallGraph.Caching;
using Xunit;
namespace StellaOps.Scanner.CallGraph.Tests;
public class CircuitBreakerStateTests
{
[Fact]
public void RecordFailure_TripsOpen_AfterThreshold()
{
var config = new CircuitBreakerConfig
{
FailureThreshold = 2,
TimeoutSeconds = 60,
HalfOpenTimeout = 10
};
var cb = new CircuitBreakerState(config);
Assert.Equal(CircuitState.Closed, cb.State);
cb.RecordFailure();
Assert.Equal(CircuitState.Closed, cb.State);
cb.RecordFailure();
Assert.Equal(CircuitState.Open, cb.State);
Assert.True(cb.IsOpen);
}
[Fact]
public void RecordSuccess_ResetsToClosed()
{
var config = new CircuitBreakerConfig { FailureThreshold = 1, TimeoutSeconds = 60, HalfOpenTimeout = 10 };
var cb = new CircuitBreakerState(config);
cb.RecordFailure();
Assert.True(cb.IsOpen);
cb.RecordSuccess();
Assert.Equal(CircuitState.Closed, cb.State);
Assert.False(cb.IsOpen);
}
}

View File

@@ -0,0 +1,166 @@
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.CallGraph.DotNet;
using Xunit;
namespace StellaOps.Scanner.CallGraph.Tests;
public class DotNetCallGraphExtractorTests
{
[Fact]
public async Task ExtractAsync_SimpleProject_ProducesEntrypointAndSink()
{
await using var temp = await TempDirectory.CreateAsync();
var csprojPath = Path.Combine(temp.Path, "App.csproj");
await File.WriteAllTextAsync(csprojPath, """
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
</Project>
""");
await File.WriteAllTextAsync(Path.Combine(temp.Path, "Program.cs"), """
using System;
public sealed class HttpGetAttribute : Attribute { }
namespace System.Diagnostics
{
public static class Process
{
public static void Start(string cmd) { }
}
}
public sealed class FooController
{
[HttpGet]
public void Get()
{
Helper();
}
private void Helper()
{
System.Diagnostics.Process.Start("cmd.exe");
}
}
""");
var fixedTime = DateTimeOffset.Parse("2025-12-17T00:00:00Z");
var extractor = new DotNetCallGraphExtractor(new FixedTimeProvider(fixedTime));
var snapshot = await extractor.ExtractAsync(new CallGraphExtractionRequest(
ScanId: "scan-001",
Language: "dotnet",
TargetPath: csprojPath));
Assert.Equal("scan-001", snapshot.ScanId);
Assert.Equal("dotnet", snapshot.Language);
Assert.False(string.IsNullOrWhiteSpace(snapshot.GraphDigest));
Assert.NotEmpty(snapshot.Nodes);
Assert.NotEmpty(snapshot.Edges);
Assert.Contains(snapshot.Nodes, n => n.IsEntrypoint && n.EntrypointType == EntrypointType.HttpHandler);
Assert.Contains(snapshot.Nodes, n => n.IsSink);
Assert.NotEmpty(snapshot.SinkIds);
Assert.NotEmpty(snapshot.EntrypointIds);
}
[Fact]
public async Task ExtractAsync_IsDeterministic_ForSameInputs()
{
await using var temp = await TempDirectory.CreateAsync();
var csprojPath = Path.Combine(temp.Path, "App.csproj");
await File.WriteAllTextAsync(csprojPath, """
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
</PropertyGroup>
</Project>
""");
await File.WriteAllTextAsync(Path.Combine(temp.Path, "Program.cs"), """
public static class Program
{
public static void Main()
{
A();
}
private static void A()
{
B();
}
private static void B()
{
}
}
""");
var extractor = new DotNetCallGraphExtractor();
var request = new CallGraphExtractionRequest("scan-001", "dotnet", csprojPath);
var first = await extractor.ExtractAsync(request);
var second = await extractor.ExtractAsync(request);
Assert.Equal(first.GraphDigest, second.GraphDigest);
Assert.Equal(first.Nodes.Select(n => n.NodeId), second.Nodes.Select(n => n.NodeId));
Assert.Equal(first.Edges.Select(e => (e.SourceId, e.TargetId, e.CallKind)), second.Edges.Select(e => (e.SourceId, e.TargetId, e.CallKind)));
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _instant;
public FixedTimeProvider(DateTimeOffset instant)
{
_instant = instant;
}
public override DateTimeOffset GetUtcNow() => _instant;
}
private sealed class TempDirectory : IAsyncDisposable
{
public string Path { get; }
private TempDirectory(string path)
{
Path = path;
}
public static Task<TempDirectory> CreateAsync()
{
var root = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stella_callgraph_{Guid.NewGuid():N}");
Directory.CreateDirectory(root);
return Task.FromResult(new TempDirectory(root));
}
public ValueTask DisposeAsync()
{
try
{
if (Directory.Exists(Path))
{
Directory.Delete(Path, recursive: true);
}
}
catch
{
// best effort cleanup
}
return ValueTask.CompletedTask;
}
}
}

View File

@@ -0,0 +1,67 @@
using System.Collections.Immutable;
using StellaOps.Scanner.CallGraph;
using Xunit;
namespace StellaOps.Scanner.CallGraph.Tests;
public class ReachabilityAnalyzerTests
{
[Fact]
public void Analyze_WhenSinkReachable_ReturnsShortestPath()
{
var entry = CallGraphNodeIds.Compute("dotnet:test:entry");
var mid = CallGraphNodeIds.Compute("dotnet:test:mid");
var sink = CallGraphNodeIds.Compute("dotnet:test:sink");
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:placeholder",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes:
[
new CallGraphNode(entry, "Entry", "file.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(mid, "Mid", "file.cs", 2, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(sink, "Sink", "file.cs", 3, "System", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec),
],
Edges:
[
new CallGraphEdge(entry, mid, CallKind.Direct),
new CallGraphEdge(mid, sink, CallKind.Direct),
],
EntrypointIds: [entry],
SinkIds: [sink]);
var analyzer = new ReachabilityAnalyzer();
var result = analyzer.Analyze(snapshot);
Assert.Contains(sink, result.ReachableSinkIds);
Assert.Single(result.Paths);
Assert.Equal(entry, result.Paths[0].EntrypointId);
Assert.Equal(sink, result.Paths[0].SinkId);
Assert.Equal(ImmutableArray.Create(entry, mid, sink), result.Paths[0].NodeIds);
}
[Fact]
public void Analyze_WhenNoEntrypoints_ReturnsEmpty()
{
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:placeholder",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes: ImmutableArray<CallGraphNode>.Empty,
Edges: ImmutableArray<CallGraphEdge>.Empty,
EntrypointIds: ImmutableArray<string>.Empty,
SinkIds: ImmutableArray<string>.Empty);
var analyzer = new ReachabilityAnalyzer();
var result = analyzer.Analyze(snapshot);
Assert.Empty(result.ReachableNodeIds);
Assert.Empty(result.ReachableSinkIds);
Assert.Empty(result.Paths);
Assert.False(string.IsNullOrWhiteSpace(result.ResultDigest));
}
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\..\\__Libraries\\StellaOps.Scanner.CallGraph\\StellaOps.Scanner.CallGraph.csproj" />
<ProjectReference Include="..\\..\\..\\__Libraries\\StellaOps.Messaging.Testing\\StellaOps.Messaging.Testing.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,85 @@
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Messaging.Testing.Fixtures;
using StellaOps.Scanner.CallGraph;
using StellaOps.Scanner.CallGraph.Caching;
using Xunit;
namespace StellaOps.Scanner.CallGraph.Tests;
[Collection(nameof(ValkeyFixtureCollection))]
public class ValkeyCallGraphCacheServiceTests : IAsyncLifetime
{
private readonly ValkeyFixture _fixture;
private ValkeyCallGraphCacheService _cache = null!;
public ValkeyCallGraphCacheServiceTests(ValkeyFixture fixture)
{
_fixture = fixture;
}
public Task InitializeAsync()
{
var options = Options.Create(new CallGraphCacheConfig
{
Enabled = true,
ConnectionString = _fixture.ConnectionString,
KeyPrefix = "test:callgraph:",
TtlSeconds = 60,
EnableGzip = true,
CircuitBreaker = new CircuitBreakerConfig { FailureThreshold = 3, TimeoutSeconds = 30, HalfOpenTimeout = 10 }
});
_cache = new ValkeyCallGraphCacheService(options, NullLogger<ValkeyCallGraphCacheService>.Instance);
return Task.CompletedTask;
}
public async Task DisposeAsync()
{
await _cache.DisposeAsync();
}
[Fact]
public async Task SetThenGet_CallGraph_RoundTrips()
{
var nodeId = CallGraphNodeIds.Compute("dotnet:test:entry");
var snapshot = new CallGraphSnapshot(
ScanId: "scan-cache-1",
GraphDigest: "sha256:cg",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes: [new CallGraphNode(nodeId, "Entry", "file.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null)],
Edges: [],
EntrypointIds: [nodeId],
SinkIds: []);
await _cache.SetCallGraphAsync(snapshot);
var loaded = await _cache.TryGetCallGraphAsync("scan-cache-1", "dotnet");
Assert.NotNull(loaded);
Assert.Equal(snapshot.ScanId, loaded!.ScanId);
Assert.Equal(snapshot.Language, loaded.Language);
Assert.Equal(snapshot.GraphDigest, loaded.GraphDigest);
}
[Fact]
public async Task SetThenGet_ReachabilityResult_RoundTrips()
{
var result = new ReachabilityAnalysisResult(
ScanId: "scan-cache-2",
GraphDigest: "sha256:cg",
Language: "dotnet",
ComputedAt: DateTimeOffset.UtcNow,
ReachableNodeIds: [],
ReachableSinkIds: [],
Paths: [],
ResultDigest: "sha256:r");
await _cache.SetReachabilityResultAsync(result);
var loaded = await _cache.TryGetReachabilityResultAsync("scan-cache-2", "dotnet");
Assert.NotNull(loaded);
Assert.Equal(result.ResultDigest, loaded!.ResultDigest);
}
}

View File

@@ -1,61 +1,52 @@
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Services;
using Microsoft.Extensions.Logging.Abstractions;
using Moq;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Repositories;
using StellaOps.Scanner.Storage.Services;
using Xunit;
namespace StellaOps.Scanner.Storage.Tests;
/// <summary>
/// Unit tests for ClassificationChangeTracker.
/// SPRINT_3404_0001_0001 - Task #11, #12
/// Unit tests for <see cref="ClassificationChangeTracker" />.
/// </summary>
public sealed class ClassificationChangeTrackerTests
{
private readonly Mock<IClassificationHistoryRepository> _repositoryMock;
private readonly FakeClassificationHistoryRepository _repository;
private readonly ClassificationChangeTracker _tracker;
private readonly FakeTimeProvider _timeProvider;
public ClassificationChangeTrackerTests()
{
_repositoryMock = new Mock<IClassificationHistoryRepository>();
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
_repository = new FakeClassificationHistoryRepository();
_tracker = new ClassificationChangeTracker(
_repositoryMock.Object,
_repository,
NullLogger<ClassificationChangeTracker>.Instance,
_timeProvider);
new FakeTimeProvider(DateTimeOffset.Parse("2025-12-17T00:00:00Z")));
}
[Fact]
public async Task TrackChangeAsync_ActualChange_InsertsToRepository()
{
// Arrange
var change = CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected);
// Act
await _tracker.TrackChangeAsync(change);
// Assert
_repositoryMock.Verify(r => r.InsertAsync(change, It.IsAny<CancellationToken>()), Times.Once);
Assert.Single(_repository.InsertedChanges);
Assert.Same(change, _repository.InsertedChanges[0]);
}
[Fact]
public async Task TrackChangeAsync_NoOpChange_SkipsInsert()
{
// Arrange - same status
var change = CreateChange(ClassificationStatus.Affected, ClassificationStatus.Affected);
// Act
await _tracker.TrackChangeAsync(change);
// Assert
_repositoryMock.Verify(r => r.InsertAsync(It.IsAny<ClassificationChange>(), It.IsAny<CancellationToken>()), Times.Never);
Assert.Empty(_repository.InsertedChanges);
}
[Fact]
public async Task TrackChangesAsync_FiltersNoOpChanges()
{
// Arrange
var changes = new[]
{
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected),
@@ -63,97 +54,70 @@ public sealed class ClassificationChangeTrackerTests
CreateChange(ClassificationStatus.Affected, ClassificationStatus.Fixed),
};
// Act
await _tracker.TrackChangesAsync(changes);
// Assert
_repositoryMock.Verify(r => r.InsertBatchAsync(
It.Is<IEnumerable<ClassificationChange>>(c => c.Count() == 2),
It.IsAny<CancellationToken>()),
Times.Once);
Assert.Single(_repository.InsertedBatches);
Assert.Equal(2, _repository.InsertedBatches[0].Count);
}
[Fact]
public async Task TrackChangesAsync_EmptyAfterFilter_DoesNotInsert()
{
// Arrange - all no-ops
var changes = new[]
{
CreateChange(ClassificationStatus.Affected, ClassificationStatus.Affected),
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Unknown),
};
// Act
await _tracker.TrackChangesAsync(changes);
// Assert
_repositoryMock.Verify(r => r.InsertBatchAsync(It.IsAny<IEnumerable<ClassificationChange>>(), It.IsAny<CancellationToken>()), Times.Never);
Assert.Empty(_repository.InsertedBatches);
}
[Fact]
public void IsFnTransition_UnknownToAffected_ReturnsTrue()
{
// Arrange
var change = CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected);
// Assert
Assert.True(change.IsFnTransition);
}
[Fact]
public void IsFnTransition_UnaffectedToAffected_ReturnsTrue()
{
// Arrange
var change = CreateChange(ClassificationStatus.Unaffected, ClassificationStatus.Affected);
// Assert
Assert.True(change.IsFnTransition);
}
[Fact]
public void IsFnTransition_AffectedToFixed_ReturnsFalse()
{
// Arrange
var change = CreateChange(ClassificationStatus.Affected, ClassificationStatus.Fixed);
// Assert
Assert.False(change.IsFnTransition);
}
[Fact]
public void IsFnTransition_NewToAffected_ReturnsFalse()
{
// Arrange - new finding, not a reclassification
var change = CreateChange(ClassificationStatus.New, ClassificationStatus.Affected);
// Assert
Assert.False(change.IsFnTransition);
}
[Fact]
public async Task ComputeDeltaAsync_NewFinding_RecordsAsNewStatus()
{
// Arrange
var tenantId = Guid.NewGuid();
var artifact = "sha256:abc123";
var prevExecId = Guid.NewGuid();
var currExecId = Guid.NewGuid();
_repositoryMock
.Setup(r => r.GetByExecutionAsync(tenantId, prevExecId, It.IsAny<CancellationToken>()))
.ReturnsAsync(Array.Empty<ClassificationChange>());
_repository.SetExecutionChanges(tenantId, prevExecId, Array.Empty<ClassificationChange>());
_repository.SetExecutionChanges(tenantId, currExecId, new[]
{
CreateChange(ClassificationStatus.New, ClassificationStatus.Affected, artifact, "CVE-2024-0001"),
});
_repositoryMock
.Setup(r => r.GetByExecutionAsync(tenantId, currExecId, It.IsAny<CancellationToken>()))
.ReturnsAsync(new[]
{
CreateChange(ClassificationStatus.New, ClassificationStatus.Affected, artifact, "CVE-2024-0001"),
});
// Act
var delta = await _tracker.ComputeDeltaAsync(tenantId, artifact, prevExecId, currExecId);
// Assert
Assert.Single(delta);
Assert.Equal(ClassificationStatus.New, delta[0].PreviousStatus);
Assert.Equal(ClassificationStatus.Affected, delta[0].NewStatus);
@@ -162,30 +126,22 @@ public sealed class ClassificationChangeTrackerTests
[Fact]
public async Task ComputeDeltaAsync_StatusChange_RecordsDelta()
{
// Arrange
var tenantId = Guid.NewGuid();
var artifact = "sha256:abc123";
var prevExecId = Guid.NewGuid();
var currExecId = Guid.NewGuid();
_repositoryMock
.Setup(r => r.GetByExecutionAsync(tenantId, prevExecId, It.IsAny<CancellationToken>()))
.ReturnsAsync(new[]
{
CreateChange(ClassificationStatus.New, ClassificationStatus.Unknown, artifact, "CVE-2024-0001"),
});
_repository.SetExecutionChanges(tenantId, prevExecId, new[]
{
CreateChange(ClassificationStatus.New, ClassificationStatus.Unknown, artifact, "CVE-2024-0001"),
});
_repository.SetExecutionChanges(tenantId, currExecId, new[]
{
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected, artifact, "CVE-2024-0001"),
});
_repositoryMock
.Setup(r => r.GetByExecutionAsync(tenantId, currExecId, It.IsAny<CancellationToken>()))
.ReturnsAsync(new[]
{
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected, artifact, "CVE-2024-0001"),
});
// Act
var delta = await _tracker.ComputeDeltaAsync(tenantId, artifact, prevExecId, currExecId);
// Assert
Assert.Single(delta);
Assert.Equal(ClassificationStatus.Unknown, delta[0].PreviousStatus);
Assert.Equal(ClassificationStatus.Affected, delta[0].NewStatus);
@@ -196,8 +152,7 @@ public sealed class ClassificationChangeTrackerTests
ClassificationStatus next,
string artifact = "sha256:test",
string vulnId = "CVE-2024-0001")
{
return new ClassificationChange
=> new()
{
ArtifactDigest = artifact,
VulnId = vulnId,
@@ -209,29 +164,66 @@ public sealed class ClassificationChangeTrackerTests
NewStatus = next,
Cause = DriftCause.FeedDelta,
};
private sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public FakeTimeProvider(DateTimeOffset now) => _now = now;
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
}
private sealed class FakeClassificationHistoryRepository : IClassificationHistoryRepository
{
private readonly Dictionary<(Guid tenantId, Guid executionId), IReadOnlyList<ClassificationChange>> _byExecution = new();
public List<ClassificationChange> InsertedChanges { get; } = new();
public List<List<ClassificationChange>> InsertedBatches { get; } = new();
public void SetExecutionChanges(Guid tenantId, Guid executionId, IReadOnlyList<ClassificationChange> changes)
=> _byExecution[(tenantId, executionId)] = changes;
public Task InsertAsync(ClassificationChange change, CancellationToken cancellationToken = default)
{
InsertedChanges.Add(change);
return Task.CompletedTask;
}
public Task InsertBatchAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default)
{
InsertedBatches.Add(changes.ToList());
return Task.CompletedTask;
}
public Task<IReadOnlyList<ClassificationChange>> GetByExecutionAsync(
Guid tenantId,
Guid executionId,
CancellationToken cancellationToken = default)
{
return Task.FromResult(_byExecution.TryGetValue((tenantId, executionId), out var changes)
? changes
: Array.Empty<ClassificationChange>());
}
public Task<IReadOnlyList<ClassificationChange>> GetChangesAsync(Guid tenantId, DateTimeOffset since, CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
public Task<IReadOnlyList<ClassificationChange>> GetByArtifactAsync(string artifactDigest, CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
public Task<IReadOnlyList<ClassificationChange>> GetByVulnIdAsync(string vulnId, Guid? tenantId = null, CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
public Task<IReadOnlyList<FnDriftStats>> GetDriftStatsAsync(Guid tenantId, DateOnly fromDate, DateOnly toDate, CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
public Task<FnDrift30dSummary?> GetDrift30dSummaryAsync(Guid tenantId, CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
public Task RefreshDriftStatsAsync(CancellationToken cancellationToken = default)
=> throw new NotSupportedException();
}
}
/// <summary>
/// Fake time provider for testing.
/// </summary>
internal sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _now;
public FakeTimeProvider(DateTimeOffset now) => _now = now;
public override DateTimeOffset GetUtcNow() => _now;
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
}
/// <summary>
/// Mock interface for testing.
/// </summary>
public interface IClassificationHistoryRepository
{
Task InsertAsync(ClassificationChange change, CancellationToken cancellationToken = default);
Task InsertBatchAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default);
Task<IReadOnlyList<ClassificationChange>> GetByExecutionAsync(Guid tenantId, Guid executionId, CancellationToken cancellationToken = default);
}

View File

@@ -5,6 +5,8 @@
// Description: Unit tests for scan metrics repository operations
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging.Abstractions;
using Npgsql;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Repositories;
using Xunit;
@@ -16,6 +18,7 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
{
private readonly ScannerPostgresFixture _fixture;
private IScanMetricsRepository _repository = null!;
private NpgsqlDataSource _dataSource = null!;
public ScanMetricsRepositoryTests(ScannerPostgresFixture fixture)
{
@@ -24,11 +27,20 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
public async Task InitializeAsync()
{
await _fixture.ResetAsync();
_repository = new PostgresScanMetricsRepository(_fixture.CreateConnection);
await _fixture.TruncateAllTablesAsync();
// Migration 004 creates scan metrics objects under the hard-coded `scanner` schema.
// Clear those tables explicitly for test isolation.
await _fixture.ExecuteSqlAsync("TRUNCATE TABLE scanner.execution_phases, scanner.scan_metrics CASCADE;");
_dataSource = NpgsqlDataSource.Create(_fixture.ConnectionString);
_repository = new PostgresScanMetricsRepository(_dataSource, NullLogger<PostgresScanMetricsRepository>.Instance);
}
public Task DisposeAsync() => Task.CompletedTask;
public async Task DisposeAsync()
{
await _dataSource.DisposeAsync();
}
[Fact]
public async Task SaveAsync_InsertsNewMetrics()
@@ -59,7 +71,7 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
new ExecutionPhase
{
MetricsId = metrics.MetricsId,
PhaseName = "pull",
PhaseName = ScanPhaseNames.Ingest,
PhaseOrder = 1,
StartedAt = DateTimeOffset.UtcNow.AddSeconds(-10),
FinishedAt = DateTimeOffset.UtcNow.AddSeconds(-5),
@@ -68,7 +80,7 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
new ExecutionPhase
{
MetricsId = metrics.MetricsId,
PhaseName = "analyze",
PhaseName = ScanPhaseNames.Analyze,
PhaseOrder = 2,
StartedAt = DateTimeOffset.UtcNow.AddSeconds(-5),
FinishedAt = DateTimeOffset.UtcNow,
@@ -80,10 +92,10 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
await _repository.SavePhasesAsync(phases, CancellationToken.None);
// Assert
var retrieved = await _repository.GetPhasesByMetricsIdAsync(metrics.MetricsId, CancellationToken.None);
var retrieved = await _repository.GetPhasesAsync(metrics.MetricsId, CancellationToken.None);
Assert.Equal(2, retrieved.Count);
Assert.Contains(retrieved, p => p.PhaseName == "pull");
Assert.Contains(retrieved, p => p.PhaseName == "analyze");
Assert.Contains(retrieved, p => p.PhaseName == ScanPhaseNames.Ingest);
Assert.Contains(retrieved, p => p.PhaseName == ScanPhaseNames.Analyze);
}
[Fact]
@@ -97,7 +109,7 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
}
[Fact]
public async Task GetTteByTenantAsync_ReturnsMetricsForTenant()
public async Task GetRecentAsync_ReturnsMetricsForTenant()
{
// Arrange
var tenantId = Guid.NewGuid();
@@ -110,7 +122,7 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
await _repository.SaveAsync(metricsOther, CancellationToken.None);
// Act
var result = await _repository.GetTteByTenantAsync(tenantId, limit: 10, CancellationToken.None);
var result = await _repository.GetRecentAsync(tenantId, limit: 10, includeReplays: true, cancellationToken: CancellationToken.None);
// Assert
Assert.Equal(2, result.Count);
@@ -118,33 +130,35 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
}
[Fact]
public async Task GetTteBySurfaceAsync_ReturnsMetricsForSurface()
public async Task GetByArtifactAsync_ReturnsMetricsForArtifact()
{
// Arrange
var surfaceId = Guid.NewGuid();
var metrics1 = CreateTestMetrics(surfaceId: surfaceId);
var metrics2 = CreateTestMetrics(surfaceId: surfaceId);
var artifactDigest = $"sha256:{Guid.NewGuid():N}";
var metrics1 = CreateTestMetrics(artifactDigest: artifactDigest);
var metrics2 = CreateTestMetrics(artifactDigest: artifactDigest);
var other = CreateTestMetrics();
await _repository.SaveAsync(metrics1, CancellationToken.None);
await _repository.SaveAsync(metrics2, CancellationToken.None);
await _repository.SaveAsync(other, CancellationToken.None);
// Act
var result = await _repository.GetTteBySurfaceAsync(surfaceId, limit: 10, CancellationToken.None);
var result = await _repository.GetByArtifactAsync(artifactDigest, CancellationToken.None);
// Assert
Assert.Equal(2, result.Count);
Assert.All(result, m => Assert.Equal(surfaceId, m.SurfaceId));
Assert.All(result, m => Assert.Equal(artifactDigest, m.ArtifactDigest));
}
[Fact]
public async Task GetP50TteAsync_CalculatesMedianCorrectly()
public async Task GetTtePercentileAsync_CalculatesMedianCorrectly()
{
// Arrange
var tenantId = Guid.NewGuid();
var baseTime = DateTimeOffset.UtcNow;
// Create metrics with different durations: 100ms, 200ms, 300ms, 400ms, 500ms
for (int i = 1; i <= 5; i++)
// Create metrics with different durations: 100ms, 200ms, 300ms, 400ms, 500ms.
for (var i = 1; i <= 5; i++)
{
var metrics = new ScanMetrics
{
@@ -152,22 +166,26 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
ScanId = Guid.NewGuid(),
TenantId = tenantId,
ArtifactDigest = $"sha256:{Guid.NewGuid():N}",
ArtifactType = "oci_image",
ArtifactType = ArtifactTypes.OciImage,
FindingsSha256 = $"sha256:{Guid.NewGuid():N}",
StartedAt = baseTime.AddMilliseconds(-(i * 100)),
FinishedAt = baseTime,
Phases = new ScanPhaseTimings
{
PullMs = i * 20,
IngestMs = i * 20,
AnalyzeMs = i * 30,
DecideMs = i * 50
}
ReachabilityMs = 0,
VexMs = 0,
SignMs = 0,
PublishMs = 0
},
ScannerVersion = "1.0.0"
};
await _repository.SaveAsync(metrics, CancellationToken.None);
}
// Act
var p50 = await _repository.GetP50TteAsync(tenantId, since: baseTime.AddHours(-1), CancellationToken.None);
var p50 = await _repository.GetTtePercentileAsync(tenantId, percentile: 0.50m, since: baseTime.AddHours(-1), cancellationToken: CancellationToken.None);
// Assert
Assert.NotNull(p50);
@@ -178,15 +196,15 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
public async Task SaveAsync_PreservesPhaseTimings()
{
// Arrange
var metrics = CreateTestMetrics();
metrics.Phases = new ScanPhaseTimings
var metrics = CreateTestMetrics(phases: new ScanPhaseTimings
{
PullMs = 100,
IngestMs = 100,
AnalyzeMs = 200,
DecideMs = 150,
AttestMs = 50,
ReachabilityMs = 300
};
ReachabilityMs = 300,
VexMs = 150,
SignMs = 50,
PublishMs = 25
});
// Act
await _repository.SaveAsync(metrics, CancellationToken.None);
@@ -194,20 +212,19 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
// Assert
var retrieved = await _repository.GetByScanIdAsync(metrics.ScanId, CancellationToken.None);
Assert.NotNull(retrieved);
Assert.Equal(100, retrieved.Phases.PullMs);
Assert.Equal(100, retrieved.Phases.IngestMs);
Assert.Equal(200, retrieved.Phases.AnalyzeMs);
Assert.Equal(150, retrieved.Phases.DecideMs);
Assert.Equal(50, retrieved.Phases.AttestMs);
Assert.Equal(300, retrieved.Phases.ReachabilityMs);
Assert.Equal(150, retrieved.Phases.VexMs);
Assert.Equal(50, retrieved.Phases.SignMs);
Assert.Equal(25, retrieved.Phases.PublishMs);
}
[Fact]
public async Task SaveAsync_HandlesReplayScans()
{
// Arrange
var metrics = CreateTestMetrics();
metrics.IsReplay = true;
metrics.ReplayManifestHash = "sha256:replay123";
var metrics = CreateTestMetrics(isReplay: true, replayManifestHash: "sha256:replay123");
// Act
await _repository.SaveAsync(metrics, CancellationToken.None);
@@ -219,7 +236,13 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
Assert.Equal("sha256:replay123", retrieved.ReplayManifestHash);
}
private static ScanMetrics CreateTestMetrics(Guid? tenantId = null, Guid? surfaceId = null)
private static ScanMetrics CreateTestMetrics(
Guid? tenantId = null,
Guid? surfaceId = null,
string? artifactDigest = null,
ScanPhaseTimings? phases = null,
bool isReplay = false,
string? replayManifestHash = null)
{
return new ScanMetrics
{
@@ -227,12 +250,15 @@ public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
ScanId = Guid.NewGuid(),
TenantId = tenantId ?? Guid.NewGuid(),
SurfaceId = surfaceId,
ArtifactDigest = $"sha256:{Guid.NewGuid():N}",
ArtifactType = "oci_image",
ArtifactDigest = artifactDigest ?? $"sha256:{Guid.NewGuid():N}",
ArtifactType = ArtifactTypes.OciImage,
ReplayManifestHash = replayManifestHash,
FindingsSha256 = $"sha256:{Guid.NewGuid():N}",
StartedAt = DateTimeOffset.UtcNow.AddMinutes(-1),
FinishedAt = DateTimeOffset.UtcNow,
Phases = new ScanPhaseTimings()
Phases = phases ?? ScanPhaseTimings.Empty,
ScannerVersion = "1.0.0",
IsReplay = isReplay
};
}
}

View File

@@ -77,7 +77,7 @@ public class SmartDiffRepositoryIntegrationTests : IAsyncLifetime
// Assert
Assert.NotNull(retrieved);
Assert.Equal(snapshot.FindingKey.VulnId, retrieved.FindingKey.VulnId);
Assert.Equal(snapshot.FindingKey.Purl, retrieved.FindingKey.Purl);
Assert.Equal(snapshot.FindingKey.ComponentPurl, retrieved.FindingKey.ComponentPurl);
Assert.Equal(snapshot.Reachable, retrieved.Reachable);
Assert.Equal(snapshot.VexStatus, retrieved.VexStatus);
Assert.Equal(snapshot.Kev, retrieved.Kev);
@@ -89,11 +89,11 @@ public class SmartDiffRepositoryIntegrationTests : IAsyncLifetime
// Arrange
var findingKey = new FindingKey("CVE-2024-5678", "pkg:pypi/requests@2.28.0");
var snapshot1 = CreateTestSnapshot(findingKey.VulnId, findingKey.Purl, "scan-001",
var snapshot1 = CreateTestSnapshot(findingKey.VulnId, findingKey.ComponentPurl, "scan-001",
capturedAt: DateTimeOffset.UtcNow.AddHours(-2));
var snapshot2 = CreateTestSnapshot(findingKey.VulnId, findingKey.Purl, "scan-002",
var snapshot2 = CreateTestSnapshot(findingKey.VulnId, findingKey.ComponentPurl, "scan-002",
capturedAt: DateTimeOffset.UtcNow.AddHours(-1));
var snapshot3 = CreateTestSnapshot(findingKey.VulnId, findingKey.Purl, "scan-003",
var snapshot3 = CreateTestSnapshot(findingKey.VulnId, findingKey.ComponentPurl, "scan-003",
capturedAt: DateTimeOffset.UtcNow);
// Act
@@ -251,8 +251,8 @@ public class SmartDiffRepositoryIntegrationTests : IAsyncLifetime
{
// Arrange
var findingKey = new FindingKey("CVE-2024-HIST", "pkg:npm/history@1.0.0");
var change1 = CreateTestChange(findingKey.VulnId, findingKey.Purl, hasMaterialChange: true, priority: 100);
var change2 = CreateTestChange(findingKey.VulnId, findingKey.Purl, hasMaterialChange: true, priority: 200);
var change1 = CreateTestChange(findingKey.VulnId, findingKey.ComponentPurl, hasMaterialChange: true, priority: 100);
var change2 = CreateTestChange(findingKey.VulnId, findingKey.ComponentPurl, hasMaterialChange: true, priority: 200);
await _changeRepo.StoreChangeAsync(change1, "scan-h1");
await _changeRepo.StoreChangeAsync(change2, "scan-h2");

View File

@@ -13,6 +13,8 @@ using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy;
using StellaOps.Scanner.Storage.Models;
using StellaOps.Scanner.Storage.Services;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Options;
using StellaOps.Scanner.WebService.Services;
@@ -30,7 +32,8 @@ public sealed class ReportEventDispatcherTests
public async Task PublishAsync_EmitsReportReadyAndScanCompleted()
{
var publisher = new RecordingEventPublisher();
var dispatcher = new ReportEventDispatcher(publisher, Microsoft.Extensions.Options.Options.Create(new ScannerWebServiceOptions()), TimeProvider.System, NullLogger<ReportEventDispatcher>.Instance);
var tracker = new RecordingClassificationChangeTracker();
var dispatcher = new ReportEventDispatcher(publisher, tracker, Microsoft.Extensions.Options.Options.Create(new ScannerWebServiceOptions()), TimeProvider.System, NullLogger<ReportEventDispatcher>.Instance);
var cancellationToken = CancellationToken.None;
var request = new ReportRequestDto
@@ -165,6 +168,143 @@ public sealed class ReportEventDispatcherTests
Assert.Equal("blocked", scanPayload.Report.Verdict);
}
[Fact]
public async Task PublishAsync_RecordsFnDriftClassificationChanges()
{
var publisher = new RecordingEventPublisher();
var tracker = new RecordingClassificationChangeTracker();
var dispatcher = new ReportEventDispatcher(publisher, tracker, Microsoft.Extensions.Options.Options.Create(new ScannerWebServiceOptions()), TimeProvider.System, NullLogger<ReportEventDispatcher>.Instance);
var cancellationToken = CancellationToken.None;
var request = new ReportRequestDto
{
ImageDigest = "sha256:feedface",
Findings = new[]
{
new PolicyPreviewFindingDto
{
Id = "finding-1",
Severity = "Critical",
Repository = "acme/edge/api",
Cve = "CVE-2024-9999",
Purl = "pkg:nuget/Acme.Edge.Api@1.2.3",
Tags = new[] { "reachability:runtime" }
}
}
};
var baseline = new PolicyVerdict("finding-1", PolicyVerdictStatus.Pass, ConfigVersion: "1.0");
var projected = new PolicyVerdict(
"finding-1",
PolicyVerdictStatus.Blocked,
Score: 47.5,
ConfigVersion: "1.0",
SourceTrust: "NVD",
Reachability: "runtime");
var preview = new PolicyPreviewResponse(
Success: true,
PolicyDigest: "digest-123",
RevisionId: "rev-42",
Issues: ImmutableArray<PolicyIssue>.Empty,
Diffs: ImmutableArray.Create(new PolicyVerdictDiff(baseline, projected)),
ChangedCount: 1);
var document = new ReportDocumentDto
{
ReportId = "report-abc",
ImageDigest = "sha256:feedface",
GeneratedAt = DateTimeOffset.Parse("2025-10-19T12:34:56Z"),
Verdict = "blocked",
Policy = new ReportPolicyDto
{
RevisionId = "rev-42",
Digest = "digest-123"
},
Summary = new ReportSummaryDto
{
Total = 1,
Blocked = 1,
Warned = 0,
Ignored = 0,
Quieted = 0
}
};
var context = new DefaultHttpContext();
context.User = new ClaimsPrincipal(new ClaimsIdentity(new[] { new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha") }));
await dispatcher.PublishAsync(request, preview, document, envelope: null, context, cancellationToken);
var change = Assert.Single(tracker.Changes);
Assert.Equal("sha256:feedface", change.ArtifactDigest);
Assert.Equal("CVE-2024-9999", change.VulnId);
Assert.Equal("pkg:nuget/Acme.Edge.Api@1.2.3", change.PackagePurl);
Assert.Equal(ClassificationStatus.Unaffected, change.PreviousStatus);
Assert.Equal(ClassificationStatus.Affected, change.NewStatus);
Assert.Equal(DriftCause.ReachabilityDelta, change.Cause);
Assert.Equal(document.GeneratedAt, change.ChangedAt);
Assert.NotEqual(Guid.Empty, change.TenantId);
Assert.NotEqual(Guid.Empty, change.ExecutionId);
Assert.NotEqual(Guid.Empty, change.ManifestId);
}
[Fact]
public async Task PublishAsync_DoesNotFailWhenFnDriftTrackingThrows()
{
var publisher = new RecordingEventPublisher();
var tracker = new RecordingClassificationChangeTracker
{
ThrowOnTrack = true
};
var dispatcher = new ReportEventDispatcher(publisher, tracker, Microsoft.Extensions.Options.Options.Create(new ScannerWebServiceOptions()), TimeProvider.System, NullLogger<ReportEventDispatcher>.Instance);
var cancellationToken = CancellationToken.None;
var request = new ReportRequestDto
{
ImageDigest = "sha256:feedface",
Findings = new[]
{
new PolicyPreviewFindingDto
{
Id = "finding-1",
Severity = "Critical",
Repository = "acme/edge/api",
Cve = "CVE-2024-9999",
Purl = "pkg:nuget/Acme.Edge.Api@1.2.3"
}
}
};
var baseline = new PolicyVerdict("finding-1", PolicyVerdictStatus.Pass, ConfigVersion: "1.0");
var projected = new PolicyVerdict("finding-1", PolicyVerdictStatus.Blocked, ConfigVersion: "1.0");
var preview = new PolicyPreviewResponse(
Success: true,
PolicyDigest: "digest-123",
RevisionId: "rev-42",
Issues: ImmutableArray<PolicyIssue>.Empty,
Diffs: ImmutableArray.Create(new PolicyVerdictDiff(baseline, projected)),
ChangedCount: 1);
var document = new ReportDocumentDto
{
ReportId = "report-abc",
ImageDigest = "sha256:feedface",
GeneratedAt = DateTimeOffset.Parse("2025-10-19T12:34:56Z"),
Verdict = "blocked",
Policy = new ReportPolicyDto(),
Summary = new ReportSummaryDto()
};
var context = new DefaultHttpContext();
context.User = new ClaimsPrincipal(new ClaimsIdentity(new[] { new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha") }));
await dispatcher.PublishAsync(request, preview, document, envelope: null, context, cancellationToken);
Assert.Equal(2, publisher.Events.Count);
}
[Fact]
public async Task PublishAsync_HonoursConfiguredConsoleAndApiSegments()
{
@@ -186,7 +326,8 @@ public sealed class ReportEventDispatcherTests
});
var publisher = new RecordingEventPublisher();
var dispatcher = new ReportEventDispatcher(publisher, options, TimeProvider.System, NullLogger<ReportEventDispatcher>.Instance);
var tracker = new RecordingClassificationChangeTracker();
var dispatcher = new ReportEventDispatcher(publisher, tracker, options, TimeProvider.System, NullLogger<ReportEventDispatcher>.Instance);
var cancellationToken = CancellationToken.None;
var request = new ReportRequestDto
@@ -295,4 +436,40 @@ public sealed class ReportEventDispatcherTests
return Task.CompletedTask;
}
}
private sealed class RecordingClassificationChangeTracker : IClassificationChangeTracker
{
public List<ClassificationChange> Changes { get; } = new();
public bool ThrowOnTrack { get; init; }
public Task TrackChangeAsync(ClassificationChange change, CancellationToken cancellationToken = default)
{
if (ThrowOnTrack)
{
throw new InvalidOperationException("Tracking failure");
}
Changes.Add(change);
return Task.CompletedTask;
}
public Task TrackChangesAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default)
{
if (ThrowOnTrack)
{
throw new InvalidOperationException("Tracking failure");
}
Changes.AddRange(changes);
return Task.CompletedTask;
}
public Task<IReadOnlyList<ClassificationChange>> ComputeDeltaAsync(
Guid tenantId,
string artifactDigest,
Guid previousExecutionId,
Guid currentExecutionId,
CancellationToken cancellationToken = default)
=> Task.FromResult<IReadOnlyList<ClassificationChange>>(Array.Empty<ClassificationChange>());
}
}

View File

@@ -120,7 +120,7 @@ public sealed class FailureSignatureRepository : RepositoryBase<SchedulerDataSou
ORDER BY last_seen_at DESC
""";
return await QueryListAsync(
return await QueryAsync(
tenantId,
sql,
cmd =>
@@ -147,7 +147,7 @@ public sealed class FailureSignatureRepository : RepositoryBase<SchedulerDataSou
LIMIT @limit
""";
return await QueryListAsync(
return await QueryAsync(
tenantId,
sql,
cmd =>
@@ -176,7 +176,7 @@ public sealed class FailureSignatureRepository : RepositoryBase<SchedulerDataSou
LIMIT @limit
""";
return await QueryListAsync(
return await QueryAsync(
tenantId,
sql,
cmd =>

55
src/Signals/AGENTS.md Normal file
View File

@@ -0,0 +1,55 @@
# Signals · AGENTS Charter
## Module Scope & Working Directory
- Working directory: `src/Signals/**` (service + storage + tests).
- Primary service: `src/Signals/StellaOps.Signals/` (tests under `src/Signals/__Tests/**`).
- Mission: ingest and normalize reachability/runtime signals, then compute deterministic Unknowns scores/bands and decay to drive triage.
## Roles Covered
- Backend engineer (.NET 10): ingestion, scoring/decay services, persistence, APIs.
- QA / determinism: unit/property tests, golden fixtures, replayable scoring runs.
- Observability: metrics/logging for scoring and decay batches.
## Required Reading (treat as read before DOING)
- Global: `docs/README.md`, `docs/07_HIGH_LEVEL_ARCHITECTURE.md`, `docs/modules/platform/architecture-overview.md`.
- Signals (Unknowns): `docs/signals/unknowns-registry.md`, `docs/modules/signals/unknowns/2025-12-01-unknowns-registry.md`.
- Signals (Decay): `docs/modules/signals/decay/2025-12-01-confidence-decay.md`.
- Reachability delivery guide (unknowns + runtime ingestion): `docs/reachability/DELIVERY_GUIDE.md`.
- Related sprints (design + evidence):
- `docs/implplan/archived/SPRINT_1102_0001_0001_unknowns_scoring_schema.md`
- `docs/implplan/archived/SPRINT_1105_0001_0001_deploy_refs_graph_metrics.md`
- `docs/implplan/archived/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md`
## Contracts (Triage & Unknowns)
### Unknowns Scoring (5-factor)
- Canonical formula (see `src/Signals/StellaOps.Signals/Services/UnknownsScoringService.cs`):
- `Score = clamp01(wP*P + wE*E + wU*U + wC*C + wS*S)`
- Bands:
- HOT: `Score >= HotThreshold` (default `0.70`)
- WARM: `WarmThreshold <= Score < HotThreshold` (default `0.40..0.70`)
- COLD: `Score < WarmThreshold`
- Configuration (must be stable across runs):
- `Signals:UnknownsScoring` (see `src/Signals/StellaOps.Signals/Options/UnknownsScoringOptions.cs`)
- `Signals:UnknownsDecay` (see `src/Signals/StellaOps.Signals/Options/UnknownsDecayOptions.cs`)
- Determinism rules:
- Never use randomness in scoring/banding.
- Normalize inputs consistently (stable string comparisons via `StringComparer.Ordinal`, clamp ranges, avoid culture-dependent formatting).
- If adding new uncertainty flags or normalization terms, version and document them; update fixtures/tests.
### Unknowns Decay
- Nightly decay recomputes scores/bands and schedules rescans (see `src/Signals/StellaOps.Signals/Services/UnknownsDecayService.cs` and `src/Signals/StellaOps.Signals/Services/NightlyDecayWorker.cs`).
- Time source must be injectable (`TimeProvider`) for deterministic tests.
- Any scheduling defaults must remain offline-friendly and bounded (no tight loops, capped batch sizes).
## Testing & Validation
- Prefer module-scoped runs:
- `dotnet test src/Signals/__Tests/*/*.csproj -c Release`
- Tests must cover:
- Scoring determinism (same inputs -> same score/band).
- Boundary conditions at `WarmThreshold`/`HotThreshold`.
- Decay batch scheduling and time math (fixed `TimeProvider`).
## Delivery Discipline
- Update sprint tracker statuses (`TODO -> DOING -> DONE/BLOCKED`) in `docs/implplan/SPRINT_*.md`.
- Mirror the same status in `src/Signals/StellaOps.Signals/TASKS.md` when work starts/finishes.

View File

@@ -15,6 +15,24 @@ Provide language-agnostic collection, normalization, and scoring of reachability
- `docs/modules/platform/architecture-overview.md`
- `docs/signals/unknowns-registry.md`
- `docs/reachability/DELIVERY_GUIDE.md` (unknowns + runtime ingestion sections)
- Module front door: `src/Signals/AGENTS.md` (scoring/decay contract summary)
## Contracts (Triage & Unknowns)
### Unknowns Scoring (5-factor)
- Canonical formula (implementation source of truth):
- `src/Signals/StellaOps.Signals/Services/UnknownsScoringService.cs`
- `Score = clamp01(wP*P + wE*E + wU*U + wC*C + wS*S)`
- Configuration (must remain deterministic across runs):
- `Signals:UnknownsScoring` (`src/Signals/StellaOps.Signals/Options/UnknownsScoringOptions.cs`)
- `Signals:UnknownsDecay` (`src/Signals/StellaOps.Signals/Options/UnknownsDecayOptions.cs`)
- Band thresholds (defaults): HOT `>= 0.70`, WARM `>= 0.40`, else COLD.
### Unknowns Decay
- Nightly batch + rescan scheduling:
- `src/Signals/StellaOps.Signals/Services/UnknownsDecayService.cs`
- `src/Signals/StellaOps.Signals/Services/NightlyDecayWorker.cs`
- Time handling must stay injectable (`TimeProvider`) to keep tests deterministic.
## Working Agreement
- 1. Update task status to `DOING`/`DONE` in both correspoding sprint file `/docs/implplan/SPRINT_*.md` and the local `TASKS.md` when you start or finish work.

View File

@@ -7,4 +7,5 @@ This file mirrors sprint work for the Signals module.
| `SIG-STORE-401-016` | `docs/implplan/SPRINT_0401_0001_0001_reachability_evidence_chain.md` | DONE (2025-12-13) | Added reachability store repository APIs and models; callgraph ingestion now populates the store; Mongo index script at `ops/mongo/indices/reachability_store_indices.js`. |
| `UNCERTAINTY-SCHEMA-401-024` | `docs/implplan/SPRINT_0401_0001_0001_reachability_evidence_chain.md` | DONE (2025-12-13) | Implemented uncertainty tiers and scoring integration; see `src/Signals/StellaOps.Signals/Lattice/UncertaintyTier.cs` and `src/Signals/StellaOps.Signals/Lattice/ReachabilityLattice.cs`. |
| `UNCERTAINTY-SCORER-401-025` | `docs/implplan/SPRINT_0401_0001_0001_reachability_evidence_chain.md` | DONE (2025-12-13) | Reachability risk score now uses configurable entropy weights and is aligned with `UncertaintyDocument.RiskScore`; tests cover tier/entropy scoring. |
| `UNKNOWNS-DECAY-3601-001` | `docs/implplan/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md` | DOING (2025-12-15) | Implement decay worker/service, signal refresh hook, and deterministic unit/integration tests. |
| `UNKNOWNS-DECAY-3601-001` | `docs/implplan/SPRINT_3601_0001_0001_unknowns_decay_algorithm.md` | DONE (2025-12-17) | Implemented decay worker/service, signal refresh hook, and deterministic unit/integration tests. |
| `TRI-MASTER-0003` | `docs/implplan/SPRINT_3600_0001_0001_triage_unknowns_master.md` | DONE (2025-12-17) | Synced Signals AGENTS with Unknowns scoring/decay contracts and configuration sections. |

View File

@@ -50,3 +50,4 @@
| UI-TRIAGE-4601-001 | DONE (2025-12-15) | Keyboard shortcuts for triage workspace (SPRINT_4601_0001_0001_keyboard_shortcuts.md). |
| UI-TRIAGE-4602-001 | DONE (2025-12-15) | Finish triage decision drawer/evidence pills QA: component specs + Storybook stories (SPRINT_4602_0001_0001_decision_drawer_evidence_tab.md). |
| UI-TTFS-0340-001 | DONE (2025-12-15) | FirstSignalCard UI component + client/store/tests (SPRINT_0340_0001_0001_first_signal_card_ui.md). |
| TRI-MASTER-0009 | DONE (2025-12-17) | Added Playwright E2E coverage for triage workflow (tabs, VEX modal, decision drawer, evidence pills). |

View File

@@ -0,0 +1,102 @@
import { expect, test } from '@playwright/test';
import { policyAuthorSession } from '../../src/app/testing';
const mockConfig = {
authority: {
issuer: 'https://authority.local',
clientId: 'stellaops-ui',
authorizeEndpoint: 'https://authority.local/connect/authorize',
tokenEndpoint: 'https://authority.local/connect/token',
logoutEndpoint: 'https://authority.local/connect/logout',
redirectUri: 'http://127.0.0.1:4400/auth/callback',
postLogoutRedirectUri: 'http://127.0.0.1:4400/',
scope:
'openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:view vuln:investigate vuln:operate vuln:audit',
audience: 'https://scanner.local',
dpopAlgorithms: ['ES256'],
refreshLeewaySeconds: 60,
},
apiBaseUrls: {
authority: 'https://authority.local',
scanner: 'https://scanner.local',
policy: 'https://scanner.local',
concelier: 'https://concelier.local',
attestor: 'https://attestor.local',
},
quickstartMode: true,
};
test.beforeEach(async ({ page }) => {
await page.addInitScript((session) => {
try {
window.sessionStorage.clear();
} catch {
// ignore storage errors in restricted contexts
}
(window as any).__stellaopsTestSession = session;
}, policyAuthorSession);
await page.route('**/config.json', (route) =>
route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify(mockConfig),
})
);
await page.route('https://authority.local/**', (route) => route.abort());
});
test('triage workflow: pills navigate + open drawer', async ({ page }) => {
await page.goto('/triage/artifacts/asset-web-prod');
await expect(page.getByRole('heading', { name: 'Artifact triage' })).toBeVisible({ timeout: 10000 });
await expect(page.getByRole('tab', { name: 'Evidence' })).toHaveAttribute('aria-selected', 'true');
const reachabilityPill = page.getByRole('button', { name: /^Reachability:/ });
const vexPill = page.getByRole('button', { name: /^VEX:/ });
await expect(reachabilityPill).toBeVisible();
await expect(vexPill).toBeVisible();
await reachabilityPill.click();
await expect(page.getByRole('tab', { name: 'Reachability' })).toHaveAttribute('aria-selected', 'true');
await expect(page.locator('#triage-panel-reachability')).toBeVisible();
await vexPill.click();
const drawer = page.getByRole('dialog', { name: 'Record Decision' });
await expect(drawer).toHaveClass(/open/);
await drawer.getByRole('button', { name: 'Close drawer' }).click();
await expect(drawer).not.toHaveClass(/open/);
});
test('triage workflow: record decision opens VEX modal', async ({ page }) => {
await page.goto('/triage/artifacts/asset-web-prod');
await expect(page.getByRole('heading', { name: 'Artifact triage' })).toBeVisible({ timeout: 10000 });
await page.getByRole('tab', { name: 'Evidence' }).click();
await expect(page.locator('#triage-panel-evidence')).toBeVisible();
await page.getByRole('button', { name: 'Record Decision' }).first().click();
const drawer = page.getByRole('dialog', { name: 'Record Decision' });
await expect(drawer).toHaveClass(/open/);
const submit = drawer.getByRole('button', { name: 'Record Decision' });
await expect(submit).toBeDisabled();
await drawer.getByLabel('Select reason').selectOption('component_not_present');
await drawer.getByLabel('Additional notes').fill('E2E: not affected via quickstart fixture');
await expect(submit).toBeEnabled();
await submit.click();
await expect(drawer).not.toHaveClass(/open/);
const vexDialog = page.getByRole('dialog', { name: 'VEX decision' });
await expect(vexDialog).toBeVisible({ timeout: 10000 });
await page.getByRole('button', { name: 'Close VEX decision dialog' }).click();
await expect(vexDialog).toBeHidden();
});

View File

@@ -0,0 +1,35 @@
# StellaOps.Router.Gateway — AGENTS
## Roles
- Backend engineer: maintain the Gateway middleware pipeline (endpoint resolution, auth, routing decision, transport dispatch) and shared concerns (rate limiting, payload limits, OpenAPI aggregation).
- QA automation: own Gateway-focused unit/integration tests (middleware order, error mapping, determinism, and config validation).
## Required Reading
- docs/README.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
- docs/modules/router/README.md
- docs/modules/router/architecture.md
- docs/modules/router/openapi-aggregation.md
- docs/modules/router/schema-validation.md
## Working Directory & Scope
- Primary: `src/__Libraries/StellaOps.Router.Gateway`
- Allowed tests: `src/__Libraries/__Tests/StellaOps.Router.Gateway.Tests`
- Allowed shared dependencies (read/consume): `src/__Libraries/StellaOps.Router.Common`, `src/__Libraries/StellaOps.Router.Config`, `src/__Libraries/StellaOps.Router.Transport.*`
- Cross-module edits require a note in the owning sprints **Execution Log** and **Decisions & Risks**.
## Determinism & Guardrails
- Target runtime: .NET 10 (`net10.0`) with C# preview enabled by repo policy.
- Middleware must be deterministic: stable header writing, stable error shapes, UTC timestamps only.
- Offline-first posture: no runtime external downloads; Valkey/Redis is an optional dependency configured via connection string.
- Avoid high-cardinality metrics labels by default; only emit route labels when they are bounded (configured route names).
## Testing Expectations
- Add/modify unit tests for every behavior change.
- Prefer unit tests for config parsing, route matching, and limiter logic; keep integration tests behind explicit opt-in when they require Docker/Valkey.
- Default command: `dotnet test src/__Libraries/__Tests/StellaOps.Router.Gateway.Tests -c Release`.
## Handoff Notes
- Keep this file aligned with router architecture docs and sprint decisions; record updates in sprint **Execution Log**.

View File

@@ -19,12 +19,13 @@ public static class ApplicationBuilderExtensions
// Enforce payload limits first
app.UseMiddleware<PayloadLimitsMiddleware>();
// Rate limiting (Sprint 1200_001_001)
app.UseRateLimiting();
// Resolve endpoints from routing state
app.UseMiddleware<EndpointResolutionMiddleware>();
// Rate limiting (Sprint 1200_001_001)
// Runs after endpoint resolution so microservice identity is available.
app.UseRateLimiting();
// Make routing decisions (select instance)
app.UseMiddleware<RoutingDecisionMiddleware>();
@@ -59,12 +60,13 @@ public static class ApplicationBuilderExtensions
/// <returns>The application builder for chaining.</returns>
public static IApplicationBuilder UseRouterGatewayCore(this IApplicationBuilder app)
{
// Rate limiting (Sprint 1200_001_001)
app.UseRateLimiting();
// Resolve endpoints from routing state
app.UseMiddleware<EndpointResolutionMiddleware>();
// Rate limiting (Sprint 1200_001_001)
// Runs after endpoint resolution so microservice identity is available.
app.UseRateLimiting();
// Make routing decisions (select instance)
app.UseMiddleware<RoutingDecisionMiddleware>();

Some files were not shown because too many files have changed in this diff Show More