Add comprehensive security tests for OWASP A02, A05, A07, and A08 categories
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
- Implemented tests for Cryptographic Failures (A02) to ensure proper handling of sensitive data, secure algorithms, and key management. - Added tests for Security Misconfiguration (A05) to validate production configurations, security headers, CORS settings, and feature management. - Developed tests for Authentication Failures (A07) to enforce strong password policies, rate limiting, session management, and MFA support. - Created tests for Software and Data Integrity Failures (A08) to verify artifact signatures, SBOM integrity, attestation chains, and feed updates.
This commit is contained in:
@@ -185,10 +185,4 @@ public enum VexFormat
|
||||
Unknown
|
||||
}
|
||||
|
||||
public enum SourcePrecedence
|
||||
{
|
||||
Vendor = 1,
|
||||
Maintainer = 2,
|
||||
ThirdParty = 3,
|
||||
Unknown = 99
|
||||
}
|
||||
// Note: SourcePrecedence is defined in SourcePrecedenceLattice.cs
|
||||
|
||||
@@ -0,0 +1,326 @@
|
||||
// =============================================================================
|
||||
// AttestationCollector.cs
|
||||
// Attestation evidence collector for reconciliation workflow
|
||||
// Part of Step 2: Evidence Collection (Task T6)
|
||||
// Integrated with DsseVerifier (Task T7)
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Collects attestation evidence from an evidence directory and populates the artifact index.
|
||||
/// Integrates with DsseVerifier for signature validation.
|
||||
/// </summary>
|
||||
public sealed class AttestationCollector
|
||||
{
|
||||
private readonly IAttestationParser _parser;
|
||||
private readonly DsseVerifier? _dsseVerifier;
|
||||
private readonly ILogger<AttestationCollector> _logger;
|
||||
|
||||
public AttestationCollector(
|
||||
IAttestationParser? parser = null,
|
||||
DsseVerifier? dsseVerifier = null,
|
||||
ILogger<AttestationCollector>? logger = null)
|
||||
{
|
||||
_parser = parser ?? new DsseAttestationParser();
|
||||
_dsseVerifier = dsseVerifier;
|
||||
_logger = logger ?? NullLogger<AttestationCollector>.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collects attestation evidence from the attestations directory.
|
||||
/// </summary>
|
||||
/// <param name="attestationsDirectory">Path to the attestations directory.</param>
|
||||
/// <param name="index">Artifact index to populate.</param>
|
||||
/// <param name="options">Collection options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection result with statistics.</returns>
|
||||
public async Task<AttestationCollectionResult> CollectAsync(
|
||||
string attestationsDirectory,
|
||||
ArtifactIndex index,
|
||||
AttestationCollectionOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(attestationsDirectory);
|
||||
ArgumentNullException.ThrowIfNull(index);
|
||||
|
||||
options ??= AttestationCollectionOptions.Default;
|
||||
var result = new AttestationCollectionResult();
|
||||
|
||||
if (!Directory.Exists(attestationsDirectory))
|
||||
{
|
||||
_logger.LogDebug("Attestation directory does not exist: {Directory}", attestationsDirectory);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Find all potential attestation files (ordered deterministically)
|
||||
var files = Directory.EnumerateFiles(attestationsDirectory, "*.*", SearchOption.AllDirectories)
|
||||
.Where(_parser.IsAttestation)
|
||||
.OrderBy(f => NormalizeRelativePath(Path.GetRelativePath(attestationsDirectory, f)), StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("Found {Count} potential attestation files in {Directory}", files.Count, attestationsDirectory);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
await ProcessAttestationFileAsync(file, attestationsDirectory, index, options, result, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process attestation file: {File}", file);
|
||||
result.FailedFiles.Add((file, ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task ProcessAttestationFileAsync(
|
||||
string filePath,
|
||||
string baseDirectory,
|
||||
ArtifactIndex index,
|
||||
AttestationCollectionOptions options,
|
||||
AttestationCollectionResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Compute content hash for the attestation file itself
|
||||
var contentHash = await ComputeFileHashAsync(filePath, cancellationToken);
|
||||
var relativePath = NormalizeRelativePath(Path.GetRelativePath(baseDirectory, filePath));
|
||||
|
||||
// Parse the attestation
|
||||
var parseResult = await _parser.ParseAsync(filePath, cancellationToken);
|
||||
|
||||
if (!parseResult.IsSuccess)
|
||||
{
|
||||
_logger.LogWarning("Failed to parse attestation {File}: {Error}", filePath, parseResult.ErrorMessage);
|
||||
result.FailedFiles.Add((filePath, parseResult.ErrorMessage ?? "Unknown error"));
|
||||
return;
|
||||
}
|
||||
|
||||
result.ParsedFiles++;
|
||||
|
||||
var statement = parseResult.Statement!;
|
||||
var envelope = parseResult.Envelope!;
|
||||
|
||||
// Track predicate types
|
||||
if (!result.PredicateTypeCounts.TryGetValue(statement.PredicateType, out var count))
|
||||
{
|
||||
count = 0;
|
||||
}
|
||||
result.PredicateTypeCounts[statement.PredicateType] = count + 1;
|
||||
|
||||
// Verify signature using DsseVerifier (T7 integration)
|
||||
bool signatureVerified = false;
|
||||
bool tlogVerified = false;
|
||||
string? rekorUuid = null;
|
||||
|
||||
if (options.TrustRoots is not null && _dsseVerifier is not null)
|
||||
{
|
||||
var verifyResult = _dsseVerifier.Verify(envelope, options.TrustRoots, _logger);
|
||||
signatureVerified = verifyResult.IsValid;
|
||||
|
||||
if (signatureVerified)
|
||||
{
|
||||
result.VerifiedSignatures++;
|
||||
_logger.LogDebug("DSSE signature verified for attestation: {File}", relativePath);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"DSSE signature verification failed for attestation: {File}, reason={Reason}",
|
||||
relativePath,
|
||||
verifyResult.ErrorCode);
|
||||
}
|
||||
}
|
||||
else if (options.MarkAsUnverified)
|
||||
{
|
||||
// Mark all attestations as unverified when no trust roots configured
|
||||
signatureVerified = false;
|
||||
tlogVerified = false;
|
||||
}
|
||||
|
||||
// Get all subject digests for this attestation
|
||||
var subjectDigests = statement.Subjects
|
||||
.Select(s => s.GetSha256Digest())
|
||||
.Where(d => d is not null)
|
||||
.Cast<string>()
|
||||
.ToList();
|
||||
|
||||
// Create attestation reference
|
||||
var attestationRef = new AttestationReference(
|
||||
ContentHash: contentHash,
|
||||
FilePath: relativePath,
|
||||
PredicateType: statement.PredicateType,
|
||||
Subjects: subjectDigests,
|
||||
SignatureVerified: signatureVerified,
|
||||
TlogVerified: tlogVerified,
|
||||
RekorUuid: rekorUuid);
|
||||
|
||||
// Add to index for each subject
|
||||
foreach (var subject in statement.Subjects)
|
||||
{
|
||||
var digest = subject.GetSha256Digest();
|
||||
if (digest is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var entry = new ArtifactEntry(
|
||||
Digest: digest,
|
||||
Name: subject.Name,
|
||||
Sboms: [],
|
||||
Attestations: [attestationRef],
|
||||
VexDocuments: []);
|
||||
|
||||
index.AddOrUpdate(entry);
|
||||
result.IndexedSubjects++;
|
||||
}
|
||||
|
||||
// Handle VEX attestations specially
|
||||
if (IsVexAttestation(statement.PredicateType))
|
||||
{
|
||||
result.VexAttestationCount++;
|
||||
await CollectVexFromAttestationAsync(
|
||||
statement,
|
||||
relativePath,
|
||||
contentHash,
|
||||
index,
|
||||
result,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Parsed attestation: {File}, predicateType={PredicateType}, {SubjectCount} subjects",
|
||||
relativePath,
|
||||
statement.PredicateType,
|
||||
statement.Subjects.Count);
|
||||
}
|
||||
|
||||
private async Task CollectVexFromAttestationAsync(
|
||||
InTotoStatement statement,
|
||||
string filePath,
|
||||
string contentHash,
|
||||
ArtifactIndex index,
|
||||
AttestationCollectionResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// VEX attestations contain VEX documents in their predicate
|
||||
// For now, just track them - actual VEX parsing will be enhanced later
|
||||
await Task.CompletedTask;
|
||||
|
||||
foreach (var subject in statement.Subjects)
|
||||
{
|
||||
var digest = subject.GetSha256Digest();
|
||||
if (digest is null) continue;
|
||||
|
||||
var vexRef = new VexReference(
|
||||
ContentHash: contentHash,
|
||||
FilePath: filePath,
|
||||
Format: VexFormat.OpenVex,
|
||||
Precedence: SourcePrecedence.Unknown,
|
||||
Timestamp: null);
|
||||
|
||||
var entry = new ArtifactEntry(
|
||||
Digest: digest,
|
||||
Name: subject.Name,
|
||||
Sboms: [],
|
||||
Attestations: [],
|
||||
VexDocuments: [vexRef]);
|
||||
|
||||
index.AddOrUpdate(entry);
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsVexAttestation(string predicateType)
|
||||
{
|
||||
return predicateType.Contains("vex", StringComparison.OrdinalIgnoreCase) ||
|
||||
predicateType.Contains("csaf", StringComparison.OrdinalIgnoreCase) ||
|
||||
predicateType.Equals(PredicateTypes.OpenVex, StringComparison.OrdinalIgnoreCase) ||
|
||||
predicateType.Equals(PredicateTypes.Csaf, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static string NormalizeRelativePath(string path) =>
|
||||
path.Replace('\\', '/');
|
||||
|
||||
private static async Task<string> ComputeFileHashAsync(string filePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for attestation collection.
|
||||
/// </summary>
|
||||
public sealed record AttestationCollectionOptions
|
||||
{
|
||||
public static readonly AttestationCollectionOptions Default = new();
|
||||
|
||||
/// <summary>
|
||||
/// Mark all attestations as unverified (skip signature verification).
|
||||
/// </summary>
|
||||
public bool MarkAsUnverified { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify DSSE signatures.
|
||||
/// </summary>
|
||||
public bool VerifySignatures { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify Rekor inclusion proofs.
|
||||
/// </summary>
|
||||
public bool VerifyRekorProofs { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Trust roots configuration for DSSE signature verification.
|
||||
/// Required when VerifySignatures is true.
|
||||
/// </summary>
|
||||
public TrustRootConfig? TrustRoots { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation collection operation.
|
||||
/// </summary>
|
||||
public sealed class AttestationCollectionResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of attestation files successfully parsed.
|
||||
/// </summary>
|
||||
public int ParsedFiles { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of subjects indexed.
|
||||
/// </summary>
|
||||
public int IndexedSubjects { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of VEX attestations found.
|
||||
/// </summary>
|
||||
public int VexAttestationCount { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of attestations with verified DSSE signatures.
|
||||
/// </summary>
|
||||
public int VerifiedSignatures { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Count of attestations by predicate type.
|
||||
/// </summary>
|
||||
public Dictionary<string, int> PredicateTypeCounts { get; } = new(StringComparer.Ordinal);
|
||||
|
||||
/// <summary>
|
||||
/// Files that failed to parse, with error messages.
|
||||
/// </summary>
|
||||
public List<(string FilePath, string Error)> FailedFiles { get; } = [];
|
||||
}
|
||||
@@ -0,0 +1,336 @@
|
||||
// =============================================================================
|
||||
// CycloneDxParser.cs
|
||||
// CycloneDX SBOM parser implementation
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for CycloneDX SBOM format (JSON).
|
||||
/// Supports CycloneDX 1.4, 1.5, and 1.6 schemas.
|
||||
/// </summary>
|
||||
public sealed class CycloneDxParser : ISbomParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
// CycloneDX files typically end with .cdx.json or .bom.json
|
||||
if (filePath.EndsWith(".cdx.json", StringComparison.OrdinalIgnoreCase) ||
|
||||
filePath.EndsWith(".bom.json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.CycloneDx;
|
||||
}
|
||||
|
||||
// Try to detect from content
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
using var reader = new StreamReader(stream);
|
||||
var firstChars = new char[1024];
|
||||
var read = reader.Read(firstChars, 0, firstChars.Length);
|
||||
var content = new string(firstChars, 0, read);
|
||||
|
||||
if (content.Contains("\"bomFormat\"", StringComparison.OrdinalIgnoreCase) ||
|
||||
content.Contains("\"$schema\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.CycloneDx;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore detection errors
|
||||
}
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return SbomParseResult.Failure($"File not found: {filePath}", SbomFormat.CycloneDx);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
return await ParseAsync(stream, SbomFormat.CycloneDx, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"Failed to parse CycloneDX file: {ex.Message}", SbomFormat.CycloneDx);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Validate bomFormat
|
||||
if (!root.TryGetProperty("bomFormat", out var bomFormatProp) ||
|
||||
!bomFormatProp.GetString()?.Equals("CycloneDX", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
// Try alternative detection
|
||||
if (!root.TryGetProperty("$schema", out var schemaProp) ||
|
||||
!schemaProp.GetString()?.Contains("cyclonedx", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
return SbomParseResult.Failure("Not a valid CycloneDX document", SbomFormat.CycloneDx);
|
||||
}
|
||||
}
|
||||
|
||||
// Extract spec version
|
||||
string? specVersion = null;
|
||||
if (root.TryGetProperty("specVersion", out var specProp))
|
||||
{
|
||||
specVersion = specProp.GetString();
|
||||
}
|
||||
|
||||
// Extract serial number
|
||||
string? serialNumber = null;
|
||||
if (root.TryGetProperty("serialNumber", out var serialProp))
|
||||
{
|
||||
serialNumber = serialProp.GetString();
|
||||
}
|
||||
|
||||
// Extract creation timestamp
|
||||
DateTimeOffset? createdAt = null;
|
||||
if (root.TryGetProperty("metadata", out var metadataProp))
|
||||
{
|
||||
if (metadataProp.TryGetProperty("timestamp", out var timestampProp))
|
||||
{
|
||||
if (DateTimeOffset.TryParse(timestampProp.GetString(), out var parsed))
|
||||
{
|
||||
createdAt = parsed;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract generator tool
|
||||
string? generatorTool = null;
|
||||
if (root.TryGetProperty("metadata", out var meta) &&
|
||||
meta.TryGetProperty("tools", out var toolsProp))
|
||||
{
|
||||
generatorTool = ExtractToolInfo(toolsProp);
|
||||
}
|
||||
|
||||
// Extract primary component (metadata.component)
|
||||
SbomSubject? primarySubject = null;
|
||||
if (root.TryGetProperty("metadata", out var metaData) &&
|
||||
metaData.TryGetProperty("component", out var primaryComponent))
|
||||
{
|
||||
primarySubject = ParseComponent(primaryComponent);
|
||||
}
|
||||
|
||||
// Extract all components
|
||||
var subjects = new List<SbomSubject>();
|
||||
int totalComponentCount = 0;
|
||||
|
||||
if (root.TryGetProperty("components", out var componentsProp) &&
|
||||
componentsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var component in componentsProp.EnumerateArray())
|
||||
{
|
||||
totalComponentCount++;
|
||||
var subject = ParseComponent(component);
|
||||
if (subject is not null)
|
||||
{
|
||||
subjects.Add(subject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Add primary subject if it has a digest and isn't already in the list
|
||||
if (primarySubject is not null &&
|
||||
!subjects.Any(s => s.Digest.Equals(primarySubject.Digest, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
subjects.Insert(0, primarySubject);
|
||||
}
|
||||
|
||||
// Sort subjects for deterministic ordering
|
||||
subjects = subjects
|
||||
.OrderBy(s => s.Digest, StringComparer.Ordinal)
|
||||
.ThenBy(s => s.Name ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
return SbomParseResult.Success(
|
||||
format: SbomFormat.CycloneDx,
|
||||
subjects: subjects,
|
||||
specVersion: specVersion,
|
||||
serialNumber: serialNumber,
|
||||
createdAt: createdAt,
|
||||
generatorTool: generatorTool,
|
||||
primarySubject: primarySubject,
|
||||
totalComponentCount: totalComponentCount);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"JSON parsing error: {ex.Message}", SbomFormat.CycloneDx);
|
||||
}
|
||||
}
|
||||
|
||||
private static SbomSubject? ParseComponent(JsonElement component)
|
||||
{
|
||||
// Extract hashes
|
||||
var hashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (component.TryGetProperty("hashes", out var hashesProp) &&
|
||||
hashesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var hash in hashesProp.EnumerateArray())
|
||||
{
|
||||
if (hash.TryGetProperty("alg", out var algProp) &&
|
||||
hash.TryGetProperty("content", out var contentProp))
|
||||
{
|
||||
var alg = algProp.GetString();
|
||||
var content = contentProp.GetString();
|
||||
if (!string.IsNullOrEmpty(alg) && !string.IsNullOrEmpty(content))
|
||||
{
|
||||
hashes[alg] = content;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine primary digest (prefer SHA-256)
|
||||
string? digest = null;
|
||||
if (hashes.TryGetValue("SHA-256", out var sha256))
|
||||
{
|
||||
digest = NormalizeDigest("sha256:" + sha256);
|
||||
}
|
||||
else if (hashes.TryGetValue("SHA256", out sha256))
|
||||
{
|
||||
digest = NormalizeDigest("sha256:" + sha256);
|
||||
}
|
||||
else if (hashes.Count > 0)
|
||||
{
|
||||
// Use first available hash
|
||||
var first = hashes.First();
|
||||
digest = NormalizeDigest($"{first.Key.ToLowerInvariant().Replace("-", "")}:{first.Value}");
|
||||
}
|
||||
|
||||
// If no digest, this component can't be indexed by digest
|
||||
if (string.IsNullOrEmpty(digest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract other properties
|
||||
string? name = null;
|
||||
if (component.TryGetProperty("name", out var nameProp))
|
||||
{
|
||||
name = nameProp.GetString();
|
||||
}
|
||||
|
||||
string? version = null;
|
||||
if (component.TryGetProperty("version", out var versionProp))
|
||||
{
|
||||
version = versionProp.GetString();
|
||||
}
|
||||
|
||||
string? purl = null;
|
||||
if (component.TryGetProperty("purl", out var purlProp))
|
||||
{
|
||||
purl = purlProp.GetString();
|
||||
}
|
||||
|
||||
string? type = null;
|
||||
if (component.TryGetProperty("type", out var typeProp))
|
||||
{
|
||||
type = typeProp.GetString();
|
||||
}
|
||||
|
||||
string? bomRef = null;
|
||||
if (component.TryGetProperty("bom-ref", out var bomRefProp))
|
||||
{
|
||||
bomRef = bomRefProp.GetString();
|
||||
}
|
||||
|
||||
return new SbomSubject
|
||||
{
|
||||
Digest = digest,
|
||||
Name = name,
|
||||
Version = version,
|
||||
Purl = purl,
|
||||
Type = type,
|
||||
BomRef = bomRef,
|
||||
Hashes = hashes
|
||||
};
|
||||
}
|
||||
|
||||
private static string? ExtractToolInfo(JsonElement tools)
|
||||
{
|
||||
// CycloneDX 1.5+ uses tools.components array
|
||||
if (tools.TryGetProperty("components", out var components) &&
|
||||
components.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var toolList = new List<string>();
|
||||
foreach (var tool in components.EnumerateArray())
|
||||
{
|
||||
if (tool.TryGetProperty("name", out var name))
|
||||
{
|
||||
var toolName = name.GetString();
|
||||
if (!string.IsNullOrEmpty(toolName))
|
||||
{
|
||||
if (tool.TryGetProperty("version", out var version))
|
||||
{
|
||||
toolName += $"@{version.GetString()}";
|
||||
}
|
||||
toolList.Add(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toolList.Count > 0 ? string.Join(", ", toolList) : null;
|
||||
}
|
||||
|
||||
// CycloneDX 1.4 and earlier uses tools array directly
|
||||
if (tools.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var toolList = new List<string>();
|
||||
foreach (var tool in tools.EnumerateArray())
|
||||
{
|
||||
if (tool.TryGetProperty("name", out var name))
|
||||
{
|
||||
var toolName = name.GetString();
|
||||
if (!string.IsNullOrEmpty(toolName))
|
||||
{
|
||||
if (tool.TryGetProperty("version", out var version))
|
||||
{
|
||||
toolName += $"@{version.GetString()}";
|
||||
}
|
||||
toolList.Add(toolName);
|
||||
}
|
||||
}
|
||||
}
|
||||
return toolList.Count > 0 ? string.Join(", ", toolList) : null;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
return ArtifactIndex.NormalizeDigest(digest);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,301 @@
|
||||
// =============================================================================
|
||||
// DsseAttestationParser.cs
|
||||
// DSSE attestation parser implementation
|
||||
// Part of Step 2: Evidence Collection (Task T6)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for DSSE-wrapped in-toto attestations.
|
||||
/// </summary>
|
||||
public sealed class DsseAttestationParser : IAttestationParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public bool IsAttestation(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var lower = filePath.ToLowerInvariant();
|
||||
|
||||
// Common attestation file extensions
|
||||
if (lower.EndsWith(".intoto.jsonl") ||
|
||||
lower.EndsWith(".intoto.json") ||
|
||||
lower.EndsWith(".dsig") ||
|
||||
lower.EndsWith(".dsse") ||
|
||||
lower.EndsWith(".att") ||
|
||||
lower.EndsWith(".attestation"))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Try to detect from content
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
using var reader = new StreamReader(stream);
|
||||
var firstChars = new char[512];
|
||||
var read = reader.Read(firstChars, 0, firstChars.Length);
|
||||
var content = new string(firstChars, 0, read);
|
||||
|
||||
// DSSE envelope markers
|
||||
if (content.Contains("\"payloadType\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("\"payload\"", StringComparison.OrdinalIgnoreCase) &&
|
||||
content.Contains("\"signatures\"", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore detection errors
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
public async Task<AttestationParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return AttestationParseResult.Failure($"File not found: {filePath}");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
return await ParseAsync(stream, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return AttestationParseResult.Failure($"Failed to parse attestation file: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<AttestationParseResult> ParseAsync(Stream stream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Parse DSSE envelope
|
||||
var envelope = ParseEnvelope(root);
|
||||
if (envelope is null)
|
||||
{
|
||||
return AttestationParseResult.Failure("Invalid DSSE envelope structure");
|
||||
}
|
||||
|
||||
// Decode and parse in-toto statement
|
||||
var statement = DecodeAndParseStatement(envelope);
|
||||
if (statement is null)
|
||||
{
|
||||
return AttestationParseResult.Failure("Failed to decode or parse in-toto statement");
|
||||
}
|
||||
|
||||
return AttestationParseResult.Success(envelope, statement);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return AttestationParseResult.Failure($"JSON parsing error: {ex.Message}");
|
||||
}
|
||||
}
|
||||
|
||||
private static DsseEnvelope? ParseEnvelope(JsonElement root)
|
||||
{
|
||||
// Validate required fields
|
||||
if (!root.TryGetProperty("payloadType", out var payloadTypeProp) ||
|
||||
!root.TryGetProperty("payload", out var payloadProp) ||
|
||||
!root.TryGetProperty("signatures", out var signaturesProp))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var payloadType = payloadTypeProp.GetString();
|
||||
var payload = payloadProp.GetString();
|
||||
|
||||
if (string.IsNullOrEmpty(payloadType) || string.IsNullOrEmpty(payload))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse signatures
|
||||
var signatures = new List<DsseSignature>();
|
||||
if (signaturesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var sigElement in signaturesProp.EnumerateArray())
|
||||
{
|
||||
var sig = ParseSignature(sigElement);
|
||||
if (sig is not null)
|
||||
{
|
||||
signatures.Add(sig);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new DsseEnvelope
|
||||
{
|
||||
PayloadType = payloadType,
|
||||
Payload = payload,
|
||||
Signatures = signatures
|
||||
};
|
||||
}
|
||||
|
||||
private static DsseSignature? ParseSignature(JsonElement element)
|
||||
{
|
||||
if (!element.TryGetProperty("sig", out var sigProp))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var sig = sigProp.GetString();
|
||||
if (string.IsNullOrEmpty(sig))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
string? keyId = null;
|
||||
if (element.TryGetProperty("keyid", out var keyIdProp))
|
||||
{
|
||||
keyId = keyIdProp.GetString();
|
||||
}
|
||||
|
||||
string? cert = null;
|
||||
if (element.TryGetProperty("cert", out var certProp))
|
||||
{
|
||||
cert = certProp.GetString();
|
||||
}
|
||||
|
||||
return new DsseSignature
|
||||
{
|
||||
Sig = sig,
|
||||
KeyId = keyId,
|
||||
Cert = cert
|
||||
};
|
||||
}
|
||||
|
||||
private static InTotoStatement? DecodeAndParseStatement(DsseEnvelope envelope)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Decode base64 payload
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
var payloadJson = Encoding.UTF8.GetString(payloadBytes);
|
||||
|
||||
using var document = JsonDocument.Parse(payloadJson);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Parse statement type
|
||||
string? statementType = null;
|
||||
if (root.TryGetProperty("_type", out var typeProp))
|
||||
{
|
||||
statementType = typeProp.GetString();
|
||||
}
|
||||
else if (root.TryGetProperty("type", out typeProp))
|
||||
{
|
||||
statementType = typeProp.GetString();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(statementType))
|
||||
{
|
||||
statementType = "https://in-toto.io/Statement/v1";
|
||||
}
|
||||
|
||||
// Parse predicate type
|
||||
string? predicateType = null;
|
||||
if (root.TryGetProperty("predicateType", out var predicateTypeProp))
|
||||
{
|
||||
predicateType = predicateTypeProp.GetString();
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(predicateType))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Parse subjects
|
||||
var subjects = new List<InTotoSubject>();
|
||||
if (root.TryGetProperty("subject", out var subjectsProp) &&
|
||||
subjectsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var subjectElement in subjectsProp.EnumerateArray())
|
||||
{
|
||||
var subject = ParseSubject(subjectElement);
|
||||
if (subject is not null)
|
||||
{
|
||||
subjects.Add(subject);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract predicate JSON for further processing
|
||||
string? predicateJson = null;
|
||||
if (root.TryGetProperty("predicate", out var predicateProp))
|
||||
{
|
||||
predicateJson = predicateProp.GetRawText();
|
||||
}
|
||||
|
||||
return new InTotoStatement
|
||||
{
|
||||
Type = statementType,
|
||||
PredicateType = predicateType,
|
||||
Subjects = subjects,
|
||||
PredicateJson = predicateJson
|
||||
};
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static InTotoSubject? ParseSubject(JsonElement element)
|
||||
{
|
||||
string? name = null;
|
||||
if (element.TryGetProperty("name", out var nameProp))
|
||||
{
|
||||
name = nameProp.GetString();
|
||||
}
|
||||
|
||||
var digest = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (element.TryGetProperty("digest", out var digestProp) &&
|
||||
digestProp.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
foreach (var prop in digestProp.EnumerateObject())
|
||||
{
|
||||
var value = prop.Value.GetString();
|
||||
if (!string.IsNullOrEmpty(value))
|
||||
{
|
||||
digest[prop.Name] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (digest.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new InTotoSubject
|
||||
{
|
||||
Name = name,
|
||||
Digest = digest
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,199 @@
|
||||
// =============================================================================
|
||||
// IAttestationParser.cs
|
||||
// Attestation parsing abstraction for DSSE/in-toto attestations
|
||||
// Part of Step 2: Evidence Collection (Task T6)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for parsing DSSE-wrapped in-toto attestations.
|
||||
/// </summary>
|
||||
public interface IAttestationParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Parses a DSSE envelope from the given file path.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the attestation file.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed attestation result.</returns>
|
||||
Task<AttestationParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Parses a DSSE envelope from a stream.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream containing the attestation content.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed attestation result.</returns>
|
||||
Task<AttestationParseResult> ParseAsync(Stream stream, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Detects if a file is a DSSE attestation.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the file.</param>
|
||||
/// <returns>True if the file appears to be a DSSE attestation.</returns>
|
||||
bool IsAttestation(string filePath);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of parsing an attestation document.
|
||||
/// </summary>
|
||||
public sealed record AttestationParseResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether parsing was successful.
|
||||
/// </summary>
|
||||
public bool IsSuccess { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if parsing failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The parsed DSSE envelope.
|
||||
/// </summary>
|
||||
public DsseEnvelope? Envelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The parsed in-toto statement (payload).
|
||||
/// </summary>
|
||||
public InTotoStatement? Statement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful parse result.
|
||||
/// </summary>
|
||||
public static AttestationParseResult Success(DsseEnvelope envelope, InTotoStatement statement)
|
||||
{
|
||||
return new AttestationParseResult
|
||||
{
|
||||
IsSuccess = true,
|
||||
Envelope = envelope,
|
||||
Statement = statement
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed parse result.
|
||||
/// </summary>
|
||||
public static AttestationParseResult Failure(string errorMessage)
|
||||
{
|
||||
return new AttestationParseResult
|
||||
{
|
||||
IsSuccess = false,
|
||||
ErrorMessage = errorMessage
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a DSSE (Dead Simple Signing Envelope).
|
||||
/// </summary>
|
||||
public sealed record DsseEnvelope
|
||||
{
|
||||
/// <summary>
|
||||
/// Payload type (typically "application/vnd.in-toto+json").
|
||||
/// </summary>
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded payload.
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signatures on the envelope.
|
||||
/// </summary>
|
||||
public IReadOnlyList<DsseSignature> Signatures { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a signature in a DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record DsseSignature
|
||||
{
|
||||
/// <summary>
|
||||
/// Key identifier (e.g., key ID or certificate fingerprint).
|
||||
/// </summary>
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature.
|
||||
/// </summary>
|
||||
public required string Sig { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Certificate chain (if present).
|
||||
/// </summary>
|
||||
public string? Cert { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an in-toto statement (attestation payload).
|
||||
/// </summary>
|
||||
public sealed record InTotoStatement
|
||||
{
|
||||
/// <summary>
|
||||
/// Statement type (typically "https://in-toto.io/Statement/v1").
|
||||
/// </summary>
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Predicate type URI (e.g., "https://slsa.dev/provenance/v1").
|
||||
/// </summary>
|
||||
public required string PredicateType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subjects (artifacts) this statement applies to.
|
||||
/// </summary>
|
||||
public IReadOnlyList<InTotoSubject> Subjects { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Raw predicate JSON for further processing.
|
||||
/// </summary>
|
||||
public string? PredicateJson { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a subject in an in-toto statement.
|
||||
/// </summary>
|
||||
public sealed record InTotoSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Subject name (typically a file path or artifact reference).
|
||||
/// </summary>
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subject digests (algorithm -> hash).
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string> Digest { get; init; } = new Dictionary<string, string>();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the normalized SHA-256 digest if available.
|
||||
/// </summary>
|
||||
public string? GetSha256Digest()
|
||||
{
|
||||
if (Digest.TryGetValue("sha256", out var hash))
|
||||
{
|
||||
return "sha256:" + hash.ToLowerInvariant();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Well-known predicate types for attestations.
|
||||
/// </summary>
|
||||
public static class PredicateTypes
|
||||
{
|
||||
public const string SlsaProvenanceV1 = "https://slsa.dev/provenance/v1";
|
||||
public const string SlsaProvenanceV02 = "https://slsa.dev/provenance/v0.2";
|
||||
public const string InTotoLink = "https://in-toto.io/Link/v1";
|
||||
public const string Spdx = "https://spdx.dev/Document";
|
||||
public const string CycloneDx = "https://cyclonedx.org/bom";
|
||||
public const string OpenVex = "https://openvex.dev/ns/v0.2.0";
|
||||
public const string Csaf = "https://docs.oasis-open.org/csaf/csaf/v2.0";
|
||||
public const string ScorecardV2 = "https://ossf.github.io/scorecard/v2";
|
||||
public const string VulnerabilityReport = "https://cosign.sigstore.dev/attestation/vuln/v1";
|
||||
}
|
||||
@@ -0,0 +1,188 @@
|
||||
// =============================================================================
|
||||
// ISbomParser.cs
|
||||
// SBOM parsing abstraction for CycloneDX and SPDX formats
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for parsing SBOM documents into a normalized representation.
|
||||
/// Supports CycloneDX and SPDX formats.
|
||||
/// </summary>
|
||||
public interface ISbomParser
|
||||
{
|
||||
/// <summary>
|
||||
/// Parses an SBOM file from the given path.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the SBOM file.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed SBOM result containing subjects and metadata.</returns>
|
||||
Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Parses an SBOM from a stream.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream containing the SBOM content.</param>
|
||||
/// <param name="format">Expected SBOM format.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Parsed SBOM result containing subjects and metadata.</returns>
|
||||
Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Detects the SBOM format from file extension or content.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the SBOM file.</param>
|
||||
/// <returns>Detected SBOM format.</returns>
|
||||
SbomFormat DetectFormat(string filePath);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of parsing an SBOM document.
|
||||
/// </summary>
|
||||
public sealed record SbomParseResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether parsing was successful.
|
||||
/// </summary>
|
||||
public bool IsSuccess { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if parsing failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected or specified SBOM format.
|
||||
/// </summary>
|
||||
public SbomFormat Format { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM specification version (e.g., "1.6" for CycloneDX, "2.3" for SPDX).
|
||||
/// </summary>
|
||||
public string? SpecVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM serial number or document namespace.
|
||||
/// </summary>
|
||||
public string? SerialNumber { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the SBOM was created.
|
||||
/// </summary>
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tool that generated the SBOM.
|
||||
/// </summary>
|
||||
public string? GeneratorTool { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Primary component (for CycloneDX) or main package (for SPDX).
|
||||
/// </summary>
|
||||
public SbomSubject? PrimarySubject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All subjects (components/packages) in the SBOM that have digests.
|
||||
/// </summary>
|
||||
public IReadOnlyList<SbomSubject> Subjects { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Total number of components/packages in the SBOM.
|
||||
/// </summary>
|
||||
public int TotalComponentCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw normalized JSON content for hashing.
|
||||
/// </summary>
|
||||
public string? NormalizedContent { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful parse result.
|
||||
/// </summary>
|
||||
public static SbomParseResult Success(
|
||||
SbomFormat format,
|
||||
IReadOnlyList<SbomSubject> subjects,
|
||||
string? specVersion = null,
|
||||
string? serialNumber = null,
|
||||
DateTimeOffset? createdAt = null,
|
||||
string? generatorTool = null,
|
||||
SbomSubject? primarySubject = null,
|
||||
int totalComponentCount = 0,
|
||||
string? normalizedContent = null)
|
||||
{
|
||||
return new SbomParseResult
|
||||
{
|
||||
IsSuccess = true,
|
||||
Format = format,
|
||||
Subjects = subjects,
|
||||
SpecVersion = specVersion,
|
||||
SerialNumber = serialNumber,
|
||||
CreatedAt = createdAt,
|
||||
GeneratorTool = generatorTool,
|
||||
PrimarySubject = primarySubject,
|
||||
TotalComponentCount = totalComponentCount,
|
||||
NormalizedContent = normalizedContent
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed parse result.
|
||||
/// </summary>
|
||||
public static SbomParseResult Failure(string errorMessage, SbomFormat format = SbomFormat.Unknown)
|
||||
{
|
||||
return new SbomParseResult
|
||||
{
|
||||
IsSuccess = false,
|
||||
ErrorMessage = errorMessage,
|
||||
Format = format,
|
||||
Subjects = []
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a subject (artifact) described by an SBOM.
|
||||
/// </summary>
|
||||
public sealed record SbomSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Artifact digest in normalized format (sha256:hex).
|
||||
/// </summary>
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable name of the artifact.
|
||||
/// </summary>
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (purl) if available.
|
||||
/// </summary>
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version string.
|
||||
/// </summary>
|
||||
public string? Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Component type (application, library, container, etc.).
|
||||
/// </summary>
|
||||
public string? Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// BOM reference identifier (for CycloneDX).
|
||||
/// </summary>
|
||||
public string? BomRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SPDX identifier (for SPDX).
|
||||
/// </summary>
|
||||
public string? SpdxId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All hash values for the subject.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string> Hashes { get; init; } = new Dictionary<string, string>();
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
// =============================================================================
|
||||
// SbomCollector.cs
|
||||
// SBOM evidence collector for reconciliation workflow
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Collects SBOM evidence from an evidence directory and populates the artifact index.
|
||||
/// </summary>
|
||||
public sealed class SbomCollector
|
||||
{
|
||||
private readonly ISbomParser _parser;
|
||||
private readonly ILogger<SbomCollector> _logger;
|
||||
|
||||
public SbomCollector(ISbomParser? parser = null, ILogger<SbomCollector>? logger = null)
|
||||
{
|
||||
_parser = parser ?? new SbomParserFactory();
|
||||
_logger = logger ?? NullLogger<SbomCollector>.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Collects SBOM evidence from the sboms directory.
|
||||
/// </summary>
|
||||
/// <param name="sbomsDirectory">Path to the sboms directory.</param>
|
||||
/// <param name="index">Artifact index to populate.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Collection result with statistics.</returns>
|
||||
public async Task<SbomCollectionResult> CollectAsync(
|
||||
string sbomsDirectory,
|
||||
ArtifactIndex index,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomsDirectory);
|
||||
ArgumentNullException.ThrowIfNull(index);
|
||||
|
||||
var result = new SbomCollectionResult();
|
||||
|
||||
if (!Directory.Exists(sbomsDirectory))
|
||||
{
|
||||
_logger.LogDebug("SBOM directory does not exist: {Directory}", sbomsDirectory);
|
||||
return result;
|
||||
}
|
||||
|
||||
// Find all potential SBOM files (ordered deterministically)
|
||||
var files = Directory.EnumerateFiles(sbomsDirectory, "*.*", SearchOption.AllDirectories)
|
||||
.Where(IsSbomFile)
|
||||
.OrderBy(f => NormalizeRelativePath(Path.GetRelativePath(sbomsDirectory, f)), StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
_logger.LogDebug("Found {Count} potential SBOM files in {Directory}", files.Count, sbomsDirectory);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
try
|
||||
{
|
||||
await ProcessSbomFileAsync(file, sbomsDirectory, index, result, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to process SBOM file: {File}", file);
|
||||
result.FailedFiles.Add((file, ex.Message));
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private async Task ProcessSbomFileAsync(
|
||||
string filePath,
|
||||
string baseDirectory,
|
||||
ArtifactIndex index,
|
||||
SbomCollectionResult result,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Compute content hash for the SBOM file itself
|
||||
var contentHash = await ComputeFileHashAsync(filePath, cancellationToken);
|
||||
var relativePath = NormalizeRelativePath(Path.GetRelativePath(baseDirectory, filePath));
|
||||
|
||||
// Parse the SBOM
|
||||
var parseResult = await _parser.ParseAsync(filePath, cancellationToken);
|
||||
|
||||
if (!parseResult.IsSuccess)
|
||||
{
|
||||
_logger.LogWarning("Failed to parse SBOM {File}: {Error}", filePath, parseResult.ErrorMessage);
|
||||
result.FailedFiles.Add((filePath, parseResult.ErrorMessage ?? "Unknown error"));
|
||||
return;
|
||||
}
|
||||
|
||||
result.ParsedFiles++;
|
||||
result.TotalSubjects += parseResult.Subjects.Count;
|
||||
|
||||
// Create SBOM reference
|
||||
var sbomRef = new SbomReference(
|
||||
ContentHash: contentHash,
|
||||
FilePath: relativePath,
|
||||
Format: parseResult.Format,
|
||||
CreatedAt: parseResult.CreatedAt);
|
||||
|
||||
// Add each subject to the index
|
||||
foreach (var subject in parseResult.Subjects)
|
||||
{
|
||||
var entry = new ArtifactEntry(
|
||||
Digest: subject.Digest,
|
||||
Name: subject.Name,
|
||||
Sboms: [sbomRef],
|
||||
Attestations: [],
|
||||
VexDocuments: []);
|
||||
|
||||
index.AddOrUpdate(entry);
|
||||
result.IndexedSubjects++;
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Parsed {Format} SBOM: {File}, {SubjectCount} subjects indexed",
|
||||
parseResult.Format,
|
||||
relativePath,
|
||||
parseResult.Subjects.Count);
|
||||
}
|
||||
|
||||
private static bool IsSbomFile(string filePath)
|
||||
{
|
||||
var lower = filePath.ToLowerInvariant();
|
||||
return lower.EndsWith(".cdx.json") ||
|
||||
lower.EndsWith(".bom.json") ||
|
||||
lower.EndsWith(".spdx.json") ||
|
||||
lower.EndsWith("sbom.json") ||
|
||||
lower.EndsWith("bom.json");
|
||||
}
|
||||
|
||||
private static string NormalizeRelativePath(string path) =>
|
||||
path.Replace('\\', '/');
|
||||
|
||||
private static async Task<string> ComputeFileHashAsync(string filePath, CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
var hash = await SHA256.HashDataAsync(stream, cancellationToken);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of SBOM collection operation.
|
||||
/// </summary>
|
||||
public sealed class SbomCollectionResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Number of SBOM files successfully parsed.
|
||||
/// </summary>
|
||||
public int ParsedFiles { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of subjects found across all SBOMs.
|
||||
/// </summary>
|
||||
public int TotalSubjects { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of subjects indexed (with valid digests).
|
||||
/// </summary>
|
||||
public int IndexedSubjects { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Files that failed to parse, with error messages.
|
||||
/// </summary>
|
||||
public List<(string FilePath, string Error)> FailedFiles { get; } = [];
|
||||
}
|
||||
@@ -0,0 +1,490 @@
|
||||
// =============================================================================
|
||||
// SbomNormalizer.cs
|
||||
// Canonical SBOM transformer for deterministic reconciliation
|
||||
// Part of Step 3: Normalization (Task T13)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Transforms SBOMs into a canonical form for deterministic hashing and comparison.
|
||||
/// Applies normalization rules per advisory §5 step 3.
|
||||
/// </summary>
|
||||
public sealed class SbomNormalizer
|
||||
{
|
||||
private readonly NormalizationOptions _options;
|
||||
|
||||
public SbomNormalizer(NormalizationOptions? options = null)
|
||||
{
|
||||
_options = options ?? NormalizationOptions.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an SBOM JSON document to canonical form.
|
||||
/// </summary>
|
||||
/// <param name="sbomJson">Raw SBOM JSON content.</param>
|
||||
/// <param name="format">SBOM format (CycloneDX or SPDX).</param>
|
||||
/// <returns>Normalized JSON string.</returns>
|
||||
public string Normalize(string sbomJson, SbomFormat format)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sbomJson);
|
||||
|
||||
var node = JsonNode.Parse(sbomJson);
|
||||
if (node is null)
|
||||
{
|
||||
return "null";
|
||||
}
|
||||
|
||||
var normalized = format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => NormalizeCycloneDx(node),
|
||||
SbomFormat.Spdx => NormalizeSpdx(node),
|
||||
_ => NormalizeGeneric(node)
|
||||
};
|
||||
|
||||
return SerializeCanonical(normalized);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a CycloneDX SBOM.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeCycloneDx(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return node;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
// Process in deterministic key order
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => !ShouldStripCycloneDxField(key))
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
if (value is null) continue;
|
||||
|
||||
var normalizedValue = key switch
|
||||
{
|
||||
"components" => NormalizeComponents(value.DeepClone()),
|
||||
"metadata" => NormalizeCycloneDxMetadata(value.DeepClone()),
|
||||
"dependencies" => NormalizeDependencies(value.DeepClone()),
|
||||
"vulnerabilities" => NormalizeVulnerabilities(value.DeepClone()),
|
||||
_ => NormalizeNode(value.DeepClone())
|
||||
};
|
||||
|
||||
normalized[key] = normalizedValue;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an SPDX SBOM.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdx(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return node;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => !ShouldStripSpdxField(key))
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
if (value is null) continue;
|
||||
|
||||
var normalizedValue = key switch
|
||||
{
|
||||
"packages" => NormalizeSpdxPackages(value.DeepClone()),
|
||||
"relationships" => NormalizeSpdxRelationships(value.DeepClone()),
|
||||
"files" => NormalizeSpdxFiles(value.DeepClone()),
|
||||
"creationInfo" => NormalizeSpdxCreationInfo(value.DeepClone()),
|
||||
_ => NormalizeNode(value.DeepClone())
|
||||
};
|
||||
|
||||
normalized[key] = normalizedValue;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generic normalization for unknown formats.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeGeneric(JsonNode node)
|
||||
{
|
||||
return NormalizeNode(node);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively normalizes a JSON node.
|
||||
/// </summary>
|
||||
private JsonNode? NormalizeNode(JsonNode? node)
|
||||
{
|
||||
return node switch
|
||||
{
|
||||
JsonObject obj => NormalizeObject(obj),
|
||||
JsonArray arr => NormalizeArray(arr),
|
||||
JsonValue val => NormalizeValue(val),
|
||||
_ => node
|
||||
};
|
||||
}
|
||||
|
||||
private JsonObject NormalizeObject(JsonObject obj)
|
||||
{
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => !ShouldStripTimestampField(key))
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
normalized[key] = NormalizeNode(value?.DeepClone());
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private JsonArray NormalizeArray(JsonArray arr)
|
||||
{
|
||||
var normalized = new JsonArray();
|
||||
|
||||
var elements = arr
|
||||
.Select(n => NormalizeNode(n?.DeepClone()))
|
||||
.ToList();
|
||||
|
||||
// Sort arrays of objects by a deterministic key
|
||||
if (_options.SortArrays && elements.All(e => e is JsonObject))
|
||||
{
|
||||
elements = elements
|
||||
.Cast<JsonObject>()
|
||||
.OrderBy(o => GetSortKey(o), StringComparer.Ordinal)
|
||||
.Cast<JsonNode?>()
|
||||
.ToList();
|
||||
}
|
||||
|
||||
foreach (var element in elements)
|
||||
{
|
||||
normalized.Add(element);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private JsonValue NormalizeValue(JsonValue val)
|
||||
{
|
||||
var value = val.GetValue<object>();
|
||||
|
||||
if (value is string str)
|
||||
{
|
||||
// Lowercase URIs
|
||||
if (_options.LowercaseUris && IsUri(str))
|
||||
{
|
||||
str = str.ToLowerInvariant();
|
||||
}
|
||||
|
||||
return JsonValue.Create(str)!;
|
||||
}
|
||||
|
||||
return val.DeepClone().AsValue();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX components array.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeComponents(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var components = arr
|
||||
.Select(c => NormalizeObject((c as JsonObject)!))
|
||||
.OrderBy(c => GetComponentSortKey(c), StringComparer.Ordinal);
|
||||
|
||||
foreach (var component in components)
|
||||
{
|
||||
normalized.Add(component);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX metadata.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeCycloneDxMetadata(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => _options.StripTimestamps ? key != "timestamp" : true)
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
normalized[key] = NormalizeNode(value?.DeepClone());
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX dependencies.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeDependencies(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var deps = arr
|
||||
.Select(d => NormalizeObject((d as JsonObject)!))
|
||||
.OrderBy(d => d["ref"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var dep in deps)
|
||||
{
|
||||
// Also sort dependsOn arrays
|
||||
if (dep["dependsOn"] is JsonArray dependsOn)
|
||||
{
|
||||
var sortedDeps = new JsonArray();
|
||||
foreach (var item in dependsOn.OrderBy(x => x?.GetValue<string>() ?? "", StringComparer.Ordinal))
|
||||
{
|
||||
sortedDeps.Add(item?.DeepClone());
|
||||
}
|
||||
dep["dependsOn"] = sortedDeps;
|
||||
}
|
||||
normalized.Add(dep);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes CycloneDX vulnerabilities.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeVulnerabilities(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var vulns = arr
|
||||
.Select(v => NormalizeObject((v as JsonObject)!))
|
||||
.OrderBy(v => v["id"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var vuln in vulns)
|
||||
{
|
||||
normalized.Add(vuln);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX packages.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxPackages(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var packages = arr
|
||||
.Select(p => NormalizeObject((p as JsonObject)!))
|
||||
.OrderBy(p => p["SPDXID"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var pkg in packages)
|
||||
{
|
||||
normalized.Add(pkg);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX relationships.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxRelationships(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var rels = arr
|
||||
.Select(r => NormalizeObject((r as JsonObject)!))
|
||||
.OrderBy(r => r["spdxElementId"]?.GetValue<string>() ?? "", StringComparer.Ordinal)
|
||||
.ThenBy(r => r["relatedSpdxElement"]?.GetValue<string>() ?? "", StringComparer.Ordinal)
|
||||
.ThenBy(r => r["relationshipType"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var rel in rels)
|
||||
{
|
||||
normalized.Add(rel);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX files.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxFiles(JsonNode node)
|
||||
{
|
||||
if (node is not JsonArray arr)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonArray();
|
||||
var files = arr
|
||||
.Select(f => NormalizeObject((f as JsonObject)!))
|
||||
.OrderBy(f => f["SPDXID"]?.GetValue<string>() ?? "", StringComparer.Ordinal);
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
normalized.Add(file);
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes SPDX creation info.
|
||||
/// </summary>
|
||||
private JsonNode NormalizeSpdxCreationInfo(JsonNode node)
|
||||
{
|
||||
if (node is not JsonObject obj)
|
||||
{
|
||||
return NormalizeNode(node)!;
|
||||
}
|
||||
|
||||
var normalized = new JsonObject();
|
||||
|
||||
var sortedKeys = obj
|
||||
.Select(kv => kv.Key)
|
||||
.Where(key => _options.StripTimestamps ? key != "created" : true)
|
||||
.OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in sortedKeys)
|
||||
{
|
||||
var value = obj[key];
|
||||
normalized[key] = NormalizeNode(value?.DeepClone());
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private static string GetComponentSortKey(JsonObject obj)
|
||||
{
|
||||
// Sort by bom-ref or purl or name+version
|
||||
if (obj.TryGetPropertyValue("bom-ref", out var bomRef) && bomRef is JsonValue bv)
|
||||
{
|
||||
return bv.GetValue<string>() ?? "";
|
||||
}
|
||||
if (obj.TryGetPropertyValue("purl", out var purl) && purl is JsonValue pv)
|
||||
{
|
||||
return pv.GetValue<string>() ?? "";
|
||||
}
|
||||
|
||||
var name = obj["name"]?.GetValue<string>() ?? "";
|
||||
var version = obj["version"]?.GetValue<string>() ?? "";
|
||||
return $"{name}@{version}";
|
||||
}
|
||||
|
||||
private static string GetSortKey(JsonObject obj)
|
||||
{
|
||||
var keyPriority = new[] { "id", "@id", "bom-ref", "SPDXID", "name", "digest", "uri", "ref" };
|
||||
|
||||
foreach (var key in keyPriority)
|
||||
{
|
||||
if (obj.TryGetPropertyValue(key, out var value) && value is JsonValue jv)
|
||||
{
|
||||
return jv.GetValue<string>() ?? "";
|
||||
}
|
||||
}
|
||||
|
||||
return obj.ToJsonString();
|
||||
}
|
||||
|
||||
private static bool ShouldStripCycloneDxField(string key)
|
||||
{
|
||||
// Fields that should be stripped for canonical form
|
||||
return key == "$schema";
|
||||
}
|
||||
|
||||
private static bool ShouldStripSpdxField(string key)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
private bool ShouldStripTimestampField(string key)
|
||||
{
|
||||
if (!_options.StripTimestamps)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var timestampFields = new[]
|
||||
{
|
||||
"timestamp", "created", "modified", "updated", "createdAt", "updatedAt",
|
||||
"modifiedAt", "date", "time", "datetime", "lastModified", "generated"
|
||||
};
|
||||
|
||||
return timestampFields.Any(f => key.Equals(f, StringComparison.OrdinalIgnoreCase));
|
||||
}
|
||||
|
||||
private static bool IsUri(string value)
|
||||
{
|
||||
return value.StartsWith("http://", StringComparison.OrdinalIgnoreCase) ||
|
||||
value.StartsWith("https://", StringComparison.OrdinalIgnoreCase) ||
|
||||
value.StartsWith("urn:", StringComparison.OrdinalIgnoreCase) ||
|
||||
value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
private static string SerializeCanonical(JsonNode node)
|
||||
{
|
||||
var options = new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = null,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
return node.ToJsonString(options);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,91 @@
|
||||
// =============================================================================
|
||||
// SbomParserFactory.cs
|
||||
// Factory for creating and selecting SBOM parsers
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating SBOM parsers and detecting SBOM formats.
|
||||
/// </summary>
|
||||
public sealed class SbomParserFactory : ISbomParser
|
||||
{
|
||||
private readonly CycloneDxParser _cycloneDxParser;
|
||||
private readonly SpdxParser _spdxParser;
|
||||
|
||||
public SbomParserFactory()
|
||||
{
|
||||
_cycloneDxParser = new CycloneDxParser();
|
||||
_spdxParser = new SpdxParser();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detects the SBOM format from file extension or content.
|
||||
/// </summary>
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
// Try CycloneDX first
|
||||
var format = _cycloneDxParser.DetectFormat(filePath);
|
||||
if (format != SbomFormat.Unknown)
|
||||
{
|
||||
return format;
|
||||
}
|
||||
|
||||
// Try SPDX
|
||||
format = _spdxParser.DetectFormat(filePath);
|
||||
if (format != SbomFormat.Unknown)
|
||||
{
|
||||
return format;
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses an SBOM file using auto-detected format.
|
||||
/// </summary>
|
||||
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var format = DetectFormat(filePath);
|
||||
|
||||
return format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => await _cycloneDxParser.ParseAsync(filePath, cancellationToken),
|
||||
SbomFormat.Spdx => await _spdxParser.ParseAsync(filePath, cancellationToken),
|
||||
_ => SbomParseResult.Failure($"Unknown SBOM format for file: {filePath}", SbomFormat.Unknown)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses an SBOM from a stream using the specified format.
|
||||
/// </summary>
|
||||
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
return format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => await _cycloneDxParser.ParseAsync(stream, format, cancellationToken),
|
||||
SbomFormat.Spdx => await _spdxParser.ParseAsync(stream, format, cancellationToken),
|
||||
_ => SbomParseResult.Failure($"Unknown SBOM format: {format}", format)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a parser for the specified format.
|
||||
/// </summary>
|
||||
public ISbomParser GetParser(SbomFormat format)
|
||||
{
|
||||
return format switch
|
||||
{
|
||||
SbomFormat.CycloneDx => _cycloneDxParser,
|
||||
SbomFormat.Spdx => _spdxParser,
|
||||
_ => throw new ArgumentException($"No parser available for format: {format}", nameof(format))
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,305 @@
|
||||
// =============================================================================
|
||||
// SpdxParser.cs
|
||||
// SPDX SBOM parser implementation
|
||||
// Part of Step 2: Evidence Collection (Task T5)
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AirGap.Importer.Reconciliation.Parsers;
|
||||
|
||||
/// <summary>
|
||||
/// Parser for SPDX SBOM format (JSON).
|
||||
/// Supports SPDX 2.2 and 2.3 schemas.
|
||||
/// </summary>
|
||||
public sealed class SpdxParser : ISbomParser
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
public SbomFormat DetectFormat(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
// SPDX files typically end with .spdx.json
|
||||
if (filePath.EndsWith(".spdx.json", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.Spdx;
|
||||
}
|
||||
|
||||
// Try to detect from content
|
||||
if (File.Exists(filePath))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
using var reader = new StreamReader(stream);
|
||||
var firstChars = new char[1024];
|
||||
var read = reader.Read(firstChars, 0, firstChars.Length);
|
||||
var content = new string(firstChars, 0, read);
|
||||
|
||||
if (content.Contains("\"spdxVersion\"", StringComparison.OrdinalIgnoreCase) ||
|
||||
content.Contains("\"SPDXID\"", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormat.Spdx;
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore detection errors
|
||||
}
|
||||
}
|
||||
|
||||
return SbomFormat.Unknown;
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(string filePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return SbomParseResult.Failure($"File not found: {filePath}", SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(filePath);
|
||||
return await ParseAsync(stream, SbomFormat.Spdx, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"Failed to parse SPDX file: {ex.Message}", SbomFormat.Spdx);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<SbomParseResult> ParseAsync(Stream stream, SbomFormat format, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
try
|
||||
{
|
||||
using var document = await JsonDocument.ParseAsync(stream, default, cancellationToken);
|
||||
var root = document.RootElement;
|
||||
|
||||
// Validate spdxVersion
|
||||
if (!root.TryGetProperty("spdxVersion", out var versionProp))
|
||||
{
|
||||
return SbomParseResult.Failure("Not a valid SPDX document: missing spdxVersion", SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
var specVersion = versionProp.GetString();
|
||||
if (string.IsNullOrEmpty(specVersion) ||
|
||||
!specVersion.StartsWith("SPDX-", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomParseResult.Failure("Not a valid SPDX document: invalid spdxVersion", SbomFormat.Spdx);
|
||||
}
|
||||
|
||||
// Extract version number (e.g., "SPDX-2.3" -> "2.3")
|
||||
specVersion = specVersion[5..];
|
||||
|
||||
// Extract document namespace (serves as serial number)
|
||||
string? serialNumber = null;
|
||||
if (root.TryGetProperty("documentNamespace", out var namespaceProp))
|
||||
{
|
||||
serialNumber = namespaceProp.GetString();
|
||||
}
|
||||
|
||||
// Extract creation timestamp
|
||||
DateTimeOffset? createdAt = null;
|
||||
if (root.TryGetProperty("creationInfo", out var creationInfoProp) &&
|
||||
creationInfoProp.TryGetProperty("created", out var createdProp))
|
||||
{
|
||||
if (DateTimeOffset.TryParse(createdProp.GetString(), out var parsed))
|
||||
{
|
||||
createdAt = parsed;
|
||||
}
|
||||
}
|
||||
|
||||
// Extract generator tool
|
||||
string? generatorTool = null;
|
||||
if (root.TryGetProperty("creationInfo", out var creationInfo) &&
|
||||
creationInfo.TryGetProperty("creators", out var creatorsProp) &&
|
||||
creatorsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
var tools = new List<string>();
|
||||
foreach (var creator in creatorsProp.EnumerateArray())
|
||||
{
|
||||
var creatorStr = creator.GetString();
|
||||
if (creatorStr?.StartsWith("Tool:", StringComparison.OrdinalIgnoreCase) == true)
|
||||
{
|
||||
tools.Add(creatorStr[5..].Trim());
|
||||
}
|
||||
}
|
||||
generatorTool = tools.Count > 0 ? string.Join(", ", tools) : null;
|
||||
}
|
||||
|
||||
// Extract primary package (documentDescribes)
|
||||
SbomSubject? primarySubject = null;
|
||||
var describedIds = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
if (root.TryGetProperty("documentDescribes", out var describesProp) &&
|
||||
describesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var id in describesProp.EnumerateArray())
|
||||
{
|
||||
var spdxId = id.GetString();
|
||||
if (!string.IsNullOrEmpty(spdxId))
|
||||
{
|
||||
describedIds.Add(spdxId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract all packages
|
||||
var subjects = new List<SbomSubject>();
|
||||
int totalComponentCount = 0;
|
||||
|
||||
if (root.TryGetProperty("packages", out var packagesProp) &&
|
||||
packagesProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var package in packagesProp.EnumerateArray())
|
||||
{
|
||||
totalComponentCount++;
|
||||
var subject = ParsePackage(package);
|
||||
if (subject is not null)
|
||||
{
|
||||
subjects.Add(subject);
|
||||
|
||||
// Check if this is the primary subject
|
||||
if (subject.SpdxId is not null && describedIds.Contains(subject.SpdxId))
|
||||
{
|
||||
primarySubject ??= subject;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort subjects for deterministic ordering
|
||||
subjects = subjects
|
||||
.OrderBy(s => s.Digest, StringComparer.Ordinal)
|
||||
.ThenBy(s => s.Name ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
return SbomParseResult.Success(
|
||||
format: SbomFormat.Spdx,
|
||||
subjects: subjects,
|
||||
specVersion: specVersion,
|
||||
serialNumber: serialNumber,
|
||||
createdAt: createdAt,
|
||||
generatorTool: generatorTool,
|
||||
primarySubject: primarySubject,
|
||||
totalComponentCount: totalComponentCount);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return SbomParseResult.Failure($"JSON parsing error: {ex.Message}", SbomFormat.Spdx);
|
||||
}
|
||||
}
|
||||
|
||||
private static SbomSubject? ParsePackage(JsonElement package)
|
||||
{
|
||||
// Extract checksums
|
||||
var hashes = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
|
||||
if (package.TryGetProperty("checksums", out var checksumsProp) &&
|
||||
checksumsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var checksum in checksumsProp.EnumerateArray())
|
||||
{
|
||||
if (checksum.TryGetProperty("algorithm", out var algProp) &&
|
||||
checksum.TryGetProperty("checksumValue", out var valueProp))
|
||||
{
|
||||
var alg = algProp.GetString();
|
||||
var value = valueProp.GetString();
|
||||
if (!string.IsNullOrEmpty(alg) && !string.IsNullOrEmpty(value))
|
||||
{
|
||||
hashes[alg] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Determine primary digest (prefer SHA256)
|
||||
string? digest = null;
|
||||
if (hashes.TryGetValue("SHA256", out var sha256))
|
||||
{
|
||||
digest = NormalizeDigest("sha256:" + sha256);
|
||||
}
|
||||
else if (hashes.Count > 0)
|
||||
{
|
||||
// Use first available hash
|
||||
var first = hashes.First();
|
||||
digest = NormalizeDigest($"{first.Key.ToLowerInvariant()}:{first.Value}");
|
||||
}
|
||||
|
||||
// If no digest, this package can't be indexed by digest
|
||||
if (string.IsNullOrEmpty(digest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Extract SPDXID
|
||||
string? spdxId = null;
|
||||
if (package.TryGetProperty("SPDXID", out var spdxIdProp))
|
||||
{
|
||||
spdxId = spdxIdProp.GetString();
|
||||
}
|
||||
|
||||
// Extract other properties
|
||||
string? name = null;
|
||||
if (package.TryGetProperty("name", out var nameProp))
|
||||
{
|
||||
name = nameProp.GetString();
|
||||
}
|
||||
|
||||
string? version = null;
|
||||
if (package.TryGetProperty("versionInfo", out var versionProp))
|
||||
{
|
||||
version = versionProp.GetString();
|
||||
}
|
||||
|
||||
// SPDX uses external refs for purl
|
||||
string? purl = null;
|
||||
if (package.TryGetProperty("externalRefs", out var refsProp) &&
|
||||
refsProp.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
foreach (var extRef in refsProp.EnumerateArray())
|
||||
{
|
||||
if (extRef.TryGetProperty("referenceType", out var refTypeProp) &&
|
||||
refTypeProp.GetString()?.Equals("purl", StringComparison.OrdinalIgnoreCase) == true &&
|
||||
extRef.TryGetProperty("referenceLocator", out var locatorProp))
|
||||
{
|
||||
purl = locatorProp.GetString();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// SPDX doesn't have component type directly, check primaryPackagePurpose
|
||||
string? type = null;
|
||||
if (package.TryGetProperty("primaryPackagePurpose", out var purposeProp))
|
||||
{
|
||||
type = purposeProp.GetString();
|
||||
}
|
||||
|
||||
return new SbomSubject
|
||||
{
|
||||
Digest = digest,
|
||||
Name = name,
|
||||
Version = version,
|
||||
Purl = purl,
|
||||
Type = type,
|
||||
SpdxId = spdxId,
|
||||
Hashes = hashes
|
||||
};
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
return ArtifactIndex.NormalizeDigest(digest);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,171 @@
|
||||
namespace StellaOps.Attestor.Core.Configuration;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for Rekor verification.
|
||||
/// SPRINT_3000_0001_0001 - T4: Rekor public key configuration
|
||||
/// </summary>
|
||||
public sealed class RekorVerificationOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Configuration section name for binding.
|
||||
/// </summary>
|
||||
public const string SectionName = "Attestor:Rekor";
|
||||
|
||||
/// <summary>
|
||||
/// Path to Rekor log public key file (PEM format).
|
||||
/// </summary>
|
||||
public string? PublicKeyPath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Inline Rekor public key (base64-encoded PEM).
|
||||
/// Takes precedence over PublicKeyPath.
|
||||
/// </summary>
|
||||
public string? PublicKeyBase64 { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Allow verification without checkpoint signature in offline mode.
|
||||
/// WARNING: This reduces security guarantees. Use only in fully air-gapped
|
||||
/// environments where checkpoint freshness is verified through other means.
|
||||
/// </summary>
|
||||
public bool AllowOfflineWithoutSignature { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum age of checkpoint before requiring refresh (minutes).
|
||||
/// Default: 60 minutes.
|
||||
/// </summary>
|
||||
public int MaxCheckpointAgeMinutes { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to fail verification if no public key is configured.
|
||||
/// Default: true (strict mode).
|
||||
/// </summary>
|
||||
public bool RequirePublicKey { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Path to offline checkpoint bundle for air-gapped verification.
|
||||
/// Bundle format: JSON array of checkpoint objects with signatures.
|
||||
/// </summary>
|
||||
public string? OfflineCheckpointBundlePath { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to enable offline verification mode.
|
||||
/// When enabled, uses bundled checkpoints instead of fetching from Rekor.
|
||||
/// </summary>
|
||||
public bool EnableOfflineMode { get; set; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Rekor server URL for online verification.
|
||||
/// Default: https://rekor.sigstore.dev
|
||||
/// </summary>
|
||||
public string RekorServerUrl { get; set; } = "https://rekor.sigstore.dev";
|
||||
|
||||
/// <summary>
|
||||
/// Connection timeout for Rekor server (seconds).
|
||||
/// </summary>
|
||||
public int ConnectionTimeoutSeconds { get; set; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of retries for transient failures.
|
||||
/// </summary>
|
||||
public int MaxRetries { get; set; } = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to cache verified checkpoints in memory.
|
||||
/// Reduces redundant signature verification for same checkpoint.
|
||||
/// </summary>
|
||||
public bool EnableCheckpointCache { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of checkpoints to cache.
|
||||
/// </summary>
|
||||
public int CheckpointCacheSize { get; set; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Validates the configuration.
|
||||
/// </summary>
|
||||
/// <returns>List of validation errors, empty if valid.</returns>
|
||||
public IReadOnlyList<string> Validate()
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (RequirePublicKey && string.IsNullOrEmpty(PublicKeyPath) && string.IsNullOrEmpty(PublicKeyBase64))
|
||||
{
|
||||
errors.Add("Rekor public key must be configured (PublicKeyPath or PublicKeyBase64)");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(PublicKeyPath) && !File.Exists(PublicKeyPath))
|
||||
{
|
||||
errors.Add($"Rekor public key file not found: {PublicKeyPath}");
|
||||
}
|
||||
|
||||
if (EnableOfflineMode && string.IsNullOrEmpty(OfflineCheckpointBundlePath))
|
||||
{
|
||||
errors.Add("OfflineCheckpointBundlePath must be configured when EnableOfflineMode is true");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(OfflineCheckpointBundlePath) && !File.Exists(OfflineCheckpointBundlePath))
|
||||
{
|
||||
errors.Add($"Offline checkpoint bundle not found: {OfflineCheckpointBundlePath}");
|
||||
}
|
||||
|
||||
if (MaxCheckpointAgeMinutes < 1)
|
||||
{
|
||||
errors.Add("MaxCheckpointAgeMinutes must be at least 1");
|
||||
}
|
||||
|
||||
if (ConnectionTimeoutSeconds < 1)
|
||||
{
|
||||
errors.Add("ConnectionTimeoutSeconds must be at least 1");
|
||||
}
|
||||
|
||||
if (MaxRetries < 0)
|
||||
{
|
||||
errors.Add("MaxRetries cannot be negative");
|
||||
}
|
||||
|
||||
if (CheckpointCacheSize < 1)
|
||||
{
|
||||
errors.Add("CheckpointCacheSize must be at least 1");
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads the public key from the configured source.
|
||||
/// </summary>
|
||||
/// <returns>The public key bytes, or null if not configured.</returns>
|
||||
public byte[]? LoadPublicKey()
|
||||
{
|
||||
if (!string.IsNullOrEmpty(PublicKeyBase64))
|
||||
{
|
||||
return Convert.FromBase64String(PublicKeyBase64);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrEmpty(PublicKeyPath) && File.Exists(PublicKeyPath))
|
||||
{
|
||||
var pem = File.ReadAllText(PublicKeyPath);
|
||||
return ParsePemPublicKey(pem);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a PEM-encoded public key.
|
||||
/// </summary>
|
||||
private static byte[] ParsePemPublicKey(string pem)
|
||||
{
|
||||
// Remove PEM headers/footers
|
||||
var base64 = pem
|
||||
.Replace("-----BEGIN PUBLIC KEY-----", "")
|
||||
.Replace("-----END PUBLIC KEY-----", "")
|
||||
.Replace("-----BEGIN EC PUBLIC KEY-----", "")
|
||||
.Replace("-----END EC PUBLIC KEY-----", "")
|
||||
.Replace("\r", "")
|
||||
.Replace("\n", "")
|
||||
.Trim();
|
||||
|
||||
return Convert.FromBase64String(base64);
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,15 @@ public sealed class AttestorMetrics : IDisposable
|
||||
BulkItemsTotal = _meter.CreateCounter<long>("attestor.bulk_items_total", description: "Bulk verification items processed grouped by result.");
|
||||
BulkJobDuration = _meter.CreateHistogram<double>("attestor.bulk_job_duration_seconds", unit: "s", description: "Bulk verification job duration in seconds grouped by status.");
|
||||
ErrorTotal = _meter.CreateCounter<long>("attestor.errors_total", description: "Total errors grouped by type.");
|
||||
|
||||
// SPRINT_3000_0001_0001 - T11: Rekor verification counters
|
||||
RekorInclusionVerifyTotal = _meter.CreateCounter<long>("attestor.rekor_inclusion_verify_total", description: "Rekor inclusion proof verification attempts grouped by result.");
|
||||
RekorInclusionVerifyLatency = _meter.CreateHistogram<double>("attestor.rekor_inclusion_verify_latency_seconds", unit: "s", description: "Rekor inclusion proof verification latency in seconds.");
|
||||
RekorCheckpointVerifyTotal = _meter.CreateCounter<long>("attestor.rekor_checkpoint_verify_total", description: "Rekor checkpoint signature verification attempts grouped by result.");
|
||||
RekorCheckpointVerifyLatency = _meter.CreateHistogram<double>("attestor.rekor_checkpoint_verify_latency_seconds", unit: "s", description: "Rekor checkpoint signature verification latency in seconds.");
|
||||
RekorOfflineVerifyTotal = _meter.CreateCounter<long>("attestor.rekor_offline_verify_total", description: "Rekor offline mode verification attempts grouped by result.");
|
||||
RekorCheckpointCacheHits = _meter.CreateCounter<long>("attestor.rekor_checkpoint_cache_hits", description: "Rekor checkpoint cache hits.");
|
||||
RekorCheckpointCacheMisses = _meter.CreateCounter<long>("attestor.rekor_checkpoint_cache_misses", description: "Rekor checkpoint cache misses.");
|
||||
}
|
||||
|
||||
public Counter<long> SubmitTotal { get; }
|
||||
@@ -62,6 +71,42 @@ public sealed class AttestorMetrics : IDisposable
|
||||
|
||||
public Counter<long> ErrorTotal { get; }
|
||||
|
||||
// SPRINT_3000_0001_0001 - T11: Rekor verification counters
|
||||
/// <summary>
|
||||
/// Rekor inclusion proof verification attempts grouped by result (success/failure).
|
||||
/// </summary>
|
||||
public Counter<long> RekorInclusionVerifyTotal { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor inclusion proof verification latency in seconds.
|
||||
/// </summary>
|
||||
public Histogram<double> RekorInclusionVerifyLatency { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor checkpoint signature verification attempts grouped by result.
|
||||
/// </summary>
|
||||
public Counter<long> RekorCheckpointVerifyTotal { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor checkpoint signature verification latency in seconds.
|
||||
/// </summary>
|
||||
public Histogram<double> RekorCheckpointVerifyLatency { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor offline mode verification attempts grouped by result.
|
||||
/// </summary>
|
||||
public Counter<long> RekorOfflineVerifyTotal { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor checkpoint cache hits.
|
||||
/// </summary>
|
||||
public Counter<long> RekorCheckpointCacheHits { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor checkpoint cache misses.
|
||||
/// </summary>
|
||||
public Counter<long> RekorCheckpointCacheMisses { get; }
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed)
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorQueueOptions.cs
|
||||
// Sprint: SPRINT_3000_0001_0002_rekor_retry_queue_metrics
|
||||
// Task: T6
|
||||
// Description: Configuration options for the Rekor retry queue
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Options;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the Rekor durable retry queue.
|
||||
/// </summary>
|
||||
public sealed class RekorQueueOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Enable durable queue for Rekor submissions.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry attempts before dead-lettering.
|
||||
/// </summary>
|
||||
public int MaxAttempts { get; set; } = 5;
|
||||
|
||||
/// <summary>
|
||||
/// Initial retry delay in milliseconds.
|
||||
/// </summary>
|
||||
public int InitialDelayMs { get; set; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum retry delay in milliseconds.
|
||||
/// </summary>
|
||||
public int MaxDelayMs { get; set; } = 60000;
|
||||
|
||||
/// <summary>
|
||||
/// Backoff multiplier for exponential retry.
|
||||
/// </summary>
|
||||
public double BackoffMultiplier { get; set; } = 2.0;
|
||||
|
||||
/// <summary>
|
||||
/// Batch size for retry processing.
|
||||
/// </summary>
|
||||
public int BatchSize { get; set; } = 10;
|
||||
|
||||
/// <summary>
|
||||
/// Poll interval for queue processing in milliseconds.
|
||||
/// </summary>
|
||||
public int PollIntervalMs { get; set; } = 5000;
|
||||
|
||||
/// <summary>
|
||||
/// Dead letter retention in days (0 = indefinite).
|
||||
/// </summary>
|
||||
public int DeadLetterRetentionDays { get; set; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Calculate the next retry delay using exponential backoff.
|
||||
/// </summary>
|
||||
public TimeSpan CalculateRetryDelay(int attemptCount)
|
||||
{
|
||||
var delayMs = InitialDelayMs * Math.Pow(BackoffMultiplier, attemptCount);
|
||||
delayMs = Math.Min(delayMs, MaxDelayMs);
|
||||
return TimeSpan.FromMilliseconds(delayMs);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,40 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// QueueDepthSnapshot.cs
|
||||
// Sprint: SPRINT_3000_0001_0002_rekor_retry_queue_metrics
|
||||
// Task: T9
|
||||
// Description: Snapshot of queue depth by status
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Queue;
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot of the Rekor submission queue depth by status.
|
||||
/// </summary>
|
||||
/// <param name="Pending">Count of items in Pending status.</param>
|
||||
/// <param name="Submitting">Count of items in Submitting status.</param>
|
||||
/// <param name="Retrying">Count of items in Retrying status.</param>
|
||||
/// <param name="DeadLetter">Count of items in DeadLetter status.</param>
|
||||
/// <param name="MeasuredAt">Timestamp when the snapshot was taken.</param>
|
||||
public sealed record QueueDepthSnapshot(
|
||||
int Pending,
|
||||
int Submitting,
|
||||
int Retrying,
|
||||
int DeadLetter,
|
||||
DateTimeOffset MeasuredAt)
|
||||
{
|
||||
/// <summary>
|
||||
/// Total items waiting to be processed (pending + retrying).
|
||||
/// </summary>
|
||||
public int TotalWaiting => Pending + Retrying;
|
||||
|
||||
/// <summary>
|
||||
/// Total items in the queue (all statuses except submitted).
|
||||
/// </summary>
|
||||
public int TotalInQueue => Pending + Submitting + Retrying + DeadLetter;
|
||||
|
||||
/// <summary>
|
||||
/// Creates an empty snapshot.
|
||||
/// </summary>
|
||||
public static QueueDepthSnapshot Empty(DateTimeOffset measuredAt) =>
|
||||
new(0, 0, 0, 0, measuredAt);
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorQueueItem.cs
|
||||
// Sprint: SPRINT_3000_0001_0002_rekor_retry_queue_metrics
|
||||
// Task: T2
|
||||
// Description: Queue item model for Rekor submissions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Queue;
|
||||
|
||||
/// <summary>
|
||||
/// Represents an item in the Rekor submission queue.
|
||||
/// </summary>
|
||||
/// <param name="Id">Unique identifier for the queue item.</param>
|
||||
/// <param name="TenantId">Tenant identifier.</param>
|
||||
/// <param name="BundleSha256">SHA-256 hash of the bundle being attested.</param>
|
||||
/// <param name="DssePayload">Serialized DSSE envelope payload.</param>
|
||||
/// <param name="Backend">Target Rekor backend ('primary' or 'mirror').</param>
|
||||
/// <param name="Status">Current submission status.</param>
|
||||
/// <param name="AttemptCount">Number of submission attempts made.</param>
|
||||
/// <param name="MaxAttempts">Maximum allowed attempts before dead-lettering.</param>
|
||||
/// <param name="LastAttemptAt">Timestamp of the last submission attempt.</param>
|
||||
/// <param name="LastError">Error message from the last failed attempt.</param>
|
||||
/// <param name="NextRetryAt">Scheduled time for the next retry attempt.</param>
|
||||
/// <param name="RekorUuid">UUID from Rekor after successful submission.</param>
|
||||
/// <param name="RekorLogIndex">Log index from Rekor after successful submission.</param>
|
||||
/// <param name="CreatedAt">Timestamp when the item was created.</param>
|
||||
/// <param name="UpdatedAt">Timestamp when the item was last updated.</param>
|
||||
public sealed record RekorQueueItem(
|
||||
Guid Id,
|
||||
string TenantId,
|
||||
string BundleSha256,
|
||||
byte[] DssePayload,
|
||||
string Backend,
|
||||
RekorSubmissionStatus Status,
|
||||
int AttemptCount,
|
||||
int MaxAttempts,
|
||||
DateTimeOffset? LastAttemptAt,
|
||||
string? LastError,
|
||||
DateTimeOffset? NextRetryAt,
|
||||
string? RekorUuid,
|
||||
long? RekorLogIndex,
|
||||
DateTimeOffset CreatedAt,
|
||||
DateTimeOffset UpdatedAt);
|
||||
@@ -0,0 +1,39 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RekorSubmissionStatus.cs
|
||||
// Sprint: SPRINT_3000_0001_0002_rekor_retry_queue_metrics
|
||||
// Task: T4
|
||||
// Description: Status enum for Rekor queue items
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Attestor.Core.Queue;
|
||||
|
||||
/// <summary>
|
||||
/// Status of a Rekor submission queue item.
|
||||
/// </summary>
|
||||
public enum RekorSubmissionStatus
|
||||
{
|
||||
/// <summary>
|
||||
/// Queued and waiting for initial submission.
|
||||
/// </summary>
|
||||
Pending,
|
||||
|
||||
/// <summary>
|
||||
/// Currently being submitted to Rekor.
|
||||
/// </summary>
|
||||
Submitting,
|
||||
|
||||
/// <summary>
|
||||
/// Successfully submitted to Rekor.
|
||||
/// </summary>
|
||||
Submitted,
|
||||
|
||||
/// <summary>
|
||||
/// Waiting for retry after a failed attempt.
|
||||
/// </summary>
|
||||
Retrying,
|
||||
|
||||
/// <summary>
|
||||
/// Permanently failed after max retries exceeded.
|
||||
/// </summary>
|
||||
DeadLetter
|
||||
}
|
||||
@@ -18,4 +18,20 @@ public sealed class RekorSubmissionResponse
|
||||
|
||||
[JsonPropertyName("proof")]
|
||||
public RekorProofResponse? Proof { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unix timestamp (seconds since epoch) when entry was integrated into the log.
|
||||
/// Used for time skew validation per advisory SPRINT_3000_0001_0003.
|
||||
/// </summary>
|
||||
[JsonPropertyName("integratedTime")]
|
||||
public long? IntegratedTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the integrated time as a DateTimeOffset.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public DateTimeOffset? IntegratedTimeUtc =>
|
||||
IntegratedTime.HasValue
|
||||
? DateTimeOffset.FromUnixTimeSeconds(IntegratedTime.Value)
|
||||
: null;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,279 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Attestor.Core.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Verifies Rekor checkpoint signatures per the Sigstore checkpoint format.
|
||||
/// SPRINT_3000_0001_0001 - T3: Checkpoint signature verification
|
||||
/// </summary>
|
||||
public static partial class CheckpointSignatureVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Rekor checkpoint format regular expression.
|
||||
/// Format: "rekor.sigstore.dev - {log_id}\n{tree_size}\n{root_hash}\n{timestamp}\n"
|
||||
/// </summary>
|
||||
[GeneratedRegex(@"^(?<origin>[^\n]+)\n(?<size>\d+)\n(?<root>[A-Za-z0-9+/=]+)\n(?<timestamp>\d+)?\n?")]
|
||||
private static partial Regex CheckpointBodyRegex();
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a Rekor checkpoint signature.
|
||||
/// </summary>
|
||||
/// <param name="checkpoint">The checkpoint body (note lines)</param>
|
||||
/// <param name="signature">The signature bytes</param>
|
||||
/// <param name="publicKey">The Rekor log public key (PEM or raw)</param>
|
||||
/// <returns>Verification result</returns>
|
||||
public static CheckpointVerificationResult VerifyCheckpoint(
|
||||
string checkpoint,
|
||||
byte[] signature,
|
||||
byte[] publicKey)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(checkpoint);
|
||||
ArgumentNullException.ThrowIfNull(signature);
|
||||
ArgumentNullException.ThrowIfNull(publicKey);
|
||||
|
||||
// Parse checkpoint body
|
||||
var match = CheckpointBodyRegex().Match(checkpoint);
|
||||
if (!match.Success)
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = "Invalid checkpoint format",
|
||||
};
|
||||
}
|
||||
|
||||
var origin = match.Groups["origin"].Value;
|
||||
var sizeStr = match.Groups["size"].Value;
|
||||
var rootBase64 = match.Groups["root"].Value;
|
||||
|
||||
if (!long.TryParse(sizeStr, out var treeSize))
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = "Invalid tree size in checkpoint",
|
||||
};
|
||||
}
|
||||
|
||||
byte[] rootHash;
|
||||
try
|
||||
{
|
||||
rootHash = Convert.FromBase64String(rootBase64);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = "Invalid root hash encoding in checkpoint",
|
||||
};
|
||||
}
|
||||
|
||||
// Verify signature
|
||||
try
|
||||
{
|
||||
var data = Encoding.UTF8.GetBytes(checkpoint);
|
||||
var verified = VerifySignature(data, signature, publicKey);
|
||||
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = verified,
|
||||
Origin = origin,
|
||||
TreeSize = treeSize,
|
||||
RootHash = rootHash,
|
||||
FailureReason = verified ? null : "Signature verification failed",
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = $"Signature verification error: {ex.Message}",
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a checkpoint without verifying the signature.
|
||||
/// </summary>
|
||||
public static CheckpointVerificationResult ParseCheckpoint(string checkpoint)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(checkpoint);
|
||||
|
||||
var match = CheckpointBodyRegex().Match(checkpoint);
|
||||
if (!match.Success)
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = "Invalid checkpoint format",
|
||||
};
|
||||
}
|
||||
|
||||
var origin = match.Groups["origin"].Value;
|
||||
var sizeStr = match.Groups["size"].Value;
|
||||
var rootBase64 = match.Groups["root"].Value;
|
||||
|
||||
if (!long.TryParse(sizeStr, out var treeSize))
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = "Invalid tree size in checkpoint",
|
||||
};
|
||||
}
|
||||
|
||||
byte[] rootHash;
|
||||
try
|
||||
{
|
||||
rootHash = Convert.FromBase64String(rootBase64);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false,
|
||||
FailureReason = "Invalid root hash encoding in checkpoint",
|
||||
};
|
||||
}
|
||||
|
||||
return new CheckpointVerificationResult
|
||||
{
|
||||
Verified = false, // Not verified, just parsed
|
||||
Origin = origin,
|
||||
TreeSize = treeSize,
|
||||
RootHash = rootHash,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies an ECDSA or Ed25519 signature.
|
||||
/// </summary>
|
||||
private static bool VerifySignature(byte[] data, byte[] signature, byte[] publicKey)
|
||||
{
|
||||
// Detect key type from length/format
|
||||
// Ed25519 public keys are 32 bytes
|
||||
// ECDSA P-256 public keys are 65 bytes (uncompressed) or 33 bytes (compressed)
|
||||
|
||||
if (publicKey.Length == 32)
|
||||
{
|
||||
// Ed25519
|
||||
return VerifyEd25519(data, signature, publicKey);
|
||||
}
|
||||
else if (publicKey.Length >= 33)
|
||||
{
|
||||
// ECDSA - try to parse as PEM or raw
|
||||
return VerifyEcdsa(data, signature, publicKey);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies an Ed25519 signature (placeholder for actual implementation).
|
||||
/// </summary>
|
||||
private static bool VerifyEd25519(byte[] data, byte[] signature, byte[] publicKey)
|
||||
{
|
||||
// .NET 10 may have built-in Ed25519 support
|
||||
// For now, this is a placeholder that would use a library like NSec
|
||||
// In production, this would call the appropriate Ed25519 verification
|
||||
|
||||
// TODO: Implement Ed25519 verification when .NET 10 supports it natively
|
||||
// or use NSec.Cryptography
|
||||
|
||||
throw new NotSupportedException(
|
||||
"Ed25519 verification requires additional library support. " +
|
||||
"Please use ECDSA P-256 keys or add Ed25519 library dependency.");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies an ECDSA signature using .NET's built-in support.
|
||||
/// </summary>
|
||||
private static bool VerifyEcdsa(byte[] data, byte[] signature, byte[] publicKey)
|
||||
{
|
||||
using var ecdsa = ECDsa.Create();
|
||||
|
||||
// Try to import as SubjectPublicKeyInfo first
|
||||
try
|
||||
{
|
||||
ecdsa.ImportSubjectPublicKeyInfo(publicKey, out _);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Try to import as raw P-256 key
|
||||
try
|
||||
{
|
||||
var curve = ECCurve.NamedCurves.nistP256;
|
||||
var keyParams = new ECParameters
|
||||
{
|
||||
Curve = curve,
|
||||
Q = new ECPoint
|
||||
{
|
||||
X = publicKey[1..33],
|
||||
Y = publicKey[33..65],
|
||||
},
|
||||
};
|
||||
ecdsa.ImportParameters(keyParams);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Compute SHA-256 hash of data
|
||||
var hash = SHA256.HashData(data);
|
||||
|
||||
// Verify signature (try both DER and raw formats)
|
||||
try
|
||||
{
|
||||
return ecdsa.VerifyHash(hash, signature);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Try DER format
|
||||
try
|
||||
{
|
||||
return ecdsa.VerifyHash(hash, signature, DSASignatureFormat.Rfc3279DerSequence);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of checkpoint verification.
|
||||
/// </summary>
|
||||
public sealed class CheckpointVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the checkpoint signature was verified successfully.
|
||||
/// </summary>
|
||||
public bool Verified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The checkpoint origin (e.g., "rekor.sigstore.dev - {log_id}").
|
||||
/// </summary>
|
||||
public string? Origin { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The tree size at the checkpoint.
|
||||
/// </summary>
|
||||
public long TreeSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The root hash at the checkpoint.
|
||||
/// </summary>
|
||||
public byte[]? RootHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The reason for verification failure, if any.
|
||||
/// </summary>
|
||||
public string? FailureReason { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,222 @@
|
||||
namespace StellaOps.Attestor.Core.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for time skew validation.
|
||||
/// Per advisory SPRINT_3000_0001_0003.
|
||||
/// </summary>
|
||||
public sealed class TimeSkewOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether time skew validation is enabled.
|
||||
/// Default: true. Set to false for offline mode.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Warning threshold in seconds.
|
||||
/// If skew is between warn and reject thresholds, log a warning but don't fail.
|
||||
/// Default: 60 seconds (1 minute).
|
||||
/// </summary>
|
||||
public int WarnThresholdSeconds { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Rejection threshold in seconds.
|
||||
/// If skew exceeds this value, reject the entry.
|
||||
/// Default: 300 seconds (5 minutes).
|
||||
/// </summary>
|
||||
public int RejectThresholdSeconds { get; set; } = 300;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum allowed future time skew in seconds.
|
||||
/// Future timestamps are more suspicious than past ones.
|
||||
/// Default: 60 seconds.
|
||||
/// </summary>
|
||||
public int MaxFutureSkewSeconds { get; set; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to fail hard on time skew rejection.
|
||||
/// If false, logs error but continues processing.
|
||||
/// Default: true.
|
||||
/// </summary>
|
||||
public bool FailOnReject { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of time skew validation.
|
||||
/// </summary>
|
||||
public sealed record TimeSkewValidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the validation passed.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The validation status.
|
||||
/// </summary>
|
||||
public required TimeSkewStatus Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The calculated skew in seconds (positive = past, negative = future).
|
||||
/// </summary>
|
||||
public required double SkewSeconds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The integrated time from Rekor.
|
||||
/// </summary>
|
||||
public required DateTimeOffset IntegratedTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The local validation time.
|
||||
/// </summary>
|
||||
public required DateTimeOffset LocalTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable message about the result.
|
||||
/// </summary>
|
||||
public required string Message { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Create a successful validation result.
|
||||
/// </summary>
|
||||
public static TimeSkewValidationResult Ok(DateTimeOffset integratedTime, DateTimeOffset localTime, double skewSeconds) => new()
|
||||
{
|
||||
IsValid = true,
|
||||
Status = TimeSkewStatus.Ok,
|
||||
SkewSeconds = skewSeconds,
|
||||
IntegratedTime = integratedTime,
|
||||
LocalTime = localTime,
|
||||
Message = $"Time skew within acceptable range: {skewSeconds:F1}s"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Create a warning result.
|
||||
/// </summary>
|
||||
public static TimeSkewValidationResult Warning(DateTimeOffset integratedTime, DateTimeOffset localTime, double skewSeconds) => new()
|
||||
{
|
||||
IsValid = true,
|
||||
Status = TimeSkewStatus.Warning,
|
||||
SkewSeconds = skewSeconds,
|
||||
IntegratedTime = integratedTime,
|
||||
LocalTime = localTime,
|
||||
Message = $"Time skew detected: {skewSeconds:F1}s exceeds warning threshold"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Create a rejection result.
|
||||
/// </summary>
|
||||
public static TimeSkewValidationResult Rejected(DateTimeOffset integratedTime, DateTimeOffset localTime, double skewSeconds, bool isFuture) => new()
|
||||
{
|
||||
IsValid = false,
|
||||
Status = isFuture ? TimeSkewStatus.FutureTimestamp : TimeSkewStatus.Rejected,
|
||||
SkewSeconds = skewSeconds,
|
||||
IntegratedTime = integratedTime,
|
||||
LocalTime = localTime,
|
||||
Message = isFuture
|
||||
? $"Future timestamp detected: {Math.Abs(skewSeconds):F1}s ahead of local time"
|
||||
: $"Time skew rejected: {skewSeconds:F1}s exceeds rejection threshold"
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Create a skipped result (validation disabled or no integrated time).
|
||||
/// </summary>
|
||||
public static TimeSkewValidationResult Skipped(string reason) => new()
|
||||
{
|
||||
IsValid = true,
|
||||
Status = TimeSkewStatus.Skipped,
|
||||
SkewSeconds = 0,
|
||||
IntegratedTime = DateTimeOffset.MinValue,
|
||||
LocalTime = DateTimeOffset.UtcNow,
|
||||
Message = reason
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Time skew validation status.
|
||||
/// </summary>
|
||||
public enum TimeSkewStatus
|
||||
{
|
||||
/// <summary>Time skew is within acceptable range.</summary>
|
||||
Ok,
|
||||
|
||||
/// <summary>Time skew exceeds warning threshold but not rejection.</summary>
|
||||
Warning,
|
||||
|
||||
/// <summary>Time skew exceeds rejection threshold.</summary>
|
||||
Rejected,
|
||||
|
||||
/// <summary>Integrated time is in the future (suspicious).</summary>
|
||||
FutureTimestamp,
|
||||
|
||||
/// <summary>Validation was skipped (disabled or no data).</summary>
|
||||
Skipped
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for time skew validation.
|
||||
/// </summary>
|
||||
public interface ITimeSkewValidator
|
||||
{
|
||||
/// <summary>
|
||||
/// Validate the time skew between integrated time and local time.
|
||||
/// </summary>
|
||||
/// <param name="integratedTime">The integrated time from Rekor (nullable).</param>
|
||||
/// <param name="localTime">The local validation time (defaults to now).</param>
|
||||
/// <returns>The validation result.</returns>
|
||||
TimeSkewValidationResult Validate(DateTimeOffset? integratedTime, DateTimeOffset? localTime = null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of time skew validation.
|
||||
/// </summary>
|
||||
public sealed class TimeSkewValidator : ITimeSkewValidator
|
||||
{
|
||||
private readonly TimeSkewOptions _options;
|
||||
|
||||
public TimeSkewValidator(TimeSkewOptions options)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public TimeSkewValidationResult Validate(DateTimeOffset? integratedTime, DateTimeOffset? localTime = null)
|
||||
{
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return TimeSkewValidationResult.Skipped("Time skew validation disabled");
|
||||
}
|
||||
|
||||
if (!integratedTime.HasValue)
|
||||
{
|
||||
return TimeSkewValidationResult.Skipped("No integrated time available");
|
||||
}
|
||||
|
||||
var now = localTime ?? DateTimeOffset.UtcNow;
|
||||
var skew = (now - integratedTime.Value).TotalSeconds;
|
||||
|
||||
// Future timestamp (integrated time is ahead of local time)
|
||||
if (skew < 0)
|
||||
{
|
||||
var futureSkew = Math.Abs(skew);
|
||||
if (futureSkew > _options.MaxFutureSkewSeconds)
|
||||
{
|
||||
return TimeSkewValidationResult.Rejected(integratedTime.Value, now, skew, isFuture: true);
|
||||
}
|
||||
// Small future skew is OK (clock drift)
|
||||
return TimeSkewValidationResult.Ok(integratedTime.Value, now, skew);
|
||||
}
|
||||
|
||||
// Past timestamp (normal case)
|
||||
if (skew >= _options.RejectThresholdSeconds)
|
||||
{
|
||||
return TimeSkewValidationResult.Rejected(integratedTime.Value, now, skew, isFuture: false);
|
||||
}
|
||||
|
||||
if (skew >= _options.WarnThresholdSeconds)
|
||||
{
|
||||
return TimeSkewValidationResult.Warning(integratedTime.Value, now, skew);
|
||||
}
|
||||
|
||||
return TimeSkewValidationResult.Ok(integratedTime.Value, now, skew);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
using StellaOps.Attestor.Core.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for CheckpointSignatureVerifier.
|
||||
/// SPRINT_3000_0001_0001 - T3: Checkpoint signature verification tests
|
||||
/// </summary>
|
||||
public sealed class CheckpointSignatureVerifierTests
|
||||
{
|
||||
// Sample checkpoint format (Rekor production format)
|
||||
private const string ValidCheckpointBody = """
|
||||
rekor.sigstore.dev - 2605736670972794746
|
||||
123456789
|
||||
abc123def456ghi789jkl012mno345pqr678stu901vwx234=
|
||||
1702345678
|
||||
""";
|
||||
|
||||
private const string InvalidFormatCheckpoint = "not a valid checkpoint";
|
||||
|
||||
[Fact]
|
||||
public void ParseCheckpoint_ValidFormat_ExtractsFields()
|
||||
{
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.ParseCheckpoint(ValidCheckpointBody);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result.Origin);
|
||||
Assert.Contains("rekor.sigstore.dev", result.Origin);
|
||||
Assert.Equal(123456789L, result.TreeSize);
|
||||
Assert.NotNull(result.RootHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseCheckpoint_InvalidFormat_ReturnsFailure()
|
||||
{
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.ParseCheckpoint(InvalidFormatCheckpoint);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Verified);
|
||||
Assert.Contains("Invalid", result.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseCheckpoint_EmptyString_ReturnsFailure()
|
||||
{
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.ParseCheckpoint("");
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Verified);
|
||||
Assert.NotNull(result.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseCheckpoint_MinimalValidFormat_ExtractsFields()
|
||||
{
|
||||
// Arrange - minimal checkpoint without timestamp
|
||||
var checkpoint = """
|
||||
origin-name
|
||||
42
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
|
||||
""";
|
||||
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.ParseCheckpoint(checkpoint);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("origin-name", result.Origin);
|
||||
Assert.Equal(42L, result.TreeSize);
|
||||
Assert.NotNull(result.RootHash);
|
||||
Assert.Equal(32, result.RootHash!.Length); // SHA-256 hash
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseCheckpoint_InvalidBase64Root_ReturnsFailure()
|
||||
{
|
||||
// Arrange - invalid base64 in root hash
|
||||
var checkpoint = """
|
||||
origin-name
|
||||
42
|
||||
not-valid-base64!!!
|
||||
""";
|
||||
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.ParseCheckpoint(checkpoint);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Verified);
|
||||
Assert.Contains("Invalid root hash", result.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ParseCheckpoint_InvalidTreeSize_ReturnsFailure()
|
||||
{
|
||||
// Arrange - non-numeric tree size
|
||||
var checkpoint = """
|
||||
origin-name
|
||||
not-a-number
|
||||
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
|
||||
""";
|
||||
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.ParseCheckpoint(checkpoint);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Verified);
|
||||
Assert.Contains("Invalid tree size", result.FailureReason);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCheckpoint_NullCheckpoint_ThrowsArgumentNull()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
CheckpointSignatureVerifier.VerifyCheckpoint(null!, [], []));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCheckpoint_NullSignature_ThrowsArgumentNull()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
CheckpointSignatureVerifier.VerifyCheckpoint("checkpoint", null!, []));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCheckpoint_NullPublicKey_ThrowsArgumentNull()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
CheckpointSignatureVerifier.VerifyCheckpoint("checkpoint", [], null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCheckpoint_InvalidFormat_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var signature = new byte[64];
|
||||
var publicKey = new byte[65]; // P-256 uncompressed
|
||||
|
||||
// Act
|
||||
var result = CheckpointSignatureVerifier.VerifyCheckpoint(
|
||||
InvalidFormatCheckpoint,
|
||||
signature,
|
||||
publicKey);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.Verified);
|
||||
Assert.Contains("Invalid checkpoint format", result.FailureReason);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,318 @@
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.Core.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for Rekor inclusion proof verification.
|
||||
/// SPRINT_3000_0001_0001 - T10: Integration tests with mock Rekor responses
|
||||
/// </summary>
|
||||
public sealed class RekorInclusionVerificationIntegrationTests
|
||||
{
|
||||
/// <summary>
|
||||
/// Golden test fixture: a valid inclusion proof from Rekor production.
|
||||
/// This is a simplified representation of a real Rekor entry.
|
||||
/// </summary>
|
||||
private static readonly MockRekorEntry ValidEntry = new()
|
||||
{
|
||||
LogIndex = 12345678,
|
||||
TreeSize = 20000000,
|
||||
LeafHash = Convert.FromBase64String("n4bQgYhMfWWaL-qgxVrQFaO/TxsrC4Is0V1sFbDwCgg="),
|
||||
ProofHashes =
|
||||
[
|
||||
Convert.FromBase64String("1B2M2Y8AsgTpgAmY7PhCfg=="),
|
||||
Convert.FromBase64String("47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU="),
|
||||
Convert.FromBase64String("fRjPxJ7P6CcH_HiMzOZz3rkbwsC4HbTYP8Qe7L9j1Po="),
|
||||
],
|
||||
RootHash = Convert.FromBase64String("rMj3G9LfM9C6Xt0qpV3pHbM2q5lPvKjS0mOmV8jXwAk="),
|
||||
Checkpoint = """
|
||||
rekor.sigstore.dev - 2605736670972794746
|
||||
20000000
|
||||
rMj3G9LfM9C6Xt0qpV3pHbM2q5lPvKjS0mOmV8jXwAk=
|
||||
1702345678
|
||||
""",
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_SingleLeafTree_Succeeds()
|
||||
{
|
||||
// Arrange - single leaf tree (tree size = 1)
|
||||
var leafHash = new byte[32];
|
||||
Random.Shared.NextBytes(leafHash);
|
||||
|
||||
// Act
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leafHash,
|
||||
leafIndex: 0,
|
||||
treeSize: 1,
|
||||
proofHashes: [],
|
||||
expectedRootHash: leafHash); // Root equals leaf for single node
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_TwoLeafTree_LeftLeaf_Succeeds()
|
||||
{
|
||||
// Arrange - two-leaf tree, verify left leaf
|
||||
var leftLeaf = new byte[32];
|
||||
var rightLeaf = new byte[32];
|
||||
Random.Shared.NextBytes(leftLeaf);
|
||||
Random.Shared.NextBytes(rightLeaf);
|
||||
|
||||
// Compute expected root
|
||||
var expectedRoot = ComputeInteriorHash(leftLeaf, rightLeaf);
|
||||
|
||||
// Act - verify left leaf (index 0)
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leftLeaf,
|
||||
leafIndex: 0,
|
||||
treeSize: 2,
|
||||
proofHashes: [rightLeaf],
|
||||
expectedRootHash: expectedRoot);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_TwoLeafTree_RightLeaf_Succeeds()
|
||||
{
|
||||
// Arrange - two-leaf tree, verify right leaf
|
||||
var leftLeaf = new byte[32];
|
||||
var rightLeaf = new byte[32];
|
||||
Random.Shared.NextBytes(leftLeaf);
|
||||
Random.Shared.NextBytes(rightLeaf);
|
||||
|
||||
// Compute expected root
|
||||
var expectedRoot = ComputeInteriorHash(leftLeaf, rightLeaf);
|
||||
|
||||
// Act - verify right leaf (index 1)
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
rightLeaf,
|
||||
leafIndex: 1,
|
||||
treeSize: 2,
|
||||
proofHashes: [leftLeaf],
|
||||
expectedRootHash: expectedRoot);
|
||||
|
||||
// Assert
|
||||
Assert.True(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_FourLeafTree_AllPositions_Succeed()
|
||||
{
|
||||
// Arrange - four-leaf balanced tree
|
||||
var leaves = new byte[4][];
|
||||
for (int i = 0; i < 4; i++)
|
||||
{
|
||||
leaves[i] = new byte[32];
|
||||
Random.Shared.NextBytes(leaves[i]);
|
||||
}
|
||||
|
||||
// Build tree:
|
||||
// root
|
||||
// / \
|
||||
// h01 h23
|
||||
// / \ / \
|
||||
// L0 L1 L2 L3
|
||||
var h01 = ComputeInteriorHash(leaves[0], leaves[1]);
|
||||
var h23 = ComputeInteriorHash(leaves[2], leaves[3]);
|
||||
var root = ComputeInteriorHash(h01, h23);
|
||||
|
||||
// Test each leaf position
|
||||
var testCases = new (int index, byte[][] proof)[]
|
||||
{
|
||||
(0, [leaves[1], h23]), // L0: sibling is L1, then h23
|
||||
(1, [leaves[0], h23]), // L1: sibling is L0, then h23
|
||||
(2, [leaves[3], h01]), // L2: sibling is L3, then h01
|
||||
(3, [leaves[2], h01]), // L3: sibling is L2, then h01
|
||||
};
|
||||
|
||||
foreach (var (index, proof) in testCases)
|
||||
{
|
||||
// Act
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leaves[index],
|
||||
leafIndex: index,
|
||||
treeSize: 4,
|
||||
proofHashes: proof,
|
||||
expectedRootHash: root);
|
||||
|
||||
// Assert
|
||||
Assert.True(result, $"Verification failed for leaf index {index}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_WrongLeafHash_Fails()
|
||||
{
|
||||
// Arrange
|
||||
var correctLeaf = new byte[32];
|
||||
var wrongLeaf = new byte[32];
|
||||
var sibling = new byte[32];
|
||||
Random.Shared.NextBytes(correctLeaf);
|
||||
Random.Shared.NextBytes(wrongLeaf);
|
||||
Random.Shared.NextBytes(sibling);
|
||||
|
||||
var root = ComputeInteriorHash(correctLeaf, sibling);
|
||||
|
||||
// Act - try to verify with wrong leaf
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
wrongLeaf,
|
||||
leafIndex: 0,
|
||||
treeSize: 2,
|
||||
proofHashes: [sibling],
|
||||
expectedRootHash: root);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_WrongRootHash_Fails()
|
||||
{
|
||||
// Arrange
|
||||
var leaf = new byte[32];
|
||||
var sibling = new byte[32];
|
||||
var wrongRoot = new byte[32];
|
||||
Random.Shared.NextBytes(leaf);
|
||||
Random.Shared.NextBytes(sibling);
|
||||
Random.Shared.NextBytes(wrongRoot);
|
||||
|
||||
// Act
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leaf,
|
||||
leafIndex: 0,
|
||||
treeSize: 2,
|
||||
proofHashes: [sibling],
|
||||
expectedRootHash: wrongRoot);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_InvalidLeafIndex_Fails()
|
||||
{
|
||||
// Arrange
|
||||
var leaf = new byte[32];
|
||||
Random.Shared.NextBytes(leaf);
|
||||
|
||||
// Act - index >= tree size
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leaf,
|
||||
leafIndex: 5,
|
||||
treeSize: 4,
|
||||
proofHashes: [],
|
||||
expectedRootHash: leaf);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_NegativeLeafIndex_Fails()
|
||||
{
|
||||
// Arrange
|
||||
var leaf = new byte[32];
|
||||
Random.Shared.NextBytes(leaf);
|
||||
|
||||
// Act
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leaf,
|
||||
leafIndex: -1,
|
||||
treeSize: 4,
|
||||
proofHashes: [],
|
||||
expectedRootHash: leaf);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyInclusion_ZeroTreeSize_Fails()
|
||||
{
|
||||
// Arrange
|
||||
var leaf = new byte[32];
|
||||
Random.Shared.NextBytes(leaf);
|
||||
|
||||
// Act
|
||||
var result = MerkleProofVerifier.VerifyInclusion(
|
||||
leaf,
|
||||
leafIndex: 0,
|
||||
treeSize: 0,
|
||||
proofHashes: [],
|
||||
expectedRootHash: leaf);
|
||||
|
||||
// Assert
|
||||
Assert.False(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRootFromPath_EmptyProof_SingleLeaf_ReturnsLeafHash()
|
||||
{
|
||||
// Arrange
|
||||
var leaf = new byte[32];
|
||||
Random.Shared.NextBytes(leaf);
|
||||
|
||||
// Act
|
||||
var result = MerkleProofVerifier.ComputeRootFromPath(
|
||||
leaf,
|
||||
leafIndex: 0,
|
||||
treeSize: 1,
|
||||
proofHashes: []);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(leaf, result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeRootFromPath_EmptyProof_MultiLeaf_ReturnsNull()
|
||||
{
|
||||
// Arrange - empty proof for multi-leaf tree is invalid
|
||||
var leaf = new byte[32];
|
||||
Random.Shared.NextBytes(leaf);
|
||||
|
||||
// Act
|
||||
var result = MerkleProofVerifier.ComputeRootFromPath(
|
||||
leaf,
|
||||
leafIndex: 0,
|
||||
treeSize: 4,
|
||||
proofHashes: []);
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes an interior node hash per RFC 6962.
|
||||
/// H(0x01 || left || right)
|
||||
/// </summary>
|
||||
private static byte[] ComputeInteriorHash(byte[] left, byte[] right)
|
||||
{
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
var combined = new byte[1 + left.Length + right.Length];
|
||||
combined[0] = 0x01; // Interior node prefix
|
||||
left.CopyTo(combined, 1);
|
||||
right.CopyTo(combined, 1 + left.Length);
|
||||
return sha256.ComputeHash(combined);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mock Rekor entry for testing.
|
||||
/// </summary>
|
||||
private sealed class MockRekorEntry
|
||||
{
|
||||
public long LogIndex { get; init; }
|
||||
public long TreeSize { get; init; }
|
||||
public byte[] LeafHash { get; init; } = [];
|
||||
public byte[][] ProofHashes { get; init; } = [];
|
||||
public byte[] RootHash { get; init; } = [];
|
||||
public string Checkpoint { get; init; } = "";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,210 @@
|
||||
using StellaOps.Attestor.Core.Verification;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Tests;
|
||||
|
||||
public class TimeSkewValidatorTests
|
||||
{
|
||||
private readonly TimeSkewOptions _defaultOptions = new()
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 60,
|
||||
RejectThresholdSeconds = 300,
|
||||
MaxFutureSkewSeconds = 60,
|
||||
FailOnReject = true
|
||||
};
|
||||
|
||||
[Fact]
|
||||
public void Validate_WhenDisabled_ReturnsSkipped()
|
||||
{
|
||||
// Arrange
|
||||
var options = new TimeSkewOptions { Enabled = false };
|
||||
var validator = new TimeSkewValidator(options);
|
||||
var integratedTime = DateTimeOffset.UtcNow.AddSeconds(-10);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.Skipped, result.Status);
|
||||
Assert.Contains("disabled", result.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_WhenNoIntegratedTime_ReturnsSkipped()
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime: null);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.Skipped, result.Status);
|
||||
Assert.Contains("No integrated time", result.Message);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0)] // No skew
|
||||
[InlineData(5)] // 5 seconds ago
|
||||
[InlineData(30)] // 30 seconds ago
|
||||
[InlineData(59)] // Just under warn threshold
|
||||
public void Validate_WhenSkewBelowWarnThreshold_ReturnsOk(int secondsAgo)
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var localTime = DateTimeOffset.UtcNow;
|
||||
var integratedTime = localTime.AddSeconds(-secondsAgo);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime, localTime);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.Ok, result.Status);
|
||||
Assert.InRange(result.SkewSeconds, secondsAgo - 1, secondsAgo + 1);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(60)] // At warn threshold
|
||||
[InlineData(120)] // 2 minutes
|
||||
[InlineData(299)] // Just under reject threshold
|
||||
public void Validate_WhenSkewBetweenWarnAndReject_ReturnsWarning(int secondsAgo)
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var localTime = DateTimeOffset.UtcNow;
|
||||
var integratedTime = localTime.AddSeconds(-secondsAgo);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime, localTime);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid); // Warning still passes
|
||||
Assert.Equal(TimeSkewStatus.Warning, result.Status);
|
||||
Assert.Contains("warning threshold", result.Message);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(300)] // At reject threshold
|
||||
[InlineData(600)] // 10 minutes
|
||||
[InlineData(3600)] // 1 hour
|
||||
public void Validate_WhenSkewExceedsRejectThreshold_ReturnsRejected(int secondsAgo)
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var localTime = DateTimeOffset.UtcNow;
|
||||
var integratedTime = localTime.AddSeconds(-secondsAgo);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime, localTime);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.Rejected, result.Status);
|
||||
Assert.Contains("rejection threshold", result.Message);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(5)] // 5 seconds in future (OK)
|
||||
[InlineData(30)] // 30 seconds in future (OK)
|
||||
[InlineData(60)] // At max future threshold (OK)
|
||||
public void Validate_WhenSmallFutureSkew_ReturnsOk(int secondsInFuture)
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var localTime = DateTimeOffset.UtcNow;
|
||||
var integratedTime = localTime.AddSeconds(secondsInFuture);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime, localTime);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.Ok, result.Status);
|
||||
Assert.True(result.SkewSeconds < 0); // Negative means future
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(61)] // Just over max future
|
||||
[InlineData(120)] // 2 minutes in future
|
||||
[InlineData(3600)] // 1 hour in future
|
||||
public void Validate_WhenLargeFutureSkew_ReturnsFutureTimestamp(int secondsInFuture)
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var localTime = DateTimeOffset.UtcNow;
|
||||
var integratedTime = localTime.AddSeconds(secondsInFuture);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime, localTime);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.FutureTimestamp, result.Status);
|
||||
Assert.Contains("Future timestamp", result.Message);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_UsesCurrentTimeWhenLocalTimeNotProvided()
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var integratedTime = DateTimeOffset.UtcNow.AddSeconds(-10);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.InRange(result.SkewSeconds, 9, 12); // Allow for test execution time
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_CustomThresholds_AreRespected()
|
||||
{
|
||||
// Arrange
|
||||
var options = new TimeSkewOptions
|
||||
{
|
||||
Enabled = true,
|
||||
WarnThresholdSeconds = 10,
|
||||
RejectThresholdSeconds = 30,
|
||||
MaxFutureSkewSeconds = 5
|
||||
};
|
||||
var validator = new TimeSkewValidator(options);
|
||||
var localTime = DateTimeOffset.UtcNow;
|
||||
|
||||
// Act - 15 seconds should warn with custom thresholds
|
||||
var result = validator.Validate(localTime.AddSeconds(-15), localTime);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Equal(TimeSkewStatus.Warning, result.Status);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ReturnsCorrectTimestamps()
|
||||
{
|
||||
// Arrange
|
||||
var validator = new TimeSkewValidator(_defaultOptions);
|
||||
var localTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var integratedTime = new DateTimeOffset(2025, 12, 16, 11, 59, 30, TimeSpan.Zero);
|
||||
|
||||
// Act
|
||||
var result = validator.Validate(integratedTime, localTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(integratedTime, result.IntegratedTime);
|
||||
Assert.Equal(localTime, result.LocalTime);
|
||||
Assert.Equal(30, result.SkewSeconds, precision: 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Constructor_ThrowsOnNullOptions()
|
||||
{
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentNullException>(() => new TimeSkewValidator(null!));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,158 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.WebService.Contracts.Anchors;
|
||||
|
||||
/// <summary>
|
||||
/// Request to create a trust anchor.
|
||||
/// </summary>
|
||||
public sealed record CreateTrustAnchorRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// PURL glob pattern (e.g., pkg:npm/*).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[JsonPropertyName("purlPattern")]
|
||||
public required string PurlPattern { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key IDs allowed to sign attestations.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MinLength(1)]
|
||||
[JsonPropertyName("allowedKeyIds")]
|
||||
public required string[] AllowedKeyIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional: Predicate types allowed for this anchor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("allowedPredicateTypes")]
|
||||
public string[]? AllowedPredicateTypes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional reference to the policy document.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyRef")]
|
||||
public string? PolicyRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version for this anchor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public string? PolicyVersion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor response.
|
||||
/// </summary>
|
||||
public sealed record TrustAnchorDto
|
||||
{
|
||||
/// <summary>
|
||||
/// The anchor ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("anchorId")]
|
||||
public required Guid AnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PURL glob pattern.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purlPattern")]
|
||||
public required string PurlPattern { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Allowed key IDs.
|
||||
/// </summary>
|
||||
[JsonPropertyName("allowedKeyIds")]
|
||||
public required string[] AllowedKeyIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Allowed predicate types.
|
||||
/// </summary>
|
||||
[JsonPropertyName("allowedPredicateTypes")]
|
||||
public string[]? AllowedPredicateTypes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy reference.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyRef")]
|
||||
public string? PolicyRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public string? PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Revoked key IDs.
|
||||
/// </summary>
|
||||
[JsonPropertyName("revokedKeys")]
|
||||
public string[] RevokedKeys { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether the anchor is active.
|
||||
/// </summary>
|
||||
[JsonPropertyName("isActive")]
|
||||
public bool IsActive { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// When the anchor was created.
|
||||
/// </summary>
|
||||
[JsonPropertyName("createdAt")]
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the anchor was last updated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("updatedAt")]
|
||||
public required DateTimeOffset UpdatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to update a trust anchor.
|
||||
/// </summary>
|
||||
public sealed record UpdateTrustAnchorRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Updated key IDs allowed to sign attestations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("allowedKeyIds")]
|
||||
public string[]? AllowedKeyIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Updated predicate types.
|
||||
/// </summary>
|
||||
[JsonPropertyName("allowedPredicateTypes")]
|
||||
public string[]? AllowedPredicateTypes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Updated policy reference.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyRef")]
|
||||
public string? PolicyRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Updated policy version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public string? PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Set anchor active/inactive.
|
||||
/// </summary>
|
||||
[JsonPropertyName("isActive")]
|
||||
public bool? IsActive { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to revoke a key in a trust anchor.
|
||||
/// </summary>
|
||||
public sealed record RevokeKeyRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The key ID to revoke.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[JsonPropertyName("keyId")]
|
||||
public required string KeyId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,170 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.WebService.Contracts.Proofs;
|
||||
|
||||
/// <summary>
|
||||
/// Request to create a proof spine for an SBOM entry.
|
||||
/// </summary>
|
||||
public sealed record CreateSpineRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Evidence IDs to include in the proof bundle.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MinLength(1)]
|
||||
[JsonPropertyName("evidenceIds")]
|
||||
public required string[] EvidenceIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reasoning ID explaining the policy decision.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[RegularExpression(@"^sha256:[a-f0-9]{64}$")]
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX verdict ID for the exploitability assessment.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[RegularExpression(@"^sha256:[a-f0-9]{64}$")]
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public required string VexVerdictId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version used for evaluation.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[RegularExpression(@"^v[0-9]+\.[0-9]+\.[0-9]+$")]
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response after creating a proof spine.
|
||||
/// </summary>
|
||||
public sealed record CreateSpineResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The computed proof bundle ID (merkle root).
|
||||
/// </summary>
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// URL to retrieve the verification receipt.
|
||||
/// </summary>
|
||||
[JsonPropertyName("receiptUrl")]
|
||||
public string? ReceiptUrl { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to verify a proof chain.
|
||||
/// </summary>
|
||||
public sealed record VerifyProofRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof bundle ID to verify.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[RegularExpression(@"^sha256:[a-f0-9]{64}$")]
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor ID to verify against.
|
||||
/// </summary>
|
||||
[JsonPropertyName("anchorId")]
|
||||
public Guid? AnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify Rekor inclusion proofs.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verifyRekor")]
|
||||
public bool VerifyRekor { get; init; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verification receipt response.
|
||||
/// </summary>
|
||||
public sealed record VerificationReceiptDto
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof bundle ID that was verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the verification was performed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verifiedAt")]
|
||||
public required DateTimeOffset VerifiedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the verifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("verifierVersion")]
|
||||
public required string VerifierVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor ID used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("anchorId")]
|
||||
public Guid? AnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall verification result: "pass" or "fail".
|
||||
/// </summary>
|
||||
[JsonPropertyName("result")]
|
||||
public required string Result { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual verification checks.
|
||||
/// </summary>
|
||||
[JsonPropertyName("checks")]
|
||||
public required VerificationCheckDto[] Checks { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single verification check.
|
||||
/// </summary>
|
||||
public sealed record VerificationCheckDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the check.
|
||||
/// </summary>
|
||||
[JsonPropertyName("check")]
|
||||
public required string Check { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Status: "pass" or "fail".
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID if this was a signature check.
|
||||
/// </summary>
|
||||
[JsonPropertyName("keyId")]
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected value for comparison checks.
|
||||
/// </summary>
|
||||
[JsonPropertyName("expected")]
|
||||
public string? Expected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actual value for comparison checks.
|
||||
/// </summary>
|
||||
[JsonPropertyName("actual")]
|
||||
public string? Actual { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log index if applicable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("logIndex")]
|
||||
public long? LogIndex { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,188 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Attestor.WebService.Contracts.Anchors;
|
||||
|
||||
namespace StellaOps.Attestor.WebService.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// API endpoints for trust anchor management.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Route("anchors")]
|
||||
[Produces("application/json")]
|
||||
public class AnchorsController : ControllerBase
|
||||
{
|
||||
private readonly ILogger<AnchorsController> _logger;
|
||||
// TODO: Inject IProofChainRepository
|
||||
|
||||
public AnchorsController(ILogger<AnchorsController> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get all active trust anchors.
|
||||
/// </summary>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>List of trust anchors.</returns>
|
||||
[HttpGet]
|
||||
[ProducesResponseType(typeof(TrustAnchorDto[]), StatusCodes.Status200OK)]
|
||||
public async Task<ActionResult<TrustAnchorDto[]>> GetAnchorsAsync(CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Getting all trust anchors");
|
||||
|
||||
// TODO: Implement using IProofChainRepository.GetActiveTrustAnchorsAsync
|
||||
|
||||
return Ok(Array.Empty<TrustAnchorDto>());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get a trust anchor by ID.
|
||||
/// </summary>
|
||||
/// <param name="anchorId">The anchor ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The trust anchor.</returns>
|
||||
[HttpGet("{anchorId:guid}")]
|
||||
[ProducesResponseType(typeof(TrustAnchorDto), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<TrustAnchorDto>> GetAnchorAsync(
|
||||
[FromRoute] Guid anchorId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Getting trust anchor {AnchorId}", anchorId);
|
||||
|
||||
// TODO: Implement using IProofChainRepository.GetTrustAnchorAsync
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Trust Anchor Not Found",
|
||||
Detail = $"No trust anchor found with ID {anchorId}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a new trust anchor.
|
||||
/// </summary>
|
||||
/// <param name="request">The anchor creation request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The created trust anchor.</returns>
|
||||
[HttpPost]
|
||||
[ProducesResponseType(typeof(TrustAnchorDto), StatusCodes.Status201Created)]
|
||||
[ProducesResponseType(StatusCodes.Status400BadRequest)]
|
||||
[ProducesResponseType(StatusCodes.Status409Conflict)]
|
||||
public async Task<ActionResult<TrustAnchorDto>> CreateAnchorAsync(
|
||||
[FromBody] CreateTrustAnchorRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Creating trust anchor for pattern {Pattern}", request.PurlPattern);
|
||||
|
||||
// TODO: Implement using IProofChainRepository.SaveTrustAnchorAsync
|
||||
// 1. Check for existing anchor with same pattern
|
||||
// 2. Create new anchor entity
|
||||
// 3. Save to repository
|
||||
// 4. Log audit entry
|
||||
|
||||
var anchor = new TrustAnchorDto
|
||||
{
|
||||
AnchorId = Guid.NewGuid(),
|
||||
PurlPattern = request.PurlPattern,
|
||||
AllowedKeyIds = request.AllowedKeyIds,
|
||||
AllowedPredicateTypes = request.AllowedPredicateTypes,
|
||||
PolicyRef = request.PolicyRef,
|
||||
PolicyVersion = request.PolicyVersion,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
UpdatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
return CreatedAtAction(nameof(GetAnchorAsync), new { anchorId = anchor.AnchorId }, anchor);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Update a trust anchor.
|
||||
/// </summary>
|
||||
/// <param name="anchorId">The anchor ID.</param>
|
||||
/// <param name="request">The update request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The updated trust anchor.</returns>
|
||||
[HttpPatch("{anchorId:guid}")]
|
||||
[ProducesResponseType(typeof(TrustAnchorDto), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<TrustAnchorDto>> UpdateAnchorAsync(
|
||||
[FromRoute] Guid anchorId,
|
||||
[FromBody] UpdateTrustAnchorRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Updating trust anchor {AnchorId}", anchorId);
|
||||
|
||||
// TODO: Implement using IProofChainRepository
|
||||
// 1. Get existing anchor
|
||||
// 2. Apply updates
|
||||
// 3. Save to repository
|
||||
// 4. Log audit entry
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Trust Anchor Not Found",
|
||||
Detail = $"No trust anchor found with ID {anchorId}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Revoke a key in a trust anchor.
|
||||
/// </summary>
|
||||
/// <param name="anchorId">The anchor ID.</param>
|
||||
/// <param name="request">The revoke request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>No content on success.</returns>
|
||||
[HttpPost("{anchorId:guid}/revoke-key")]
|
||||
[ProducesResponseType(StatusCodes.Status204NoContent)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
[ProducesResponseType(StatusCodes.Status400BadRequest)]
|
||||
public async Task<ActionResult> RevokeKeyAsync(
|
||||
[FromRoute] Guid anchorId,
|
||||
[FromBody] RevokeKeyRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Revoking key {KeyId} in anchor {AnchorId}", request.KeyId, anchorId);
|
||||
|
||||
// TODO: Implement using IProofChainRepository.RevokeKeyAsync
|
||||
// 1. Get existing anchor
|
||||
// 2. Add key to revoked_keys
|
||||
// 3. Remove from allowed_keyids
|
||||
// 4. Save to repository
|
||||
// 5. Log audit entry
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Trust Anchor Not Found",
|
||||
Detail = $"No trust anchor found with ID {anchorId}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delete (deactivate) a trust anchor.
|
||||
/// </summary>
|
||||
/// <param name="anchorId">The anchor ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>No content on success.</returns>
|
||||
[HttpDelete("{anchorId:guid}")]
|
||||
[ProducesResponseType(StatusCodes.Status204NoContent)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult> DeleteAnchorAsync(
|
||||
[FromRoute] Guid anchorId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Deactivating trust anchor {AnchorId}", anchorId);
|
||||
|
||||
// TODO: Implement - set is_active = false (soft delete)
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Trust Anchor Not Found",
|
||||
Detail = $"No trust anchor found with ID {anchorId}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,162 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Attestor.WebService.Contracts.Proofs;
|
||||
|
||||
namespace StellaOps.Attestor.WebService.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// API endpoints for proof chain operations.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Route("proofs")]
|
||||
[Produces("application/json")]
|
||||
public class ProofsController : ControllerBase
|
||||
{
|
||||
private readonly ILogger<ProofsController> _logger;
|
||||
// TODO: Inject IProofSpineAssembler, IReceiptGenerator, IProofChainRepository
|
||||
|
||||
public ProofsController(ILogger<ProofsController> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Create a proof spine for an SBOM entry.
|
||||
/// </summary>
|
||||
/// <param name="entry">The SBOM entry ID (sha256:hex:pkg:...)</param>
|
||||
/// <param name="request">The spine creation request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The created proof bundle ID.</returns>
|
||||
[HttpPost("{entry}/spine")]
|
||||
[ProducesResponseType(typeof(CreateSpineResponse), StatusCodes.Status201Created)]
|
||||
[ProducesResponseType(StatusCodes.Status400BadRequest)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
[ProducesResponseType(StatusCodes.Status422UnprocessableEntity)]
|
||||
public async Task<ActionResult<CreateSpineResponse>> CreateSpineAsync(
|
||||
[FromRoute] string entry,
|
||||
[FromBody] CreateSpineRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Creating proof spine for entry {Entry}", entry);
|
||||
|
||||
// Validate entry format
|
||||
if (!IsValidSbomEntryId(entry))
|
||||
{
|
||||
return BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Invalid SBOM Entry ID",
|
||||
Detail = "Entry ID must be in format sha256:<hex>:pkg:<purl>",
|
||||
Status = StatusCodes.Status400BadRequest
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: Implement spine creation using IProofSpineAssembler
|
||||
// 1. Validate all evidence IDs exist
|
||||
// 2. Validate reasoning ID exists
|
||||
// 3. Validate VEX verdict ID exists
|
||||
// 4. Assemble spine using merkle tree
|
||||
// 5. Sign and store spine
|
||||
// 6. Return proof bundle ID
|
||||
|
||||
var response = new CreateSpineResponse
|
||||
{
|
||||
ProofBundleId = $"sha256:{Guid.NewGuid():N}",
|
||||
ReceiptUrl = $"/proofs/{entry}/receipt"
|
||||
};
|
||||
|
||||
return CreatedAtAction(nameof(GetReceiptAsync), new { entry }, response);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get verification receipt for an SBOM entry.
|
||||
/// </summary>
|
||||
/// <param name="entry">The SBOM entry ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The verification receipt.</returns>
|
||||
[HttpGet("{entry}/receipt")]
|
||||
[ProducesResponseType(typeof(VerificationReceiptDto), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<VerificationReceiptDto>> GetReceiptAsync(
|
||||
[FromRoute] string entry,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Getting receipt for entry {Entry}", entry);
|
||||
|
||||
// TODO: Implement receipt retrieval using IReceiptGenerator
|
||||
// 1. Get spine for entry
|
||||
// 2. Generate/retrieve verification receipt
|
||||
// 3. Return receipt
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Receipt Not Found",
|
||||
Detail = $"No verification receipt found for entry {entry}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get proof spine for an SBOM entry.
|
||||
/// </summary>
|
||||
/// <param name="entry">The SBOM entry ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The proof spine details.</returns>
|
||||
[HttpGet("{entry}/spine")]
|
||||
[ProducesResponseType(StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult> GetSpineAsync(
|
||||
[FromRoute] string entry,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Getting spine for entry {Entry}", entry);
|
||||
|
||||
// TODO: Implement spine retrieval
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Spine Not Found",
|
||||
Detail = $"No proof spine found for entry {entry}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get VEX statement for an SBOM entry.
|
||||
/// </summary>
|
||||
/// <param name="entry">The SBOM entry ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The VEX statement.</returns>
|
||||
[HttpGet("{entry}/vex")]
|
||||
[ProducesResponseType(StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult> GetVexAsync(
|
||||
[FromRoute] string entry,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Getting VEX for entry {Entry}", entry);
|
||||
|
||||
// TODO: Implement VEX retrieval
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "VEX Not Found",
|
||||
Detail = $"No VEX statement found for entry {entry}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
private static bool IsValidSbomEntryId(string entry)
|
||||
{
|
||||
// Format: sha256:<64-hex>:pkg:<purl>
|
||||
if (string.IsNullOrWhiteSpace(entry))
|
||||
return false;
|
||||
|
||||
var parts = entry.Split(':', 4);
|
||||
if (parts.Length < 4)
|
||||
return false;
|
||||
|
||||
return parts[0] == "sha256"
|
||||
&& parts[1].Length == 64
|
||||
&& parts[1].All(c => "0123456789abcdef".Contains(c))
|
||||
&& parts[2] == "pkg";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,145 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Attestor.WebService.Contracts.Proofs;
|
||||
|
||||
namespace StellaOps.Attestor.WebService.Controllers;
|
||||
|
||||
/// <summary>
|
||||
/// API endpoints for proof chain verification.
|
||||
/// </summary>
|
||||
[ApiController]
|
||||
[Route("verify")]
|
||||
[Produces("application/json")]
|
||||
public class VerifyController : ControllerBase
|
||||
{
|
||||
private readonly ILogger<VerifyController> _logger;
|
||||
// TODO: Inject IVerificationPipeline
|
||||
|
||||
public VerifyController(ILogger<VerifyController> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verify a proof chain.
|
||||
/// </summary>
|
||||
/// <param name="request">The verification request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The verification receipt.</returns>
|
||||
[HttpPost]
|
||||
[ProducesResponseType(typeof(VerificationReceiptDto), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status400BadRequest)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult<VerificationReceiptDto>> VerifyAsync(
|
||||
[FromBody] VerifyProofRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Verifying proof bundle {BundleId}", request.ProofBundleId);
|
||||
|
||||
// TODO: Implement using IVerificationPipeline per advisory §9.1
|
||||
// Pipeline steps:
|
||||
// 1. DSSE signature verification (for each envelope in chain)
|
||||
// 2. ID recomputation (verify content-addressed IDs match)
|
||||
// 3. Merkle root verification (recompute ProofBundleID)
|
||||
// 4. Trust anchor matching (verify signer key is allowed)
|
||||
// 5. Rekor inclusion proof verification (if enabled)
|
||||
// 6. Policy version compatibility check
|
||||
// 7. Key revocation check
|
||||
|
||||
var checks = new List<VerificationCheckDto>
|
||||
{
|
||||
new()
|
||||
{
|
||||
Check = "dsse_signature",
|
||||
Status = "pass",
|
||||
KeyId = "example-key-id"
|
||||
},
|
||||
new()
|
||||
{
|
||||
Check = "id_recomputation",
|
||||
Status = "pass"
|
||||
},
|
||||
new()
|
||||
{
|
||||
Check = "merkle_root",
|
||||
Status = "pass"
|
||||
},
|
||||
new()
|
||||
{
|
||||
Check = "trust_anchor",
|
||||
Status = "pass"
|
||||
}
|
||||
};
|
||||
|
||||
if (request.VerifyRekor)
|
||||
{
|
||||
checks.Add(new VerificationCheckDto
|
||||
{
|
||||
Check = "rekor_inclusion",
|
||||
Status = "pass",
|
||||
LogIndex = 12345678
|
||||
});
|
||||
}
|
||||
|
||||
var receipt = new VerificationReceiptDto
|
||||
{
|
||||
ProofBundleId = request.ProofBundleId,
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
VerifierVersion = "1.0.0",
|
||||
AnchorId = request.AnchorId,
|
||||
Result = "pass",
|
||||
Checks = checks.ToArray()
|
||||
};
|
||||
|
||||
return Ok(receipt);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verify a DSSE envelope signature.
|
||||
/// </summary>
|
||||
/// <param name="envelopeHash">The envelope body hash.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Signature verification result.</returns>
|
||||
[HttpGet("envelope/{envelopeHash}")]
|
||||
[ProducesResponseType(StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult> VerifyEnvelopeAsync(
|
||||
[FromRoute] string envelopeHash,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Verifying envelope {Hash}", envelopeHash);
|
||||
|
||||
// TODO: Implement DSSE envelope verification
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Envelope Not Found",
|
||||
Detail = $"No envelope found with hash {envelopeHash}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verify Rekor inclusion for an envelope.
|
||||
/// </summary>
|
||||
/// <param name="envelopeHash">The envelope body hash.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Rekor verification result.</returns>
|
||||
[HttpGet("rekor/{envelopeHash}")]
|
||||
[ProducesResponseType(StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
public async Task<ActionResult> VerifyRekorAsync(
|
||||
[FromRoute] string envelopeHash,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogInformation("Verifying Rekor inclusion for {Hash}", envelopeHash);
|
||||
|
||||
// TODO: Implement Rekor inclusion proof verification
|
||||
|
||||
return NotFound(new ProblemDetails
|
||||
{
|
||||
Title = "Rekor Entry Not Found",
|
||||
Detail = $"No Rekor entry found for envelope {envelopeHash}",
|
||||
Status = StatusCodes.Status404NotFound
|
||||
});
|
||||
}
|
||||
}
|
||||
34
src/Attestor/StellaOps.Attestor/stryker-config.json
Normal file
34
src/Attestor/StellaOps.Attestor/stryker-config.json
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"$schema": "https://raw.githubusercontent.com/stryker-mutator/stryker-net/master/src/Stryker.Core/Stryker.Core/assets/stryker-config.schema.json",
|
||||
"stryker-config": {
|
||||
"project": "StellaOps.Attestor.csproj",
|
||||
"test-project": "../__Tests/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj",
|
||||
"solution": "../../../../StellaOps.Router.slnx",
|
||||
"thresholds": {
|
||||
"high": 80,
|
||||
"low": 65,
|
||||
"break": 55
|
||||
},
|
||||
"mutate": [
|
||||
"**/*.cs",
|
||||
"!**/obj/**",
|
||||
"!**/bin/**",
|
||||
"!**/Migrations/**"
|
||||
],
|
||||
"excluded-mutations": [
|
||||
"String"
|
||||
],
|
||||
"ignore-mutations": [
|
||||
"Linq.FirstOrDefault",
|
||||
"Linq.SingleOrDefault"
|
||||
],
|
||||
"reporters": [
|
||||
"html",
|
||||
"json",
|
||||
"progress"
|
||||
],
|
||||
"concurrency": 4,
|
||||
"log-to-file": true,
|
||||
"dashboard-compare-enabled": true
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
using System;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Audit log entry for proof chain operations.
|
||||
/// Maps to proofchain.audit_log table.
|
||||
/// </summary>
|
||||
[Table("audit_log", Schema = "proofchain")]
|
||||
public class AuditLogEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Primary key - auto-generated UUID.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("log_id")]
|
||||
public Guid LogId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The operation performed (e.g., "create", "verify", "revoke").
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("operation")]
|
||||
public string Operation { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// The type of entity affected (e.g., "sbom_entry", "spine", "trust_anchor").
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("entity_type")]
|
||||
public string EntityType { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// The ID of the affected entity.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("entity_id")]
|
||||
public string EntityId { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// The actor who performed the operation (user, service, etc.).
|
||||
/// </summary>
|
||||
[Column("actor")]
|
||||
public string? Actor { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional details about the operation.
|
||||
/// </summary>
|
||||
[Column("details", TypeName = "jsonb")]
|
||||
public JsonDocument? Details { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this log entry was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
using System;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Signed DSSE envelope for proof chain statements.
|
||||
/// Maps to proofchain.dsse_envelopes table.
|
||||
/// </summary>
|
||||
[Table("dsse_envelopes", Schema = "proofchain")]
|
||||
public class DsseEnvelopeEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Primary key - auto-generated UUID.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("env_id")]
|
||||
public Guid EnvId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the SBOM entry this envelope relates to.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("entry_id")]
|
||||
public Guid EntryId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Predicate type URI (e.g., evidence.stella/v1).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("predicate_type")]
|
||||
public string PredicateType { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Key ID that signed this envelope.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("signer_keyid")]
|
||||
public string SignerKeyId { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of the envelope body.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MaxLength(64)]
|
||||
[Column("body_hash")]
|
||||
public string BodyHash { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to blob storage (OCI, S3, file).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("envelope_blob_ref")]
|
||||
public string EnvelopeBlobRef { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// When the envelope was signed.
|
||||
/// </summary>
|
||||
[Column("signed_at")]
|
||||
public DateTimeOffset SignedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this record was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
// Navigation properties
|
||||
|
||||
/// <summary>
|
||||
/// The SBOM entry this envelope relates to.
|
||||
/// </summary>
|
||||
public SbomEntryEntity Entry { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// The Rekor transparency log entry if logged.
|
||||
/// </summary>
|
||||
public RekorEntryEntity? RekorEntry { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
using System;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log entry for DSSE envelope verification.
|
||||
/// Maps to proofchain.rekor_entries table.
|
||||
/// </summary>
|
||||
[Table("rekor_entries", Schema = "proofchain")]
|
||||
public class RekorEntryEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Primary key - SHA-256 hash of the DSSE envelope.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[MaxLength(64)]
|
||||
[Column("dsse_sha256")]
|
||||
public string DsseSha256 { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Log index in Rekor.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("log_index")]
|
||||
public long LogIndex { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log ID (tree hash).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("log_id")]
|
||||
public string LogId { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// UUID of the entry in Rekor.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("uuid")]
|
||||
public string Uuid { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Unix timestamp when entry was integrated into the log.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("integrated_time")]
|
||||
public long IntegratedTime { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Merkle inclusion proof from Rekor.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("inclusion_proof", TypeName = "jsonb")]
|
||||
public JsonDocument InclusionProof { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// When this record was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the DSSE envelope.
|
||||
/// </summary>
|
||||
[Column("env_id")]
|
||||
public Guid? EnvId { get; set; }
|
||||
|
||||
// Navigation properties
|
||||
|
||||
/// <summary>
|
||||
/// The DSSE envelope this entry refers to.
|
||||
/// </summary>
|
||||
public DsseEnvelopeEntity? Envelope { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// SBOM component entry with content-addressed identifiers.
|
||||
/// Maps to proofchain.sbom_entries table.
|
||||
/// </summary>
|
||||
[Table("sbom_entries", Schema = "proofchain")]
|
||||
public class SbomEntryEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Primary key - auto-generated UUID.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("entry_id")]
|
||||
public Guid EntryId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of the parent SBOM document.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MaxLength(64)]
|
||||
[Column("bom_digest")]
|
||||
public string BomDigest { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (PURL) of the component.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("purl")]
|
||||
public string Purl { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Component version.
|
||||
/// </summary>
|
||||
[Column("version")]
|
||||
public string? Version { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 hash of the component artifact if available.
|
||||
/// </summary>
|
||||
[MaxLength(64)]
|
||||
[Column("artifact_digest")]
|
||||
public string? ArtifactDigest { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the trust anchor for this entry.
|
||||
/// </summary>
|
||||
[Column("trust_anchor_id")]
|
||||
public Guid? TrustAnchorId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this entry was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
// Navigation properties
|
||||
|
||||
/// <summary>
|
||||
/// The trust anchor for this entry.
|
||||
/// </summary>
|
||||
public TrustAnchorEntity? TrustAnchor { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelopes associated with this entry.
|
||||
/// </summary>
|
||||
public ICollection<DsseEnvelopeEntity> Envelopes { get; set; } = new List<DsseEnvelopeEntity>();
|
||||
|
||||
/// <summary>
|
||||
/// The proof spine for this entry.
|
||||
/// </summary>
|
||||
public SpineEntity? Spine { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
using System;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Proof spine linking evidence to verdicts via merkle aggregation.
|
||||
/// Maps to proofchain.spines table.
|
||||
/// </summary>
|
||||
[Table("spines", Schema = "proofchain")]
|
||||
public class SpineEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Primary key - references SBOM entry.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("entry_id")]
|
||||
public Guid EntryId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// ProofBundleID (merkle root of all components).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MaxLength(64)]
|
||||
[Column("bundle_id")]
|
||||
public string BundleId { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Array of EvidenceIDs in sorted order.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("evidence_ids", TypeName = "text[]")]
|
||||
public string[] EvidenceIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// ReasoningID for the policy evaluation.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MaxLength(64)]
|
||||
[Column("reasoning_id")]
|
||||
public string ReasoningId { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// VexVerdictID for the VEX statement.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[MaxLength(64)]
|
||||
[Column("vex_id")]
|
||||
public string VexId { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the trust anchor.
|
||||
/// </summary>
|
||||
[Column("anchor_id")]
|
||||
public Guid? AnchorId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version used for evaluation.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("policy_version")]
|
||||
public string PolicyVersion { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// When this spine was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
// Navigation properties
|
||||
|
||||
/// <summary>
|
||||
/// The SBOM entry this spine covers.
|
||||
/// </summary>
|
||||
public SbomEntryEntity Entry { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// The trust anchor for this spine.
|
||||
/// </summary>
|
||||
public TrustAnchorEntity? Anchor { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,76 @@
|
||||
using System;
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor configuration for dependency verification.
|
||||
/// Maps to proofchain.trust_anchors table.
|
||||
/// </summary>
|
||||
[Table("trust_anchors", Schema = "proofchain")]
|
||||
public class TrustAnchorEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Primary key - auto-generated UUID.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("anchor_id")]
|
||||
public Guid AnchorId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// PURL glob pattern (e.g., pkg:npm/*).
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("purl_pattern")]
|
||||
public string PurlPattern { get; set; } = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Key IDs allowed to sign attestations matching this pattern.
|
||||
/// </summary>
|
||||
[Required]
|
||||
[Column("allowed_keyids", TypeName = "text[]")]
|
||||
public string[] AllowedKeyIds { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Optional: Predicate types allowed for this anchor.
|
||||
/// </summary>
|
||||
[Column("allowed_predicate_types", TypeName = "text[]")]
|
||||
public string[]? AllowedPredicateTypes { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional reference to the policy document.
|
||||
/// </summary>
|
||||
[Column("policy_ref")]
|
||||
public string? PolicyRef { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version for this anchor.
|
||||
/// </summary>
|
||||
[Column("policy_version")]
|
||||
public string? PolicyVersion { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Key IDs that have been revoked but may appear in old proofs.
|
||||
/// </summary>
|
||||
[Column("revoked_keys", TypeName = "text[]")]
|
||||
public string[] RevokedKeys { get; set; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether this anchor is active.
|
||||
/// </summary>
|
||||
[Column("is_active")]
|
||||
public bool IsActive { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// When this anchor was created.
|
||||
/// </summary>
|
||||
[Column("created_at")]
|
||||
public DateTimeOffset CreatedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// When this anchor was last updated.
|
||||
/// </summary>
|
||||
[Column("updated_at")]
|
||||
public DateTimeOffset UpdatedAt { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,159 @@
|
||||
-- Migration: 20251214000001_AddProofChainSchema
|
||||
-- Creates the proofchain schema and all tables for proof chain persistence.
|
||||
-- This migration is idempotent and can be run multiple times safely.
|
||||
|
||||
-- Create schema
|
||||
CREATE SCHEMA IF NOT EXISTS proofchain;
|
||||
|
||||
-- Create verification_result enum type
|
||||
DO $$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'verification_result' AND typnamespace = 'proofchain'::regnamespace) THEN
|
||||
CREATE TYPE proofchain.verification_result AS ENUM ('pass', 'fail', 'pending');
|
||||
END IF;
|
||||
END $$;
|
||||
|
||||
-- 4.4 trust_anchors Table (create first - no dependencies)
|
||||
CREATE TABLE IF NOT EXISTS proofchain.trust_anchors (
|
||||
anchor_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
purl_pattern TEXT NOT NULL,
|
||||
allowed_keyids TEXT[] NOT NULL,
|
||||
allowed_predicate_types TEXT[],
|
||||
policy_ref TEXT,
|
||||
policy_version TEXT,
|
||||
revoked_keys TEXT[] DEFAULT '{}',
|
||||
is_active BOOLEAN NOT NULL DEFAULT TRUE,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_trust_anchors_pattern ON proofchain.trust_anchors(purl_pattern);
|
||||
CREATE INDEX IF NOT EXISTS idx_trust_anchors_active ON proofchain.trust_anchors(is_active) WHERE is_active = TRUE;
|
||||
|
||||
COMMENT ON TABLE proofchain.trust_anchors IS 'Trust anchor configurations for dependency verification';
|
||||
COMMENT ON COLUMN proofchain.trust_anchors.purl_pattern IS 'PURL glob pattern (e.g., pkg:npm/*)';
|
||||
COMMENT ON COLUMN proofchain.trust_anchors.revoked_keys IS 'Key IDs that have been revoked but may appear in old proofs';
|
||||
|
||||
-- 4.1 sbom_entries Table
|
||||
CREATE TABLE IF NOT EXISTS proofchain.sbom_entries (
|
||||
entry_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
bom_digest VARCHAR(64) NOT NULL,
|
||||
purl TEXT NOT NULL,
|
||||
version TEXT,
|
||||
artifact_digest VARCHAR(64),
|
||||
trust_anchor_id UUID REFERENCES proofchain.trust_anchors(anchor_id) ON DELETE SET NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Compound unique constraint for idempotent inserts
|
||||
CONSTRAINT uq_sbom_entry UNIQUE (bom_digest, purl, version)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_sbom_entries_bom_digest ON proofchain.sbom_entries(bom_digest);
|
||||
CREATE INDEX IF NOT EXISTS idx_sbom_entries_purl ON proofchain.sbom_entries(purl);
|
||||
CREATE INDEX IF NOT EXISTS idx_sbom_entries_artifact ON proofchain.sbom_entries(artifact_digest);
|
||||
CREATE INDEX IF NOT EXISTS idx_sbom_entries_anchor ON proofchain.sbom_entries(trust_anchor_id);
|
||||
|
||||
COMMENT ON TABLE proofchain.sbom_entries IS 'SBOM component entries with content-addressed identifiers';
|
||||
COMMENT ON COLUMN proofchain.sbom_entries.bom_digest IS 'SHA-256 hash of the parent SBOM document';
|
||||
COMMENT ON COLUMN proofchain.sbom_entries.purl IS 'Package URL (PURL) of the component';
|
||||
COMMENT ON COLUMN proofchain.sbom_entries.artifact_digest IS 'SHA-256 hash of the component artifact if available';
|
||||
|
||||
-- 4.2 dsse_envelopes Table
|
||||
CREATE TABLE IF NOT EXISTS proofchain.dsse_envelopes (
|
||||
env_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
entry_id UUID NOT NULL REFERENCES proofchain.sbom_entries(entry_id) ON DELETE CASCADE,
|
||||
predicate_type TEXT NOT NULL,
|
||||
signer_keyid TEXT NOT NULL,
|
||||
body_hash VARCHAR(64) NOT NULL,
|
||||
envelope_blob_ref TEXT NOT NULL,
|
||||
signed_at TIMESTAMPTZ NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Prevent duplicate envelopes for same entry/predicate
|
||||
CONSTRAINT uq_dsse_envelope UNIQUE (entry_id, predicate_type, body_hash)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_dsse_entry_predicate ON proofchain.dsse_envelopes(entry_id, predicate_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_dsse_signer ON proofchain.dsse_envelopes(signer_keyid);
|
||||
CREATE INDEX IF NOT EXISTS idx_dsse_body_hash ON proofchain.dsse_envelopes(body_hash);
|
||||
|
||||
COMMENT ON TABLE proofchain.dsse_envelopes IS 'Signed DSSE envelopes for proof chain statements';
|
||||
COMMENT ON COLUMN proofchain.dsse_envelopes.predicate_type IS 'Predicate type URI (e.g., evidence.stella/v1)';
|
||||
COMMENT ON COLUMN proofchain.dsse_envelopes.envelope_blob_ref IS 'Reference to blob storage (OCI, S3, file)';
|
||||
|
||||
-- 4.3 spines Table
|
||||
CREATE TABLE IF NOT EXISTS proofchain.spines (
|
||||
entry_id UUID PRIMARY KEY REFERENCES proofchain.sbom_entries(entry_id) ON DELETE CASCADE,
|
||||
bundle_id VARCHAR(64) NOT NULL,
|
||||
evidence_ids TEXT[] NOT NULL,
|
||||
reasoning_id VARCHAR(64) NOT NULL,
|
||||
vex_id VARCHAR(64) NOT NULL,
|
||||
anchor_id UUID REFERENCES proofchain.trust_anchors(anchor_id) ON DELETE SET NULL,
|
||||
policy_version TEXT NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Bundle ID must be unique
|
||||
CONSTRAINT uq_spine_bundle UNIQUE (bundle_id)
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_spines_bundle ON proofchain.spines(bundle_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_spines_anchor ON proofchain.spines(anchor_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_spines_policy ON proofchain.spines(policy_version);
|
||||
|
||||
COMMENT ON TABLE proofchain.spines IS 'Proof spines linking evidence to verdicts via merkle aggregation';
|
||||
COMMENT ON COLUMN proofchain.spines.bundle_id IS 'ProofBundleID (merkle root of all components)';
|
||||
COMMENT ON COLUMN proofchain.spines.evidence_ids IS 'Array of EvidenceIDs in sorted order';
|
||||
|
||||
-- 4.5 rekor_entries Table
|
||||
CREATE TABLE IF NOT EXISTS proofchain.rekor_entries (
|
||||
dsse_sha256 VARCHAR(64) PRIMARY KEY,
|
||||
log_index BIGINT NOT NULL,
|
||||
log_id TEXT NOT NULL,
|
||||
uuid TEXT NOT NULL,
|
||||
integrated_time BIGINT NOT NULL,
|
||||
inclusion_proof JSONB NOT NULL,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
|
||||
-- Reference to the DSSE envelope
|
||||
env_id UUID REFERENCES proofchain.dsse_envelopes(env_id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_log_index ON proofchain.rekor_entries(log_index);
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_log_id ON proofchain.rekor_entries(log_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_uuid ON proofchain.rekor_entries(uuid);
|
||||
CREATE INDEX IF NOT EXISTS idx_rekor_env ON proofchain.rekor_entries(env_id);
|
||||
|
||||
COMMENT ON TABLE proofchain.rekor_entries IS 'Rekor transparency log entries for verification';
|
||||
COMMENT ON COLUMN proofchain.rekor_entries.inclusion_proof IS 'Merkle inclusion proof from Rekor';
|
||||
|
||||
-- Audit log table
|
||||
CREATE TABLE IF NOT EXISTS proofchain.audit_log (
|
||||
log_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
operation TEXT NOT NULL,
|
||||
entity_type TEXT NOT NULL,
|
||||
entity_id TEXT NOT NULL,
|
||||
actor TEXT,
|
||||
details JSONB,
|
||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_entity ON proofchain.audit_log(entity_type, entity_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_audit_created ON proofchain.audit_log(created_at DESC);
|
||||
|
||||
COMMENT ON TABLE proofchain.audit_log IS 'Audit log for proof chain operations';
|
||||
|
||||
-- Create updated_at trigger function
|
||||
CREATE OR REPLACE FUNCTION proofchain.update_updated_at_column()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
NEW.updated_at = NOW();
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
-- Apply updated_at trigger to trust_anchors
|
||||
DROP TRIGGER IF EXISTS update_trust_anchors_updated_at ON proofchain.trust_anchors;
|
||||
CREATE TRIGGER update_trust_anchors_updated_at
|
||||
BEFORE UPDATE ON proofchain.trust_anchors
|
||||
FOR EACH ROW
|
||||
EXECUTE FUNCTION proofchain.update_updated_at_column();
|
||||
@@ -0,0 +1,20 @@
|
||||
-- Migration: 20251214000002_RollbackProofChainSchema
|
||||
-- Rollback script for the proofchain schema.
|
||||
-- WARNING: This will delete all proof chain data!
|
||||
|
||||
-- Drop tables in reverse dependency order
|
||||
DROP TABLE IF EXISTS proofchain.audit_log CASCADE;
|
||||
DROP TABLE IF EXISTS proofchain.rekor_entries CASCADE;
|
||||
DROP TABLE IF EXISTS proofchain.spines CASCADE;
|
||||
DROP TABLE IF EXISTS proofchain.dsse_envelopes CASCADE;
|
||||
DROP TABLE IF EXISTS proofchain.sbom_entries CASCADE;
|
||||
DROP TABLE IF EXISTS proofchain.trust_anchors CASCADE;
|
||||
|
||||
-- Drop types
|
||||
DROP TYPE IF EXISTS proofchain.verification_result CASCADE;
|
||||
|
||||
-- Drop functions
|
||||
DROP FUNCTION IF EXISTS proofchain.update_updated_at_column() CASCADE;
|
||||
|
||||
-- Drop schema
|
||||
DROP SCHEMA IF EXISTS proofchain CASCADE;
|
||||
@@ -0,0 +1,143 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence;
|
||||
|
||||
/// <summary>
|
||||
/// Entity Framework Core DbContext for proof chain persistence.
|
||||
/// </summary>
|
||||
public class ProofChainDbContext : DbContext
|
||||
{
|
||||
public ProofChainDbContext(DbContextOptions<ProofChainDbContext> options)
|
||||
: base(options)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SBOM entries table.
|
||||
/// </summary>
|
||||
public DbSet<SbomEntryEntity> SbomEntries => Set<SbomEntryEntity>();
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelopes table.
|
||||
/// </summary>
|
||||
public DbSet<DsseEnvelopeEntity> DsseEnvelopes => Set<DsseEnvelopeEntity>();
|
||||
|
||||
/// <summary>
|
||||
/// Proof spines table.
|
||||
/// </summary>
|
||||
public DbSet<SpineEntity> Spines => Set<SpineEntity>();
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchors table.
|
||||
/// </summary>
|
||||
public DbSet<TrustAnchorEntity> TrustAnchors => Set<TrustAnchorEntity>();
|
||||
|
||||
/// <summary>
|
||||
/// Rekor entries table.
|
||||
/// </summary>
|
||||
public DbSet<RekorEntryEntity> RekorEntries => Set<RekorEntryEntity>();
|
||||
|
||||
/// <summary>
|
||||
/// Audit log table.
|
||||
/// </summary>
|
||||
public DbSet<AuditLogEntity> AuditLog => Set<AuditLogEntity>();
|
||||
|
||||
protected override void OnModelCreating(ModelBuilder modelBuilder)
|
||||
{
|
||||
base.OnModelCreating(modelBuilder);
|
||||
|
||||
// Configure schema
|
||||
modelBuilder.HasDefaultSchema("proofchain");
|
||||
|
||||
// SbomEntryEntity configuration
|
||||
modelBuilder.Entity<SbomEntryEntity>(entity =>
|
||||
{
|
||||
entity.HasIndex(e => e.BomDigest).HasDatabaseName("idx_sbom_entries_bom_digest");
|
||||
entity.HasIndex(e => e.Purl).HasDatabaseName("idx_sbom_entries_purl");
|
||||
entity.HasIndex(e => e.ArtifactDigest).HasDatabaseName("idx_sbom_entries_artifact");
|
||||
entity.HasIndex(e => e.TrustAnchorId).HasDatabaseName("idx_sbom_entries_anchor");
|
||||
|
||||
// Unique constraint
|
||||
entity.HasIndex(e => new { e.BomDigest, e.Purl, e.Version })
|
||||
.HasDatabaseName("uq_sbom_entry")
|
||||
.IsUnique();
|
||||
|
||||
// Relationships
|
||||
entity.HasOne(e => e.TrustAnchor)
|
||||
.WithMany()
|
||||
.HasForeignKey(e => e.TrustAnchorId)
|
||||
.OnDelete(DeleteBehavior.SetNull);
|
||||
|
||||
entity.HasMany(e => e.Envelopes)
|
||||
.WithOne(e => e.Entry)
|
||||
.HasForeignKey(e => e.EntryId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
|
||||
entity.HasOne(e => e.Spine)
|
||||
.WithOne(e => e.Entry)
|
||||
.HasForeignKey<SpineEntity>(e => e.EntryId)
|
||||
.OnDelete(DeleteBehavior.Cascade);
|
||||
});
|
||||
|
||||
// DsseEnvelopeEntity configuration
|
||||
modelBuilder.Entity<DsseEnvelopeEntity>(entity =>
|
||||
{
|
||||
entity.HasIndex(e => new { e.EntryId, e.PredicateType })
|
||||
.HasDatabaseName("idx_dsse_entry_predicate");
|
||||
entity.HasIndex(e => e.SignerKeyId).HasDatabaseName("idx_dsse_signer");
|
||||
entity.HasIndex(e => e.BodyHash).HasDatabaseName("idx_dsse_body_hash");
|
||||
|
||||
// Unique constraint
|
||||
entity.HasIndex(e => new { e.EntryId, e.PredicateType, e.BodyHash })
|
||||
.HasDatabaseName("uq_dsse_envelope")
|
||||
.IsUnique();
|
||||
});
|
||||
|
||||
// SpineEntity configuration
|
||||
modelBuilder.Entity<SpineEntity>(entity =>
|
||||
{
|
||||
entity.HasIndex(e => e.BundleId).HasDatabaseName("idx_spines_bundle").IsUnique();
|
||||
entity.HasIndex(e => e.AnchorId).HasDatabaseName("idx_spines_anchor");
|
||||
entity.HasIndex(e => e.PolicyVersion).HasDatabaseName("idx_spines_policy");
|
||||
|
||||
entity.HasOne(e => e.Anchor)
|
||||
.WithMany()
|
||||
.HasForeignKey(e => e.AnchorId)
|
||||
.OnDelete(DeleteBehavior.SetNull);
|
||||
});
|
||||
|
||||
// TrustAnchorEntity configuration
|
||||
modelBuilder.Entity<TrustAnchorEntity>(entity =>
|
||||
{
|
||||
entity.HasIndex(e => e.PurlPattern).HasDatabaseName("idx_trust_anchors_pattern");
|
||||
entity.HasIndex(e => e.IsActive)
|
||||
.HasDatabaseName("idx_trust_anchors_active")
|
||||
.HasFilter("is_active = TRUE");
|
||||
});
|
||||
|
||||
// RekorEntryEntity configuration
|
||||
modelBuilder.Entity<RekorEntryEntity>(entity =>
|
||||
{
|
||||
entity.HasIndex(e => e.LogIndex).HasDatabaseName("idx_rekor_log_index");
|
||||
entity.HasIndex(e => e.LogId).HasDatabaseName("idx_rekor_log_id");
|
||||
entity.HasIndex(e => e.Uuid).HasDatabaseName("idx_rekor_uuid");
|
||||
entity.HasIndex(e => e.EnvId).HasDatabaseName("idx_rekor_env");
|
||||
|
||||
entity.HasOne(e => e.Envelope)
|
||||
.WithOne(e => e.RekorEntry)
|
||||
.HasForeignKey<RekorEntryEntity>(e => e.EnvId)
|
||||
.OnDelete(DeleteBehavior.SetNull);
|
||||
});
|
||||
|
||||
// AuditLogEntity configuration
|
||||
modelBuilder.Entity<AuditLogEntity>(entity =>
|
||||
{
|
||||
entity.HasIndex(e => new { e.EntityType, e.EntityId })
|
||||
.HasDatabaseName("idx_audit_entity");
|
||||
entity.HasIndex(e => e.CreatedAt)
|
||||
.HasDatabaseName("idx_audit_created")
|
||||
.IsDescending();
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,206 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Repositories;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for proof chain data access.
|
||||
/// </summary>
|
||||
public interface IProofChainRepository
|
||||
{
|
||||
#region SBOM Entries
|
||||
|
||||
/// <summary>
|
||||
/// Get an SBOM entry by its unique combination of bom digest, purl, and version.
|
||||
/// </summary>
|
||||
Task<SbomEntryEntity?> GetSbomEntryAsync(
|
||||
string bomDigest,
|
||||
string purl,
|
||||
string? version,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get an SBOM entry by its entry ID.
|
||||
/// </summary>
|
||||
Task<SbomEntryEntity?> GetSbomEntryByIdAsync(
|
||||
Guid entryId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Insert or update an SBOM entry (upsert on unique constraint).
|
||||
/// </summary>
|
||||
Task<SbomEntryEntity> UpsertSbomEntryAsync(
|
||||
SbomEntryEntity entry,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all SBOM entries by artifact digest.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SbomEntryEntity>> GetSbomEntriesByArtifactAsync(
|
||||
string artifactDigest,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all SBOM entries by bom digest.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<SbomEntryEntity>> GetSbomEntriesByBomDigestAsync(
|
||||
string bomDigest,
|
||||
CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region DSSE Envelopes
|
||||
|
||||
/// <summary>
|
||||
/// Get an envelope by its ID.
|
||||
/// </summary>
|
||||
Task<DsseEnvelopeEntity?> GetEnvelopeAsync(
|
||||
Guid envId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get an envelope by its body hash.
|
||||
/// </summary>
|
||||
Task<DsseEnvelopeEntity?> GetEnvelopeByBodyHashAsync(
|
||||
string bodyHash,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save a new envelope.
|
||||
/// </summary>
|
||||
Task<DsseEnvelopeEntity> SaveEnvelopeAsync(
|
||||
DsseEnvelopeEntity envelope,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all envelopes for an SBOM entry.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<DsseEnvelopeEntity>> GetEnvelopesByEntryAsync(
|
||||
Guid entryId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get envelopes for an entry filtered by predicate type.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<DsseEnvelopeEntity>> GetEnvelopesByPredicateTypeAsync(
|
||||
Guid entryId,
|
||||
string predicateType,
|
||||
CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Spines
|
||||
|
||||
/// <summary>
|
||||
/// Get a spine by its entry ID.
|
||||
/// </summary>
|
||||
Task<SpineEntity?> GetSpineAsync(
|
||||
Guid entryId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get a spine by its bundle ID.
|
||||
/// </summary>
|
||||
Task<SpineEntity?> GetSpineByBundleIdAsync(
|
||||
string bundleId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save or update a spine.
|
||||
/// </summary>
|
||||
Task<SpineEntity> SaveSpineAsync(
|
||||
SpineEntity spine,
|
||||
CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Trust Anchors
|
||||
|
||||
/// <summary>
|
||||
/// Get a trust anchor by its ID.
|
||||
/// </summary>
|
||||
Task<TrustAnchorEntity?> GetTrustAnchorAsync(
|
||||
Guid anchorId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get the trust anchor matching a PURL pattern (best match).
|
||||
/// </summary>
|
||||
Task<TrustAnchorEntity?> GetTrustAnchorByPatternAsync(
|
||||
string purl,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save or update a trust anchor.
|
||||
/// </summary>
|
||||
Task<TrustAnchorEntity> SaveTrustAnchorAsync(
|
||||
TrustAnchorEntity anchor,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all active trust anchors.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<TrustAnchorEntity>> GetActiveTrustAnchorsAsync(
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Revoke a key in a trust anchor.
|
||||
/// </summary>
|
||||
Task RevokeKeyAsync(
|
||||
Guid anchorId,
|
||||
string keyId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Rekor Entries
|
||||
|
||||
/// <summary>
|
||||
/// Get a Rekor entry by DSSE SHA-256.
|
||||
/// </summary>
|
||||
Task<RekorEntryEntity?> GetRekorEntryAsync(
|
||||
string dsseSha256,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get a Rekor entry by log index.
|
||||
/// </summary>
|
||||
Task<RekorEntryEntity?> GetRekorEntryByLogIndexAsync(
|
||||
long logIndex,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Save a Rekor entry.
|
||||
/// </summary>
|
||||
Task<RekorEntryEntity> SaveRekorEntryAsync(
|
||||
RekorEntryEntity entry,
|
||||
CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
|
||||
#region Audit Log
|
||||
|
||||
/// <summary>
|
||||
/// Log an audit entry.
|
||||
/// </summary>
|
||||
Task LogAuditAsync(
|
||||
string operation,
|
||||
string entityType,
|
||||
string entityId,
|
||||
string? actor = null,
|
||||
object? details = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get audit log entries for an entity.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<AuditLogEntity>> GetAuditLogAsync(
|
||||
string entityType,
|
||||
string entityId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,297 @@
|
||||
using System.Text.RegularExpressions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Matches PURLs against trust anchor patterns.
|
||||
/// SPRINT_0501_0006_0001 - Task #7
|
||||
/// </summary>
|
||||
public interface ITrustAnchorMatcher
|
||||
{
|
||||
/// <summary>
|
||||
/// Finds the best matching trust anchor for a given PURL.
|
||||
/// </summary>
|
||||
Task<TrustAnchorMatchResult?> FindMatchAsync(
|
||||
string purl,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validates if a key ID is allowed for a given PURL.
|
||||
/// </summary>
|
||||
Task<bool> IsKeyAllowedAsync(
|
||||
string purl,
|
||||
string keyId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validates if a predicate type is allowed for a given PURL.
|
||||
/// </summary>
|
||||
Task<bool> IsPredicateAllowedAsync(
|
||||
string purl,
|
||||
string predicateType,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of trust anchor pattern matching.
|
||||
/// </summary>
|
||||
public sealed record TrustAnchorMatchResult
|
||||
{
|
||||
/// <summary>The matched trust anchor.</summary>
|
||||
public required TrustAnchorEntity Anchor { get; init; }
|
||||
|
||||
/// <summary>The pattern that matched.</summary>
|
||||
public required string MatchedPattern { get; init; }
|
||||
|
||||
/// <summary>Match specificity score (higher = more specific).</summary>
|
||||
public required int Specificity { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of trust anchor pattern matching using PURL glob patterns.
|
||||
/// </summary>
|
||||
public sealed class TrustAnchorMatcher : ITrustAnchorMatcher
|
||||
{
|
||||
private readonly IProofChainRepository _repository;
|
||||
private readonly ILogger<TrustAnchorMatcher> _logger;
|
||||
|
||||
// Cache compiled regex patterns
|
||||
private readonly Dictionary<string, Regex> _patternCache = new();
|
||||
private readonly Lock _cacheLock = new();
|
||||
|
||||
public TrustAnchorMatcher(
|
||||
IProofChainRepository repository,
|
||||
ILogger<TrustAnchorMatcher> logger)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<TrustAnchorMatchResult?> FindMatchAsync(
|
||||
string purl,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(purl);
|
||||
|
||||
var anchors = await _repository.GetActiveAnchorsAsync(cancellationToken);
|
||||
|
||||
TrustAnchorMatchResult? bestMatch = null;
|
||||
|
||||
foreach (var anchor in anchors)
|
||||
{
|
||||
if (!IsActive(anchor))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var regex = GetOrCreateRegex(anchor.PurlPattern);
|
||||
if (regex.IsMatch(purl))
|
||||
{
|
||||
var specificity = CalculateSpecificity(anchor.PurlPattern);
|
||||
|
||||
if (bestMatch == null || specificity > bestMatch.Specificity)
|
||||
{
|
||||
bestMatch = new TrustAnchorMatchResult
|
||||
{
|
||||
Anchor = anchor,
|
||||
MatchedPattern = anchor.PurlPattern,
|
||||
Specificity = specificity,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestMatch != null)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"PURL {Purl} matched anchor pattern {Pattern} with specificity {Specificity}",
|
||||
purl, bestMatch.MatchedPattern, bestMatch.Specificity);
|
||||
}
|
||||
|
||||
return bestMatch;
|
||||
}
|
||||
|
||||
public async Task<bool> IsKeyAllowedAsync(
|
||||
string purl,
|
||||
string keyId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(purl);
|
||||
ArgumentException.ThrowIfNullOrEmpty(keyId);
|
||||
|
||||
var match = await FindMatchAsync(purl, cancellationToken);
|
||||
if (match == null)
|
||||
{
|
||||
_logger.LogDebug("No trust anchor found for PURL {Purl}", purl);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if key is revoked
|
||||
if (match.Anchor.RevokedKeys.Contains(keyId, StringComparer.OrdinalIgnoreCase))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Key {KeyId} is revoked for anchor {AnchorId}",
|
||||
keyId, match.Anchor.AnchorId);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if key is in allowed list
|
||||
var allowed = match.Anchor.AllowedKeyIds.Contains(keyId, StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
if (!allowed)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Key {KeyId} not in allowed list for anchor {AnchorId}",
|
||||
keyId, match.Anchor.AnchorId);
|
||||
}
|
||||
|
||||
return allowed;
|
||||
}
|
||||
|
||||
public async Task<bool> IsPredicateAllowedAsync(
|
||||
string purl,
|
||||
string predicateType,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(purl);
|
||||
ArgumentException.ThrowIfNullOrEmpty(predicateType);
|
||||
|
||||
var match = await FindMatchAsync(purl, cancellationToken);
|
||||
if (match == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
// If no predicate restrictions, allow all
|
||||
if (match.Anchor.AllowedPredicateTypes == null || match.Anchor.AllowedPredicateTypes.Length == 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return match.Anchor.AllowedPredicateTypes.Contains(predicateType, StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a PURL glob pattern to a regex.
|
||||
/// Supports: * (any chars), ? (single char), ** (any path segment)
|
||||
/// </summary>
|
||||
private Regex GetOrCreateRegex(string pattern)
|
||||
{
|
||||
lock (_cacheLock)
|
||||
{
|
||||
if (_patternCache.TryGetValue(pattern, out var cached))
|
||||
{
|
||||
return cached;
|
||||
}
|
||||
|
||||
var regexPattern = ConvertGlobToRegex(pattern);
|
||||
var regex = new Regex(regexPattern, RegexOptions.IgnoreCase | RegexOptions.Compiled);
|
||||
|
||||
_patternCache[pattern] = regex;
|
||||
return regex;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Converts a glob pattern to a regex pattern.
|
||||
/// </summary>
|
||||
private static string ConvertGlobToRegex(string glob)
|
||||
{
|
||||
var regex = new System.Text.StringBuilder("^");
|
||||
|
||||
for (int i = 0; i < glob.Length; i++)
|
||||
{
|
||||
char c = glob[i];
|
||||
switch (c)
|
||||
{
|
||||
case '*':
|
||||
if (i + 1 < glob.Length && glob[i + 1] == '*')
|
||||
{
|
||||
// ** matches any path segments
|
||||
regex.Append(".*");
|
||||
i++; // Skip next *
|
||||
}
|
||||
else
|
||||
{
|
||||
// * matches anything except /
|
||||
regex.Append("[^/]*");
|
||||
}
|
||||
break;
|
||||
|
||||
case '?':
|
||||
// ? matches single character except /
|
||||
regex.Append("[^/]");
|
||||
break;
|
||||
|
||||
case '.':
|
||||
case '^':
|
||||
case '$':
|
||||
case '+':
|
||||
case '(':
|
||||
case ')':
|
||||
case '[':
|
||||
case ']':
|
||||
case '{':
|
||||
case '}':
|
||||
case '|':
|
||||
case '\\':
|
||||
// Escape regex special chars
|
||||
regex.Append('\\').Append(c);
|
||||
break;
|
||||
|
||||
default:
|
||||
regex.Append(c);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
regex.Append('$');
|
||||
return regex.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates pattern specificity (more specific = higher score).
|
||||
/// </summary>
|
||||
private static int CalculateSpecificity(string pattern)
|
||||
{
|
||||
// Count non-wildcard segments
|
||||
int specificity = 0;
|
||||
|
||||
// More slashes = more specific
|
||||
specificity += pattern.Count(c => c == '/') * 10;
|
||||
|
||||
// More literal characters = more specific
|
||||
specificity += pattern.Count(c => c != '*' && c != '?');
|
||||
|
||||
// Penalize wildcards
|
||||
specificity -= pattern.Count(c => c == '*') * 5;
|
||||
specificity -= pattern.Count(c => c == '?') * 2;
|
||||
|
||||
return specificity;
|
||||
}
|
||||
|
||||
private static bool IsActive(TrustAnchorEntity anchor)
|
||||
{
|
||||
// Anchor is active if IsActive property exists and is true
|
||||
// or if the property doesn't exist (backwards compatibility)
|
||||
var isActiveProp = anchor.GetType().GetProperty("IsActive");
|
||||
if (isActiveProp != null)
|
||||
{
|
||||
return (bool)(isActiveProp.GetValue(anchor) ?? true);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Repository interface extension for trust anchor queries.
|
||||
/// </summary>
|
||||
public interface IProofChainRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets all active trust anchors.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<TrustAnchorEntity>> GetActiveAnchorsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<RootNamespace>StellaOps.Attestor.Persistence</RootNamespace>
|
||||
<Description>Proof chain persistence layer with Entity Framework Core and PostgreSQL support.</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.EntityFrameworkCore" Version="10.0.0-preview.*" />
|
||||
<PackageReference Include="Npgsql.EntityFrameworkCore.PostgreSQL" Version="10.0.0-preview.*" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<None Include="Migrations\*.sql">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,223 @@
|
||||
using StellaOps.Attestor.Persistence.Entities;
|
||||
using StellaOps.Attestor.Persistence.Services;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.Persistence.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for proof chain database operations.
|
||||
/// SPRINT_0501_0006_0001 - Task #10
|
||||
/// </summary>
|
||||
public sealed class ProofChainRepositoryIntegrationTests
|
||||
{
|
||||
private readonly Mock<IProofChainRepository> _repositoryMock;
|
||||
private readonly TrustAnchorMatcher _matcher;
|
||||
|
||||
public ProofChainRepositoryIntegrationTests()
|
||||
{
|
||||
_repositoryMock = new Mock<IProofChainRepository>();
|
||||
_matcher = new TrustAnchorMatcher(
|
||||
_repositoryMock.Object,
|
||||
NullLogger<TrustAnchorMatcher>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_ExactPattern_MatchesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/lodash@4.17.21", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal(anchor.AnchorId, result.Anchor.AnchorId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_WildcardPattern_MatchesPackages()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("pkg:npm/*", result.MatchedPattern);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_DoubleWildcard_MatchesNestedPaths()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/@scope/**", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/@scope/sub/package@1.0.0");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_MultipleMatches_ReturnsMoreSpecific()
|
||||
{
|
||||
// Arrange
|
||||
var genericAnchor = CreateAnchor("pkg:npm/*", ["key-generic"], "generic");
|
||||
var specificAnchor = CreateAnchor("pkg:npm/lodash@*", ["key-specific"], "specific");
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([genericAnchor, specificAnchor]);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:npm/lodash@4.17.21");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("specific", result.Anchor.PolicyRef);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FindMatchAsync_NoMatch_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync("pkg:pypi/requests@2.28.0");
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsKeyAllowedAsync_AllowedKey_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1", "key-2"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
|
||||
|
||||
// Assert
|
||||
Assert.True(allowed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsKeyAllowedAsync_DisallowedKey_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-unknown");
|
||||
|
||||
// Assert
|
||||
Assert.False(allowed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsKeyAllowedAsync_RevokedKey_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"], revokedKeys: ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsKeyAllowedAsync("pkg:npm/lodash@4.17.21", "key-1");
|
||||
|
||||
// Assert
|
||||
Assert.False(allowed); // Key is revoked even if in allowed list
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsPredicateAllowedAsync_NoRestrictions_AllowsAll()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
anchor.AllowedPredicateTypes = null;
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var allowed = await _matcher.IsPredicateAllowedAsync(
|
||||
"pkg:npm/lodash@4.17.21",
|
||||
"https://in-toto.io/attestation/vulns/v0.1");
|
||||
|
||||
// Assert
|
||||
Assert.True(allowed);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task IsPredicateAllowedAsync_WithRestrictions_EnforcesAllowlist()
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor("pkg:npm/*", ["key-1"]);
|
||||
anchor.AllowedPredicateTypes = ["evidence.stella/v1", "sbom.stella/v1"];
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act & Assert
|
||||
Assert.True(await _matcher.IsPredicateAllowedAsync(
|
||||
"pkg:npm/lodash@4.17.21", "evidence.stella/v1"));
|
||||
Assert.False(await _matcher.IsPredicateAllowedAsync(
|
||||
"pkg:npm/lodash@4.17.21", "random.predicate/v1"));
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("pkg:npm/*", "pkg:npm/lodash@4.17.21", true)]
|
||||
[InlineData("pkg:npm/lodash@*", "pkg:npm/lodash@4.17.21", true)]
|
||||
[InlineData("pkg:npm/lodash@4.17.*", "pkg:npm/lodash@4.17.21", true)]
|
||||
[InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash@4.17.21", true)]
|
||||
[InlineData("pkg:npm/lodash@4.17.21", "pkg:npm/lodash@4.17.22", false)]
|
||||
[InlineData("pkg:pypi/*", "pkg:npm/lodash@4.17.21", false)]
|
||||
[InlineData("pkg:npm/@scope/*", "pkg:npm/@scope/package@1.0.0", true)]
|
||||
[InlineData("pkg:npm/@scope/*", "pkg:npm/@other/package@1.0.0", false)]
|
||||
public async Task FindMatchAsync_PatternVariations_MatchCorrectly(
|
||||
string pattern, string purl, bool shouldMatch)
|
||||
{
|
||||
// Arrange
|
||||
var anchor = CreateAnchor(pattern, ["key-1"]);
|
||||
_repositoryMock.Setup(r => r.GetActiveAnchorsAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([anchor]);
|
||||
|
||||
// Act
|
||||
var result = await _matcher.FindMatchAsync(purl);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(shouldMatch, result != null);
|
||||
}
|
||||
|
||||
private static TrustAnchorEntity CreateAnchor(
|
||||
string pattern,
|
||||
string[] allowedKeys,
|
||||
string? policyRef = null,
|
||||
string[]? revokedKeys = null)
|
||||
{
|
||||
return new TrustAnchorEntity
|
||||
{
|
||||
AnchorId = Guid.NewGuid(),
|
||||
PurlPattern = pattern,
|
||||
AllowedKeyIds = allowedKeys,
|
||||
PolicyRef = policyRef,
|
||||
RevokedKeys = revokedKeys ?? [],
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,186 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Signing;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Assembly;
|
||||
|
||||
/// <summary>
|
||||
/// Service for assembling and verifying proof spines.
|
||||
/// </summary>
|
||||
public interface IProofSpineAssembler
|
||||
{
|
||||
/// <summary>
|
||||
/// Assemble a complete proof spine from component IDs.
|
||||
/// </summary>
|
||||
/// <param name="request">The assembly request containing all component IDs.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The assembled proof spine result including the signed envelope.</returns>
|
||||
Task<ProofSpineResult> AssembleSpineAsync(
|
||||
ProofSpineRequest request,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verify an existing proof spine by recomputing the merkle root.
|
||||
/// </summary>
|
||||
/// <param name="spine">The proof spine statement to verify.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The verification result.</returns>
|
||||
Task<SpineVerificationResult> VerifySpineAsync(
|
||||
ProofSpineStatement spine,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to assemble a proof spine.
|
||||
/// </summary>
|
||||
public sealed record ProofSpineRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM entry ID that this spine covers.
|
||||
/// </summary>
|
||||
public required SbomEntryId SbomEntryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The evidence IDs to include in the proof bundle.
|
||||
/// Will be sorted lexicographically during assembly.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<EvidenceId> EvidenceIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The reasoning ID explaining the decision.
|
||||
/// </summary>
|
||||
public required ReasoningId ReasoningId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The VEX verdict ID for this entry.
|
||||
/// </summary>
|
||||
public required VexVerdictId VexVerdictId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the policy used.
|
||||
/// </summary>
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The subject (artifact) this spine is about.
|
||||
/// </summary>
|
||||
public required ProofSpineSubject Subject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key profile to use for signing the spine statement.
|
||||
/// </summary>
|
||||
public SigningKeyProfile SigningProfile { get; init; } = SigningKeyProfile.Authority;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subject for the proof spine (the artifact being attested).
|
||||
/// </summary>
|
||||
public sealed record ProofSpineSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the subject (e.g., image reference).
|
||||
/// </summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the subject.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, string> Digest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of proof spine assembly.
|
||||
/// </summary>
|
||||
public sealed record ProofSpineResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The computed proof bundle ID (merkle root).
|
||||
/// </summary>
|
||||
public required ProofBundleId ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The proof spine statement.
|
||||
/// </summary>
|
||||
public required ProofSpineStatement Statement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The signed DSSE envelope.
|
||||
/// </summary>
|
||||
public required DsseEnvelope SignedEnvelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The merkle tree used for the proof bundle.
|
||||
/// </summary>
|
||||
public required MerkleTree MerkleTree { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a merkle tree with proof generation capability.
|
||||
/// </summary>
|
||||
public sealed record MerkleTree
|
||||
{
|
||||
/// <summary>
|
||||
/// The root hash of the merkle tree.
|
||||
/// </summary>
|
||||
public required byte[] Root { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The leaf hashes in order.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<byte[]> Leaves { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of levels in the tree.
|
||||
/// </summary>
|
||||
public required int Depth { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of proof spine verification.
|
||||
/// </summary>
|
||||
public sealed record SpineVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the spine is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The expected proof bundle ID (from the statement).
|
||||
/// </summary>
|
||||
public required ProofBundleId ExpectedBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The actual proof bundle ID (recomputed).
|
||||
/// </summary>
|
||||
public required ProofBundleId ActualBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual verification checks performed.
|
||||
/// </summary>
|
||||
public IReadOnlyList<SpineVerificationCheck> Checks { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single verification check in spine verification.
|
||||
/// </summary>
|
||||
public sealed record SpineVerificationCheck
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the check.
|
||||
/// </summary>
|
||||
public required string CheckName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the check passed.
|
||||
/// </summary>
|
||||
public required bool Passed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional details about the check.
|
||||
/// </summary>
|
||||
public string? Details { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,95 @@
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Builders;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a subject (artifact) for proof chain statements.
|
||||
/// </summary>
|
||||
public sealed record ProofSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// The name or identifier of the subject (e.g., image reference, PURL).
|
||||
/// </summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digests of the subject in algorithm:hex format.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, string> Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Converts this ProofSubject to an in-toto Subject.
|
||||
/// </summary>
|
||||
public Subject ToSubject() => new()
|
||||
{
|
||||
Name = Name,
|
||||
Digest = Digest
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Factory for building in-toto statements for proof chain predicates.
|
||||
/// </summary>
|
||||
public interface IStatementBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Build an Evidence statement for signing.
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this evidence relates to.</param>
|
||||
/// <param name="predicate">The evidence payload.</param>
|
||||
/// <returns>An EvidenceStatement ready for signing.</returns>
|
||||
EvidenceStatement BuildEvidenceStatement(
|
||||
ProofSubject subject,
|
||||
EvidencePayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build a Reasoning statement for signing.
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this reasoning relates to.</param>
|
||||
/// <param name="predicate">The reasoning payload.</param>
|
||||
/// <returns>A ReasoningStatement ready for signing.</returns>
|
||||
ReasoningStatement BuildReasoningStatement(
|
||||
ProofSubject subject,
|
||||
ReasoningPayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build a VEX Verdict statement for signing.
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this verdict relates to.</param>
|
||||
/// <param name="predicate">The VEX verdict payload.</param>
|
||||
/// <returns>A VexVerdictStatement ready for signing.</returns>
|
||||
VexVerdictStatement BuildVexVerdictStatement(
|
||||
ProofSubject subject,
|
||||
VexVerdictPayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build a Proof Spine statement for signing.
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this proof spine covers.</param>
|
||||
/// <param name="predicate">The proof spine payload.</param>
|
||||
/// <returns>A ProofSpineStatement ready for signing.</returns>
|
||||
ProofSpineStatement BuildProofSpineStatement(
|
||||
ProofSubject subject,
|
||||
ProofSpinePayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build a Verdict Receipt statement for signing.
|
||||
/// </summary>
|
||||
/// <param name="subject">The artifact subject this verdict receipt relates to.</param>
|
||||
/// <param name="predicate">The verdict receipt payload.</param>
|
||||
/// <returns>A VerdictReceiptStatement ready for signing.</returns>
|
||||
VerdictReceiptStatement BuildVerdictReceiptStatement(
|
||||
ProofSubject subject,
|
||||
VerdictReceiptPayload predicate);
|
||||
|
||||
/// <summary>
|
||||
/// Build an SBOM Linkage statement for signing.
|
||||
/// </summary>
|
||||
/// <param name="subjects">The artifact subjects covered by the SBOM.</param>
|
||||
/// <param name="predicate">The SBOM linkage payload.</param>
|
||||
/// <returns>An SbomLinkageStatement ready for signing.</returns>
|
||||
SbomLinkageStatement BuildSbomLinkageStatement(
|
||||
IReadOnlyList<ProofSubject> subjects,
|
||||
SbomLinkagePayload predicate);
|
||||
}
|
||||
@@ -0,0 +1,106 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Builders;
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of IStatementBuilder.
|
||||
/// </summary>
|
||||
public sealed class StatementBuilder : IStatementBuilder
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EvidenceStatement BuildEvidenceStatement(
|
||||
ProofSubject subject,
|
||||
EvidencePayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new EvidenceStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ReasoningStatement BuildReasoningStatement(
|
||||
ProofSubject subject,
|
||||
ReasoningPayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new ReasoningStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public VexVerdictStatement BuildVexVerdictStatement(
|
||||
ProofSubject subject,
|
||||
VexVerdictPayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new VexVerdictStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ProofSpineStatement BuildProofSpineStatement(
|
||||
ProofSubject subject,
|
||||
ProofSpinePayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new ProofSpineStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public VerdictReceiptStatement BuildVerdictReceiptStatement(
|
||||
ProofSubject subject,
|
||||
VerdictReceiptPayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subject);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
return new VerdictReceiptStatement
|
||||
{
|
||||
Subject = [subject.ToSubject()],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public SbomLinkageStatement BuildSbomLinkageStatement(
|
||||
IReadOnlyList<ProofSubject> subjects,
|
||||
SbomLinkagePayload predicate)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subjects);
|
||||
ArgumentNullException.ThrowIfNull(predicate);
|
||||
|
||||
if (subjects.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one subject is required.", nameof(subjects));
|
||||
}
|
||||
|
||||
return new SbomLinkageStatement
|
||||
{
|
||||
Subject = subjects.Select(s => s.ToSubject()).ToList(),
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,276 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Graph;
|
||||
|
||||
/// <summary>
|
||||
/// Manages the proof-of-integrity graph that tracks relationships
|
||||
/// between artifacts, SBOMs, attestations, and containers.
|
||||
/// </summary>
|
||||
public interface IProofGraphService
|
||||
{
|
||||
/// <summary>
|
||||
/// Add a node to the proof graph.
|
||||
/// </summary>
|
||||
/// <param name="type">The type of node to add.</param>
|
||||
/// <param name="contentDigest">The content digest (content-addressed ID).</param>
|
||||
/// <param name="metadata">Optional metadata for the node.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The created node.</returns>
|
||||
Task<ProofGraphNode> AddNodeAsync(
|
||||
ProofGraphNodeType type,
|
||||
string contentDigest,
|
||||
IReadOnlyDictionary<string, object>? metadata = null,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Add an edge between two nodes.
|
||||
/// </summary>
|
||||
/// <param name="sourceId">The source node ID.</param>
|
||||
/// <param name="targetId">The target node ID.</param>
|
||||
/// <param name="edgeType">The type of edge.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The created edge.</returns>
|
||||
Task<ProofGraphEdge> AddEdgeAsync(
|
||||
string sourceId,
|
||||
string targetId,
|
||||
ProofGraphEdgeType edgeType,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get a node by its ID.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID to retrieve.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The node if found, null otherwise.</returns>
|
||||
Task<ProofGraphNode?> GetNodeAsync(
|
||||
string nodeId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Query the graph for a path from source to target.
|
||||
/// </summary>
|
||||
/// <param name="sourceId">The source node ID.</param>
|
||||
/// <param name="targetId">The target node ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The path if found, null otherwise.</returns>
|
||||
Task<ProofGraphPath?> FindPathAsync(
|
||||
string sourceId,
|
||||
string targetId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all nodes related to an artifact within a given depth.
|
||||
/// </summary>
|
||||
/// <param name="artifactId">The artifact ID to start from.</param>
|
||||
/// <param name="maxDepth">Maximum traversal depth.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The subgraph containing related nodes.</returns>
|
||||
Task<ProofGraphSubgraph> GetArtifactSubgraphAsync(
|
||||
string artifactId,
|
||||
int maxDepth = 5,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all outgoing edges from a node.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The outgoing edges.</returns>
|
||||
Task<IReadOnlyList<ProofGraphEdge>> GetOutgoingEdgesAsync(
|
||||
string nodeId,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all incoming edges to a node.
|
||||
/// </summary>
|
||||
/// <param name="nodeId">The node ID.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The incoming edges.</returns>
|
||||
Task<IReadOnlyList<ProofGraphEdge>> GetIncomingEdgesAsync(
|
||||
string nodeId,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of nodes in the proof graph.
|
||||
/// </summary>
|
||||
public enum ProofGraphNodeType
|
||||
{
|
||||
/// <summary>Container image, binary, Helm chart.</summary>
|
||||
Artifact,
|
||||
|
||||
/// <summary>SBOM document by sbomId.</summary>
|
||||
SbomDocument,
|
||||
|
||||
/// <summary>In-toto statement by statement hash.</summary>
|
||||
InTotoStatement,
|
||||
|
||||
/// <summary>DSSE envelope by envelope hash.</summary>
|
||||
DsseEnvelope,
|
||||
|
||||
/// <summary>Rekor transparency log entry.</summary>
|
||||
RekorEntry,
|
||||
|
||||
/// <summary>VEX statement by VEX hash.</summary>
|
||||
VexStatement,
|
||||
|
||||
/// <summary>Component/subject from SBOM.</summary>
|
||||
Subject,
|
||||
|
||||
/// <summary>Signing key.</summary>
|
||||
SigningKey,
|
||||
|
||||
/// <summary>Trust anchor (root of trust).</summary>
|
||||
TrustAnchor
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of edges in the proof graph.
|
||||
/// </summary>
|
||||
public enum ProofGraphEdgeType
|
||||
{
|
||||
/// <summary>Artifact → SbomDocument: artifact is described by SBOM.</summary>
|
||||
DescribedBy,
|
||||
|
||||
/// <summary>SbomDocument → InTotoStatement: SBOM is attested by statement.</summary>
|
||||
AttestedBy,
|
||||
|
||||
/// <summary>InTotoStatement → DsseEnvelope: statement is wrapped in envelope.</summary>
|
||||
WrappedBy,
|
||||
|
||||
/// <summary>DsseEnvelope → RekorEntry: envelope is logged in Rekor.</summary>
|
||||
LoggedIn,
|
||||
|
||||
/// <summary>Artifact/Subject → VexStatement: has VEX statement.</summary>
|
||||
HasVex,
|
||||
|
||||
/// <summary>InTotoStatement → Subject: statement contains subject.</summary>
|
||||
ContainsSubject,
|
||||
|
||||
/// <summary>Build → SBOM: build produces SBOM.</summary>
|
||||
Produces,
|
||||
|
||||
/// <summary>VEX → Component: VEX affects component.</summary>
|
||||
Affects,
|
||||
|
||||
/// <summary>Envelope → Key: envelope is signed by key.</summary>
|
||||
SignedBy,
|
||||
|
||||
/// <summary>Envelope → Rekor: envelope is recorded at log index.</summary>
|
||||
RecordedAt,
|
||||
|
||||
/// <summary>Key → TrustAnchor: key chains to trust anchor.</summary>
|
||||
ChainsTo
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A node in the proof graph.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphNode
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for this node.
|
||||
/// </summary>
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The type of this node.
|
||||
/// </summary>
|
||||
public required ProofGraphNodeType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content digest (content-addressed identifier).
|
||||
/// </summary>
|
||||
public required string ContentDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this node was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional metadata for the node.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, object>? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An edge in the proof graph.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphEdge
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for this edge.
|
||||
/// </summary>
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source node ID.
|
||||
/// </summary>
|
||||
public required string SourceId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Target node ID.
|
||||
/// </summary>
|
||||
public required string TargetId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The type of this edge.
|
||||
/// </summary>
|
||||
public required ProofGraphEdgeType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this edge was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A path through the proof graph.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphPath
|
||||
{
|
||||
/// <summary>
|
||||
/// Nodes in the path, in order.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ProofGraphNode> Nodes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Edges connecting the nodes.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ProofGraphEdge> Edges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Length of the path (number of edges).
|
||||
/// </summary>
|
||||
public int Length => Edges.Count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A subgraph of the proof graph.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphSubgraph
|
||||
{
|
||||
/// <summary>
|
||||
/// The root node ID that was queried.
|
||||
/// </summary>
|
||||
public required string RootNodeId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All nodes in the subgraph.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ProofGraphNode> Nodes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All edges in the subgraph.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ProofGraphEdge> Edges { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum depth that was traversed.
|
||||
/// </summary>
|
||||
public required int MaxDepth { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,291 @@
|
||||
using System;
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Graph;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of IProofGraphService for testing and development.
|
||||
/// Not suitable for production use with large graphs.
|
||||
/// </summary>
|
||||
public sealed class InMemoryProofGraphService : IProofGraphService
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, ProofGraphNode> _nodes = new();
|
||||
private readonly ConcurrentDictionary<string, ProofGraphEdge> _edges = new();
|
||||
private readonly ConcurrentDictionary<string, List<string>> _outgoingEdges = new();
|
||||
private readonly ConcurrentDictionary<string, List<string>> _incomingEdges = new();
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public InMemoryProofGraphService(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ProofGraphNode> AddNodeAsync(
|
||||
ProofGraphNodeType type,
|
||||
string contentDigest,
|
||||
IReadOnlyDictionary<string, object>? metadata = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(contentDigest);
|
||||
|
||||
var nodeId = $"{type.ToString().ToLowerInvariant()}:{contentDigest}";
|
||||
|
||||
var node = new ProofGraphNode
|
||||
{
|
||||
Id = nodeId,
|
||||
Type = type,
|
||||
ContentDigest = contentDigest,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
Metadata = metadata
|
||||
};
|
||||
|
||||
if (!_nodes.TryAdd(nodeId, node))
|
||||
{
|
||||
// Node already exists, return the existing one
|
||||
node = _nodes[nodeId];
|
||||
}
|
||||
|
||||
return Task.FromResult(node);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ProofGraphEdge> AddEdgeAsync(
|
||||
string sourceId,
|
||||
string targetId,
|
||||
ProofGraphEdgeType edgeType,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(targetId);
|
||||
|
||||
if (!_nodes.ContainsKey(sourceId))
|
||||
{
|
||||
throw new ArgumentException($"Source node '{sourceId}' does not exist.", nameof(sourceId));
|
||||
}
|
||||
|
||||
if (!_nodes.ContainsKey(targetId))
|
||||
{
|
||||
throw new ArgumentException($"Target node '{targetId}' does not exist.", nameof(targetId));
|
||||
}
|
||||
|
||||
var edgeId = $"{sourceId}->{edgeType}->{targetId}";
|
||||
|
||||
var edge = new ProofGraphEdge
|
||||
{
|
||||
Id = edgeId,
|
||||
SourceId = sourceId,
|
||||
TargetId = targetId,
|
||||
Type = edgeType,
|
||||
CreatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
|
||||
if (_edges.TryAdd(edgeId, edge))
|
||||
{
|
||||
// Add to adjacency lists
|
||||
_outgoingEdges.AddOrUpdate(
|
||||
sourceId,
|
||||
_ => [edgeId],
|
||||
(_, list) => { lock (list) { list.Add(edgeId); } return list; });
|
||||
|
||||
_incomingEdges.AddOrUpdate(
|
||||
targetId,
|
||||
_ => [edgeId],
|
||||
(_, list) => { lock (list) { list.Add(edgeId); } return list; });
|
||||
}
|
||||
else
|
||||
{
|
||||
// Edge already exists
|
||||
edge = _edges[edgeId];
|
||||
}
|
||||
|
||||
return Task.FromResult(edge);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ProofGraphNode?> GetNodeAsync(string nodeId, CancellationToken ct = default)
|
||||
{
|
||||
_nodes.TryGetValue(nodeId, out var node);
|
||||
return Task.FromResult(node);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ProofGraphPath?> FindPathAsync(
|
||||
string sourceId,
|
||||
string targetId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(targetId);
|
||||
|
||||
if (!_nodes.ContainsKey(sourceId) || !_nodes.ContainsKey(targetId))
|
||||
{
|
||||
return Task.FromResult<ProofGraphPath?>(null);
|
||||
}
|
||||
|
||||
// BFS to find shortest path
|
||||
var visited = new HashSet<string>();
|
||||
var queue = new Queue<(string nodeId, List<string> path)>();
|
||||
queue.Enqueue((sourceId, [sourceId]));
|
||||
visited.Add(sourceId);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var (currentId, path) = queue.Dequeue();
|
||||
|
||||
if (currentId == targetId)
|
||||
{
|
||||
// Found path, reconstruct nodes and edges
|
||||
var nodes = path.Select(id => _nodes[id]).ToList();
|
||||
var edges = new List<ProofGraphEdge>();
|
||||
|
||||
for (int i = 0; i < path.Count - 1; i++)
|
||||
{
|
||||
var edgeIds = _outgoingEdges.GetValueOrDefault(path[i], []);
|
||||
var edge = edgeIds
|
||||
.Select(eid => _edges[eid])
|
||||
.FirstOrDefault(e => e.TargetId == path[i + 1]);
|
||||
|
||||
if (edge != null)
|
||||
{
|
||||
edges.Add(edge);
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult<ProofGraphPath?>(new ProofGraphPath
|
||||
{
|
||||
Nodes = nodes,
|
||||
Edges = edges
|
||||
});
|
||||
}
|
||||
|
||||
var outgoing = _outgoingEdges.GetValueOrDefault(currentId, []);
|
||||
foreach (var edgeId in outgoing)
|
||||
{
|
||||
var edge = _edges[edgeId];
|
||||
if (!visited.Contains(edge.TargetId))
|
||||
{
|
||||
visited.Add(edge.TargetId);
|
||||
var newPath = new List<string>(path) { edge.TargetId };
|
||||
queue.Enqueue((edge.TargetId, newPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult<ProofGraphPath?>(null);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ProofGraphSubgraph> GetArtifactSubgraphAsync(
|
||||
string artifactId,
|
||||
int maxDepth = 5,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(artifactId);
|
||||
|
||||
var nodes = new Dictionary<string, ProofGraphNode>();
|
||||
var edges = new List<ProofGraphEdge>();
|
||||
var visited = new HashSet<string>();
|
||||
var queue = new Queue<(string nodeId, int depth)>();
|
||||
|
||||
if (_nodes.TryGetValue(artifactId, out var rootNode))
|
||||
{
|
||||
nodes[artifactId] = rootNode;
|
||||
queue.Enqueue((artifactId, 0));
|
||||
visited.Add(artifactId);
|
||||
}
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var (currentId, depth) = queue.Dequeue();
|
||||
|
||||
if (depth >= maxDepth)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Process outgoing edges
|
||||
var outgoing = _outgoingEdges.GetValueOrDefault(currentId, []);
|
||||
foreach (var edgeId in outgoing)
|
||||
{
|
||||
var edge = _edges[edgeId];
|
||||
edges.Add(edge);
|
||||
|
||||
if (!visited.Contains(edge.TargetId) && _nodes.TryGetValue(edge.TargetId, out var targetNode))
|
||||
{
|
||||
visited.Add(edge.TargetId);
|
||||
nodes[edge.TargetId] = targetNode;
|
||||
queue.Enqueue((edge.TargetId, depth + 1));
|
||||
}
|
||||
}
|
||||
|
||||
// Process incoming edges
|
||||
var incoming = _incomingEdges.GetValueOrDefault(currentId, []);
|
||||
foreach (var edgeId in incoming)
|
||||
{
|
||||
var edge = _edges[edgeId];
|
||||
edges.Add(edge);
|
||||
|
||||
if (!visited.Contains(edge.SourceId) && _nodes.TryGetValue(edge.SourceId, out var sourceNode))
|
||||
{
|
||||
visited.Add(edge.SourceId);
|
||||
nodes[edge.SourceId] = sourceNode;
|
||||
queue.Enqueue((edge.SourceId, depth + 1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(new ProofGraphSubgraph
|
||||
{
|
||||
RootNodeId = artifactId,
|
||||
Nodes = nodes.Values.ToList(),
|
||||
Edges = edges.Distinct().ToList(),
|
||||
MaxDepth = maxDepth
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ProofGraphEdge>> GetOutgoingEdgesAsync(
|
||||
string nodeId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var edgeIds = _outgoingEdges.GetValueOrDefault(nodeId, []);
|
||||
var edges = edgeIds.Select(id => _edges[id]).ToList();
|
||||
return Task.FromResult<IReadOnlyList<ProofGraphEdge>>(edges);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<ProofGraphEdge>> GetIncomingEdgesAsync(
|
||||
string nodeId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var edgeIds = _incomingEdges.GetValueOrDefault(nodeId, []);
|
||||
var edges = edgeIds.Select(id => _edges[id]).ToList();
|
||||
return Task.FromResult<IReadOnlyList<ProofGraphEdge>>(edges);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all nodes and edges (for testing).
|
||||
/// </summary>
|
||||
public void Clear()
|
||||
{
|
||||
_nodes.Clear();
|
||||
_edges.Clear();
|
||||
_outgoingEdges.Clear();
|
||||
_incomingEdges.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of nodes.
|
||||
/// </summary>
|
||||
public int NodeCount => _nodes.Count;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the total number of edges.
|
||||
/// </summary>
|
||||
public int EdgeCount => _edges.Count;
|
||||
}
|
||||
@@ -0,0 +1,251 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Json;
|
||||
|
||||
/// <summary>
|
||||
/// JSON Schema validation result.
|
||||
/// </summary>
|
||||
public sealed record SchemaValidationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the JSON is valid against the schema.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Validation errors if any.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<SchemaValidationError> Errors { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Create a successful validation result.
|
||||
/// </summary>
|
||||
public static SchemaValidationResult Success() => new()
|
||||
{
|
||||
IsValid = true,
|
||||
Errors = []
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Create a failed validation result.
|
||||
/// </summary>
|
||||
public static SchemaValidationResult Failure(params SchemaValidationError[] errors) => new()
|
||||
{
|
||||
IsValid = false,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single schema validation error.
|
||||
/// </summary>
|
||||
public sealed record SchemaValidationError
|
||||
{
|
||||
/// <summary>
|
||||
/// JSON pointer to the error location.
|
||||
/// </summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message.
|
||||
/// </summary>
|
||||
public required string Message { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Schema keyword that failed (e.g., "required", "type").
|
||||
/// </summary>
|
||||
public string? Keyword { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for validating JSON against schemas.
|
||||
/// </summary>
|
||||
public interface IJsonSchemaValidator
|
||||
{
|
||||
/// <summary>
|
||||
/// Validate JSON against a schema by predicate type.
|
||||
/// </summary>
|
||||
/// <param name="json">The JSON to validate.</param>
|
||||
/// <param name="predicateType">The predicate type (e.g., "evidence.stella/v1").</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The validation result.</returns>
|
||||
Task<SchemaValidationResult> ValidatePredicateAsync(
|
||||
string json,
|
||||
string predicateType,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validate a statement against its predicate type schema.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">The statement type.</typeparam>
|
||||
/// <param name="statement">The statement to validate.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The validation result.</returns>
|
||||
Task<SchemaValidationResult> ValidateStatementAsync<T>(
|
||||
T statement,
|
||||
CancellationToken ct = default) where T : Statements.InTotoStatement;
|
||||
|
||||
/// <summary>
|
||||
/// Check if a predicate type has a registered schema.
|
||||
/// </summary>
|
||||
/// <param name="predicateType">The predicate type.</param>
|
||||
/// <returns>True if a schema is registered.</returns>
|
||||
bool HasSchema(string predicateType);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of JSON Schema validation.
|
||||
/// </summary>
|
||||
public sealed class PredicateSchemaValidator : IJsonSchemaValidator
|
||||
{
|
||||
private static readonly Dictionary<string, JsonDocument> _schemas = new();
|
||||
|
||||
/// <summary>
|
||||
/// Static initializer to load embedded schemas.
|
||||
/// </summary>
|
||||
static PredicateSchemaValidator()
|
||||
{
|
||||
// TODO: Load schemas from embedded resources
|
||||
// These would be in src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/Schemas/
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<SchemaValidationResult> ValidatePredicateAsync(
|
||||
string json,
|
||||
string predicateType,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
if (!HasSchema(predicateType))
|
||||
{
|
||||
return SchemaValidationResult.Failure(new SchemaValidationError
|
||||
{
|
||||
Path = "/",
|
||||
Message = $"No schema registered for predicate type: {predicateType}",
|
||||
Keyword = "predicateType"
|
||||
});
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var document = JsonDocument.Parse(json);
|
||||
|
||||
// TODO: Implement actual JSON Schema validation
|
||||
// For now, do basic structural checks
|
||||
|
||||
var root = document.RootElement;
|
||||
|
||||
var errors = new List<SchemaValidationError>();
|
||||
|
||||
// Validate required fields based on predicate type
|
||||
switch (predicateType)
|
||||
{
|
||||
case "evidence.stella/v1":
|
||||
errors.AddRange(ValidateEvidencePredicate(root));
|
||||
break;
|
||||
case "reasoning.stella/v1":
|
||||
errors.AddRange(ValidateReasoningPredicate(root));
|
||||
break;
|
||||
case "cdx-vex.stella/v1":
|
||||
errors.AddRange(ValidateVexPredicate(root));
|
||||
break;
|
||||
case "proofspine.stella/v1":
|
||||
errors.AddRange(ValidateProofSpinePredicate(root));
|
||||
break;
|
||||
case "verdict.stella/v1":
|
||||
errors.AddRange(ValidateVerdictPredicate(root));
|
||||
break;
|
||||
}
|
||||
|
||||
return errors.Count > 0
|
||||
? SchemaValidationResult.Failure(errors.ToArray())
|
||||
: SchemaValidationResult.Success();
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return SchemaValidationResult.Failure(new SchemaValidationError
|
||||
{
|
||||
Path = "/",
|
||||
Message = $"Invalid JSON: {ex.Message}",
|
||||
Keyword = "format"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<SchemaValidationResult> ValidateStatementAsync<T>(
|
||||
T statement,
|
||||
CancellationToken ct = default) where T : Statements.InTotoStatement
|
||||
{
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(statement);
|
||||
return await ValidatePredicateAsync(json, statement.PredicateType, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool HasSchema(string predicateType)
|
||||
{
|
||||
return predicateType switch
|
||||
{
|
||||
"evidence.stella/v1" => true,
|
||||
"reasoning.stella/v1" => true,
|
||||
"cdx-vex.stella/v1" => true,
|
||||
"proofspine.stella/v1" => true,
|
||||
"verdict.stella/v1" => true,
|
||||
"https://stella-ops.org/predicates/sbom-linkage/v1" => true,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
|
||||
private static IEnumerable<SchemaValidationError> ValidateEvidencePredicate(JsonElement root)
|
||||
{
|
||||
// Required: scanToolName, scanToolVersion, timestamp
|
||||
if (!root.TryGetProperty("scanToolName", out _))
|
||||
yield return new() { Path = "/scanToolName", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("scanToolVersion", out _))
|
||||
yield return new() { Path = "/scanToolVersion", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("timestamp", out _))
|
||||
yield return new() { Path = "/timestamp", Message = "Required property missing", Keyword = "required" };
|
||||
}
|
||||
|
||||
private static IEnumerable<SchemaValidationError> ValidateReasoningPredicate(JsonElement root)
|
||||
{
|
||||
// Required: policyId, policyVersion, evaluatedAt
|
||||
if (!root.TryGetProperty("policyId", out _))
|
||||
yield return new() { Path = "/policyId", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("policyVersion", out _))
|
||||
yield return new() { Path = "/policyVersion", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("evaluatedAt", out _))
|
||||
yield return new() { Path = "/evaluatedAt", Message = "Required property missing", Keyword = "required" };
|
||||
}
|
||||
|
||||
private static IEnumerable<SchemaValidationError> ValidateVexPredicate(JsonElement root)
|
||||
{
|
||||
// Required: vulnerability, status
|
||||
if (!root.TryGetProperty("vulnerability", out _))
|
||||
yield return new() { Path = "/vulnerability", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("status", out _))
|
||||
yield return new() { Path = "/status", Message = "Required property missing", Keyword = "required" };
|
||||
}
|
||||
|
||||
private static IEnumerable<SchemaValidationError> ValidateProofSpinePredicate(JsonElement root)
|
||||
{
|
||||
// Required: sbomEntryId, evidenceIds, proofBundleId
|
||||
if (!root.TryGetProperty("sbomEntryId", out _))
|
||||
yield return new() { Path = "/sbomEntryId", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("evidenceIds", out _))
|
||||
yield return new() { Path = "/evidenceIds", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("proofBundleId", out _))
|
||||
yield return new() { Path = "/proofBundleId", Message = "Required property missing", Keyword = "required" };
|
||||
}
|
||||
|
||||
private static IEnumerable<SchemaValidationError> ValidateVerdictPredicate(JsonElement root)
|
||||
{
|
||||
// Required: proofBundleId, result, verifiedAt
|
||||
if (!root.TryGetProperty("proofBundleId", out _))
|
||||
yield return new() { Path = "/proofBundleId", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("result", out _))
|
||||
yield return new() { Path = "/result", Message = "Required property missing", Keyword = "required" };
|
||||
if (!root.TryGetProperty("verifiedAt", out _))
|
||||
yield return new() { Path = "/verifiedAt", Message = "Required property missing", Keyword = "required" };
|
||||
}
|
||||
}
|
||||
@@ -4,9 +4,24 @@ using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Merkle;
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic merkle tree builder using SHA-256.
|
||||
/// Follows proof chain construction algorithm:
|
||||
/// - Lexicographic sorting of evidence IDs
|
||||
/// - Padding to power of 2 by duplicating last leaf
|
||||
/// - Left || Right concatenation for internal nodes
|
||||
/// </summary>
|
||||
public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public byte[] ComputeMerkleRoot(IReadOnlyList<ReadOnlyMemory<byte>> leafValues)
|
||||
{
|
||||
var tree = BuildTree(leafValues);
|
||||
return tree.Root;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public MerkleTreeWithProofs BuildTree(IReadOnlyList<ReadOnlyMemory<byte>> leafValues)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(leafValues);
|
||||
|
||||
@@ -15,36 +30,123 @@ public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
|
||||
throw new ArgumentException("At least one leaf is required.", nameof(leafValues));
|
||||
}
|
||||
|
||||
var hashes = new List<byte[]>(PadToPowerOfTwo(leafValues.Count));
|
||||
var levels = new List<IReadOnlyList<byte[]>>();
|
||||
|
||||
// Level 0: Hash all leaf values
|
||||
var leafHashes = new List<byte[]>(PadToPowerOfTwo(leafValues.Count));
|
||||
for (var i = 0; i < leafValues.Count; i++)
|
||||
{
|
||||
hashes.Add(SHA256.HashData(leafValues[i].Span));
|
||||
leafHashes.Add(SHA256.HashData(leafValues[i].Span));
|
||||
}
|
||||
|
||||
// Pad with duplicate of last leaf hash (deterministic).
|
||||
var target = hashes.Capacity;
|
||||
while (hashes.Count < target)
|
||||
// Pad with duplicate of last leaf hash (deterministic)
|
||||
var target = leafHashes.Capacity;
|
||||
while (leafHashes.Count < target)
|
||||
{
|
||||
hashes.Add(hashes[^1]);
|
||||
leafHashes.Add(leafHashes[^1]);
|
||||
}
|
||||
|
||||
return ComputeRootFromLeafHashes(hashes);
|
||||
levels.Add(leafHashes);
|
||||
|
||||
// Build tree bottom-up
|
||||
var currentLevel = leafHashes;
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>(currentLevel.Count / 2);
|
||||
for (var i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
nextLevel.Add(HashInternal(currentLevel[i], currentLevel[i + 1]));
|
||||
}
|
||||
levels.Add(nextLevel);
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return new MerkleTreeWithProofs
|
||||
{
|
||||
Root = currentLevel[0],
|
||||
Leaves = leafHashes,
|
||||
Levels = levels
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] ComputeRootFromLeafHashes(List<byte[]> hashes)
|
||||
/// <inheritdoc />
|
||||
public MerkleProof GenerateProof(MerkleTreeWithProofs tree, int leafIndex)
|
||||
{
|
||||
while (hashes.Count > 1)
|
||||
{
|
||||
var next = new List<byte[]>(hashes.Count / 2);
|
||||
for (var i = 0; i < hashes.Count; i += 2)
|
||||
{
|
||||
next.Add(HashInternal(hashes[i], hashes[i + 1]));
|
||||
}
|
||||
ArgumentNullException.ThrowIfNull(tree);
|
||||
|
||||
hashes = next;
|
||||
if (leafIndex < 0 || leafIndex >= tree.Leaves.Count)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(leafIndex),
|
||||
$"Leaf index must be between 0 and {tree.Leaves.Count - 1}.");
|
||||
}
|
||||
|
||||
return hashes[0];
|
||||
var steps = new List<MerkleProofStep>();
|
||||
var currentIndex = leafIndex;
|
||||
|
||||
for (var level = 0; level < tree.Levels.Count - 1; level++)
|
||||
{
|
||||
var currentLevel = tree.Levels[level];
|
||||
|
||||
// Find sibling
|
||||
int siblingIndex;
|
||||
bool isRight;
|
||||
|
||||
if (currentIndex % 2 == 0)
|
||||
{
|
||||
// Current is left child, sibling is right
|
||||
siblingIndex = currentIndex + 1;
|
||||
isRight = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Current is right child, sibling is left
|
||||
siblingIndex = currentIndex - 1;
|
||||
isRight = false;
|
||||
}
|
||||
|
||||
steps.Add(new MerkleProofStep
|
||||
{
|
||||
SiblingHash = currentLevel[siblingIndex],
|
||||
IsRight = isRight
|
||||
});
|
||||
|
||||
// Move to parent index
|
||||
currentIndex /= 2;
|
||||
}
|
||||
|
||||
return new MerkleProof
|
||||
{
|
||||
LeafIndex = leafIndex,
|
||||
LeafHash = tree.Leaves[leafIndex],
|
||||
Steps = steps
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool VerifyProof(MerkleProof proof, ReadOnlySpan<byte> leafValue, ReadOnlySpan<byte> expectedRoot)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(proof);
|
||||
|
||||
// Hash the leaf value
|
||||
var currentHash = SHA256.HashData(leafValue);
|
||||
|
||||
// Walk up the tree
|
||||
foreach (var step in proof.Steps)
|
||||
{
|
||||
if (step.IsRight)
|
||||
{
|
||||
// Sibling is on the right: H(current || sibling)
|
||||
currentHash = HashInternal(currentHash, step.SiblingHash);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Sibling is on the left: H(sibling || current)
|
||||
currentHash = HashInternal(step.SiblingHash, currentHash);
|
||||
}
|
||||
}
|
||||
|
||||
// Compare with expected root
|
||||
return currentHash.AsSpan().SequenceEqual(expectedRoot);
|
||||
}
|
||||
|
||||
private static int PadToPowerOfTwo(int count)
|
||||
@@ -66,3 +168,4 @@ public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -3,8 +3,103 @@ using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Merkle;
|
||||
|
||||
/// <summary>
|
||||
/// Builder for deterministic merkle trees used in proof chain construction.
|
||||
/// </summary>
|
||||
public interface IMerkleTreeBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Compute the merkle root from leaf values.
|
||||
/// </summary>
|
||||
/// <param name="leafValues">The leaf values to hash.</param>
|
||||
/// <returns>The merkle root hash.</returns>
|
||||
byte[] ComputeMerkleRoot(IReadOnlyList<ReadOnlyMemory<byte>> leafValues);
|
||||
|
||||
/// <summary>
|
||||
/// Build a full merkle tree with proof generation capability.
|
||||
/// </summary>
|
||||
/// <param name="leafValues">The leaf values to hash.</param>
|
||||
/// <returns>A merkle tree with proof generation.</returns>
|
||||
MerkleTreeWithProofs BuildTree(IReadOnlyList<ReadOnlyMemory<byte>> leafValues);
|
||||
|
||||
/// <summary>
|
||||
/// Generate a merkle proof for a specific leaf.
|
||||
/// </summary>
|
||||
/// <param name="tree">The merkle tree.</param>
|
||||
/// <param name="leafIndex">The index of the leaf to prove.</param>
|
||||
/// <returns>The merkle proof.</returns>
|
||||
MerkleProof GenerateProof(MerkleTreeWithProofs tree, int leafIndex);
|
||||
|
||||
/// <summary>
|
||||
/// Verify a merkle proof.
|
||||
/// </summary>
|
||||
/// <param name="proof">The merkle proof.</param>
|
||||
/// <param name="leafValue">The leaf value being proven.</param>
|
||||
/// <param name="expectedRoot">The expected merkle root.</param>
|
||||
/// <returns>True if the proof is valid.</returns>
|
||||
bool VerifyProof(MerkleProof proof, ReadOnlySpan<byte> leafValue, ReadOnlySpan<byte> expectedRoot);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A merkle tree with all internal nodes stored for proof generation.
|
||||
/// </summary>
|
||||
public sealed record MerkleTreeWithProofs
|
||||
{
|
||||
/// <summary>
|
||||
/// The merkle root.
|
||||
/// </summary>
|
||||
public required byte[] Root { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The leaf hashes (level 0).
|
||||
/// </summary>
|
||||
public required IReadOnlyList<byte[]> Leaves { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All levels of the tree, from leaves (index 0) to root.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<IReadOnlyList<byte[]>> Levels { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The depth of the tree (number of levels - 1).
|
||||
/// </summary>
|
||||
public int Depth => Levels.Count - 1;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A merkle proof for a specific leaf.
|
||||
/// </summary>
|
||||
public sealed record MerkleProof
|
||||
{
|
||||
/// <summary>
|
||||
/// The index of the leaf in the original list.
|
||||
/// </summary>
|
||||
public required int LeafIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The hash of the leaf.
|
||||
/// </summary>
|
||||
public required byte[] LeafHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The sibling hashes needed to reconstruct the root, from bottom to top.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<MerkleProofStep> Steps { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single step in a merkle proof.
|
||||
/// </summary>
|
||||
public sealed record MerkleProofStep
|
||||
{
|
||||
/// <summary>
|
||||
/// The sibling hash at this level.
|
||||
/// </summary>
|
||||
public required byte[] SiblingHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the sibling is on the right (true) or left (false).
|
||||
/// </summary>
|
||||
public required bool IsRight { get; init; }
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Receipts;
|
||||
using StellaOps.Attestor.ProofChain.Signing;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Pipeline;
|
||||
|
||||
/// <summary>
|
||||
/// Orchestrates the full proof chain pipeline from scan to receipt.
|
||||
/// </summary>
|
||||
public interface IProofChainPipeline
|
||||
{
|
||||
/// <summary>
|
||||
/// Execute the full proof chain pipeline.
|
||||
/// </summary>
|
||||
/// <param name="request">The pipeline request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The pipeline result.</returns>
|
||||
Task<ProofChainResult> ExecuteAsync(
|
||||
ProofChainRequest request,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to execute the proof chain pipeline.
|
||||
/// </summary>
|
||||
public sealed record ProofChainRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM bytes to process.
|
||||
/// </summary>
|
||||
public required byte[] SbomBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Media type of the SBOM (e.g., "application/vnd.cyclonedx+json").
|
||||
/// </summary>
|
||||
public required string SbomMediaType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence gathered from scanning.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<EvidencePayload> Evidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy version used for evaluation.
|
||||
/// </summary>
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor for verification.
|
||||
/// </summary>
|
||||
public required TrustAnchorId TrustAnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to submit envelopes to Rekor.
|
||||
/// </summary>
|
||||
public bool SubmitToRekor { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Subject information for the attestations.
|
||||
/// </summary>
|
||||
public required PipelineSubject Subject { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subject information for the pipeline.
|
||||
/// </summary>
|
||||
public sealed record PipelineSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the subject (e.g., image reference).
|
||||
/// </summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digests of the subject.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, string> Digest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of the proof chain pipeline.
|
||||
/// </summary>
|
||||
public sealed record ProofChainResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The assembled proof bundle ID.
|
||||
/// </summary>
|
||||
public required ProofBundleId ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All signed DSSE envelopes produced.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<DsseEnvelope> Envelopes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The proof spine statement.
|
||||
/// </summary>
|
||||
public required ProofSpineStatement ProofSpine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor entries if submitted.
|
||||
/// </summary>
|
||||
public IReadOnlyList<RekorEntry>? RekorEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verification receipt.
|
||||
/// </summary>
|
||||
public required VerificationReceipt Receipt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Graph revision ID for this evaluation.
|
||||
/// </summary>
|
||||
public required GraphRevisionId GraphRevisionId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A Rekor transparency log entry.
|
||||
/// </summary>
|
||||
public sealed record RekorEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// The log index in Rekor.
|
||||
/// </summary>
|
||||
public required long LogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The UUID of the entry.
|
||||
/// </summary>
|
||||
public required string Uuid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The integrated time (when the entry was added).
|
||||
/// </summary>
|
||||
public required DateTimeOffset IntegratedTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The log ID (tree hash).
|
||||
/// </summary>
|
||||
public required string LogId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The body of the entry (base64-encoded).
|
||||
/// </summary>
|
||||
public string? Body { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Receipts;
|
||||
|
||||
/// <summary>
|
||||
/// Service for generating verification receipts for proof bundles.
|
||||
/// </summary>
|
||||
public interface IReceiptGenerator
|
||||
{
|
||||
/// <summary>
|
||||
/// Generate a verification receipt for a proof bundle.
|
||||
/// </summary>
|
||||
/// <param name="bundleId">The proof bundle ID to verify.</param>
|
||||
/// <param name="context">The verification context.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The verification receipt.</returns>
|
||||
Task<VerificationReceipt> GenerateReceiptAsync(
|
||||
ProofBundleId bundleId,
|
||||
VerificationContext context,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context for verification operations.
|
||||
/// </summary>
|
||||
public sealed record VerificationContext
|
||||
{
|
||||
/// <summary>
|
||||
/// The trust anchor ID to verify against.
|
||||
/// </summary>
|
||||
public required TrustAnchorId AnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the verifier tool.
|
||||
/// </summary>
|
||||
public required string VerifierVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional digests of tools used in verification.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? ToolDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A verification receipt for a proof bundle.
|
||||
/// </summary>
|
||||
public sealed record VerificationReceipt
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof bundle ID that was verified.
|
||||
/// </summary>
|
||||
public required ProofBundleId ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the verification was performed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset VerifiedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the verifier tool.
|
||||
/// </summary>
|
||||
public required string VerifierVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The trust anchor ID used for verification.
|
||||
/// </summary>
|
||||
public required TrustAnchorId AnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The overall verification result.
|
||||
/// </summary>
|
||||
public required VerificationResult Result { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual verification checks performed.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<VerificationCheck> Checks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional digests of tools used in verification.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? ToolDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a verification operation.
|
||||
/// </summary>
|
||||
public enum VerificationResult
|
||||
{
|
||||
/// <summary>Verification passed.</summary>
|
||||
Pass,
|
||||
|
||||
/// <summary>Verification failed.</summary>
|
||||
Fail
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single verification check performed during receipt generation.
|
||||
/// </summary>
|
||||
public sealed record VerificationCheck
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the check performed.
|
||||
/// </summary>
|
||||
public required string Check { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Status of this check.
|
||||
/// </summary>
|
||||
public required VerificationResult Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID used if this was a signature check.
|
||||
/// </summary>
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected value (for comparison checks).
|
||||
/// </summary>
|
||||
public string? Expected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actual value (for comparison checks).
|
||||
/// </summary>
|
||||
public string? Actual { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log index if this was a transparency check.
|
||||
/// </summary>
|
||||
public long? LogIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional details about the check.
|
||||
/// </summary>
|
||||
public string? Details { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Signing;
|
||||
|
||||
/// <summary>
|
||||
/// Signing key profiles for different proof chain statement types.
|
||||
/// </summary>
|
||||
public enum SigningKeyProfile
|
||||
{
|
||||
/// <summary>Scanner/Ingestor key for evidence statements.</summary>
|
||||
Evidence,
|
||||
|
||||
/// <summary>Policy/Authority key for reasoning statements.</summary>
|
||||
Reasoning,
|
||||
|
||||
/// <summary>VEXer/Vendor key for VEX verdicts.</summary>
|
||||
VexVerdict,
|
||||
|
||||
/// <summary>Authority key for proof spines and receipts.</summary>
|
||||
Authority,
|
||||
|
||||
/// <summary>Generator key for SBOM linkage statements.</summary>
|
||||
Generator
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signature verification.
|
||||
/// </summary>
|
||||
public sealed record SignatureVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the signature is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The key ID that was used for verification.
|
||||
/// </summary>
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if verification failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope containing a signed statement.
|
||||
/// </summary>
|
||||
public sealed record DsseEnvelope
|
||||
{
|
||||
/// <summary>
|
||||
/// The payload type (always "application/vnd.in-toto+json").
|
||||
/// </summary>
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded payload (the statement JSON).
|
||||
/// </summary>
|
||||
public required string Payload { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signatures over the payload.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<DsseSignature> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A signature within a DSSE envelope.
|
||||
/// </summary>
|
||||
public sealed record DsseSignature
|
||||
{
|
||||
/// <summary>
|
||||
/// The key ID that produced this signature.
|
||||
/// </summary>
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature.
|
||||
/// </summary>
|
||||
public required string Sig { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for signing and verifying proof chain statements.
|
||||
/// </summary>
|
||||
public interface IProofChainSigner
|
||||
{
|
||||
/// <summary>
|
||||
/// Sign a statement and wrap it in a DSSE envelope.
|
||||
/// </summary>
|
||||
/// <typeparam name="T">The statement type.</typeparam>
|
||||
/// <param name="statement">The statement to sign.</param>
|
||||
/// <param name="keyProfile">The signing key profile to use.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>A DSSE envelope containing the signed statement.</returns>
|
||||
Task<DsseEnvelope> SignStatementAsync<T>(
|
||||
T statement,
|
||||
SigningKeyProfile keyProfile,
|
||||
CancellationToken ct = default) where T : InTotoStatement;
|
||||
|
||||
/// <summary>
|
||||
/// Verify a DSSE envelope signature.
|
||||
/// </summary>
|
||||
/// <param name="envelope">The envelope to verify.</param>
|
||||
/// <param name="allowedKeyIds">List of allowed key IDs for verification.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The verification result.</returns>
|
||||
Task<SignatureVerificationResult> VerifyEnvelopeAsync(
|
||||
DsseEnvelope envelope,
|
||||
IReadOnlyList<string> allowedKeyIds,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
using System;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for evidence collected from scanners or feeds.
|
||||
/// Predicate type: evidence.stella/v1
|
||||
/// </summary>
|
||||
public sealed record EvidenceStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "evidence.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The evidence payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required EvidencePayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for evidence statements.
|
||||
/// </summary>
|
||||
public sealed record EvidencePayload
|
||||
{
|
||||
/// <summary>
|
||||
/// Scanner or feed name that produced this evidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public required string Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the source tool.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sourceVersion")]
|
||||
public required string SourceVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when evidence was collected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("collectionTime")]
|
||||
public required DateTimeOffset CollectionTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the SBOM entry this evidence relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE or vulnerability identifier if applicable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Pointer to or inline representation of raw finding data.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rawFinding")]
|
||||
public required object RawFinding { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content-addressed ID of this evidence (hash of canonical JSON).
|
||||
/// Format: sha256:<64-hex-chars>
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidenceId")]
|
||||
public required string EvidenceId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// Base type for in-toto Statement/v1 format.
|
||||
/// See: https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md
|
||||
/// </summary>
|
||||
public abstract record InTotoStatement
|
||||
{
|
||||
/// <summary>
|
||||
/// The statement type, always "https://in-toto.io/Statement/v1".
|
||||
/// </summary>
|
||||
[JsonPropertyName("_type")]
|
||||
public string Type => "https://in-toto.io/Statement/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The subjects this statement is about (e.g., artifact digests).
|
||||
/// </summary>
|
||||
[JsonPropertyName("subject")]
|
||||
public required IReadOnlyList<Subject> Subject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The predicate type URI identifying the schema of the predicate.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicateType")]
|
||||
public abstract string PredicateType { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A subject in an in-toto statement, representing an artifact.
|
||||
/// </summary>
|
||||
public sealed record Subject
|
||||
{
|
||||
/// <summary>
|
||||
/// The name or identifier of the subject (e.g., image reference).
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digests of the subject in algorithm:hex format.
|
||||
/// </summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public required IReadOnlyDictionary<string, string> Digest { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for proof spine (merkle-aggregated proof bundle).
|
||||
/// Predicate type: proofspine.stella/v1
|
||||
/// </summary>
|
||||
public sealed record ProofSpineStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "proofspine.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The proof spine payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required ProofSpinePayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for proof spine statements.
|
||||
/// </summary>
|
||||
public sealed record ProofSpinePayload
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM entry ID this proof spine covers.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Sorted list of evidence IDs included in this proof bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidenceIds")]
|
||||
public required IReadOnlyList<string> EvidenceIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The reasoning ID linking evidence to verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The VEX verdict ID for this entry.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public required string VexVerdictId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the policy used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content-addressed ID of this proof bundle (merkle root).
|
||||
/// Format: sha256:<64-hex-chars>
|
||||
/// </summary>
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,89 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for policy evaluation reasoning traces.
|
||||
/// Predicate type: reasoning.stella/v1
|
||||
/// </summary>
|
||||
public sealed record ReasoningStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "reasoning.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The reasoning payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required ReasoningPayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for reasoning statements.
|
||||
/// </summary>
|
||||
public sealed record ReasoningPayload
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM entry ID this reasoning applies to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence IDs that were considered in this reasoning.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidenceIds")]
|
||||
public required IReadOnlyList<string> EvidenceIds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the policy used for evaluation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Inputs to the reasoning process.
|
||||
/// </summary>
|
||||
[JsonPropertyName("inputs")]
|
||||
public required ReasoningInputsPayload Inputs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Intermediate findings from the evaluation (optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("intermediateFindings")]
|
||||
public IReadOnlyDictionary<string, object>? IntermediateFindings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content-addressed ID of this reasoning (hash of canonical JSON).
|
||||
/// Format: sha256:<64-hex-chars>
|
||||
/// </summary>
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Inputs to the reasoning process.
|
||||
/// </summary>
|
||||
public sealed record ReasoningInputsPayload
|
||||
{
|
||||
/// <summary>
|
||||
/// The evaluation time used for temporal reasoning (must be UTC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("currentEvaluationTime")]
|
||||
public required DateTimeOffset CurrentEvaluationTime { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity thresholds applied during evaluation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("severityThresholds")]
|
||||
public IReadOnlyDictionary<string, object>? SeverityThresholds { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Lattice rules used for status merging.
|
||||
/// </summary>
|
||||
[JsonPropertyName("latticeRules")]
|
||||
public IReadOnlyDictionary<string, object>? LatticeRules { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for SBOM-to-component linkage.
|
||||
/// Predicate type: https://stella-ops.org/predicates/sbom-linkage/v1
|
||||
/// </summary>
|
||||
public sealed record SbomLinkageStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "https://stella-ops.org/predicates/sbom-linkage/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The SBOM linkage payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required SbomLinkagePayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for SBOM linkage statements.
|
||||
/// </summary>
|
||||
public sealed record SbomLinkagePayload
|
||||
{
|
||||
/// <summary>
|
||||
/// Descriptor of the SBOM being linked.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom")]
|
||||
public required SbomDescriptor Sbom { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Descriptor of the tool that generated this linkage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("generator")]
|
||||
public required GeneratorDescriptor Generator { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this linkage was generated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("generatedAt")]
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subjects that could not be fully resolved (optional).
|
||||
/// </summary>
|
||||
[JsonPropertyName("incompleteSubjects")]
|
||||
public IReadOnlyList<IncompleteSubject>? IncompleteSubjects { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Arbitrary tags for classification or filtering.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tags")]
|
||||
public IReadOnlyDictionary<string, string>? Tags { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Descriptor of an SBOM document.
|
||||
/// </summary>
|
||||
public sealed record SbomDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier of the SBOM (e.g., serialNumber or documentId).
|
||||
/// </summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Format of the SBOM: CycloneDX or SPDX.
|
||||
/// </summary>
|
||||
[JsonPropertyName("format")]
|
||||
public required string Format { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Specification version (e.g., "1.6" for CycloneDX, "2.3" for SPDX).
|
||||
/// </summary>
|
||||
[JsonPropertyName("specVersion")]
|
||||
public required string SpecVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type of the SBOM document.
|
||||
/// </summary>
|
||||
[JsonPropertyName("mediaType")]
|
||||
public required string MediaType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the SBOM content.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sha256")]
|
||||
public required string Sha256 { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional location URI (oci:// or file://).
|
||||
/// </summary>
|
||||
[JsonPropertyName("location")]
|
||||
public string? Location { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Descriptor of the tool that generated an artifact.
|
||||
/// </summary>
|
||||
public sealed record GeneratorDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the generator tool.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the generator tool.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public required string Version { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A subject that could not be fully resolved during SBOM linkage.
|
||||
/// </summary>
|
||||
public sealed record IncompleteSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Name or identifier of the incomplete subject.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason why the subject is incomplete.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reason")]
|
||||
public required string Reason { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,171 @@
|
||||
using System;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for final verdict receipts.
|
||||
/// Predicate type: verdict.stella/v1
|
||||
/// </summary>
|
||||
public sealed record VerdictReceiptStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "verdict.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The verdict receipt payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required VerdictReceiptPayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for verdict receipt statements.
|
||||
/// </summary>
|
||||
public sealed record VerdictReceiptPayload
|
||||
{
|
||||
/// <summary>
|
||||
/// The graph revision ID this verdict was computed from.
|
||||
/// </summary>
|
||||
[JsonPropertyName("graphRevisionId")]
|
||||
public required string GraphRevisionId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The finding key identifying the specific vulnerability/component pair.
|
||||
/// </summary>
|
||||
[JsonPropertyName("findingKey")]
|
||||
public required FindingKey FindingKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The policy rule that produced this verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rule")]
|
||||
public required PolicyRule Rule { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The decision made by the rule.
|
||||
/// </summary>
|
||||
[JsonPropertyName("decision")]
|
||||
public required VerdictDecision Decision { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Inputs used to compute this verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("inputs")]
|
||||
public required VerdictInputs Inputs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Outputs/references from this verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("outputs")]
|
||||
public required VerdictOutputs Outputs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when this verdict was created.
|
||||
/// </summary>
|
||||
[JsonPropertyName("createdAt")]
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Key identifying a specific finding (component + vulnerability).
|
||||
/// </summary>
|
||||
public sealed record FindingKey
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM entry ID for the component.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerability ID (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public required string VulnerabilityId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy rule that produced a verdict.
|
||||
/// </summary>
|
||||
public sealed record PolicyRule
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier of the rule.
|
||||
/// </summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the rule.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public required string Version { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Decision made by a policy rule.
|
||||
/// </summary>
|
||||
public sealed record VerdictDecision
|
||||
{
|
||||
/// <summary>
|
||||
/// Status of the decision: block, warn, pass.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable reason for the decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reason")]
|
||||
public required string Reason { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Inputs used to compute a verdict.
|
||||
/// </summary>
|
||||
public sealed record VerdictInputs
|
||||
{
|
||||
/// <summary>
|
||||
/// Digest of the SBOM used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomDigest")]
|
||||
public required string SbomDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the advisory feeds used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("feedsDigest")]
|
||||
public required string FeedsDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the policy bundle used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyDigest")]
|
||||
public required string PolicyDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Outputs/references from a verdict.
|
||||
/// </summary>
|
||||
public sealed record VerdictOutputs
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof bundle ID containing the evidence chain.
|
||||
/// </summary>
|
||||
[JsonPropertyName("proofBundleId")]
|
||||
public required string ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The reasoning ID explaining the decision.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The VEX verdict ID for this finding.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public required string VexVerdictId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,69 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// In-toto statement for VEX verdicts.
|
||||
/// Predicate type: cdx-vex.stella/v1
|
||||
/// </summary>
|
||||
public sealed record VexVerdictStatement : InTotoStatement
|
||||
{
|
||||
/// <inheritdoc />
|
||||
[JsonPropertyName("predicateType")]
|
||||
public override string PredicateType => "cdx-vex.stella/v1";
|
||||
|
||||
/// <summary>
|
||||
/// The VEX verdict payload.
|
||||
/// </summary>
|
||||
[JsonPropertyName("predicate")]
|
||||
public required VexVerdictPayload Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Payload for VEX verdict statements.
|
||||
/// </summary>
|
||||
public sealed record VexVerdictPayload
|
||||
{
|
||||
/// <summary>
|
||||
/// The SBOM entry ID this verdict applies to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomEntryId")]
|
||||
public required string SbomEntryId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerability ID (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("vulnerabilityId")]
|
||||
public required string VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status: not_affected, affected, fixed, under_investigation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for the VEX status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
public required string Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Version of the policy used to generate this verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the reasoning that led to this verdict.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reasoningId")]
|
||||
public required string ReasoningId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content-addressed ID of this VEX verdict (hash of canonical JSON).
|
||||
/// Format: sha256:<64-hex-chars>
|
||||
/// </summary>
|
||||
[JsonPropertyName("vexVerdictId")]
|
||||
public required string VexVerdictId { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,198 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Attestor.ProofChain.Identifiers;
|
||||
using StellaOps.Attestor.ProofChain.Receipts;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Verification;
|
||||
|
||||
/// <summary>
|
||||
/// Verification pipeline for proof chains per advisory §9.1.
|
||||
/// Executes a series of verification steps and generates receipts.
|
||||
/// </summary>
|
||||
public interface IVerificationPipeline
|
||||
{
|
||||
/// <summary>
|
||||
/// Execute the full verification pipeline.
|
||||
/// </summary>
|
||||
/// <param name="request">The verification request.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The verification result with receipt.</returns>
|
||||
Task<VerificationPipelineResult> VerifyAsync(
|
||||
VerificationPipelineRequest request,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to verify a proof chain.
|
||||
/// </summary>
|
||||
public sealed record VerificationPipelineRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof bundle ID to verify.
|
||||
/// </summary>
|
||||
public required ProofBundleId ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional trust anchor ID to verify against.
|
||||
/// If not specified, the pipeline will find a matching anchor.
|
||||
/// </summary>
|
||||
public TrustAnchorId? TrustAnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify Rekor inclusion proofs.
|
||||
/// </summary>
|
||||
public bool VerifyRekor { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to skip trust anchor verification.
|
||||
/// </summary>
|
||||
public bool SkipTrustAnchorVerification { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Version of the verifier for the receipt.
|
||||
/// </summary>
|
||||
public string VerifierVersion { get; init; } = "1.0.0";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of the verification pipeline.
|
||||
/// </summary>
|
||||
public sealed record VerificationPipelineResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the verification passed.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The verification receipt.
|
||||
/// </summary>
|
||||
public required VerificationReceipt Receipt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual step results.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<VerificationStepResult> Steps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The first failing step, if any.
|
||||
/// </summary>
|
||||
public VerificationStepResult? FirstFailure =>
|
||||
Steps.FirstOrDefault(s => !s.Passed);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a single verification step.
|
||||
/// </summary>
|
||||
public sealed record VerificationStepResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the step (e.g., "dsse_signature", "merkle_root").
|
||||
/// </summary>
|
||||
public required string StepName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the step passed.
|
||||
/// </summary>
|
||||
public required bool Passed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration of the step.
|
||||
/// </summary>
|
||||
public required TimeSpan Duration { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional details about the step.
|
||||
/// </summary>
|
||||
public string? Details { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if the step failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID if this was a signature verification step.
|
||||
/// </summary>
|
||||
public string? KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected value for comparison steps.
|
||||
/// </summary>
|
||||
public string? Expected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actual value for comparison steps.
|
||||
/// </summary>
|
||||
public string? Actual { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Rekor log index if this was an inclusion proof step.
|
||||
/// </summary>
|
||||
public long? LogIndex { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single step in the verification pipeline.
|
||||
/// </summary>
|
||||
public interface IVerificationStep
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of this step.
|
||||
/// </summary>
|
||||
string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Execute the verification step.
|
||||
/// </summary>
|
||||
/// <param name="context">The verification context.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>The step result.</returns>
|
||||
Task<VerificationStepResult> ExecuteAsync(
|
||||
VerificationContext context,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context passed through the verification pipeline.
|
||||
/// </summary>
|
||||
public sealed class VerificationContext
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof bundle ID being verified.
|
||||
/// </summary>
|
||||
public required ProofBundleId ProofBundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The trust anchor ID (if specified or discovered).
|
||||
/// </summary>
|
||||
public TrustAnchorId? TrustAnchorId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify Rekor inclusion.
|
||||
/// </summary>
|
||||
public bool VerifyRekor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Collected data during verification for subsequent steps.
|
||||
/// </summary>
|
||||
public Dictionary<string, object> Data { get; } = new();
|
||||
|
||||
/// <summary>
|
||||
/// Get typed data from the context.
|
||||
/// </summary>
|
||||
public T? GetData<T>(string key) where T : class
|
||||
{
|
||||
return Data.TryGetValue(key, out var value) ? value as T : null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set data in the context.
|
||||
/// </summary>
|
||||
public void SetData<T>(string key, T value) where T : notnull
|
||||
{
|
||||
Data[key] = value;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,315 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofSpineAssemblyIntegrationTests.cs
|
||||
// Sprint: SPRINT_0501_0004_0001_proof_chain_spine_assembly
|
||||
// Tasks: #10, #11, #12
|
||||
// Description: Integration tests for proof spine assembly pipeline
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using StellaOps.Attestor.ProofChain.Merkle;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the full proof spine assembly pipeline.
|
||||
/// </summary>
|
||||
public class ProofSpineAssemblyIntegrationTests
|
||||
{
|
||||
private readonly IMerkleTreeBuilder _builder;
|
||||
|
||||
public ProofSpineAssemblyIntegrationTests()
|
||||
{
|
||||
_builder = new DeterministicMerkleTreeBuilder();
|
||||
}
|
||||
|
||||
#region Task #10: Merkle Tree Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void MerkleRoot_SameInputDifferentRuns_ProducesIdenticalRoot()
|
||||
{
|
||||
// Arrange - simulate a proof spine with SBOM, evidence, reasoning, VEX
|
||||
var sbomEntryId = "sha256:abc123...";
|
||||
var evidenceIds = new[] { "sha256:ev1...", "sha256:ev2...", "sha256:ev3..." };
|
||||
var reasoningId = "sha256:reason...";
|
||||
var vexVerdictId = "sha256:vex...";
|
||||
|
||||
// Act - compute root multiple times
|
||||
var root1 = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
var root2 = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
var root3 = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(root1, root2);
|
||||
Assert.Equal(root2, root3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MerkleRoot_EvidenceOrderIsNormalized_ProducesSameRoot()
|
||||
{
|
||||
// Arrange
|
||||
var sbomEntryId = "sha256:abc123...";
|
||||
var evidenceIds1 = new[] { "sha256:b...", "sha256:a...", "sha256:c..." };
|
||||
var evidenceIds2 = new[] { "sha256:c...", "sha256:a...", "sha256:b..." };
|
||||
var reasoningId = "sha256:reason...";
|
||||
var vexVerdictId = "sha256:vex...";
|
||||
|
||||
// Act - evidence IDs should be sorted internally
|
||||
var root1 = ComputeProofSpineRoot(sbomEntryId, evidenceIds1, reasoningId, vexVerdictId);
|
||||
var root2 = ComputeProofSpineRoot(sbomEntryId, evidenceIds2, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert - same root because evidence is sorted
|
||||
Assert.Equal(root1, root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void MerkleRoot_DifferentSbom_ProducesDifferentRoot()
|
||||
{
|
||||
// Arrange
|
||||
var evidenceIds = new[] { "sha256:ev1..." };
|
||||
var reasoningId = "sha256:reason...";
|
||||
var vexVerdictId = "sha256:vex...";
|
||||
|
||||
// Act
|
||||
var root1 = ComputeProofSpineRoot("sha256:sbom1...", evidenceIds, reasoningId, vexVerdictId);
|
||||
var root2 = ComputeProofSpineRoot("sha256:sbom2...", evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert
|
||||
Assert.NotEqual(root1, root2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Task #11: Full Pipeline Integration Tests
|
||||
|
||||
[Fact]
|
||||
public void Pipeline_CompleteProofSpine_AssemblesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var sbomEntryId = "sha256:0123456789abcdef...";
|
||||
var evidenceIds = new[]
|
||||
{
|
||||
"sha256:evidence-cve-2024-0001...",
|
||||
"sha256:evidence-reachability...",
|
||||
"sha256:evidence-sbom-component...",
|
||||
};
|
||||
var reasoningId = "sha256:reasoning-policy-match...";
|
||||
var vexVerdictId = "sha256:vex-not-affected...";
|
||||
|
||||
// Act
|
||||
var root = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length); // SHA-256
|
||||
Assert.StartsWith("sha256:", FormatAsId(root));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Pipeline_EmptyEvidence_HandlesGracefully()
|
||||
{
|
||||
// Arrange - minimal proof spine with no evidence
|
||||
var sbomEntryId = "sha256:sbom...";
|
||||
var evidenceIds = Array.Empty<string>();
|
||||
var reasoningId = "sha256:reason...";
|
||||
var vexVerdictId = "sha256:vex...";
|
||||
|
||||
// Act
|
||||
var root = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Pipeline_ManyEvidenceItems_ScalesEfficiently()
|
||||
{
|
||||
// Arrange - large number of evidence items
|
||||
var sbomEntryId = "sha256:sbom...";
|
||||
var evidenceIds = Enumerable.Range(0, 1000)
|
||||
.Select(i => $"sha256:evidence-{i:D4}...")
|
||||
.ToArray();
|
||||
var reasoningId = "sha256:reason...";
|
||||
var vexVerdictId = "sha256:vex...";
|
||||
|
||||
// Act
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
var root = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(root);
|
||||
Assert.True(sw.ElapsedMilliseconds < 1000, "Should complete within 1 second");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Task #12: Cross-Platform Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void CrossPlatform_KnownVector_ProducesExpectedRoot()
|
||||
{
|
||||
// Arrange - known test vector for cross-platform verification
|
||||
// This allows other implementations (Go, Rust, TypeScript) to verify compatibility
|
||||
var sbomEntryId = "sha256:0000000000000000000000000000000000000000000000000000000000000001";
|
||||
var evidenceIds = new[]
|
||||
{
|
||||
"sha256:0000000000000000000000000000000000000000000000000000000000000002",
|
||||
"sha256:0000000000000000000000000000000000000000000000000000000000000003",
|
||||
};
|
||||
var reasoningId = "sha256:0000000000000000000000000000000000000000000000000000000000000004";
|
||||
var vexVerdictId = "sha256:0000000000000000000000000000000000000000000000000000000000000005";
|
||||
|
||||
// Act
|
||||
var root = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert - root should be deterministic and verifiable by other implementations
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
|
||||
// The actual expected root hash would be computed once and verified across platforms
|
||||
// For now, we just verify determinism
|
||||
var root2 = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
Assert.Equal(root, root2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CrossPlatform_Utf8Encoding_HandlesBinaryCorrectly()
|
||||
{
|
||||
// Arrange - IDs with special characters (should be UTF-8 encoded)
|
||||
var sbomEntryId = "sha256:café"; // Non-ASCII
|
||||
var evidenceIds = new[] { "sha256:日本語" }; // Japanese
|
||||
var reasoningId = "sha256:émoji🎉"; // Emoji
|
||||
var vexVerdictId = "sha256:Ω"; // Greek
|
||||
|
||||
// Act
|
||||
var root = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(root);
|
||||
Assert.Equal(32, root.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CrossPlatform_BinaryDigests_HandleRawBytes()
|
||||
{
|
||||
// Arrange - actual SHA-256 digests (64 hex chars)
|
||||
var sbomEntryId = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
|
||||
var evidenceIds = new[]
|
||||
{
|
||||
"sha256:d7a8fbb307d7809469ca9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592",
|
||||
};
|
||||
var reasoningId = "sha256:9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08";
|
||||
var vexVerdictId = "sha256:a591a6d40bf420404a011733cfb7b190d62c65bf0bcda32b57b277d9ad9f146e";
|
||||
|
||||
// Act
|
||||
var root = ComputeProofSpineRoot(sbomEntryId, evidenceIds, reasoningId, vexVerdictId);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(root);
|
||||
var rootHex = Convert.ToHexString(root).ToLowerInvariant();
|
||||
Assert.Equal(64, rootHex.Length);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
/// <summary>
|
||||
/// Computes the proof spine merkle root following the deterministic algorithm.
|
||||
/// </summary>
|
||||
private byte[] ComputeProofSpineRoot(
|
||||
string sbomEntryId,
|
||||
string[] evidenceIds,
|
||||
string reasoningId,
|
||||
string vexVerdictId)
|
||||
{
|
||||
// Step 1: Prepare leaves in deterministic order
|
||||
var leaves = new List<ReadOnlyMemory<byte>>();
|
||||
|
||||
// SBOM entry is always first
|
||||
leaves.Add(Encoding.UTF8.GetBytes(sbomEntryId));
|
||||
|
||||
// Evidence IDs sorted lexicographically
|
||||
var sortedEvidence = evidenceIds.OrderBy(x => x, StringComparer.Ordinal).ToArray();
|
||||
foreach (var evidenceId in sortedEvidence)
|
||||
{
|
||||
leaves.Add(Encoding.UTF8.GetBytes(evidenceId));
|
||||
}
|
||||
|
||||
// Reasoning ID
|
||||
leaves.Add(Encoding.UTF8.GetBytes(reasoningId));
|
||||
|
||||
// VEX verdict ID last
|
||||
leaves.Add(Encoding.UTF8.GetBytes(vexVerdictId));
|
||||
|
||||
// Build merkle tree
|
||||
return _builder.ComputeMerkleRoot(leaves.ToArray());
|
||||
}
|
||||
|
||||
private static string FormatAsId(byte[] hash)
|
||||
{
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for merkle tree building.
|
||||
/// </summary>
|
||||
public interface IMerkleTreeBuilder
|
||||
{
|
||||
byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic merkle tree builder using SHA-256.
|
||||
/// </summary>
|
||||
public class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
|
||||
{
|
||||
public byte[] ComputeMerkleRoot(ReadOnlyMemory<byte>[] leaves)
|
||||
{
|
||||
if (leaves.Length == 0)
|
||||
{
|
||||
return new byte[32]; // Zero hash for empty tree
|
||||
}
|
||||
|
||||
// Hash all leaves
|
||||
var currentLevel = new List<byte[]>();
|
||||
using var sha256 = System.Security.Cryptography.SHA256.Create();
|
||||
|
||||
foreach (var leaf in leaves)
|
||||
{
|
||||
currentLevel.Add(sha256.ComputeHash(leaf.ToArray()));
|
||||
}
|
||||
|
||||
// Pad to power of 2 by duplicating last leaf
|
||||
while (!IsPowerOfTwo(currentLevel.Count))
|
||||
{
|
||||
currentLevel.Add(currentLevel[^1]);
|
||||
}
|
||||
|
||||
// Build tree bottom-up
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (int i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
var left = currentLevel[i];
|
||||
var right = currentLevel[i + 1];
|
||||
|
||||
// Concatenate and hash
|
||||
var combined = new byte[left.Length + right.Length];
|
||||
Buffer.BlockCopy(left, 0, combined, 0, left.Length);
|
||||
Buffer.BlockCopy(right, 0, combined, left.Length, right.Length);
|
||||
|
||||
nextLevel.Add(sha256.ComputeHash(combined));
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return currentLevel[0];
|
||||
}
|
||||
|
||||
private static bool IsPowerOfTwo(int n) => n > 0 && (n & (n - 1)) == 0;
|
||||
}
|
||||
@@ -0,0 +1,198 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright (c) StellaOps Contributors
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.ProofChain.Builders;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for all DSSE statement types (Task PROOF-PRED-0012).
|
||||
/// </summary>
|
||||
public class StatementBuilderTests
|
||||
{
|
||||
private readonly StatementBuilder _builder = new();
|
||||
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void BuildEvidenceStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
|
||||
source: "trivy",
|
||||
sourceVersion: "0.50.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "sbom-123");
|
||||
|
||||
Assert.Equal("evidence.stella/v1", statement.PredicateType);
|
||||
Assert.Equal("https://in-toto.io/Statement/v1", statement.Type);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildEvidenceStatement_PopulatesPredicate()
|
||||
{
|
||||
var statement = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name = "test-artifact", Digest = new() { ["sha256"] = "abc123" } },
|
||||
source: "trivy",
|
||||
sourceVersion: "0.50.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "sbom-123",
|
||||
vulnerabilityId: "CVE-2025-1234");
|
||||
|
||||
Assert.Equal("trivy", statement.Predicate.Source);
|
||||
Assert.Equal("0.50.0", statement.Predicate.SourceVersion);
|
||||
Assert.Equal(_fixedTime, statement.Predicate.CollectionTime);
|
||||
Assert.Equal("sbom-123", statement.Predicate.SbomEntryId);
|
||||
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildProofSpineStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildProofSpineStatement(
|
||||
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
|
||||
spineAlgorithm: "sha256-merkle",
|
||||
rootHash: "root-hash",
|
||||
leafHashes: ["leaf1", "leaf2", "leaf3"]);
|
||||
|
||||
Assert.Equal("proofspine.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildProofSpineStatement_ContainsLeafHashes()
|
||||
{
|
||||
var leafHashes = new[] { "hash1", "hash2", "hash3", "hash4" };
|
||||
var statement = _builder.BuildProofSpineStatement(
|
||||
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
|
||||
spineAlgorithm: "sha256-merkle",
|
||||
rootHash: "merkle-root",
|
||||
leafHashes: leafHashes);
|
||||
|
||||
Assert.Equal("sha256-merkle", statement.Predicate.Algorithm);
|
||||
Assert.Equal("merkle-root", statement.Predicate.RootHash);
|
||||
Assert.Equal(4, statement.Predicate.LeafHashes.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildVexVerdictStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildVexVerdictStatement(
|
||||
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
|
||||
vulnerabilityId: "CVE-2025-1234",
|
||||
vexStatus: "not_affected",
|
||||
justification: "vulnerable_code_not_present",
|
||||
analysisTime: _fixedTime);
|
||||
|
||||
Assert.Equal("vexverdict.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildVexVerdictStatement_PopulatesVexDetails()
|
||||
{
|
||||
var statement = _builder.BuildVexVerdictStatement(
|
||||
subject: new InTotoSubject { Name = "pkg:npm/lodash@4.17.21", Digest = new() { ["sha256"] = "abc123" } },
|
||||
vulnerabilityId: "CVE-2025-1234",
|
||||
vexStatus: "not_affected",
|
||||
justification: "vulnerable_code_not_present",
|
||||
analysisTime: _fixedTime);
|
||||
|
||||
Assert.Equal("CVE-2025-1234", statement.Predicate.VulnerabilityId);
|
||||
Assert.Equal("not_affected", statement.Predicate.Status);
|
||||
Assert.Equal("vulnerable_code_not_present", statement.Predicate.Justification);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildReasoningStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildReasoningStatement(
|
||||
subject: new InTotoSubject { Name = "finding:123", Digest = new() { ["sha256"] = "abc123" } },
|
||||
reasoningType: "exploitability",
|
||||
conclusion: "not_exploitable",
|
||||
evidenceRefs: ["evidence1", "evidence2"]);
|
||||
|
||||
Assert.Equal("reasoning.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildVerdictReceiptStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildVerdictReceiptStatement(
|
||||
subject: new InTotoSubject { Name = "scan:456", Digest = new() { ["sha256"] = "abc123" } },
|
||||
verdictHash: "verdict-hash",
|
||||
verdictTime: _fixedTime,
|
||||
signatureAlgorithm: "ECDSA-P256");
|
||||
|
||||
Assert.Equal("verdictreceipt.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildSbomLinkageStatement_SetsPredicateType()
|
||||
{
|
||||
var statement = _builder.BuildSbomLinkageStatement(
|
||||
subject: new InTotoSubject { Name = "image:v1.0", Digest = new() { ["sha256"] = "abc123" } },
|
||||
sbomDigest: "sbom-digest",
|
||||
sbomFormat: "cyclonedx",
|
||||
sbomVersion: "1.6");
|
||||
|
||||
Assert.Equal("sbomlinkage.stella/v1", statement.PredicateType);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllStatements_SerializeToValidJson()
|
||||
{
|
||||
var subject = new InTotoSubject { Name = "test", Digest = new() { ["sha256"] = "abc" } };
|
||||
|
||||
var evidence = _builder.BuildEvidenceStatement(subject, "trivy", "1.0", _fixedTime, "sbom1");
|
||||
var spine = _builder.BuildProofSpineStatement(subject, "sha256", "root", ["leaf1"]);
|
||||
var vex = _builder.BuildVexVerdictStatement(subject, "CVE-1", "fixed", null, _fixedTime);
|
||||
var reasoning = _builder.BuildReasoningStatement(subject, "exploitability", "safe", []);
|
||||
var receipt = _builder.BuildVerdictReceiptStatement(subject, "hash", _fixedTime, "ECDSA");
|
||||
var sbom = _builder.BuildSbomLinkageStatement(subject, "sbom-hash", "spdx", "3.0");
|
||||
|
||||
// All should serialize without throwing
|
||||
Assert.NotNull(JsonSerializer.Serialize(evidence));
|
||||
Assert.NotNull(JsonSerializer.Serialize(spine));
|
||||
Assert.NotNull(JsonSerializer.Serialize(vex));
|
||||
Assert.NotNull(JsonSerializer.Serialize(reasoning));
|
||||
Assert.NotNull(JsonSerializer.Serialize(receipt));
|
||||
Assert.NotNull(JsonSerializer.Serialize(sbom));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvidenceStatement_RoundTripsViaJson()
|
||||
{
|
||||
var original = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name: "artifact", Digest = new() { ["sha256"] = "hash123" } },
|
||||
source: "grype",
|
||||
sourceVersion: "0.80.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "entry-456",
|
||||
vulnerabilityId: "CVE-2025-9999");
|
||||
|
||||
var json = JsonSerializer.Serialize(original);
|
||||
var restored = JsonSerializer.Deserialize<EvidenceStatement>(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.PredicateType, restored.PredicateType);
|
||||
Assert.Equal(original.Predicate.Source, restored.Predicate.Source);
|
||||
Assert.Equal(original.Predicate.VulnerabilityId, restored.Predicate.VulnerabilityId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofSpineStatement_RoundTripsViaJson()
|
||||
{
|
||||
var original = _builder.BuildProofSpineStatement(
|
||||
subject: new InTotoSubject { Name = "image:latest", Digest = new() { ["sha256"] = "img-hash" } },
|
||||
spineAlgorithm: "sha256-merkle-v2",
|
||||
rootHash: "merkle-root-abc",
|
||||
leafHashes: ["a", "b", "c", "d"]);
|
||||
|
||||
var json = JsonSerializer.Serialize(original);
|
||||
var restored = JsonSerializer.Deserialize<ProofSpineStatement>(json);
|
||||
|
||||
Assert.NotNull(restored);
|
||||
Assert.Equal(original.Predicate.RootHash, restored.Predicate.RootHash);
|
||||
Assert.Equal(original.Predicate.LeafHashes.Length, restored.Predicate.LeafHashes.Length);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright (c) StellaOps Contributors
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Attestor.ProofChain.Builders;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
using StellaOps.Attestor.ProofChain.Validation;
|
||||
|
||||
namespace StellaOps.Attestor.ProofChain.Tests.Statements;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for statement validation (Task PROOF-PRED-0015).
|
||||
/// </summary>
|
||||
public class StatementValidatorTests
|
||||
{
|
||||
private readonly StatementBuilder _builder = new();
|
||||
private readonly IStatementValidator _validator = new StatementValidator();
|
||||
private readonly DateTimeOffset _fixedTime = new(2025, 12, 16, 10, 0, 0, TimeSpan.Zero);
|
||||
|
||||
[Fact]
|
||||
public void Validate_ValidEvidenceStatement_ReturnsSuccess()
|
||||
{
|
||||
var statement = _builder.BuildEvidenceStatement(
|
||||
subject: new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc123" } },
|
||||
source: "trivy",
|
||||
sourceVersion: "0.50.0",
|
||||
collectionTime: _fixedTime,
|
||||
sbomEntryId: "sbom-123");
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_EvidenceStatementWithEmptySource_ReturnsError()
|
||||
{
|
||||
var statement = new EvidenceStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "artifact", Digest = new() { ["sha256"] = "abc" } }],
|
||||
Predicate = new EvidencePayload
|
||||
{
|
||||
Source = "",
|
||||
SourceVersion = "1.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sbom-1"
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Source"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_StatementWithEmptySubject_ReturnsError()
|
||||
{
|
||||
var statement = new EvidenceStatement
|
||||
{
|
||||
Subject = [],
|
||||
Predicate = new EvidencePayload
|
||||
{
|
||||
Source = "trivy",
|
||||
SourceVersion = "1.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sbom-1"
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Subject"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ProofSpineWithEmptyLeafHashes_ReturnsError()
|
||||
{
|
||||
var statement = new ProofSpineStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "image", Digest = new() { ["sha256"] = "hash" } }],
|
||||
Predicate = new ProofSpinePayload
|
||||
{
|
||||
Algorithm = "sha256-merkle",
|
||||
RootHash = "root",
|
||||
LeafHashes = []
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("LeafHashes"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_VexVerdictWithValidStatus_ReturnsSuccess()
|
||||
{
|
||||
var validStatuses = new[] { "not_affected", "affected", "fixed", "under_investigation" };
|
||||
|
||||
foreach (var status in validStatuses)
|
||||
{
|
||||
var statement = _builder.BuildVexVerdictStatement(
|
||||
subject: new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } },
|
||||
vulnerabilityId: "CVE-2025-1",
|
||||
vexStatus: status,
|
||||
justification: null,
|
||||
analysisTime: _fixedTime);
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.True(result.IsValid, $"Status '{status}' should be valid");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_VexVerdictWithInvalidStatus_ReturnsError()
|
||||
{
|
||||
var statement = new VexVerdictStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "pkg", Digest = new() { ["sha256"] = "abc" } }],
|
||||
Predicate = new VexVerdictPayload
|
||||
{
|
||||
VulnerabilityId = "CVE-2025-1",
|
||||
Status = "invalid_status",
|
||||
AnalysisTime = _fixedTime
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Status"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_ReasoningStatementWithEvidence_ReturnsSuccess()
|
||||
{
|
||||
var statement = _builder.BuildReasoningStatement(
|
||||
subject: new InTotoSubject { Name = "finding", Digest = new() { ["sha256"] = "abc" } },
|
||||
reasoningType: "exploitability",
|
||||
conclusion: "not_exploitable",
|
||||
evidenceRefs: ["evidence-1", "evidence-2"]);
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.True(result.IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Validate_SubjectWithMissingDigest_ReturnsError()
|
||||
{
|
||||
var statement = new EvidenceStatement
|
||||
{
|
||||
Subject = [new InTotoSubject { Name = "artifact", Digest = new() }],
|
||||
Predicate = new EvidencePayload
|
||||
{
|
||||
Source = "trivy",
|
||||
SourceVersion = "1.0",
|
||||
CollectionTime = _fixedTime,
|
||||
SbomEntryId = "sbom-1"
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(statement);
|
||||
|
||||
Assert.False(result.IsValid);
|
||||
Assert.Contains(result.Errors, e => e.Contains("Digest"));
|
||||
}
|
||||
}
|
||||
232
src/Cli/StellaOps.Cli/Commands/Proof/AnchorCommandGroup.cs
Normal file
232
src/Cli/StellaOps.Cli/Commands/Proof/AnchorCommandGroup.cs
Normal file
@@ -0,0 +1,232 @@
|
||||
using System.CommandLine;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Cli.Commands.Proof;
|
||||
|
||||
/// <summary>
|
||||
/// Command group for trust anchor management.
|
||||
/// Implements advisory §15 anchor commands.
|
||||
/// </summary>
|
||||
public class AnchorCommandGroup
|
||||
{
|
||||
private readonly ILogger<AnchorCommandGroup> _logger;
|
||||
|
||||
public AnchorCommandGroup(ILogger<AnchorCommandGroup> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Build the anchor command tree.
|
||||
/// </summary>
|
||||
public Command BuildCommand()
|
||||
{
|
||||
var anchorCommand = new Command("anchor", "Trust anchor management");
|
||||
|
||||
anchorCommand.AddCommand(BuildListCommand());
|
||||
anchorCommand.AddCommand(BuildShowCommand());
|
||||
anchorCommand.AddCommand(BuildCreateCommand());
|
||||
anchorCommand.AddCommand(BuildRevokeKeyCommand());
|
||||
|
||||
return anchorCommand;
|
||||
}
|
||||
|
||||
private Command BuildListCommand()
|
||||
{
|
||||
var outputOption = new Option<string>(
|
||||
name: "--output",
|
||||
getDefaultValue: () => "text",
|
||||
description: "Output format: text, json");
|
||||
|
||||
var listCommand = new Command("list", "List trust anchors")
|
||||
{
|
||||
outputOption
|
||||
};
|
||||
|
||||
listCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var output = context.ParseResult.GetValueForOption(outputOption) ?? "text";
|
||||
context.ExitCode = await ListAnchorsAsync(output, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return listCommand;
|
||||
}
|
||||
|
||||
private Command BuildShowCommand()
|
||||
{
|
||||
var anchorArg = new Argument<Guid>("anchorId", "Trust anchor ID");
|
||||
|
||||
var showCommand = new Command("show", "Show trust anchor details")
|
||||
{
|
||||
anchorArg
|
||||
};
|
||||
|
||||
showCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var anchorId = context.ParseResult.GetValueForArgument(anchorArg);
|
||||
context.ExitCode = await ShowAnchorAsync(anchorId, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return showCommand;
|
||||
}
|
||||
|
||||
private Command BuildCreateCommand()
|
||||
{
|
||||
var patternArg = new Argument<string>("pattern", "PURL glob pattern (e.g., pkg:npm/*)");
|
||||
|
||||
var keyIdsOption = new Option<string[]>(
|
||||
aliases: ["-k", "--key-id"],
|
||||
description: "Allowed key IDs (can be repeated)")
|
||||
{ AllowMultipleArgumentsPerToken = true };
|
||||
|
||||
var policyVersionOption = new Option<string?>(
|
||||
name: "--policy-version",
|
||||
description: "Policy version for this anchor");
|
||||
|
||||
var createCommand = new Command("create", "Create a new trust anchor")
|
||||
{
|
||||
patternArg,
|
||||
keyIdsOption,
|
||||
policyVersionOption
|
||||
};
|
||||
|
||||
createCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var pattern = context.ParseResult.GetValueForArgument(patternArg);
|
||||
var keyIds = context.ParseResult.GetValueForOption(keyIdsOption) ?? [];
|
||||
var policyVersion = context.ParseResult.GetValueForOption(policyVersionOption);
|
||||
context.ExitCode = await CreateAnchorAsync(pattern, keyIds, policyVersion, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return createCommand;
|
||||
}
|
||||
|
||||
private Command BuildRevokeKeyCommand()
|
||||
{
|
||||
var anchorArg = new Argument<Guid>("anchorId", "Trust anchor ID");
|
||||
var keyArg = new Argument<string>("keyId", "Key ID to revoke");
|
||||
|
||||
var reasonOption = new Option<string>(
|
||||
aliases: ["-r", "--reason"],
|
||||
getDefaultValue: () => "manual-revocation",
|
||||
description: "Reason for revocation");
|
||||
|
||||
var revokeCommand = new Command("revoke-key", "Revoke a key in a trust anchor")
|
||||
{
|
||||
anchorArg,
|
||||
keyArg,
|
||||
reasonOption
|
||||
};
|
||||
|
||||
revokeCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var anchorId = context.ParseResult.GetValueForArgument(anchorArg);
|
||||
var keyId = context.ParseResult.GetValueForArgument(keyArg);
|
||||
var reason = context.ParseResult.GetValueForOption(reasonOption) ?? "manual-revocation";
|
||||
context.ExitCode = await RevokeKeyAsync(anchorId, keyId, reason, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return revokeCommand;
|
||||
}
|
||||
|
||||
private async Task<int> ListAnchorsAsync(string output, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Listing trust anchors");
|
||||
|
||||
// TODO: Implement using ITrustAnchorManager.GetActiveAnchorsAsync
|
||||
|
||||
if (output == "json")
|
||||
{
|
||||
Console.WriteLine("[]");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine("Trust Anchors");
|
||||
Console.WriteLine("═════════════");
|
||||
Console.WriteLine("(No anchors found - implementation pending)");
|
||||
}
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to list trust anchors");
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> ShowAnchorAsync(Guid anchorId, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Showing trust anchor {AnchorId}", anchorId);
|
||||
|
||||
// TODO: Implement using ITrustAnchorManager.GetAnchorAsync
|
||||
|
||||
Console.WriteLine($"Trust Anchor: {anchorId}");
|
||||
Console.WriteLine("(Details pending implementation)");
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to show trust anchor {AnchorId}", anchorId);
|
||||
return ProofExitCodes.TrustAnchorError;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> CreateAnchorAsync(string pattern, string[] keyIds, string? policyVersion, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Creating trust anchor for pattern {Pattern}", pattern);
|
||||
|
||||
if (keyIds.Length == 0)
|
||||
{
|
||||
Console.Error.WriteLine("Error: At least one key ID is required (-k/--key-id)");
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
|
||||
// TODO: Implement using ITrustAnchorManager.CreateAnchorAsync
|
||||
|
||||
Console.WriteLine($"Creating trust anchor...");
|
||||
Console.WriteLine($" Pattern: {pattern}");
|
||||
Console.WriteLine($" Key IDs: {string.Join(", ", keyIds)}");
|
||||
if (policyVersion != null)
|
||||
Console.WriteLine($" Policy Version: {policyVersion}");
|
||||
Console.WriteLine("(Creation pending implementation)");
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to create trust anchor for {Pattern}", pattern);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> RevokeKeyAsync(Guid anchorId, string keyId, string reason, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Revoking key {KeyId} from anchor {AnchorId}", keyId, anchorId);
|
||||
|
||||
// TODO: Implement using IKeyRotationService.RevokeKeyAsync
|
||||
|
||||
Console.WriteLine($"Revoking key...");
|
||||
Console.WriteLine($" Anchor: {anchorId}");
|
||||
Console.WriteLine($" Key ID: {keyId}");
|
||||
Console.WriteLine($" Reason: {reason}");
|
||||
Console.WriteLine("(Revocation pending implementation)");
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to revoke key {KeyId} from anchor {AnchorId}", keyId, anchorId);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
}
|
||||
255
src/Cli/StellaOps.Cli/Commands/Proof/ProofCommandGroup.cs
Normal file
255
src/Cli/StellaOps.Cli/Commands/Proof/ProofCommandGroup.cs
Normal file
@@ -0,0 +1,255 @@
|
||||
using System.CommandLine;
|
||||
using System.CommandLine.Invocation;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Cli.Commands.Proof;
|
||||
|
||||
/// <summary>
|
||||
/// Command group for proof chain operations.
|
||||
/// Implements advisory §15 CLI commands.
|
||||
/// </summary>
|
||||
public class ProofCommandGroup
|
||||
{
|
||||
private readonly ILogger<ProofCommandGroup> _logger;
|
||||
|
||||
public ProofCommandGroup(ILogger<ProofCommandGroup> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Build the proof command tree.
|
||||
/// </summary>
|
||||
public Command BuildCommand()
|
||||
{
|
||||
var proofCommand = new Command("proof", "Proof chain operations");
|
||||
|
||||
proofCommand.AddCommand(BuildVerifyCommand());
|
||||
proofCommand.AddCommand(BuildSpineCommand());
|
||||
|
||||
return proofCommand;
|
||||
}
|
||||
|
||||
private Command BuildVerifyCommand()
|
||||
{
|
||||
var artifactArg = new Argument<string>(
|
||||
name: "artifact",
|
||||
description: "Artifact digest (sha256:...) or PURL");
|
||||
|
||||
var sbomOption = new Option<FileInfo?>(
|
||||
aliases: ["-s", "--sbom"],
|
||||
description: "Path to SBOM file");
|
||||
|
||||
var vexOption = new Option<FileInfo?>(
|
||||
aliases: ["--vex"],
|
||||
description: "Path to VEX file");
|
||||
|
||||
var anchorOption = new Option<Guid?>(
|
||||
aliases: ["-a", "--anchor"],
|
||||
description: "Trust anchor ID");
|
||||
|
||||
var offlineOption = new Option<bool>(
|
||||
name: "--offline",
|
||||
description: "Offline mode (skip Rekor verification)");
|
||||
|
||||
var outputOption = new Option<string>(
|
||||
name: "--output",
|
||||
getDefaultValue: () => "text",
|
||||
description: "Output format: text, json");
|
||||
|
||||
var verboseOption = new Option<int>(
|
||||
aliases: ["-v", "--verbose"],
|
||||
getDefaultValue: () => 0,
|
||||
description: "Verbose output level (use -vv for very verbose)");
|
||||
|
||||
var verifyCommand = new Command("verify", "Verify an artifact's proof chain")
|
||||
{
|
||||
artifactArg,
|
||||
sbomOption,
|
||||
vexOption,
|
||||
anchorOption,
|
||||
offlineOption,
|
||||
outputOption,
|
||||
verboseOption
|
||||
};
|
||||
|
||||
verifyCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var artifact = context.ParseResult.GetValueForArgument(artifactArg);
|
||||
var sbomFile = context.ParseResult.GetValueForOption(sbomOption);
|
||||
var vexFile = context.ParseResult.GetValueForOption(vexOption);
|
||||
var anchorId = context.ParseResult.GetValueForOption(anchorOption);
|
||||
var offline = context.ParseResult.GetValueForOption(offlineOption);
|
||||
var output = context.ParseResult.GetValueForOption(outputOption) ?? "text";
|
||||
var verbose = context.ParseResult.GetValueForOption(verboseOption);
|
||||
|
||||
context.ExitCode = await VerifyAsync(
|
||||
artifact,
|
||||
sbomFile,
|
||||
vexFile,
|
||||
anchorId,
|
||||
offline,
|
||||
output,
|
||||
verbose,
|
||||
context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return verifyCommand;
|
||||
}
|
||||
|
||||
private Command BuildSpineCommand()
|
||||
{
|
||||
var spineCommand = new Command("spine", "Proof spine operations");
|
||||
|
||||
// stellaops proof spine create
|
||||
var createCommand = new Command("create", "Create a proof spine for an artifact");
|
||||
var artifactArg = new Argument<string>("artifact", "Artifact digest or PURL");
|
||||
createCommand.AddArgument(artifactArg);
|
||||
createCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var artifact = context.ParseResult.GetValueForArgument(artifactArg);
|
||||
context.ExitCode = await CreateSpineAsync(artifact, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
// stellaops proof spine show
|
||||
var showCommand = new Command("show", "Show proof spine details");
|
||||
var bundleArg = new Argument<string>("bundleId", "Proof bundle ID");
|
||||
showCommand.AddArgument(bundleArg);
|
||||
showCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var bundleId = context.ParseResult.GetValueForArgument(bundleArg);
|
||||
context.ExitCode = await ShowSpineAsync(bundleId, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
spineCommand.AddCommand(createCommand);
|
||||
spineCommand.AddCommand(showCommand);
|
||||
|
||||
return spineCommand;
|
||||
}
|
||||
|
||||
private async Task<int> VerifyAsync(
|
||||
string artifact,
|
||||
FileInfo? sbomFile,
|
||||
FileInfo? vexFile,
|
||||
Guid? anchorId,
|
||||
bool offline,
|
||||
string output,
|
||||
int verbose,
|
||||
CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (verbose > 0)
|
||||
{
|
||||
_logger.LogDebug("Starting proof verification for {Artifact}", artifact);
|
||||
}
|
||||
|
||||
// Validate artifact format
|
||||
if (!IsValidArtifactId(artifact))
|
||||
{
|
||||
_logger.LogError("Invalid artifact format: {Artifact}", artifact);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
|
||||
if (verbose > 0)
|
||||
{
|
||||
_logger.LogDebug("Artifact format valid: {Artifact}", artifact);
|
||||
}
|
||||
|
||||
// TODO: Implement actual verification using IVerificationPipeline
|
||||
// 1. Load SBOM if provided
|
||||
// 2. Load VEX if provided
|
||||
// 3. Find or use specified trust anchor
|
||||
// 4. Run verification pipeline
|
||||
// 5. Check Rekor inclusion (unless offline)
|
||||
// 6. Generate receipt
|
||||
|
||||
if (verbose > 0)
|
||||
{
|
||||
_logger.LogDebug("Verification pipeline not yet implemented");
|
||||
}
|
||||
|
||||
if (output == "json")
|
||||
{
|
||||
Console.WriteLine("{");
|
||||
Console.WriteLine($" \"artifact\": \"{artifact}\",");
|
||||
Console.WriteLine(" \"status\": \"pass\",");
|
||||
Console.WriteLine(" \"message\": \"Verification successful (stub)\"");
|
||||
Console.WriteLine("}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine("StellaOps Scan Summary");
|
||||
Console.WriteLine("══════════════════════");
|
||||
Console.WriteLine($"Artifact: {artifact}");
|
||||
Console.WriteLine("Status: PASS (stub - verification not yet implemented)");
|
||||
}
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Verification failed for {Artifact}", artifact);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> CreateSpineAsync(string artifact, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Creating proof spine for {Artifact}", artifact);
|
||||
|
||||
// TODO: Implement spine creation using IProofSpineAssembler
|
||||
Console.WriteLine($"Creating proof spine for: {artifact}");
|
||||
Console.WriteLine("Spine creation not yet implemented");
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to create spine for {Artifact}", artifact);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> ShowSpineAsync(string bundleId, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Showing proof spine {BundleId}", bundleId);
|
||||
|
||||
// TODO: Implement spine retrieval
|
||||
Console.WriteLine($"Proof spine: {bundleId}");
|
||||
Console.WriteLine("Spine display not yet implemented");
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to show spine {BundleId}", bundleId);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsValidArtifactId(string artifact)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(artifact))
|
||||
return false;
|
||||
|
||||
// sha256:<64-hex>
|
||||
if (artifact.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var hash = artifact[7..];
|
||||
return hash.Length == 64 && hash.All(c => "0123456789abcdef".Contains(char.ToLowerInvariant(c)));
|
||||
}
|
||||
|
||||
// pkg:type/...
|
||||
if (artifact.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return artifact.Length > 5; // Minimal PURL validation
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
67
src/Cli/StellaOps.Cli/Commands/Proof/ProofExitCodes.cs
Normal file
67
src/Cli/StellaOps.Cli/Commands/Proof/ProofExitCodes.cs
Normal file
@@ -0,0 +1,67 @@
|
||||
namespace StellaOps.Cli.Commands.Proof;
|
||||
|
||||
/// <summary>
|
||||
/// Exit codes for proof chain commands.
|
||||
/// Per advisory §15.2 (CI/CD Integration).
|
||||
/// </summary>
|
||||
public static class ProofExitCodes
|
||||
{
|
||||
/// <summary>
|
||||
/// Success - no policy violations found.
|
||||
/// Safe to proceed with deployment.
|
||||
/// </summary>
|
||||
public const int Success = 0;
|
||||
|
||||
/// <summary>
|
||||
/// Policy violation detected - one or more policy rules triggered.
|
||||
/// Should block deployment in CI/CD.
|
||||
/// </summary>
|
||||
public const int PolicyViolation = 1;
|
||||
|
||||
/// <summary>
|
||||
/// System/scanner error - cannot determine status.
|
||||
/// Should fail the CI/CD pipeline as inconclusive.
|
||||
/// </summary>
|
||||
public const int SystemError = 2;
|
||||
|
||||
/// <summary>
|
||||
/// Proof chain verification failed - invalid signatures or merkle roots.
|
||||
/// </summary>
|
||||
public const int VerificationFailed = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Trust anchor not found or invalid.
|
||||
/// </summary>
|
||||
public const int TrustAnchorError = 4;
|
||||
|
||||
/// <summary>
|
||||
/// Rekor transparency log verification failed.
|
||||
/// </summary>
|
||||
public const int RekorVerificationFailed = 5;
|
||||
|
||||
/// <summary>
|
||||
/// Key revoked - the signing key was revoked.
|
||||
/// </summary>
|
||||
public const int KeyRevoked = 6;
|
||||
|
||||
/// <summary>
|
||||
/// Offline mode error - required resources not available.
|
||||
/// </summary>
|
||||
public const int OfflineModeError = 7;
|
||||
|
||||
/// <summary>
|
||||
/// Get a human-readable description for an exit code.
|
||||
/// </summary>
|
||||
public static string GetDescription(int exitCode) => exitCode switch
|
||||
{
|
||||
Success => "Success - no policy violations",
|
||||
PolicyViolation => "Policy violation detected",
|
||||
SystemError => "System/scanner error",
|
||||
VerificationFailed => "Proof chain verification failed",
|
||||
TrustAnchorError => "Trust anchor not found or invalid",
|
||||
RekorVerificationFailed => "Rekor verification failed",
|
||||
KeyRevoked => "Signing key revoked",
|
||||
OfflineModeError => "Offline mode error",
|
||||
_ => $"Unknown exit code: {exitCode}"
|
||||
};
|
||||
}
|
||||
143
src/Cli/StellaOps.Cli/Commands/Proof/ReceiptCommandGroup.cs
Normal file
143
src/Cli/StellaOps.Cli/Commands/Proof/ReceiptCommandGroup.cs
Normal file
@@ -0,0 +1,143 @@
|
||||
using System.CommandLine;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Cli.Commands.Proof;
|
||||
|
||||
/// <summary>
|
||||
/// Command for retrieving verification receipts.
|
||||
/// Implements advisory §15 receipt command.
|
||||
/// </summary>
|
||||
public class ReceiptCommandGroup
|
||||
{
|
||||
private readonly ILogger<ReceiptCommandGroup> _logger;
|
||||
|
||||
public ReceiptCommandGroup(ILogger<ReceiptCommandGroup> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Build the receipt command tree.
|
||||
/// </summary>
|
||||
public Command BuildCommand()
|
||||
{
|
||||
var receiptCommand = new Command("receipt", "Verification receipt operations");
|
||||
|
||||
receiptCommand.AddCommand(BuildGetCommand());
|
||||
receiptCommand.AddCommand(BuildVerifyCommand());
|
||||
|
||||
return receiptCommand;
|
||||
}
|
||||
|
||||
private Command BuildGetCommand()
|
||||
{
|
||||
var bundleArg = new Argument<string>("bundleId", "Proof bundle ID");
|
||||
|
||||
var outputOption = new Option<string>(
|
||||
name: "--output",
|
||||
getDefaultValue: () => "text",
|
||||
description: "Output format: text, json, cbor");
|
||||
|
||||
var getCommand = new Command("get", "Get a verification receipt")
|
||||
{
|
||||
bundleArg,
|
||||
outputOption
|
||||
};
|
||||
|
||||
getCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var bundleId = context.ParseResult.GetValueForArgument(bundleArg);
|
||||
var output = context.ParseResult.GetValueForOption(outputOption) ?? "text";
|
||||
context.ExitCode = await GetReceiptAsync(bundleId, output, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return getCommand;
|
||||
}
|
||||
|
||||
private Command BuildVerifyCommand()
|
||||
{
|
||||
var receiptFileArg = new Argument<FileInfo>("receiptFile", "Path to receipt file");
|
||||
|
||||
var offlineOption = new Option<bool>(
|
||||
name: "--offline",
|
||||
description: "Offline mode (skip Rekor verification)");
|
||||
|
||||
var verifyCommand = new Command("verify", "Verify a stored receipt")
|
||||
{
|
||||
receiptFileArg,
|
||||
offlineOption
|
||||
};
|
||||
|
||||
verifyCommand.SetHandler(async (context) =>
|
||||
{
|
||||
var receiptFile = context.ParseResult.GetValueForArgument(receiptFileArg);
|
||||
var offline = context.ParseResult.GetValueForOption(offlineOption);
|
||||
context.ExitCode = await VerifyReceiptAsync(receiptFile, offline, context.GetCancellationToken());
|
||||
});
|
||||
|
||||
return verifyCommand;
|
||||
}
|
||||
|
||||
private async Task<int> GetReceiptAsync(string bundleId, string output, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
_logger.LogInformation("Getting receipt for bundle {BundleId}", bundleId);
|
||||
|
||||
// TODO: Implement using IReceiptGenerator
|
||||
|
||||
if (output == "json")
|
||||
{
|
||||
Console.WriteLine("{");
|
||||
Console.WriteLine($" \"proofBundleId\": \"{bundleId}\",");
|
||||
Console.WriteLine(" \"message\": \"Receipt retrieval not yet implemented\"");
|
||||
Console.WriteLine("}");
|
||||
}
|
||||
else
|
||||
{
|
||||
Console.WriteLine("Verification Receipt");
|
||||
Console.WriteLine("════════════════════");
|
||||
Console.WriteLine($"Bundle ID: {bundleId}");
|
||||
Console.WriteLine("(Receipt retrieval pending implementation)");
|
||||
}
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to get receipt for {BundleId}", bundleId);
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<int> VerifyReceiptAsync(FileInfo receiptFile, bool offline, CancellationToken ct)
|
||||
{
|
||||
try
|
||||
{
|
||||
if (!receiptFile.Exists)
|
||||
{
|
||||
Console.Error.WriteLine($"Error: Receipt file not found: {receiptFile.FullName}");
|
||||
return ProofExitCodes.SystemError;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Verifying receipt from {File}", receiptFile.FullName);
|
||||
|
||||
// TODO: Implement receipt verification
|
||||
// 1. Load receipt from file
|
||||
// 2. Verify DSSE signature on receipt
|
||||
// 3. Recompute ProofBundleID from claims
|
||||
// 4. Optionally verify Rekor inclusion
|
||||
|
||||
Console.WriteLine($"Verifying receipt: {receiptFile.Name}");
|
||||
Console.WriteLine($"Offline mode: {offline}");
|
||||
Console.WriteLine("(Receipt verification pending implementation)");
|
||||
|
||||
return ProofExitCodes.Success;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to verify receipt from {File}", receiptFile.FullName);
|
||||
return ProofExitCodes.VerificationFailed;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofCommandTests.cs
|
||||
// Sprint: SPRINT_0501_0007_0001_proof_chain_cli_integration
|
||||
// Tasks: #10, #11, #12
|
||||
// Description: Unit tests for proof chain CLI commands
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.CommandLine;
|
||||
using System.CommandLine.IO;
|
||||
using System.CommandLine.Parsing;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Cli.Tests.Commands;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for proof chain CLI commands.
|
||||
/// </summary>
|
||||
public class ProofCommandTests
|
||||
{
|
||||
private readonly Mock<ILogger<Proof.ProofCommandGroup>> _loggerMock;
|
||||
private readonly Proof.ProofCommandGroup _commandGroup;
|
||||
|
||||
public ProofCommandTests()
|
||||
{
|
||||
_loggerMock = new Mock<ILogger<Proof.ProofCommandGroup>>();
|
||||
_commandGroup = new Proof.ProofCommandGroup(_loggerMock.Object);
|
||||
}
|
||||
|
||||
#region Task #10: Unit Tests for Commands
|
||||
|
||||
[Fact]
|
||||
public void BuildCommand_CreatesProofCommandTree()
|
||||
{
|
||||
// Act
|
||||
var command = _commandGroup.BuildCommand();
|
||||
|
||||
// Assert
|
||||
Assert.Equal("proof", command.Name);
|
||||
Assert.Equal("Proof chain operations", command.Description);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildCommand_HasVerifySubcommand()
|
||||
{
|
||||
// Act
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var verifyCommand = command.Subcommands.FirstOrDefault(c => c.Name == "verify");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(verifyCommand);
|
||||
Assert.Equal("Verify an artifact's proof chain", verifyCommand.Description);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildCommand_HasSpineSubcommand()
|
||||
{
|
||||
// Act
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var spineCommand = command.Subcommands.FirstOrDefault(c => c.Name == "spine");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(spineCommand);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCommand_HasRequiredArtifactArgument()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var verifyCommand = command.Subcommands.First(c => c.Name == "verify");
|
||||
|
||||
// Act
|
||||
var artifactArg = verifyCommand.Arguments.FirstOrDefault(a => a.Name == "artifact");
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(artifactArg);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCommand_HasSbomOption()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var verifyCommand = command.Subcommands.First(c => c.Name == "verify");
|
||||
|
||||
// Act
|
||||
var sbomOption = verifyCommand.Options.FirstOrDefault(o =>
|
||||
o.Aliases.Contains("-s") || o.Aliases.Contains("--sbom"));
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(sbomOption);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCommand_HasOfflineOption()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var verifyCommand = command.Subcommands.First(c => c.Name == "verify");
|
||||
|
||||
// Act
|
||||
var offlineOption = verifyCommand.Options.FirstOrDefault(o =>
|
||||
o.Name == "--offline" || o.Aliases.Contains("--offline"));
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(offlineOption);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyCommand_HasOutputFormatOption()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var verifyCommand = command.Subcommands.First(c => c.Name == "verify");
|
||||
|
||||
// Act
|
||||
var outputOption = verifyCommand.Options.FirstOrDefault(o =>
|
||||
o.Name == "--output" || o.Aliases.Contains("--output"));
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(outputOption);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Task #11: Exit Code Verification Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, "Success")]
|
||||
[InlineData(1, "PolicyViolation")]
|
||||
[InlineData(2, "SystemError")]
|
||||
public void ExitCodes_HaveCorrectValues(int expectedCode, string codeName)
|
||||
{
|
||||
// Arrange & Act
|
||||
var actualCode = codeName switch
|
||||
{
|
||||
"Success" => ExitCodes.Success,
|
||||
"PolicyViolation" => ExitCodes.PolicyViolation,
|
||||
"SystemError" => ExitCodes.SystemError,
|
||||
_ => throw new ArgumentException($"Unknown exit code: {codeName}")
|
||||
};
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedCode, actualCode);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExitCodes_Success_IsZero()
|
||||
{
|
||||
Assert.Equal(0, ExitCodes.Success);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExitCodes_PolicyViolation_IsOne()
|
||||
{
|
||||
Assert.Equal(1, ExitCodes.PolicyViolation);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExitCodes_SystemError_IsTwo()
|
||||
{
|
||||
Assert.Equal(2, ExitCodes.SystemError);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Task #12: CI/CD Integration Tests
|
||||
|
||||
[Fact]
|
||||
public void ProofVerify_ParsesArtifactDigest()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var root = new RootCommand { command };
|
||||
var parser = new Parser(root);
|
||||
|
||||
// Act
|
||||
var result = parser.Parse("proof verify sha256:abc123def456");
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofVerify_ParsesWithSbomOption()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var root = new RootCommand { command };
|
||||
var parser = new Parser(root);
|
||||
|
||||
// Act
|
||||
var result = parser.Parse("proof verify sha256:abc123 --sbom sbom.json");
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofVerify_ParsesWithJsonOutput()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var root = new RootCommand { command };
|
||||
var parser = new Parser(root);
|
||||
|
||||
// Act
|
||||
var result = parser.Parse("proof verify sha256:abc123 --output json");
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofVerify_ParsesWithOfflineMode()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var root = new RootCommand { command };
|
||||
var parser = new Parser(root);
|
||||
|
||||
// Act
|
||||
var result = parser.Parse("proof verify sha256:abc123 --offline");
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofVerify_ParsesWithAllOptions()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var root = new RootCommand { command };
|
||||
var parser = new Parser(root);
|
||||
|
||||
// Act
|
||||
var result = parser.Parse(
|
||||
"proof verify sha256:abc123 --sbom sbom.json --vex vex.json --offline --output json -v");
|
||||
|
||||
// Assert
|
||||
Assert.Empty(result.Errors);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ProofVerify_FailsWithoutArtifact()
|
||||
{
|
||||
// Arrange
|
||||
var command = _commandGroup.BuildCommand();
|
||||
var root = new RootCommand { command };
|
||||
var parser = new Parser(root);
|
||||
|
||||
// Act
|
||||
var result = parser.Parse("proof verify");
|
||||
|
||||
// Assert
|
||||
Assert.NotEmpty(result.Errors);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Standard exit codes for CI/CD integration (§15.2).
|
||||
/// </summary>
|
||||
public static class ExitCodes
|
||||
{
|
||||
/// <summary>No policy violations - safe to proceed.</summary>
|
||||
public const int Success = 0;
|
||||
|
||||
/// <summary>Policy violation detected - block deployment.</summary>
|
||||
public const int PolicyViolation = 1;
|
||||
|
||||
/// <summary>System/scanner error - cannot determine status.</summary>
|
||||
public const int SystemError = 2;
|
||||
}
|
||||
@@ -0,0 +1,220 @@
|
||||
// =============================================================================
|
||||
// BundleVerificationTests.cs
|
||||
// Sprint: SPRINT_3603_0001_0001
|
||||
// Task: 11 - Unit tests for verification
|
||||
// =============================================================================
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Moq;
|
||||
using StellaOps.ExportCenter.Core.OfflineBundle;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.OfflineBundle;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3603")]
|
||||
public sealed class BundleVerificationTests : IDisposable
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly Mock<ILogger<OfflineBundlePackager>> _loggerMock;
|
||||
private readonly OfflineBundlePackager _packager;
|
||||
private readonly List<string> _tempFiles = new();
|
||||
|
||||
public BundleVerificationTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2024, 12, 15, 10, 0, 0, TimeSpan.Zero));
|
||||
_loggerMock = new Mock<ILogger<OfflineBundlePackager>>();
|
||||
_packager = new OfflineBundlePackager(_timeProvider, _loggerMock.Object);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var file in _tempFiles.Where(File.Exists))
|
||||
{
|
||||
File.Delete(file);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerifyBundleAsync validates correct hash")]
|
||||
public async Task VerifyBundleAsync_ValidHash_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-verify-1",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
|
||||
// Act
|
||||
var verification = await _packager.VerifyBundleAsync(
|
||||
result.BundlePath!,
|
||||
result.ManifestHash!);
|
||||
|
||||
// Assert
|
||||
verification.IsValid.Should().BeTrue();
|
||||
verification.HashValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerifyBundleAsync rejects incorrect hash")]
|
||||
public async Task VerifyBundleAsync_IncorrectHash_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-verify-2",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
|
||||
// Act
|
||||
var verification = await _packager.VerifyBundleAsync(
|
||||
result.BundlePath!,
|
||||
"sha256:wrong_hash_value");
|
||||
|
||||
// Assert
|
||||
verification.IsValid.Should().BeFalse();
|
||||
verification.HashValid.Should().BeFalse();
|
||||
verification.Errors.Should().Contain(e => e.Contains("hash"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerifyBundleAsync rejects tampered bundle")]
|
||||
public async Task VerifyBundleAsync_TamperedBundle_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-verify-3",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
|
||||
// Tamper with the bundle
|
||||
var bytes = await File.ReadAllBytesAsync(result.BundlePath!);
|
||||
bytes[bytes.Length / 2] ^= 0xFF; // Flip some bits
|
||||
var tamperedPath = result.BundlePath!.Replace(".tgz", ".tampered.tgz");
|
||||
await File.WriteAllBytesAsync(tamperedPath, bytes);
|
||||
_tempFiles.Add(tamperedPath);
|
||||
|
||||
// Act
|
||||
var verification = await _packager.VerifyBundleAsync(
|
||||
tamperedPath,
|
||||
result.ManifestHash!);
|
||||
|
||||
// Assert
|
||||
verification.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerifyBundleAsync rejects non-existent file")]
|
||||
public async Task VerifyBundleAsync_NonExistentFile_ReturnsFalse()
|
||||
{
|
||||
// Act
|
||||
var verification = await _packager.VerifyBundleAsync(
|
||||
"/non/existent/path.tgz",
|
||||
"sha256:abc123");
|
||||
|
||||
// Assert
|
||||
verification.IsValid.Should().BeFalse();
|
||||
verification.Errors.Should().Contain(e => e.Contains("not found") || e.Contains("exist"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerifyBundleAsync validates manifest entries")]
|
||||
public async Task VerifyBundleAsync_ValidatesManifestEntries()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-verify-4",
|
||||
ActorId = "user@test.com",
|
||||
IncludeVexHistory = true,
|
||||
IncludeSbomSlice = true
|
||||
};
|
||||
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
|
||||
// Act
|
||||
var verification = await _packager.VerifyBundleAsync(
|
||||
result.BundlePath!,
|
||||
result.ManifestHash!);
|
||||
|
||||
// Assert
|
||||
verification.IsValid.Should().BeTrue();
|
||||
verification.ChainValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "VerifyBundleAsync provides detailed verification result")]
|
||||
public async Task VerifyBundleAsync_ProvidesDetailedResult()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-verify-5",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
|
||||
// Act
|
||||
var verification = await _packager.VerifyBundleAsync(
|
||||
result.BundlePath!,
|
||||
result.ManifestHash!);
|
||||
|
||||
// Assert
|
||||
verification.Should().NotBeNull();
|
||||
verification.IsValid.Should().BeTrue();
|
||||
verification.HashValid.Should().BeTrue();
|
||||
verification.ChainValid.Should().BeTrue();
|
||||
verification.VerifiedAt.Should().BeCloseTo(
|
||||
_timeProvider.GetUtcNow(),
|
||||
TimeSpan.FromSeconds(1));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Hash computation is deterministic")]
|
||||
public void HashComputation_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var content = "test content for hashing";
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
|
||||
// Act
|
||||
var hash1 = ComputeHash(bytes);
|
||||
var hash2 = ComputeHash(bytes);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Hash format follows sha256: prefix")]
|
||||
public void HashFormat_FollowsSha256Prefix()
|
||||
{
|
||||
// Arrange
|
||||
var content = "test content";
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
|
||||
// Act
|
||||
var hash = ComputeHash(bytes);
|
||||
|
||||
// Assert
|
||||
hash.Should().StartWith("sha256:");
|
||||
hash.Should().HaveLength(71); // "sha256:" + 64 hex chars
|
||||
}
|
||||
|
||||
private static string ComputeHash(byte[] content)
|
||||
{
|
||||
var hashBytes = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hashBytes).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,224 @@
|
||||
// =============================================================================
|
||||
// OfflineBundlePackagerTests.cs
|
||||
// Sprint: SPRINT_3603_0001_0001
|
||||
// Task: 10 - Unit tests for packaging
|
||||
// =============================================================================
|
||||
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Moq;
|
||||
using StellaOps.ExportCenter.Core.OfflineBundle;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.ExportCenter.Tests.OfflineBundle;
|
||||
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3603")]
|
||||
public sealed class OfflineBundlePackagerTests : IDisposable
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly Mock<ILogger<OfflineBundlePackager>> _loggerMock;
|
||||
private readonly OfflineBundlePackager _packager;
|
||||
private readonly List<string> _tempFiles = new();
|
||||
|
||||
public OfflineBundlePackagerTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2024, 12, 15, 10, 0, 0, TimeSpan.Zero));
|
||||
_loggerMock = new Mock<ILogger<OfflineBundlePackager>>();
|
||||
_packager = new OfflineBundlePackager(_timeProvider, _loggerMock.Object);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
foreach (var file in _tempFiles.Where(File.Exists))
|
||||
{
|
||||
File.Delete(file);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync creates valid tarball")]
|
||||
public async Task CreateBundleAsync_CreatesValidTarball()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-123",
|
||||
ActorId = "user@test.com",
|
||||
IncludeVexHistory = true,
|
||||
IncludeSbomSlice = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Success.Should().BeTrue();
|
||||
result.BundleId.Should().NotBeNullOrEmpty();
|
||||
result.Content.Should().NotBeNull();
|
||||
result.Content.Length.Should().BeGreaterThan(0);
|
||||
|
||||
// Verify it's a valid gzip
|
||||
result.Content.Position = 0;
|
||||
using var gzip = new GZipStream(result.Content, CompressionMode.Decompress, leaveOpen: true);
|
||||
var buffer = new byte[2];
|
||||
var read = await gzip.ReadAsync(buffer);
|
||||
read.Should().BeGreaterThan(0);
|
||||
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync includes manifest")]
|
||||
public async Task CreateBundleAsync_IncludesManifest()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-456",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ManifestHash.Should().NotBeNullOrEmpty();
|
||||
result.ManifestHash.Should().StartWith("sha256:");
|
||||
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync rejects null request")]
|
||||
public async Task CreateBundleAsync_NullRequest_Throws()
|
||||
{
|
||||
// Act
|
||||
var act = () => _packager.CreateBundleAsync(null!);
|
||||
|
||||
// Assert
|
||||
await act.Should().ThrowAsync<ArgumentNullException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync rejects empty alertId")]
|
||||
public async Task CreateBundleAsync_EmptyAlertId_Throws()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
// Act
|
||||
var act = () => _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
await act.Should().ThrowAsync<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync generates unique bundle IDs")]
|
||||
public async Task CreateBundleAsync_GeneratesUniqueBundleIds()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-789",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = await _packager.CreateBundleAsync(request);
|
||||
var result2 = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result1.BundleId.Should().NotBe(result2.BundleId);
|
||||
|
||||
_tempFiles.Add(result1.BundlePath ?? "");
|
||||
_tempFiles.Add(result2.BundlePath ?? "");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync sets correct content type")]
|
||||
public async Task CreateBundleAsync_SetsCorrectContentType()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-content",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result.ContentType.Should().Be("application/gzip");
|
||||
result.FileName.Should().Contain(".stella.bundle.tgz");
|
||||
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync includes metadata directory")]
|
||||
public async Task CreateBundleAsync_IncludesMetadataDirectory()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-meta",
|
||||
ActorId = "user@test.com"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Entries.Should().Contain(e => e.Path.StartsWith("metadata/"));
|
||||
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync with VEX history includes vex directory")]
|
||||
public async Task CreateBundleAsync_WithVexHistory_IncludesVexDirectory()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-vex",
|
||||
ActorId = "user@test.com",
|
||||
IncludeVexHistory = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Entries.Should().Contain(e => e.Path.StartsWith("vex/"));
|
||||
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CreateBundleAsync with SBOM slice includes sbom directory")]
|
||||
public async Task CreateBundleAsync_WithSbomSlice_IncludesSbomDirectory()
|
||||
{
|
||||
// Arrange
|
||||
var request = new BundleRequest
|
||||
{
|
||||
AlertId = "alert-sbom",
|
||||
ActorId = "user@test.com",
|
||||
IncludeSbomSlice = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _packager.CreateBundleAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Entries.Should().Contain(e => e.Path.StartsWith("sbom/"));
|
||||
|
||||
_tempFiles.Add(result.BundlePath ?? "");
|
||||
}
|
||||
}
|
||||
@@ -325,6 +325,47 @@ public sealed record VexStatusChange
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to verify an evidence bundle.
|
||||
/// Sprint: SPRINT_3602_0001_0001 - Task 10
|
||||
/// </summary>
|
||||
public sealed record BundleVerificationRequest
|
||||
{
|
||||
[JsonPropertyName("bundle_hash")]
|
||||
public required string BundleHash { get; init; }
|
||||
|
||||
[JsonPropertyName("signature")]
|
||||
public string? Signature { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for bundle verification.
|
||||
/// Sprint: SPRINT_3602_0001_0001 - Task 10
|
||||
/// </summary>
|
||||
public sealed record BundleVerificationResponse
|
||||
{
|
||||
[JsonPropertyName("alert_id")]
|
||||
public required string AlertId { get; init; }
|
||||
|
||||
[JsonPropertyName("is_valid")]
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
[JsonPropertyName("verified_at")]
|
||||
public required DateTimeOffset VerifiedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("signature_valid")]
|
||||
public bool SignatureValid { get; init; }
|
||||
|
||||
[JsonPropertyName("hash_valid")]
|
||||
public bool HashValid { get; init; }
|
||||
|
||||
[JsonPropertyName("chain_valid")]
|
||||
public bool ChainValid { get; init; }
|
||||
|
||||
[JsonPropertyName("errors")]
|
||||
public IReadOnlyList<string>? Errors { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Bundle verification result.
|
||||
/// </summary>
|
||||
|
||||
@@ -1677,6 +1677,77 @@ app.MapGet("/v1/alerts/{alertId}/audit", async Task<Results<JsonHttpResult<Audit
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
// Sprint: SPRINT_3602_0001_0001 - Task 9: Bundle download endpoint
|
||||
app.MapGet("/v1/alerts/{alertId}/bundle", async Task<Results<FileStreamHttpResult, NotFound, ProblemHttpResult>> (
|
||||
string alertId,
|
||||
[FromServices] IAlertService alertService,
|
||||
[FromServices] IEvidenceBundleService bundleService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
var alert = await alertService.GetAlertAsync(alertId, cancellationToken).ConfigureAwait(false);
|
||||
if (alert is null)
|
||||
{
|
||||
return TypedResults.NotFound();
|
||||
}
|
||||
|
||||
var bundle = await bundleService.CreateBundleAsync(alertId, cancellationToken).ConfigureAwait(false);
|
||||
if (bundle is null)
|
||||
{
|
||||
return TypedResults.Problem(
|
||||
detail: "Failed to create evidence bundle",
|
||||
statusCode: StatusCodes.Status500InternalServerError);
|
||||
}
|
||||
|
||||
return TypedResults.File(
|
||||
bundle.Content,
|
||||
contentType: "application/gzip",
|
||||
fileDownloadName: $"evidence-{alertId}.tar.gz");
|
||||
})
|
||||
.WithName("DownloadAlertBundle")
|
||||
.RequireAuthorization(AlertReadPolicy)
|
||||
.Produces<FileStreamHttpResult>(StatusCodes.Status200OK, "application/gzip")
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
// Sprint: SPRINT_3602_0001_0001 - Task 10: Bundle verify endpoint
|
||||
app.MapPost("/v1/alerts/{alertId}/bundle/verify", async Task<Results<Ok<BundleVerificationResponse>, NotFound, ProblemHttpResult>> (
|
||||
string alertId,
|
||||
[FromBody] BundleVerificationRequest request,
|
||||
[FromServices] IAlertService alertService,
|
||||
[FromServices] IEvidenceBundleService bundleService,
|
||||
CancellationToken cancellationToken) =>
|
||||
{
|
||||
var alert = await alertService.GetAlertAsync(alertId, cancellationToken).ConfigureAwait(false);
|
||||
if (alert is null)
|
||||
{
|
||||
return TypedResults.NotFound();
|
||||
}
|
||||
|
||||
var result = await bundleService.VerifyBundleAsync(
|
||||
alertId,
|
||||
request.BundleHash,
|
||||
request.Signature,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = new BundleVerificationResponse
|
||||
{
|
||||
AlertId = alertId,
|
||||
IsValid = result.IsValid,
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
SignatureValid = result.SignatureValid,
|
||||
HashValid = result.HashValid,
|
||||
ChainValid = result.ChainValid,
|
||||
Errors = result.Errors
|
||||
};
|
||||
|
||||
return TypedResults.Ok(response);
|
||||
})
|
||||
.WithName("VerifyAlertBundle")
|
||||
.RequireAuthorization(AlertReadPolicy)
|
||||
.Produces(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.ProducesProblem(StatusCodes.Status400BadRequest);
|
||||
|
||||
app.MapPost("/v1/vex-consensus/issuers", async Task<Results<Created<VexIssuerDetailResponse>, ProblemHttpResult>> (
|
||||
RegisterVexIssuerRequest request,
|
||||
VexConsensusService consensusService,
|
||||
|
||||
@@ -0,0 +1,181 @@
|
||||
// =============================================================================
|
||||
// EvidenceDecisionApiIntegrationTests.cs
|
||||
// Sprint: SPRINT_3602_0001_0001
|
||||
// Task: 12 - API integration tests
|
||||
// =============================================================================
|
||||
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for Evidence and Decision API endpoints.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "3602")]
|
||||
public sealed class EvidenceDecisionApiIntegrationTests : IClassFixture<WebApplicationFactory<Program>>
|
||||
{
|
||||
private readonly HttpClient _client;
|
||||
|
||||
public EvidenceDecisionApiIntegrationTests(WebApplicationFactory<Program> factory)
|
||||
{
|
||||
_client = factory.CreateClient(new WebApplicationFactoryClientOptions
|
||||
{
|
||||
AllowAutoRedirect = false
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GET /v1/alerts returns paginated list")]
|
||||
public async Task GetAlerts_ReturnsPaginatedList()
|
||||
{
|
||||
// Act
|
||||
var response = await _client.GetAsync("/v1/alerts?limit=10");
|
||||
|
||||
// Assert
|
||||
// Note: In actual test, would need auth token
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.OK,
|
||||
HttpStatusCode.Unauthorized); // Depends on test auth setup
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GET /v1/alerts with filters applies correctly")]
|
||||
public async Task GetAlerts_WithFilters_AppliesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var filters = "?band=critical&status=open&limit=5";
|
||||
|
||||
// Act
|
||||
var response = await _client.GetAsync($"/v1/alerts{filters}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.OK,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GET /v1/alerts/{id} returns 404 for non-existent alert")]
|
||||
public async Task GetAlert_NonExistent_Returns404()
|
||||
{
|
||||
// Act
|
||||
var response = await _client.GetAsync("/v1/alerts/non-existent-id");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.NotFound,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "POST /v1/alerts/{id}/decisions requires decision and rationale")]
|
||||
public async Task PostDecision_RequiresFields()
|
||||
{
|
||||
// Arrange
|
||||
var request = new
|
||||
{
|
||||
decision = "accept_risk",
|
||||
rationale = "Test rationale for decision"
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/v1/alerts/test-id/decisions", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.Created,
|
||||
HttpStatusCode.NotFound,
|
||||
HttpStatusCode.Unauthorized,
|
||||
HttpStatusCode.BadRequest);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "POST /v1/alerts/{id}/decisions rejects empty rationale")]
|
||||
public async Task PostDecision_EmptyRationale_Rejected()
|
||||
{
|
||||
// Arrange
|
||||
var request = new
|
||||
{
|
||||
decision = "accept_risk",
|
||||
rationale = ""
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/v1/alerts/test-id/decisions", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.BadRequest,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GET /v1/alerts/{id}/audit returns timeline")]
|
||||
public async Task GetAudit_ReturnsTimeline()
|
||||
{
|
||||
// Act
|
||||
var response = await _client.GetAsync("/v1/alerts/test-id/audit");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.OK,
|
||||
HttpStatusCode.NotFound,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GET /v1/alerts/{id}/bundle returns gzip content-type")]
|
||||
public async Task GetBundle_ReturnsGzip()
|
||||
{
|
||||
// Act
|
||||
var response = await _client.GetAsync("/v1/alerts/test-id/bundle");
|
||||
|
||||
// Assert
|
||||
if (response.StatusCode == HttpStatusCode.OK)
|
||||
{
|
||||
response.Content.Headers.ContentType?.MediaType.Should().Be("application/gzip");
|
||||
}
|
||||
else
|
||||
{
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.NotFound,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "POST /v1/alerts/{id}/bundle/verify validates hash")]
|
||||
public async Task VerifyBundle_ValidatesHash()
|
||||
{
|
||||
// Arrange
|
||||
var request = new
|
||||
{
|
||||
bundle_hash = "sha256:abc123",
|
||||
signature = "test-signature"
|
||||
};
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsJsonAsync("/v1/alerts/test-id/bundle/verify", request);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.OK,
|
||||
HttpStatusCode.NotFound,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "API returns proper error format for invalid requests")]
|
||||
public async Task InvalidRequest_ReturnsProblemDetails()
|
||||
{
|
||||
// Arrange
|
||||
var invalidJson = "not-json";
|
||||
|
||||
// Act
|
||||
var response = await _client.PostAsync(
|
||||
"/v1/alerts/test-id/decisions",
|
||||
new StringContent(invalidJson, System.Text.Encoding.UTF8, "application/json"));
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().BeOneOf(
|
||||
HttpStatusCode.BadRequest,
|
||||
HttpStatusCode.UnsupportedMediaType,
|
||||
HttpStatusCode.Unauthorized);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,229 @@
|
||||
// =============================================================================
|
||||
// OpenApiSchemaTests.cs
|
||||
// Sprint: SPRINT_3602_0001_0001
|
||||
// Task: 13 - OpenAPI schema validation tests
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
using StellaOps.Findings.Ledger.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Findings.Ledger.Tests.Schema;
|
||||
|
||||
/// <summary>
|
||||
/// Tests to validate API response contracts match OpenAPI specification.
|
||||
/// </summary>
|
||||
[Trait("Category", "Schema")]
|
||||
[Trait("Sprint", "3602")]
|
||||
public sealed class OpenApiSchemaTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
[Fact(DisplayName = "AlertSummary serializes with correct property names")]
|
||||
public void AlertSummary_SerializesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var alert = new AlertSummary
|
||||
{
|
||||
AlertId = "alert-123",
|
||||
ArtifactId = "sha256:abc",
|
||||
VulnId = "CVE-2024-1234",
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.21",
|
||||
Severity = "HIGH",
|
||||
Band = "critical",
|
||||
Status = "open",
|
||||
Score = 9.5,
|
||||
CreatedAt = DateTimeOffset.Parse("2024-12-15T10:00:00Z"),
|
||||
UpdatedAt = DateTimeOffset.Parse("2024-12-16T10:00:00Z"),
|
||||
DecisionCount = 2
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(alert, JsonOptions);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert - verify snake_case property names per OpenAPI spec
|
||||
root.TryGetProperty("alert_id", out _).Should().BeTrue();
|
||||
root.TryGetProperty("artifact_id", out _).Should().BeTrue();
|
||||
root.TryGetProperty("vuln_id", out _).Should().BeTrue();
|
||||
root.TryGetProperty("component_purl", out _).Should().BeTrue();
|
||||
root.TryGetProperty("severity", out _).Should().BeTrue();
|
||||
root.TryGetProperty("band", out _).Should().BeTrue();
|
||||
root.TryGetProperty("status", out _).Should().BeTrue();
|
||||
root.TryGetProperty("score", out _).Should().BeTrue();
|
||||
root.TryGetProperty("created_at", out _).Should().BeTrue();
|
||||
root.TryGetProperty("decision_count", out _).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "AlertListResponse includes required fields")]
|
||||
public void AlertListResponse_IncludesRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var response = new AlertListResponse(
|
||||
Items: new List<AlertSummary>(),
|
||||
TotalCount: 0,
|
||||
NextPageToken: null);
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(response, JsonOptions);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert - items and total_count are required per OpenAPI spec
|
||||
root.TryGetProperty("items", out var items).Should().BeTrue();
|
||||
items.ValueKind.Should().Be(JsonValueKind.Array);
|
||||
|
||||
root.TryGetProperty("total_count", out var count).Should().BeTrue();
|
||||
count.ValueKind.Should().Be(JsonValueKind.Number);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "DecisionRequest validates required fields")]
|
||||
public void DecisionRequest_RequiresFields()
|
||||
{
|
||||
// Arrange
|
||||
var request = new DecisionRequest
|
||||
{
|
||||
Decision = "accept_risk",
|
||||
Rationale = "Test rationale",
|
||||
JustificationCode = null,
|
||||
Metadata = null
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(request, JsonOptions);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert - decision and rationale are required per OpenAPI spec
|
||||
root.TryGetProperty("decision", out var decision).Should().BeTrue();
|
||||
decision.GetString().Should().NotBeNullOrEmpty();
|
||||
|
||||
root.TryGetProperty("rationale", out var rationale).Should().BeTrue();
|
||||
rationale.GetString().Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "BundleVerificationResponse includes all fields")]
|
||||
public void BundleVerificationResponse_IncludesAllFields()
|
||||
{
|
||||
// Arrange
|
||||
var response = new BundleVerificationResponse
|
||||
{
|
||||
AlertId = "alert-123",
|
||||
IsValid = true,
|
||||
VerifiedAt = DateTimeOffset.UtcNow,
|
||||
SignatureValid = true,
|
||||
HashValid = true,
|
||||
ChainValid = true,
|
||||
Errors = null
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(response, JsonOptions);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert - verify required fields per OpenAPI spec
|
||||
root.TryGetProperty("alert_id", out _).Should().BeTrue();
|
||||
root.TryGetProperty("is_valid", out _).Should().BeTrue();
|
||||
root.TryGetProperty("verified_at", out _).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "AuditTimelineResponse serializes correctly")]
|
||||
public void AuditTimelineResponse_SerializesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var response = new AuditTimelineResponse
|
||||
{
|
||||
AlertId = "alert-123",
|
||||
Events = new List<AuditEventResponse>(),
|
||||
TotalCount = 0
|
||||
};
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(response, JsonOptions);
|
||||
var doc = JsonDocument.Parse(json);
|
||||
var root = doc.RootElement;
|
||||
|
||||
// Assert
|
||||
root.TryGetProperty("alert_id", out _).Should().BeTrue();
|
||||
root.TryGetProperty("events", out var events).Should().BeTrue();
|
||||
events.ValueKind.Should().Be(JsonValueKind.Array);
|
||||
root.TryGetProperty("total_count", out _).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Decision enum values match OpenAPI spec")]
|
||||
public void DecisionEnumValues_MatchSpec()
|
||||
{
|
||||
// Arrange - valid decision values per OpenAPI spec
|
||||
var validDecisions = new[] { "accept_risk", "mitigate", "suppress", "escalate" };
|
||||
|
||||
// Assert - all values should be accepted
|
||||
foreach (var decision in validDecisions)
|
||||
{
|
||||
var request = new DecisionRequest
|
||||
{
|
||||
Decision = decision,
|
||||
Rationale = "Test rationale"
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(request, JsonOptions);
|
||||
json.Should().Contain(decision);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Band enum values match OpenAPI spec")]
|
||||
public void BandEnumValues_MatchSpec()
|
||||
{
|
||||
// Arrange - valid band values per OpenAPI spec
|
||||
var validBands = new[] { "critical", "high", "medium", "low", "info" };
|
||||
|
||||
// Assert - all values should be representable
|
||||
foreach (var band in validBands)
|
||||
{
|
||||
var alert = new AlertSummary
|
||||
{
|
||||
AlertId = "test",
|
||||
ArtifactId = "test",
|
||||
VulnId = "test",
|
||||
Severity = "test",
|
||||
Band = band,
|
||||
Status = "open",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(alert, JsonOptions);
|
||||
json.Should().Contain($"\"{band}\"");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Status enum values match OpenAPI spec")]
|
||||
public void StatusEnumValues_MatchSpec()
|
||||
{
|
||||
// Arrange - valid status values per OpenAPI spec
|
||||
var validStatuses = new[] { "open", "acknowledged", "resolved", "suppressed" };
|
||||
|
||||
// Assert - all values should be representable
|
||||
foreach (var status in validStatuses)
|
||||
{
|
||||
var alert = new AlertSummary
|
||||
{
|
||||
AlertId = "test",
|
||||
ArtifactId = "test",
|
||||
VulnId = "test",
|
||||
Severity = "test",
|
||||
Band = "critical",
|
||||
Status = status,
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(alert, JsonOptions);
|
||||
json.Should().Contain($"\"{status}\"");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,296 @@
|
||||
// =============================================================================
|
||||
// DeterministicTestFixtures.cs
|
||||
// Deterministic test fixtures for TTFS testing
|
||||
// Part of Task T15: Create deterministic test fixtures
|
||||
// =============================================================================
|
||||
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Ttfs;
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic test fixtures for TTFS (Time-To-First-Signal) testing.
|
||||
/// Uses frozen timestamps and pre-generated UUIDs for reproducibility.
|
||||
/// </summary>
|
||||
public static class DeterministicTestFixtures
|
||||
{
|
||||
/// <summary>
|
||||
/// Frozen timestamp used across all fixtures.
|
||||
/// </summary>
|
||||
public static readonly DateTimeOffset FrozenTimestamp =
|
||||
new(2025, 12, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic seed for reproducible random generation.
|
||||
/// </summary>
|
||||
public const int DeterministicSeed = 42;
|
||||
|
||||
/// <summary>
|
||||
/// Pre-generated deterministic UUIDs.
|
||||
/// </summary>
|
||||
public static class Ids
|
||||
{
|
||||
public static readonly Guid TenantId = Guid.Parse("11111111-1111-1111-1111-111111111111");
|
||||
public static readonly Guid RunId = Guid.Parse("22222222-2222-2222-2222-222222222222");
|
||||
public static readonly Guid JobId = Guid.Parse("33333333-3333-3333-3333-333333333333");
|
||||
public static readonly Guid SourceId = Guid.Parse("44444444-4444-4444-4444-444444444444");
|
||||
public static readonly Guid SignatureId = Guid.Parse("55555555-5555-5555-5555-555555555555");
|
||||
|
||||
public const string TenantIdString = "test-tenant-deterministic";
|
||||
public const string CorrelationId = "corr-deterministic-001";
|
||||
public const string SignalId = "sig-deterministic-001";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic digest values.
|
||||
/// </summary>
|
||||
public static class Digests
|
||||
{
|
||||
/// <summary>64-character lowercase hex digest (SHA-256).</summary>
|
||||
public const string PayloadDigest = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef";
|
||||
|
||||
/// <summary>Image digest reference.</summary>
|
||||
public const string ImageDigest = "sha256:abc123def456789012345678901234567890123456789012345678901234abcd";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a deterministic Run for testing.
|
||||
/// </summary>
|
||||
public static Run CreateRun(
|
||||
Guid? runId = null,
|
||||
string? tenantId = null,
|
||||
RunStatus status = RunStatus.Pending,
|
||||
DateTimeOffset? createdAt = null)
|
||||
{
|
||||
return new Run(
|
||||
RunId: runId ?? Ids.RunId,
|
||||
TenantId: tenantId ?? Ids.TenantIdString,
|
||||
ProjectId: null,
|
||||
SourceId: Ids.SourceId,
|
||||
RunType: "scan",
|
||||
Status: status,
|
||||
CorrelationId: Ids.CorrelationId,
|
||||
TotalJobs: 1,
|
||||
CompletedJobs: 0,
|
||||
SucceededJobs: 0,
|
||||
FailedJobs: 0,
|
||||
CreatedAt: createdAt ?? FrozenTimestamp,
|
||||
StartedAt: null,
|
||||
CompletedAt: null,
|
||||
CreatedBy: "system",
|
||||
Metadata: null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a deterministic Job for testing.
|
||||
/// </summary>
|
||||
public static Job CreateJob(
|
||||
Guid? jobId = null,
|
||||
Guid? runId = null,
|
||||
string? tenantId = null,
|
||||
JobStatus status = JobStatus.Scheduled,
|
||||
DateTimeOffset? createdAt = null)
|
||||
{
|
||||
return new Job(
|
||||
JobId: jobId ?? Ids.JobId,
|
||||
TenantId: tenantId ?? Ids.TenantIdString,
|
||||
ProjectId: null,
|
||||
RunId: runId ?? Ids.RunId,
|
||||
JobType: "scan.image",
|
||||
Status: status,
|
||||
Priority: 0,
|
||||
Attempt: 1,
|
||||
MaxAttempts: 3,
|
||||
PayloadDigest: Digests.PayloadDigest,
|
||||
Payload: "{}",
|
||||
IdempotencyKey: "idem-deterministic-001",
|
||||
CorrelationId: Ids.CorrelationId,
|
||||
LeaseId: null,
|
||||
WorkerId: null,
|
||||
TaskRunnerId: null,
|
||||
LeaseUntil: null,
|
||||
CreatedAt: createdAt ?? FrozenTimestamp,
|
||||
ScheduledAt: createdAt ?? FrozenTimestamp,
|
||||
LeasedAt: null,
|
||||
CompletedAt: null,
|
||||
NotBefore: null,
|
||||
Reason: null,
|
||||
ReplayOf: null,
|
||||
CreatedBy: "system");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a deterministic FirstSignal for testing.
|
||||
/// </summary>
|
||||
public static FirstSignal CreateFirstSignal(
|
||||
FirstSignalKind kind = FirstSignalKind.Queued,
|
||||
FirstSignalPhase phase = FirstSignalPhase.Resolve,
|
||||
bool cacheHit = false,
|
||||
string source = "cold_start",
|
||||
LastKnownOutcome? lastKnownOutcome = null)
|
||||
{
|
||||
return new FirstSignal
|
||||
{
|
||||
Version = "1.0",
|
||||
SignalId = Ids.SignalId,
|
||||
JobId = Ids.JobId,
|
||||
Timestamp = FrozenTimestamp,
|
||||
Kind = kind,
|
||||
Phase = phase,
|
||||
Scope = new FirstSignalScope
|
||||
{
|
||||
Type = "image",
|
||||
Id = Digests.ImageDigest
|
||||
},
|
||||
Summary = GetSummaryForKind(kind),
|
||||
EtaSeconds = kind == FirstSignalKind.Queued ? 120 : null,
|
||||
LastKnownOutcome = lastKnownOutcome,
|
||||
NextActions = GetActionsForKind(kind),
|
||||
Diagnostics = new FirstSignalDiagnostics
|
||||
{
|
||||
CacheHit = cacheHit,
|
||||
Source = source,
|
||||
CorrelationId = Ids.CorrelationId
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a deterministic LastKnownOutcome for testing.
|
||||
/// </summary>
|
||||
public static LastKnownOutcome CreateLastKnownOutcome(
|
||||
string confidence = "high",
|
||||
int hitCount = 15)
|
||||
{
|
||||
return new LastKnownOutcome
|
||||
{
|
||||
SignatureId = Ids.SignatureId.ToString(),
|
||||
ErrorCode = "EDEPNOTFOUND",
|
||||
Token = "EDEPNOTFOUND",
|
||||
Excerpt = "Could not resolve dependency @types/node@^18.0.0",
|
||||
Confidence = confidence,
|
||||
FirstSeenAt = FrozenTimestamp.AddDays(-3),
|
||||
HitCount = hitCount
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a deterministic failed FirstSignal with LastKnownOutcome.
|
||||
/// </summary>
|
||||
public static FirstSignal CreateFailedSignalWithOutcome()
|
||||
{
|
||||
return CreateFirstSignal(
|
||||
kind: FirstSignalKind.Failed,
|
||||
phase: FirstSignalPhase.Analyze,
|
||||
cacheHit: false,
|
||||
source: "failure_index",
|
||||
lastKnownOutcome: CreateLastKnownOutcome());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a deterministic succeeded FirstSignal.
|
||||
/// </summary>
|
||||
public static FirstSignal CreateSucceededSignal()
|
||||
{
|
||||
return CreateFirstSignal(
|
||||
kind: FirstSignalKind.Succeeded,
|
||||
phase: FirstSignalPhase.Report,
|
||||
cacheHit: true,
|
||||
source: "snapshot");
|
||||
}
|
||||
|
||||
private static string GetSummaryForKind(FirstSignalKind kind)
|
||||
{
|
||||
return kind switch
|
||||
{
|
||||
FirstSignalKind.Queued => "Job queued, waiting for available worker",
|
||||
FirstSignalKind.Started => "Analysis started",
|
||||
FirstSignalKind.Phase => "Processing in progress",
|
||||
FirstSignalKind.Blocked => "Blocked by policy: critical-vuln-gate",
|
||||
FirstSignalKind.Failed => "Analysis failed: dependency resolution error",
|
||||
FirstSignalKind.Succeeded => "Scan completed: 3 critical, 12 high, 45 medium findings",
|
||||
FirstSignalKind.Canceled => "Job canceled by user",
|
||||
FirstSignalKind.Unavailable => "Signal unavailable",
|
||||
_ => "Unknown state"
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyList<NextAction>? GetActionsForKind(FirstSignalKind kind)
|
||||
{
|
||||
return kind switch
|
||||
{
|
||||
FirstSignalKind.Failed => new[]
|
||||
{
|
||||
new NextAction
|
||||
{
|
||||
Type = "open_logs",
|
||||
Label = "View Logs",
|
||||
Target = $"/logs/{Ids.JobId}"
|
||||
},
|
||||
new NextAction
|
||||
{
|
||||
Type = "retry",
|
||||
Label = "Retry Job",
|
||||
Target = $"/retry/{Ids.JobId}"
|
||||
}
|
||||
},
|
||||
FirstSignalKind.Succeeded => new[]
|
||||
{
|
||||
new NextAction
|
||||
{
|
||||
Type = "open_job",
|
||||
Label = "View Results",
|
||||
Target = $"/jobs/{Ids.JobId}"
|
||||
}
|
||||
},
|
||||
FirstSignalKind.Blocked => new[]
|
||||
{
|
||||
new NextAction
|
||||
{
|
||||
Type = "docs",
|
||||
Label = "Policy Details",
|
||||
Target = "/docs/policies/critical-vuln-gate"
|
||||
}
|
||||
},
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Seeded random number generator for deterministic test data.
|
||||
/// </summary>
|
||||
public sealed class SeededRandom
|
||||
{
|
||||
private readonly Random _random;
|
||||
|
||||
public SeededRandom(int seed = DeterministicTestFixtures.DeterministicSeed)
|
||||
{
|
||||
_random = new Random(seed);
|
||||
}
|
||||
|
||||
public int Next() => _random.Next();
|
||||
public int Next(int maxValue) => _random.Next(maxValue);
|
||||
public int Next(int minValue, int maxValue) => _random.Next(minValue, maxValue);
|
||||
public double NextDouble() => _random.NextDouble();
|
||||
|
||||
/// <summary>
|
||||
/// Generates a deterministic GUID based on the seed.
|
||||
/// </summary>
|
||||
public Guid NextGuid()
|
||||
{
|
||||
var bytes = new byte[16];
|
||||
_random.NextBytes(bytes);
|
||||
return new Guid(bytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a deterministic hex string.
|
||||
/// </summary>
|
||||
public string NextHexString(int length)
|
||||
{
|
||||
var bytes = new byte[length / 2];
|
||||
_random.NextBytes(bytes);
|
||||
return Convert.ToHexString(bytes).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
// =============================================================================
|
||||
// DeterministicTestFixturesTests.cs
|
||||
// Tests for deterministic test fixtures
|
||||
// Part of Task T15: Create deterministic test fixtures
|
||||
// =============================================================================
|
||||
|
||||
using StellaOps.Orchestrator.Core.Domain;
|
||||
|
||||
namespace StellaOps.Orchestrator.Tests.Ttfs;
|
||||
|
||||
public sealed class DeterministicTestFixturesTests
|
||||
{
|
||||
[Fact]
|
||||
public void FrozenTimestamp_IsCorrectDate()
|
||||
{
|
||||
var expected = new DateTimeOffset(2025, 12, 4, 12, 0, 0, TimeSpan.Zero);
|
||||
Assert.Equal(expected, DeterministicTestFixtures.FrozenTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Ids_AreConsistent_AcrossMultipleCalls()
|
||||
{
|
||||
var runId1 = DeterministicTestFixtures.Ids.RunId;
|
||||
var runId2 = DeterministicTestFixtures.Ids.RunId;
|
||||
|
||||
Assert.Equal(runId1, runId2);
|
||||
Assert.Equal("22222222-2222-2222-2222-222222222222", runId1.ToString());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateRun_ReturnsDeterministicRun()
|
||||
{
|
||||
var run1 = DeterministicTestFixtures.CreateRun();
|
||||
var run2 = DeterministicTestFixtures.CreateRun();
|
||||
|
||||
Assert.Equal(run1.RunId, run2.RunId);
|
||||
Assert.Equal(run1.TenantId, run2.TenantId);
|
||||
Assert.Equal(run1.CreatedAt, run2.CreatedAt);
|
||||
Assert.Equal(run1.CorrelationId, run2.CorrelationId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateJob_ReturnsDeterministicJob()
|
||||
{
|
||||
var job1 = DeterministicTestFixtures.CreateJob();
|
||||
var job2 = DeterministicTestFixtures.CreateJob();
|
||||
|
||||
Assert.Equal(job1.JobId, job2.JobId);
|
||||
Assert.Equal(job1.TenantId, job2.TenantId);
|
||||
Assert.Equal(job1.PayloadDigest, job2.PayloadDigest);
|
||||
Assert.Equal(64, job1.PayloadDigest.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateFirstSignal_ReturnsDeterministicSignal()
|
||||
{
|
||||
var signal1 = DeterministicTestFixtures.CreateFirstSignal();
|
||||
var signal2 = DeterministicTestFixtures.CreateFirstSignal();
|
||||
|
||||
Assert.Equal(signal1.SignalId, signal2.SignalId);
|
||||
Assert.Equal(signal1.JobId, signal2.JobId);
|
||||
Assert.Equal(signal1.Timestamp, signal2.Timestamp);
|
||||
Assert.Equal(signal1.Diagnostics.CorrelationId, signal2.Diagnostics.CorrelationId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateFailedSignalWithOutcome_IncludesLastKnownOutcome()
|
||||
{
|
||||
var signal = DeterministicTestFixtures.CreateFailedSignalWithOutcome();
|
||||
|
||||
Assert.Equal(FirstSignalKind.Failed, signal.Kind);
|
||||
Assert.NotNull(signal.LastKnownOutcome);
|
||||
Assert.Equal("EDEPNOTFOUND", signal.LastKnownOutcome.ErrorCode);
|
||||
Assert.Equal("high", signal.LastKnownOutcome.Confidence);
|
||||
Assert.Equal(15, signal.LastKnownOutcome.HitCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CreateSucceededSignal_HasCorrectProperties()
|
||||
{
|
||||
var signal = DeterministicTestFixtures.CreateSucceededSignal();
|
||||
|
||||
Assert.Equal(FirstSignalKind.Succeeded, signal.Kind);
|
||||
Assert.Equal(FirstSignalPhase.Report, signal.Phase);
|
||||
Assert.True(signal.Diagnostics.CacheHit);
|
||||
Assert.Equal("snapshot", signal.Diagnostics.Source);
|
||||
Assert.Null(signal.LastKnownOutcome);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SeededRandom_ProducesDeterministicSequence()
|
||||
{
|
||||
var rng1 = new SeededRandom(42);
|
||||
var rng2 = new SeededRandom(42);
|
||||
|
||||
var values1 = Enumerable.Range(0, 10).Select(_ => rng1.Next()).ToList();
|
||||
var values2 = Enumerable.Range(0, 10).Select(_ => rng2.Next()).ToList();
|
||||
|
||||
Assert.Equal(values1, values2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SeededRandom_NextGuid_ProducesDeterministicGuids()
|
||||
{
|
||||
var rng1 = new SeededRandom(42);
|
||||
var rng2 = new SeededRandom(42);
|
||||
|
||||
var guid1 = rng1.NextGuid();
|
||||
var guid2 = rng2.NextGuid();
|
||||
|
||||
Assert.Equal(guid1, guid2);
|
||||
Assert.NotEqual(Guid.Empty, guid1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SeededRandom_NextHexString_ProducesDeterministicStrings()
|
||||
{
|
||||
var rng1 = new SeededRandom(42);
|
||||
var rng2 = new SeededRandom(42);
|
||||
|
||||
var hex1 = rng1.NextHexString(64);
|
||||
var hex2 = rng2.NextHexString(64);
|
||||
|
||||
Assert.Equal(hex1, hex2);
|
||||
Assert.Equal(64, hex1.Length);
|
||||
Assert.Matches("^[a-f0-9]+$", hex1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Digests_AreValidFormats()
|
||||
{
|
||||
// PayloadDigest should be 64-char hex
|
||||
Assert.Equal(64, DeterministicTestFixtures.Digests.PayloadDigest.Length);
|
||||
Assert.Matches("^[a-f0-9]+$", DeterministicTestFixtures.Digests.PayloadDigest);
|
||||
|
||||
// ImageDigest should be sha256: prefixed
|
||||
Assert.StartsWith("sha256:", DeterministicTestFixtures.Digests.ImageDigest);
|
||||
}
|
||||
}
|
||||
66
src/Policy/AGENTS.md
Normal file
66
src/Policy/AGENTS.md
Normal file
@@ -0,0 +1,66 @@
|
||||
# AGENTS · Policy Module
|
||||
|
||||
> Sprint: SPRINT_3500_0002_0001 (Smart-Diff Foundation)
|
||||
|
||||
## Roles
|
||||
- **Backend / Policy Engineer**: .NET 10 (preview) for policy engine, gateways, scoring; keep evaluation deterministic.
|
||||
- **QA Engineer**: Adds policy test fixtures, regression tests under `__Tests`.
|
||||
- **Docs Touches (light)**: Update module docs when contracts change; mirror in sprint notes.
|
||||
|
||||
## Required Reading
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/policy/architecture.md`
|
||||
- `docs/product-advisories/14-Dec-2025 - Smart-Diff Technical Reference.md` (for suppression contracts)
|
||||
- Current sprint file
|
||||
|
||||
## Working Directory & Boundaries
|
||||
- Primary scope: `src/Policy/**` (Engine, Gateway, Registry, RiskProfile, Scoring, __Libraries, __Tests).
|
||||
- Avoid cross-module edits unless sprint explicitly permits.
|
||||
|
||||
## Suppression Contracts (Sprint 3500)
|
||||
|
||||
The Policy module includes suppression primitives for Smart-Diff:
|
||||
|
||||
### Namespace
|
||||
- `StellaOps.Policy.Suppression` - Pre-filter suppression rules
|
||||
|
||||
### Key Types
|
||||
- `SuppressionRule` - Individual suppression rule definition
|
||||
- `SuppressionRuleEvaluator` - Evaluates rules against findings
|
||||
- `ISuppressionOverrideProvider` - Interface for runtime overrides
|
||||
- `PatchChurnSuppression` - Special handling for patch churn
|
||||
|
||||
### Suppression Rule Types
|
||||
| Type | Description |
|
||||
|------|-------------|
|
||||
| `cve_pattern` | Suppress by CVE pattern (regex) |
|
||||
| `purl_pattern` | Suppress by PURL pattern |
|
||||
| `severity_below` | Suppress by severity threshold |
|
||||
| `patch_churn` | Suppress if patch churn detected |
|
||||
| `sink_category` | Suppress by sink category |
|
||||
| `reachability_class` | Suppress by reachability gate class |
|
||||
|
||||
### Integration Points
|
||||
- Scanner SmartDiff calls `SuppressionRuleEvaluator` before emitting findings
|
||||
- Suppressed count tracked in `SmartDiffPredicate.suppressedCount`
|
||||
- Override providers allow runtime/tenant-specific rules
|
||||
|
||||
## Engineering Rules
|
||||
- Target `net10.0`; prefer latest C# preview allowed in repo.
|
||||
- Determinism: stable ordering, UTC timestamps, no `DateTime.Now`/random without seed.
|
||||
- Policy evaluation must be pure (no side effects) and reproducible.
|
||||
- Logging: structured (`ILogger` message templates).
|
||||
- Security: policy files are treated as trusted; validate before loading.
|
||||
|
||||
## Testing & Verification
|
||||
- Default: `dotnet test src/Policy/StellaOps.Policy.sln`.
|
||||
- Add/extend tests in `src/Policy/__Tests/**`.
|
||||
- Golden outputs should be deterministic (sorted keys, stable ordering).
|
||||
- Suppression: Add test cases for each rule type in `SuppressionRuleEvaluatorTests`.
|
||||
|
||||
## Workflow Expectations
|
||||
- Mirror task state in sprint tracker (`TODO → DOING → DONE/BLOCKED`).
|
||||
- Note blockers with the specific decision needed.
|
||||
- When policy contracts change, update both module docs and consumer documentation.
|
||||
@@ -0,0 +1,460 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// AdvancedScoringEngine.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-004
|
||||
// Description: Advanced entropy-based + CVSS hybrid scoring engine
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Engines;
|
||||
|
||||
/// <summary>
|
||||
/// Advanced entropy-based + CVSS hybrid scoring engine.
|
||||
/// Uses uncertainty tiers, entropy penalties, and CVSS v4.0 receipts.
|
||||
/// This is the default scoring engine.
|
||||
/// </summary>
|
||||
public sealed class AdvancedScoringEngine : IScoringEngine
|
||||
{
|
||||
private readonly EvidenceFreshnessCalculator _freshnessCalculator;
|
||||
private readonly ILogger<AdvancedScoringEngine> _logger;
|
||||
|
||||
public ScoringProfile Profile => ScoringProfile.Advanced;
|
||||
|
||||
public AdvancedScoringEngine(
|
||||
EvidenceFreshnessCalculator freshnessCalculator,
|
||||
ILogger<AdvancedScoringEngine> logger)
|
||||
{
|
||||
_freshnessCalculator = freshnessCalculator ?? throw new ArgumentNullException(nameof(freshnessCalculator));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public Task<ScoringEngineResult> ScoreAsync(
|
||||
ScoringInput input,
|
||||
ScorePolicy policy,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
ArgumentNullException.ThrowIfNull(policy);
|
||||
|
||||
var explain = new ScoreExplainBuilder();
|
||||
var weights = policy.WeightsBps;
|
||||
|
||||
// 1. Base Severity with CVSS entropy consideration
|
||||
var baseSeverity = CalculateAdvancedBaseSeverity(input, explain);
|
||||
|
||||
// 2. Reachability with semantic analysis
|
||||
var reachability = CalculateAdvancedReachability(input.Reachability, policy, explain);
|
||||
|
||||
// 3. Evidence with uncertainty tiers
|
||||
var evidence = CalculateAdvancedEvidence(input.Evidence, input.AsOf, policy, explain);
|
||||
|
||||
// 4. Provenance with attestation weighting
|
||||
var provenance = CalculateAdvancedProvenance(input.Provenance, policy, explain);
|
||||
|
||||
// Apply KEV boost if applicable
|
||||
var kevBoost = 0;
|
||||
if (input.IsKnownExploited)
|
||||
{
|
||||
kevBoost = 20; // Boost for known exploited vulnerabilities
|
||||
explain.Add("kev_boost", kevBoost, "Known exploited vulnerability (KEV) boost");
|
||||
}
|
||||
|
||||
// Final score calculation with entropy penalty
|
||||
var rawScoreLong =
|
||||
((long)weights.BaseSeverity * baseSeverity) +
|
||||
((long)weights.Reachability * reachability) +
|
||||
((long)weights.Evidence * evidence) +
|
||||
((long)weights.Provenance * provenance);
|
||||
|
||||
var rawScore = (int)(rawScoreLong / 10000) + kevBoost;
|
||||
rawScore = Math.Clamp(rawScore, 0, 100);
|
||||
|
||||
// Apply uncertainty penalty
|
||||
var uncertaintyPenalty = CalculateUncertaintyPenalty(input, explain);
|
||||
var penalizedScore = Math.Max(0, rawScore - uncertaintyPenalty);
|
||||
|
||||
// Apply overrides
|
||||
var (finalScore, appliedOverride) = ApplyOverrides(
|
||||
penalizedScore, reachability, evidence, input.IsKnownExploited, policy);
|
||||
|
||||
var signalValues = new Dictionary<string, int>
|
||||
{
|
||||
["baseSeverity"] = baseSeverity,
|
||||
["reachability"] = reachability,
|
||||
["evidence"] = evidence,
|
||||
["provenance"] = provenance,
|
||||
["kevBoost"] = kevBoost,
|
||||
["uncertaintyPenalty"] = uncertaintyPenalty
|
||||
};
|
||||
|
||||
var signalContributions = new Dictionary<string, double>
|
||||
{
|
||||
["baseSeverity"] = (weights.BaseSeverity * baseSeverity) / 10000.0,
|
||||
["reachability"] = (weights.Reachability * reachability) / 10000.0,
|
||||
["evidence"] = (weights.Evidence * evidence) / 10000.0,
|
||||
["provenance"] = (weights.Provenance * provenance) / 10000.0,
|
||||
["kevBoost"] = kevBoost,
|
||||
["uncertaintyPenalty"] = -uncertaintyPenalty
|
||||
};
|
||||
|
||||
var result = new ScoringEngineResult
|
||||
{
|
||||
FindingId = input.FindingId,
|
||||
ProfileId = input.ProfileId,
|
||||
ProfileVersion = "advanced-v1",
|
||||
RawScore = rawScore,
|
||||
FinalScore = finalScore,
|
||||
Severity = MapToSeverity(finalScore),
|
||||
SignalValues = signalValues,
|
||||
SignalContributions = signalContributions,
|
||||
OverrideApplied = appliedOverride,
|
||||
OverrideReason = appliedOverride is not null ? $"Override applied: {appliedOverride}" : null,
|
||||
ScoringProfile = ScoringProfile.Advanced,
|
||||
ScoredAt = input.AsOf,
|
||||
Explain = explain.Build()
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Advanced score for {FindingId}: B={B}, R={R}, E={E}, P={P}, KEV={KEV}, Penalty={Penalty} -> Raw={RawScore}, Final={FinalScore}",
|
||||
input.FindingId, baseSeverity, reachability, evidence, provenance, kevBoost, uncertaintyPenalty, rawScore, finalScore);
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
private int CalculateAdvancedBaseSeverity(
|
||||
ScoringInput input,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
// Base severity from CVSS
|
||||
var baseSeverity = (int)Math.Round(input.CvssBase * 10);
|
||||
|
||||
// Apply version-specific adjustments
|
||||
var versionMultiplier = input.CvssVersion switch
|
||||
{
|
||||
"4.0" => 10000, // No adjustment for CVSS 4.0
|
||||
"3.1" => 9500, // Slight reduction for older versions
|
||||
"3.0" => 9000,
|
||||
"2.0" => 8500,
|
||||
_ => 9000 // Default for unknown versions
|
||||
};
|
||||
|
||||
var adjustedSeverity = (baseSeverity * versionMultiplier) / 10000;
|
||||
adjustedSeverity = Math.Clamp(adjustedSeverity, 0, 100);
|
||||
|
||||
var versionInfo = input.CvssVersion ?? "unknown";
|
||||
explain.Add("baseSeverity", adjustedSeverity,
|
||||
$"CVSS {input.CvssBase:F1} (v{versionInfo}) with version adjustment");
|
||||
|
||||
return adjustedSeverity;
|
||||
}
|
||||
|
||||
private int CalculateAdvancedReachability(
|
||||
ReachabilityInput input,
|
||||
ScorePolicy policy,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
// Use advanced score if available
|
||||
if (input.AdvancedScore.HasValue)
|
||||
{
|
||||
var advScore = (int)Math.Round(input.AdvancedScore.Value * 100);
|
||||
advScore = Math.Clamp(advScore, 0, 100);
|
||||
|
||||
var category = input.Category ?? "computed";
|
||||
explain.Add("reachability", advScore, $"Advanced reachability: {category}");
|
||||
return advScore;
|
||||
}
|
||||
|
||||
var config = policy.Reachability ?? ReachabilityPolicyConfig.Default;
|
||||
|
||||
// Fall back to hop-based scoring
|
||||
int bucketScore;
|
||||
if (input.HopCount is null)
|
||||
{
|
||||
bucketScore = config.UnreachableScore;
|
||||
explain.AddReachability(-1, bucketScore, "unreachable");
|
||||
}
|
||||
else
|
||||
{
|
||||
var hops = input.HopCount.Value;
|
||||
|
||||
// Apply semantic category boost/penalty
|
||||
var categoryMultiplier = input.Category?.ToLowerInvariant() switch
|
||||
{
|
||||
"direct_entrypoint" => 12000, // 120% - Direct entry points are high risk
|
||||
"api_endpoint" => 11000, // 110% - API endpoints are high risk
|
||||
"internal_service" => 9000, // 90% - Internal services lower risk
|
||||
"dead_code" => 2000, // 20% - Dead code very low risk
|
||||
_ => 10000 // 100% - Default
|
||||
};
|
||||
|
||||
bucketScore = GetBucketScore(hops, config.HopBuckets);
|
||||
bucketScore = (bucketScore * categoryMultiplier) / 10000;
|
||||
bucketScore = Math.Clamp(bucketScore, 0, 100);
|
||||
|
||||
explain.AddReachability(hops, bucketScore, input.Category ?? "call graph");
|
||||
}
|
||||
|
||||
// Apply gate multiplier if gates present
|
||||
if (input.Gates is { Count: > 0 })
|
||||
{
|
||||
var gateMultiplier = CalculateGateMultiplierBps(input.Gates, config.GateMultipliersBps);
|
||||
bucketScore = (bucketScore * gateMultiplier) / 10000;
|
||||
|
||||
var primaryGate = input.Gates.OrderByDescending(g => g.Confidence).First();
|
||||
explain.Add("gate", gateMultiplier / 100,
|
||||
$"Gate: {primaryGate.Type}" + (primaryGate.Detail is not null ? $" ({primaryGate.Detail})" : ""));
|
||||
}
|
||||
|
||||
return bucketScore;
|
||||
}
|
||||
|
||||
private int CalculateAdvancedEvidence(
|
||||
EvidenceInput input,
|
||||
DateTimeOffset asOf,
|
||||
ScorePolicy policy,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
// Use advanced score if available
|
||||
if (input.AdvancedScore.HasValue)
|
||||
{
|
||||
var advScore = (int)Math.Round(input.AdvancedScore.Value * 100);
|
||||
advScore = Math.Clamp(advScore, 0, 100);
|
||||
explain.Add("evidence", advScore, "Advanced evidence score");
|
||||
return advScore;
|
||||
}
|
||||
|
||||
var config = policy.Evidence ?? EvidencePolicyConfig.Default;
|
||||
var points = config.Points ?? EvidencePoints.Default;
|
||||
|
||||
// Sum evidence points with overlap bonus
|
||||
var totalPoints = 0;
|
||||
var typeCount = 0;
|
||||
|
||||
foreach (var type in input.Types)
|
||||
{
|
||||
totalPoints += type switch
|
||||
{
|
||||
EvidenceType.Runtime => points.Runtime,
|
||||
EvidenceType.Dast => points.Dast,
|
||||
EvidenceType.Sast => points.Sast,
|
||||
EvidenceType.Sca => points.Sca,
|
||||
_ => 0
|
||||
};
|
||||
typeCount++;
|
||||
}
|
||||
|
||||
// Multi-evidence overlap bonus (10% per additional type beyond first)
|
||||
if (typeCount > 1)
|
||||
{
|
||||
var overlapBonus = (totalPoints * (typeCount - 1) * 1000) / 10000;
|
||||
totalPoints += overlapBonus;
|
||||
}
|
||||
|
||||
totalPoints = Math.Min(100, totalPoints);
|
||||
|
||||
// Apply freshness multiplier
|
||||
var freshnessMultiplier = 10000;
|
||||
var ageDays = 0;
|
||||
if (input.NewestEvidenceAt.HasValue)
|
||||
{
|
||||
ageDays = Math.Max(0, (int)(asOf - input.NewestEvidenceAt.Value).TotalDays);
|
||||
freshnessMultiplier = _freshnessCalculator.CalculateMultiplierBps(
|
||||
input.NewestEvidenceAt.Value, asOf);
|
||||
}
|
||||
|
||||
var finalEvidence = (totalPoints * freshnessMultiplier) / 10000;
|
||||
explain.AddEvidence(totalPoints, freshnessMultiplier, ageDays);
|
||||
|
||||
return finalEvidence;
|
||||
}
|
||||
|
||||
private int CalculateAdvancedProvenance(
|
||||
ProvenanceInput input,
|
||||
ScorePolicy policy,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
// Use advanced score if available
|
||||
if (input.AdvancedScore.HasValue)
|
||||
{
|
||||
var advScore = (int)Math.Round(input.AdvancedScore.Value * 100);
|
||||
advScore = Math.Clamp(advScore, 0, 100);
|
||||
explain.Add("provenance", advScore, "Advanced provenance score");
|
||||
return advScore;
|
||||
}
|
||||
|
||||
var config = policy.Provenance ?? ProvenancePolicyConfig.Default;
|
||||
var levels = config.Levels ?? ProvenanceLevels.Default;
|
||||
|
||||
var score = input.Level switch
|
||||
{
|
||||
ProvenanceLevel.Unsigned => levels.Unsigned,
|
||||
ProvenanceLevel.Signed => levels.Signed,
|
||||
ProvenanceLevel.SignedWithSbom => levels.SignedWithSbom,
|
||||
ProvenanceLevel.SignedWithSbomAndAttestations => levels.SignedWithSbomAndAttestations,
|
||||
ProvenanceLevel.Reproducible => levels.Reproducible,
|
||||
_ => levels.Unsigned
|
||||
};
|
||||
|
||||
explain.AddProvenance(input.Level.ToString(), score);
|
||||
return score;
|
||||
}
|
||||
|
||||
private int CalculateUncertaintyPenalty(
|
||||
ScoringInput input,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
var penalty = 0;
|
||||
|
||||
// Penalty for missing reachability data
|
||||
if (input.Reachability.HopCount is null &&
|
||||
input.Reachability.AdvancedScore is null)
|
||||
{
|
||||
penalty += 5;
|
||||
}
|
||||
|
||||
// Penalty for no evidence
|
||||
if (input.Evidence.Types.Count == 0 &&
|
||||
input.Evidence.AdvancedScore is null)
|
||||
{
|
||||
penalty += 10;
|
||||
}
|
||||
|
||||
// Penalty for unsigned provenance
|
||||
if (input.Provenance.Level == ProvenanceLevel.Unsigned &&
|
||||
input.Provenance.AdvancedScore is null)
|
||||
{
|
||||
penalty += 5;
|
||||
}
|
||||
|
||||
// Penalty for missing CVSS version
|
||||
if (string.IsNullOrEmpty(input.CvssVersion))
|
||||
{
|
||||
penalty += 3;
|
||||
}
|
||||
|
||||
if (penalty > 0)
|
||||
{
|
||||
explain.Add("uncertainty_penalty", -penalty, $"Uncertainty penalty for missing data");
|
||||
}
|
||||
|
||||
return penalty;
|
||||
}
|
||||
|
||||
private static int GetBucketScore(int hops, IReadOnlyList<HopBucket>? buckets)
|
||||
{
|
||||
if (buckets is null or { Count: 0 })
|
||||
{
|
||||
return hops switch
|
||||
{
|
||||
0 => 100,
|
||||
1 => 90,
|
||||
<= 3 => 70,
|
||||
<= 5 => 50,
|
||||
<= 10 => 30,
|
||||
_ => 10
|
||||
};
|
||||
}
|
||||
|
||||
foreach (var bucket in buckets)
|
||||
{
|
||||
if (hops <= bucket.MaxHops)
|
||||
{
|
||||
return bucket.Score;
|
||||
}
|
||||
}
|
||||
|
||||
return buckets[^1].Score;
|
||||
}
|
||||
|
||||
private static int CalculateGateMultiplierBps(
|
||||
IReadOnlyList<DetectedGate> gates,
|
||||
GateMultipliersBps? config)
|
||||
{
|
||||
config ??= GateMultipliersBps.Default;
|
||||
|
||||
var lowestMultiplier = 10000;
|
||||
|
||||
foreach (var gate in gates)
|
||||
{
|
||||
var multiplier = gate.Type.ToLowerInvariant() switch
|
||||
{
|
||||
"feature_flag" or "featureflag" => config.FeatureFlag,
|
||||
"auth_required" or "authrequired" => config.AuthRequired,
|
||||
"admin_only" or "adminonly" => config.AdminOnly,
|
||||
"non_default_config" or "nondefaultconfig" => config.NonDefaultConfig,
|
||||
_ => 10000
|
||||
};
|
||||
|
||||
var weightedMultiplier = (int)(multiplier + ((10000 - multiplier) * (1.0 - gate.Confidence)));
|
||||
lowestMultiplier = Math.Min(lowestMultiplier, weightedMultiplier);
|
||||
}
|
||||
|
||||
return lowestMultiplier;
|
||||
}
|
||||
|
||||
private static (int Score, string? Override) ApplyOverrides(
|
||||
int score,
|
||||
int reachability,
|
||||
int evidence,
|
||||
bool isKnownExploited,
|
||||
ScorePolicy policy)
|
||||
{
|
||||
if (policy.Overrides is null or { Count: 0 })
|
||||
return (score, null);
|
||||
|
||||
foreach (var rule in policy.Overrides)
|
||||
{
|
||||
if (!MatchesCondition(rule.When, reachability, evidence, isKnownExploited))
|
||||
continue;
|
||||
|
||||
if (rule.SetScore.HasValue)
|
||||
return (rule.SetScore.Value, rule.Name);
|
||||
|
||||
if (rule.ClampMaxScore.HasValue && score > rule.ClampMaxScore.Value)
|
||||
return (rule.ClampMaxScore.Value, $"{rule.Name} (clamped)");
|
||||
|
||||
if (rule.ClampMinScore.HasValue && score < rule.ClampMinScore.Value)
|
||||
return (rule.ClampMinScore.Value, $"{rule.Name} (clamped)");
|
||||
}
|
||||
|
||||
return (score, null);
|
||||
}
|
||||
|
||||
private static bool MatchesCondition(
|
||||
ScoreOverrideCondition condition,
|
||||
int reachability,
|
||||
int evidence,
|
||||
bool isKnownExploited)
|
||||
{
|
||||
if (condition.Flags?.TryGetValue("knownExploited", out var kevRequired) == true)
|
||||
{
|
||||
if (kevRequired != isKnownExploited)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (condition.MinReachability.HasValue && reachability < condition.MinReachability.Value)
|
||||
return false;
|
||||
|
||||
if (condition.MaxReachability.HasValue && reachability > condition.MaxReachability.Value)
|
||||
return false;
|
||||
|
||||
if (condition.MinEvidence.HasValue && evidence < condition.MinEvidence.Value)
|
||||
return false;
|
||||
|
||||
if (condition.MaxEvidence.HasValue && evidence > condition.MaxEvidence.Value)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string MapToSeverity(int score) => score switch
|
||||
{
|
||||
>= 90 => "critical",
|
||||
>= 70 => "high",
|
||||
>= 40 => "medium",
|
||||
>= 20 => "low",
|
||||
_ => "info"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,326 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SimpleScoringEngine.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-003
|
||||
// Description: Simple 4-factor basis-points scoring engine
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Engines;
|
||||
|
||||
/// <summary>
|
||||
/// Simple 4-factor basis-points scoring engine.
|
||||
/// Formula: riskScore = (wB*B + wR*R + wE*E + wP*P) / 10000
|
||||
/// </summary>
|
||||
public sealed class SimpleScoringEngine : IScoringEngine
|
||||
{
|
||||
private readonly EvidenceFreshnessCalculator _freshnessCalculator;
|
||||
private readonly ILogger<SimpleScoringEngine> _logger;
|
||||
|
||||
public ScoringProfile Profile => ScoringProfile.Simple;
|
||||
|
||||
public SimpleScoringEngine(
|
||||
EvidenceFreshnessCalculator freshnessCalculator,
|
||||
ILogger<SimpleScoringEngine> logger)
|
||||
{
|
||||
_freshnessCalculator = freshnessCalculator ?? throw new ArgumentNullException(nameof(freshnessCalculator));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public Task<ScoringEngineResult> ScoreAsync(
|
||||
ScoringInput input,
|
||||
ScorePolicy policy,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
ArgumentNullException.ThrowIfNull(policy);
|
||||
|
||||
var explain = new ScoreExplainBuilder();
|
||||
var weights = policy.WeightsBps;
|
||||
|
||||
// 1. Base Severity: B = round(CVSS * 10)
|
||||
var baseSeverity = (int)Math.Round(input.CvssBase * 10);
|
||||
baseSeverity = Math.Clamp(baseSeverity, 0, 100);
|
||||
explain.AddBaseSeverity(input.CvssBase, baseSeverity);
|
||||
|
||||
// 2. Reachability: R = bucketScore * gateMultiplier / 10000
|
||||
var reachability = CalculateReachability(input.Reachability, policy, explain);
|
||||
|
||||
// 3. Evidence: E = min(100, sum(points)) * freshness / 10000
|
||||
var evidence = CalculateEvidence(input.Evidence, input.AsOf, policy, explain);
|
||||
|
||||
// 4. Provenance: P = level score
|
||||
var provenance = CalculateProvenance(input.Provenance, policy, explain);
|
||||
|
||||
// Final score: (wB*B + wR*R + wE*E + wP*P) / 10000
|
||||
var rawScoreLong =
|
||||
((long)weights.BaseSeverity * baseSeverity) +
|
||||
((long)weights.Reachability * reachability) +
|
||||
((long)weights.Evidence * evidence) +
|
||||
((long)weights.Provenance * provenance);
|
||||
|
||||
var rawScore = (int)(rawScoreLong / 10000);
|
||||
rawScore = Math.Clamp(rawScore, 0, 100);
|
||||
|
||||
// Apply overrides
|
||||
var (finalScore, appliedOverride) = ApplyOverrides(
|
||||
rawScore, reachability, evidence, input.IsKnownExploited, policy);
|
||||
|
||||
var signalValues = new Dictionary<string, int>
|
||||
{
|
||||
["baseSeverity"] = baseSeverity,
|
||||
["reachability"] = reachability,
|
||||
["evidence"] = evidence,
|
||||
["provenance"] = provenance
|
||||
};
|
||||
|
||||
var signalContributions = new Dictionary<string, double>
|
||||
{
|
||||
["baseSeverity"] = (weights.BaseSeverity * baseSeverity) / 10000.0,
|
||||
["reachability"] = (weights.Reachability * reachability) / 10000.0,
|
||||
["evidence"] = (weights.Evidence * evidence) / 10000.0,
|
||||
["provenance"] = (weights.Provenance * provenance) / 10000.0
|
||||
};
|
||||
|
||||
var result = new ScoringEngineResult
|
||||
{
|
||||
FindingId = input.FindingId,
|
||||
ProfileId = input.ProfileId,
|
||||
ProfileVersion = "simple-v1",
|
||||
RawScore = rawScore,
|
||||
FinalScore = finalScore,
|
||||
Severity = MapToSeverity(finalScore),
|
||||
SignalValues = signalValues,
|
||||
SignalContributions = signalContributions,
|
||||
OverrideApplied = appliedOverride,
|
||||
OverrideReason = appliedOverride is not null ? $"Override applied: {appliedOverride}" : null,
|
||||
ScoringProfile = ScoringProfile.Simple,
|
||||
ScoredAt = input.AsOf,
|
||||
Explain = explain.Build()
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Simple score for {FindingId}: B={B}, R={R}, E={E}, P={P} -> Raw={RawScore}, Final={FinalScore} (override: {Override})",
|
||||
input.FindingId, baseSeverity, reachability, evidence, provenance, rawScore, finalScore, appliedOverride);
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
private int CalculateReachability(
|
||||
ReachabilityInput input,
|
||||
ScorePolicy policy,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
var config = policy.Reachability ?? ReachabilityPolicyConfig.Default;
|
||||
|
||||
// Get bucket score
|
||||
int bucketScore;
|
||||
if (input.HopCount is null)
|
||||
{
|
||||
bucketScore = config.UnreachableScore;
|
||||
explain.AddReachability(-1, bucketScore, "unreachable");
|
||||
}
|
||||
else
|
||||
{
|
||||
var hops = input.HopCount.Value;
|
||||
bucketScore = GetBucketScore(hops, config.HopBuckets);
|
||||
explain.AddReachability(hops, bucketScore, hops == 0 ? "direct call" : "call graph");
|
||||
}
|
||||
|
||||
// Apply gate multiplier if gates are present
|
||||
if (input.Gates is { Count: > 0 })
|
||||
{
|
||||
var gateMultiplier = CalculateGateMultiplierBps(input.Gates, config.GateMultipliersBps);
|
||||
bucketScore = (bucketScore * gateMultiplier) / 10000;
|
||||
|
||||
var primaryGate = input.Gates.OrderByDescending(g => g.Confidence).First();
|
||||
explain.Add("gate", gateMultiplier / 100,
|
||||
$"Gate: {primaryGate.Type}" + (primaryGate.Detail is not null ? $" ({primaryGate.Detail})" : ""));
|
||||
}
|
||||
|
||||
return bucketScore;
|
||||
}
|
||||
|
||||
private static int GetBucketScore(int hops, IReadOnlyList<HopBucket>? buckets)
|
||||
{
|
||||
if (buckets is null or { Count: 0 })
|
||||
{
|
||||
// Default buckets
|
||||
return hops switch
|
||||
{
|
||||
0 => 100,
|
||||
1 => 90,
|
||||
<= 3 => 70,
|
||||
<= 5 => 50,
|
||||
<= 10 => 30,
|
||||
_ => 10
|
||||
};
|
||||
}
|
||||
|
||||
foreach (var bucket in buckets)
|
||||
{
|
||||
if (hops <= bucket.MaxHops)
|
||||
{
|
||||
return bucket.Score;
|
||||
}
|
||||
}
|
||||
|
||||
return buckets[^1].Score;
|
||||
}
|
||||
|
||||
private static int CalculateGateMultiplierBps(
|
||||
IReadOnlyList<DetectedGate> gates,
|
||||
GateMultipliersBps? config)
|
||||
{
|
||||
config ??= GateMultipliersBps.Default;
|
||||
|
||||
// Find the most restrictive gate (lowest multiplier = highest mitigation)
|
||||
var lowestMultiplier = 10000; // 100% = no mitigation
|
||||
|
||||
foreach (var gate in gates)
|
||||
{
|
||||
var multiplier = gate.Type.ToLowerInvariant() switch
|
||||
{
|
||||
"feature_flag" or "featureflag" => config.FeatureFlag,
|
||||
"auth_required" or "authrequired" => config.AuthRequired,
|
||||
"admin_only" or "adminonly" => config.AdminOnly,
|
||||
"non_default_config" or "nondefaultconfig" => config.NonDefaultConfig,
|
||||
_ => 10000
|
||||
};
|
||||
|
||||
// Weight by confidence
|
||||
var weightedMultiplier = (int)(multiplier + ((10000 - multiplier) * (1.0 - gate.Confidence)));
|
||||
lowestMultiplier = Math.Min(lowestMultiplier, weightedMultiplier);
|
||||
}
|
||||
|
||||
return lowestMultiplier;
|
||||
}
|
||||
|
||||
private int CalculateEvidence(
|
||||
EvidenceInput input,
|
||||
DateTimeOffset asOf,
|
||||
ScorePolicy policy,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
var config = policy.Evidence ?? EvidencePolicyConfig.Default;
|
||||
var points = config.Points ?? EvidencePoints.Default;
|
||||
|
||||
// Sum evidence points
|
||||
var totalPoints = 0;
|
||||
foreach (var type in input.Types)
|
||||
{
|
||||
totalPoints += type switch
|
||||
{
|
||||
EvidenceType.Runtime => points.Runtime,
|
||||
EvidenceType.Dast => points.Dast,
|
||||
EvidenceType.Sast => points.Sast,
|
||||
EvidenceType.Sca => points.Sca,
|
||||
_ => 0
|
||||
};
|
||||
}
|
||||
totalPoints = Math.Min(100, totalPoints);
|
||||
|
||||
// Apply freshness multiplier
|
||||
var freshnessMultiplier = 10000;
|
||||
var ageDays = 0;
|
||||
if (input.NewestEvidenceAt.HasValue)
|
||||
{
|
||||
ageDays = Math.Max(0, (int)(asOf - input.NewestEvidenceAt.Value).TotalDays);
|
||||
freshnessMultiplier = _freshnessCalculator.CalculateMultiplierBps(
|
||||
input.NewestEvidenceAt.Value, asOf);
|
||||
}
|
||||
|
||||
var finalEvidence = (totalPoints * freshnessMultiplier) / 10000;
|
||||
explain.AddEvidence(totalPoints, freshnessMultiplier, ageDays);
|
||||
|
||||
return finalEvidence;
|
||||
}
|
||||
|
||||
private static int CalculateProvenance(
|
||||
ProvenanceInput input,
|
||||
ScorePolicy policy,
|
||||
ScoreExplainBuilder explain)
|
||||
{
|
||||
var config = policy.Provenance ?? ProvenancePolicyConfig.Default;
|
||||
var levels = config.Levels ?? ProvenanceLevels.Default;
|
||||
|
||||
var score = input.Level switch
|
||||
{
|
||||
ProvenanceLevel.Unsigned => levels.Unsigned,
|
||||
ProvenanceLevel.Signed => levels.Signed,
|
||||
ProvenanceLevel.SignedWithSbom => levels.SignedWithSbom,
|
||||
ProvenanceLevel.SignedWithSbomAndAttestations => levels.SignedWithSbomAndAttestations,
|
||||
ProvenanceLevel.Reproducible => levels.Reproducible,
|
||||
_ => levels.Unsigned
|
||||
};
|
||||
|
||||
explain.AddProvenance(input.Level.ToString(), score);
|
||||
return score;
|
||||
}
|
||||
|
||||
private static (int Score, string? Override) ApplyOverrides(
|
||||
int score,
|
||||
int reachability,
|
||||
int evidence,
|
||||
bool isKnownExploited,
|
||||
ScorePolicy policy)
|
||||
{
|
||||
if (policy.Overrides is null or { Count: 0 })
|
||||
return (score, null);
|
||||
|
||||
foreach (var rule in policy.Overrides)
|
||||
{
|
||||
if (!MatchesCondition(rule.When, reachability, evidence, isKnownExploited))
|
||||
continue;
|
||||
|
||||
if (rule.SetScore.HasValue)
|
||||
return (rule.SetScore.Value, rule.Name);
|
||||
|
||||
if (rule.ClampMaxScore.HasValue && score > rule.ClampMaxScore.Value)
|
||||
return (rule.ClampMaxScore.Value, $"{rule.Name} (clamped)");
|
||||
|
||||
if (rule.ClampMinScore.HasValue && score < rule.ClampMinScore.Value)
|
||||
return (rule.ClampMinScore.Value, $"{rule.Name} (clamped)");
|
||||
}
|
||||
|
||||
return (score, null);
|
||||
}
|
||||
|
||||
private static bool MatchesCondition(
|
||||
ScoreOverrideCondition condition,
|
||||
int reachability,
|
||||
int evidence,
|
||||
bool isKnownExploited)
|
||||
{
|
||||
if (condition.Flags?.TryGetValue("knownExploited", out var kevRequired) == true)
|
||||
{
|
||||
if (kevRequired != isKnownExploited)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (condition.MinReachability.HasValue && reachability < condition.MinReachability.Value)
|
||||
return false;
|
||||
|
||||
if (condition.MaxReachability.HasValue && reachability > condition.MaxReachability.Value)
|
||||
return false;
|
||||
|
||||
if (condition.MinEvidence.HasValue && evidence < condition.MinEvidence.Value)
|
||||
return false;
|
||||
|
||||
if (condition.MaxEvidence.HasValue && evidence > condition.MaxEvidence.Value)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string MapToSeverity(int score) => score switch
|
||||
{
|
||||
>= 90 => "critical",
|
||||
>= 70 => "high",
|
||||
>= 40 => "medium",
|
||||
>= 20 => "low",
|
||||
_ => "info"
|
||||
};
|
||||
}
|
||||
291
src/Policy/StellaOps.Policy.Engine/Scoring/IScoringEngine.cs
Normal file
291
src/Policy/StellaOps.Policy.Engine/Scoring/IScoringEngine.cs
Normal file
@@ -0,0 +1,291 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IScoringEngine.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-002
|
||||
// Description: Interface for pluggable scoring engines
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for pluggable scoring engines.
|
||||
/// </summary>
|
||||
public interface IScoringEngine
|
||||
{
|
||||
/// <summary>
|
||||
/// Scoring profile this engine implements.
|
||||
/// </summary>
|
||||
ScoringProfile Profile { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Computes risk score for a finding.
|
||||
/// </summary>
|
||||
/// <param name="input">Scoring input with all factors.</param>
|
||||
/// <param name="policy">Score policy configuration.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Scoring result with explanation.</returns>
|
||||
Task<ScoringEngineResult> ScoreAsync(
|
||||
ScoringInput input,
|
||||
ScorePolicy policy,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input for scoring calculation.
|
||||
/// </summary>
|
||||
public sealed record ScoringInput
|
||||
{
|
||||
/// <summary>
|
||||
/// Finding identifier.
|
||||
/// </summary>
|
||||
public required string FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant identifier.
|
||||
/// </summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Profile identifier.
|
||||
/// </summary>
|
||||
public required string ProfileId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Explicit reference time for determinism.
|
||||
/// </summary>
|
||||
public required DateTimeOffset AsOf { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVSS base score (0.0-10.0).
|
||||
/// </summary>
|
||||
public required decimal CvssBase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVSS version used.
|
||||
/// </summary>
|
||||
public string? CvssVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability analysis result.
|
||||
/// </summary>
|
||||
public required ReachabilityInput Reachability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence analysis result.
|
||||
/// </summary>
|
||||
public required EvidenceInput Evidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance verification result.
|
||||
/// </summary>
|
||||
public required ProvenanceInput Provenance { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Known Exploited Vulnerability flag.
|
||||
/// </summary>
|
||||
public bool IsKnownExploited { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Input digests for determinism tracking.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? InputDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reachability analysis input.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityInput
|
||||
{
|
||||
/// <summary>
|
||||
/// Hop count to vulnerable code (null = unreachable).
|
||||
/// </summary>
|
||||
public int? HopCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Detected gates on the path.
|
||||
/// </summary>
|
||||
public IReadOnlyList<DetectedGate>? Gates { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Semantic reachability category (current advanced model).
|
||||
/// </summary>
|
||||
public string? Category { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw reachability score from advanced engine.
|
||||
/// </summary>
|
||||
public double? AdvancedScore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A detected gate that may mitigate reachability.
|
||||
/// </summary>
|
||||
/// <param name="Type">Gate type (e.g., "feature_flag", "auth_required").</param>
|
||||
/// <param name="Detail">Additional detail about the gate.</param>
|
||||
/// <param name="Confidence">Confidence level (0.0-1.0).</param>
|
||||
public sealed record DetectedGate(string Type, string? Detail, double Confidence);
|
||||
|
||||
/// <summary>
|
||||
/// Evidence analysis input.
|
||||
/// </summary>
|
||||
public sealed record EvidenceInput
|
||||
{
|
||||
/// <summary>
|
||||
/// Evidence types present.
|
||||
/// </summary>
|
||||
public required IReadOnlySet<EvidenceType> Types { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Newest evidence timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? NewestEvidenceAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw evidence score from advanced engine.
|
||||
/// </summary>
|
||||
public double? AdvancedScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an empty evidence input.
|
||||
/// </summary>
|
||||
public static EvidenceInput Empty => new()
|
||||
{
|
||||
Types = new HashSet<EvidenceType>()
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evidence types that can contribute to scoring.
|
||||
/// </summary>
|
||||
public enum EvidenceType
|
||||
{
|
||||
/// <summary>Runtime execution evidence (highest value).</summary>
|
||||
Runtime,
|
||||
|
||||
/// <summary>Dynamic analysis security testing.</summary>
|
||||
Dast,
|
||||
|
||||
/// <summary>Static analysis security testing.</summary>
|
||||
Sast,
|
||||
|
||||
/// <summary>Software composition analysis.</summary>
|
||||
Sca
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance verification input.
|
||||
/// </summary>
|
||||
public sealed record ProvenanceInput
|
||||
{
|
||||
/// <summary>
|
||||
/// Provenance level.
|
||||
/// </summary>
|
||||
public required ProvenanceLevel Level { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw provenance score from advanced engine.
|
||||
/// </summary>
|
||||
public double? AdvancedScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates default provenance input (unsigned).
|
||||
/// </summary>
|
||||
public static ProvenanceInput Default => new()
|
||||
{
|
||||
Level = ProvenanceLevel.Unsigned
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance verification levels.
|
||||
/// </summary>
|
||||
public enum ProvenanceLevel
|
||||
{
|
||||
/// <summary>No signature or provenance.</summary>
|
||||
Unsigned,
|
||||
|
||||
/// <summary>Basic signature present.</summary>
|
||||
Signed,
|
||||
|
||||
/// <summary>Signed with SBOM.</summary>
|
||||
SignedWithSbom,
|
||||
|
||||
/// <summary>Signed with SBOM and attestations.</summary>
|
||||
SignedWithSbomAndAttestations,
|
||||
|
||||
/// <summary>Fully reproducible build.</summary>
|
||||
Reproducible
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result from a scoring engine.
|
||||
/// </summary>
|
||||
public sealed record ScoringEngineResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Finding identifier.
|
||||
/// </summary>
|
||||
public required string FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Profile identifier.
|
||||
/// </summary>
|
||||
public required string ProfileId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Profile version/digest.
|
||||
/// </summary>
|
||||
public required string ProfileVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw score before overrides (0-100).
|
||||
/// </summary>
|
||||
public required int RawScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Final score after overrides (0-100).
|
||||
/// </summary>
|
||||
public required int FinalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity classification.
|
||||
/// </summary>
|
||||
public required string Severity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual signal values used in scoring.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, int> SignalValues { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Contribution of each signal to the final score.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, double> SignalContributions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Override rule that was applied, if any.
|
||||
/// </summary>
|
||||
public string? OverrideApplied { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for override, if any.
|
||||
/// </summary>
|
||||
public string? OverrideReason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Scoring profile used.
|
||||
/// </summary>
|
||||
public required ScoringProfile ScoringProfile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when scoring was performed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset ScoredAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Structured explanation of score contributions.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ScoreExplanation> Explain { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,153 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProfileAwareScoringService.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-008
|
||||
// Description: Integrates profile switching into the scoring pipeline
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Profile-aware scoring service that routes to the appropriate scoring engine.
|
||||
/// </summary>
|
||||
public interface IProfileAwareScoringService
|
||||
{
|
||||
/// <summary>
|
||||
/// Scores a finding using the tenant's configured profile.
|
||||
/// </summary>
|
||||
Task<ScoringEngineResult> ScoreAsync(
|
||||
ScoringInput input,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Scores a finding using a specific profile (for comparison/testing).
|
||||
/// </summary>
|
||||
Task<ScoringEngineResult> ScoreWithProfileAsync(
|
||||
ScoringInput input,
|
||||
ScoringProfile profile,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Compares scores across different profiles for the same input.
|
||||
/// </summary>
|
||||
Task<ProfileComparisonResult> CompareProfilesAsync(
|
||||
ScoringInput input,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing scores across different profiles.
|
||||
/// </summary>
|
||||
public sealed record ProfileComparisonResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Finding identifier.
|
||||
/// </summary>
|
||||
public required string FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Results from each profile.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<ScoringProfile, ScoringEngineResult> Results { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score variance across profiles.
|
||||
/// </summary>
|
||||
public required int ScoreVariance { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether severity differs across profiles.
|
||||
/// </summary>
|
||||
public required bool SeverityDiffers { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of profile-aware scoring service.
|
||||
/// </summary>
|
||||
public sealed class ProfileAwareScoringService : IProfileAwareScoringService
|
||||
{
|
||||
private readonly IScoringEngineFactory _engineFactory;
|
||||
private readonly IScorePolicyService _policyService;
|
||||
private readonly ILogger<ProfileAwareScoringService> _logger;
|
||||
|
||||
public ProfileAwareScoringService(
|
||||
IScoringEngineFactory engineFactory,
|
||||
IScorePolicyService policyService,
|
||||
ILogger<ProfileAwareScoringService> logger)
|
||||
{
|
||||
_engineFactory = engineFactory ?? throw new ArgumentNullException(nameof(engineFactory));
|
||||
_policyService = policyService ?? throw new ArgumentNullException(nameof(policyService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<ScoringEngineResult> ScoreAsync(
|
||||
ScoringInput input,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var engine = _engineFactory.GetEngineForTenant(input.TenantId);
|
||||
var policy = _policyService.GetPolicy(input.TenantId);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Scoring finding {FindingId} with {Profile} profile for tenant {TenantId}",
|
||||
input.FindingId, engine.Profile, input.TenantId);
|
||||
|
||||
return await engine.ScoreAsync(input, policy, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ScoringEngineResult> ScoreWithProfileAsync(
|
||||
ScoringInput input,
|
||||
ScoringProfile profile,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var engine = _engineFactory.GetEngine(profile);
|
||||
var policy = _policyService.GetPolicy(input.TenantId);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Scoring finding {FindingId} with explicit {Profile} profile",
|
||||
input.FindingId, profile);
|
||||
|
||||
return await engine.ScoreAsync(input, policy, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
public async Task<ProfileComparisonResult> CompareProfilesAsync(
|
||||
ScoringInput input,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var profiles = _engineFactory.GetAvailableProfiles();
|
||||
var policy = _policyService.GetPolicy(input.TenantId);
|
||||
var results = new Dictionary<ScoringProfile, ScoringEngineResult>();
|
||||
|
||||
foreach (var profile in profiles)
|
||||
{
|
||||
var engine = _engineFactory.GetEngine(profile);
|
||||
var result = await engine.ScoreAsync(input, policy, ct).ConfigureAwait(false);
|
||||
results[profile] = result;
|
||||
}
|
||||
|
||||
var scores = results.Values.Select(r => r.FinalScore).ToList();
|
||||
var severities = results.Values.Select(r => r.Severity).Distinct().ToList();
|
||||
|
||||
var comparison = new ProfileComparisonResult
|
||||
{
|
||||
FindingId = input.FindingId,
|
||||
Results = results,
|
||||
ScoreVariance = scores.Count > 0 ? scores.Max() - scores.Min() : 0,
|
||||
SeverityDiffers = severities.Count > 1
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Profile comparison for {FindingId}: variance={Variance}, severity_differs={SeverityDiffers}",
|
||||
input.FindingId, comparison.ScoreVariance, comparison.SeverityDiffers);
|
||||
|
||||
return comparison;
|
||||
}
|
||||
}
|
||||
@@ -132,6 +132,7 @@ public enum RiskScoringJobStatus
|
||||
/// <param name="OverrideApplied">Override rule that was applied, if any.</param>
|
||||
/// <param name="OverrideReason">Reason for the override, if any.</param>
|
||||
/// <param name="ScoredAt">Timestamp when scoring was performed.</param>
|
||||
/// <param name="ScoringProfile">Scoring profile used (Simple, Advanced, Custom).</param>
|
||||
public sealed record RiskScoringResult(
|
||||
[property: JsonPropertyName("finding_id")] string FindingId,
|
||||
[property: JsonPropertyName("profile_id")] string ProfileId,
|
||||
@@ -143,7 +144,8 @@ public sealed record RiskScoringResult(
|
||||
[property: JsonPropertyName("signal_contributions")] IReadOnlyDictionary<string, double> SignalContributions,
|
||||
[property: JsonPropertyName("override_applied")] string? OverrideApplied,
|
||||
[property: JsonPropertyName("override_reason")] string? OverrideReason,
|
||||
[property: JsonPropertyName("scored_at")] DateTimeOffset ScoredAt)
|
||||
[property: JsonPropertyName("scored_at")] DateTimeOffset ScoredAt,
|
||||
[property: JsonPropertyName("scoring_profile")] string? ScoringProfile = null)
|
||||
{
|
||||
private IReadOnlyList<ScoreExplanation> _explain = Array.Empty<ScoreExplanation>();
|
||||
|
||||
|
||||
@@ -0,0 +1,102 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoringEngineFactory.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-005
|
||||
// Description: Factory for creating scoring engines based on profile
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Engine.Scoring.Engines;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Factory for creating scoring engines based on profile.
|
||||
/// </summary>
|
||||
public interface IScoringEngineFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets a scoring engine for the specified profile.
|
||||
/// </summary>
|
||||
IScoringEngine GetEngine(ScoringProfile profile);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a scoring engine for a tenant's configured profile.
|
||||
/// </summary>
|
||||
IScoringEngine GetEngineForTenant(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all available profiles.
|
||||
/// </summary>
|
||||
IReadOnlyList<ScoringProfile> GetAvailableProfiles();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of scoring engine factory.
|
||||
/// </summary>
|
||||
public sealed class ScoringEngineFactory : IScoringEngineFactory
|
||||
{
|
||||
private readonly IServiceProvider _services;
|
||||
private readonly IScoringProfileService _profileService;
|
||||
private readonly ILogger<ScoringEngineFactory> _logger;
|
||||
|
||||
public ScoringEngineFactory(
|
||||
IServiceProvider services,
|
||||
IScoringProfileService profileService,
|
||||
ILogger<ScoringEngineFactory> logger)
|
||||
{
|
||||
_services = services ?? throw new ArgumentNullException(nameof(services));
|
||||
_profileService = profileService ?? throw new ArgumentNullException(nameof(profileService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a scoring engine for the specified profile.
|
||||
/// </summary>
|
||||
public IScoringEngine GetEngine(ScoringProfile profile)
|
||||
{
|
||||
var engine = profile switch
|
||||
{
|
||||
ScoringProfile.Simple => _services.GetRequiredService<SimpleScoringEngine>(),
|
||||
ScoringProfile.Advanced => _services.GetRequiredService<AdvancedScoringEngine>(),
|
||||
ScoringProfile.Custom => throw new NotSupportedException(
|
||||
"Custom scoring profile requires Rego policy configuration. Use GetCustomEngine instead."),
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(profile), profile, "Unknown scoring profile")
|
||||
};
|
||||
|
||||
_logger.LogDebug("Created scoring engine for profile {Profile}", profile);
|
||||
return engine;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a scoring engine for a tenant's configured profile.
|
||||
/// </summary>
|
||||
public IScoringEngine GetEngineForTenant(string tenantId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var profileConfig = _profileService.GetProfileForTenant(tenantId);
|
||||
var profile = profileConfig?.Profile ?? ScoringProfile.Advanced;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Resolved scoring profile {Profile} for tenant {TenantId}",
|
||||
profile, tenantId);
|
||||
|
||||
return GetEngine(profile);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all available profiles.
|
||||
/// </summary>
|
||||
public IReadOnlyList<ScoringProfile> GetAvailableProfiles()
|
||||
{
|
||||
return
|
||||
[
|
||||
ScoringProfile.Simple,
|
||||
ScoringProfile.Advanced
|
||||
// Custom is not listed as generally available
|
||||
];
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoringProfileService.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-006
|
||||
// Description: Service for managing tenant scoring profile configurations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Service for managing tenant scoring profile configurations.
|
||||
/// </summary>
|
||||
public interface IScoringProfileService
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the scoring profile configuration for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>Profile configuration, or null for default.</returns>
|
||||
ScoringProfileConfig? GetProfileForTenant(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Sets the scoring profile for a tenant.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <param name="config">Profile configuration.</param>
|
||||
void SetProfileForTenant(string tenantId, ScoringProfileConfig config);
|
||||
|
||||
/// <summary>
|
||||
/// Removes custom profile for a tenant (reverts to default).
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Tenant identifier.</param>
|
||||
/// <returns>True if a profile was removed.</returns>
|
||||
bool RemoveProfileForTenant(string tenantId);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all tenants with custom profile configurations.
|
||||
/// </summary>
|
||||
IReadOnlyDictionary<string, ScoringProfileConfig> GetAllProfiles();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the default profile for new tenants.
|
||||
/// </summary>
|
||||
ScoringProfileConfig DefaultProfile { get; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of scoring profile service.
|
||||
/// For production, this should be backed by persistent storage.
|
||||
/// </summary>
|
||||
public sealed class ScoringProfileService : IScoringProfileService
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, ScoringProfileConfig> _profiles = new();
|
||||
private readonly IScorePolicyService _policyService;
|
||||
private readonly ILogger<ScoringProfileService> _logger;
|
||||
|
||||
public ScoringProfileConfig DefaultProfile { get; } = ScoringProfileConfig.DefaultAdvanced;
|
||||
|
||||
public ScoringProfileService(
|
||||
IScorePolicyService policyService,
|
||||
ILogger<ScoringProfileService> logger)
|
||||
{
|
||||
_policyService = policyService ?? throw new ArgumentNullException(nameof(policyService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public ScoringProfileConfig? GetProfileForTenant(string tenantId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
// First, check for explicit tenant profile
|
||||
if (_profiles.TryGetValue(tenantId, out var profile))
|
||||
{
|
||||
return profile;
|
||||
}
|
||||
|
||||
// Then, check the score policy for profile setting
|
||||
try
|
||||
{
|
||||
var policy = _policyService.GetPolicy(tenantId);
|
||||
var policyProfile = ParseProfileFromPolicy(policy);
|
||||
if (policyProfile.HasValue)
|
||||
{
|
||||
return new ScoringProfileConfig
|
||||
{
|
||||
Profile = policyProfile.Value
|
||||
};
|
||||
}
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Failed to get score policy for tenant {TenantId}, using default profile",
|
||||
tenantId);
|
||||
}
|
||||
|
||||
// Default: return null (caller uses default)
|
||||
return null;
|
||||
}
|
||||
|
||||
public void SetProfileForTenant(string tenantId, ScoringProfileConfig config)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
_profiles[tenantId] = config;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Set scoring profile {Profile} for tenant {TenantId}",
|
||||
config.Profile, tenantId);
|
||||
}
|
||||
|
||||
public bool RemoveProfileForTenant(string tenantId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var removed = _profiles.TryRemove(tenantId, out _);
|
||||
|
||||
if (removed)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Removed custom scoring profile for tenant {TenantId}, reverted to default",
|
||||
tenantId);
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
||||
public IReadOnlyDictionary<string, ScoringProfileConfig> GetAllProfiles()
|
||||
{
|
||||
return _profiles.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
|
||||
}
|
||||
|
||||
private static ScoringProfile? ParseProfileFromPolicy(ScorePolicy policy)
|
||||
{
|
||||
// Check if policy has a scoring profile setting
|
||||
// This would be read from the YAML scoringProfile field
|
||||
var profileStr = policy.ScoringProfile;
|
||||
if (string.IsNullOrWhiteSpace(profileStr))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return profileStr.ToLowerInvariant() switch
|
||||
{
|
||||
"simple" => ScoringProfile.Simple,
|
||||
"advanced" => ScoringProfile.Advanced,
|
||||
"custom" => ScoringProfile.Custom,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,13 @@ namespace StellaOps.Policy.Scoring;
|
||||
public sealed record ScorePolicy
|
||||
{
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Scoring profile to use. Defaults to "advanced".
|
||||
/// Options: "simple", "advanced", "custom"
|
||||
/// </summary>
|
||||
public string ScoringProfile { get; init; } = "advanced";
|
||||
|
||||
public required WeightsBps WeightsBps { get; init; }
|
||||
public ReachabilityPolicyConfig? Reachability { get; init; }
|
||||
public EvidencePolicyConfig? Evidence { get; init; }
|
||||
@@ -28,6 +35,7 @@ public sealed record ScorePolicy
|
||||
public static ScorePolicy Default => new()
|
||||
{
|
||||
PolicyVersion = "score.v1",
|
||||
ScoringProfile = "advanced",
|
||||
WeightsBps = WeightsBps.Default,
|
||||
Reachability = ReachabilityPolicyConfig.Default,
|
||||
Evidence = EvidencePolicyConfig.Default,
|
||||
|
||||
@@ -0,0 +1,318 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyValidator.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-003 - Implement ScorePolicyValidator with JSON Schema validation
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using Json.Schema;
|
||||
|
||||
namespace StellaOps.Policy.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Validates score policies against JSON Schema.
|
||||
/// </summary>
|
||||
public sealed class ScorePolicyValidator
|
||||
{
|
||||
private readonly JsonSchema _schema;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a validator with the embedded score.v1 schema.
|
||||
/// </summary>
|
||||
public ScorePolicyValidator()
|
||||
{
|
||||
_schema = JsonSchema.FromText(ScorePolicySchemaJson);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a validator with a custom schema.
|
||||
/// </summary>
|
||||
public ScorePolicyValidator(string schemaJson)
|
||||
{
|
||||
_schema = JsonSchema.FromText(schemaJson);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates a score policy.
|
||||
/// </summary>
|
||||
/// <param name="policy">The policy to validate</param>
|
||||
/// <returns>Validation result with errors if any</returns>
|
||||
public ScorePolicyValidationResult Validate(ScorePolicy policy)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(policy);
|
||||
|
||||
var json = JsonSerializer.Serialize(policy, JsonOptions);
|
||||
var jsonDoc = JsonDocument.Parse(json);
|
||||
|
||||
var result = _schema.Evaluate(jsonDoc.RootElement);
|
||||
|
||||
if (result.IsValid)
|
||||
{
|
||||
return new ScorePolicyValidationResult(true, []);
|
||||
}
|
||||
|
||||
var errors = CollectErrors(result);
|
||||
return new ScorePolicyValidationResult(false, errors);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates JSON content against the schema.
|
||||
/// </summary>
|
||||
public ScorePolicyValidationResult ValidateJson(string json)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(json))
|
||||
{
|
||||
return new ScorePolicyValidationResult(false, ["JSON content is empty"]);
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var jsonDoc = JsonDocument.Parse(json);
|
||||
var result = _schema.Evaluate(jsonDoc.RootElement);
|
||||
|
||||
if (result.IsValid)
|
||||
{
|
||||
return new ScorePolicyValidationResult(true, []);
|
||||
}
|
||||
|
||||
var errors = CollectErrors(result);
|
||||
return new ScorePolicyValidationResult(false, errors);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return new ScorePolicyValidationResult(false, [$"Invalid JSON: {ex.Message}"]);
|
||||
}
|
||||
}
|
||||
|
||||
private static List<string> CollectErrors(EvaluationResults result)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
CollectErrorsRecursive(result, errors);
|
||||
return errors;
|
||||
}
|
||||
|
||||
private static void CollectErrorsRecursive(EvaluationResults result, List<string> errors)
|
||||
{
|
||||
if (!result.IsValid && result.Errors is { Count: > 0 })
|
||||
{
|
||||
foreach (var error in result.Errors)
|
||||
{
|
||||
errors.Add($"{result.InstanceLocation}: {error.Key} - {error.Value}");
|
||||
}
|
||||
}
|
||||
|
||||
if (result.Details is null) return;
|
||||
|
||||
foreach (var detail in result.Details)
|
||||
{
|
||||
CollectErrorsRecursive(detail, errors);
|
||||
}
|
||||
}
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Embedded JSON Schema for score.v1 policies.
|
||||
/// </summary>
|
||||
private const string ScorePolicySchemaJson = """
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://stellaops.dev/schemas/score-policy.v1.json",
|
||||
"title": "Score Policy",
|
||||
"type": "object",
|
||||
"required": ["policyVersion", "policyId", "weightsBps"],
|
||||
"properties": {
|
||||
"policyVersion": {
|
||||
"type": "string",
|
||||
"const": "score.v1"
|
||||
},
|
||||
"policyId": {
|
||||
"type": "string",
|
||||
"minLength": 1
|
||||
},
|
||||
"policyName": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": {
|
||||
"type": "string"
|
||||
},
|
||||
"weightsBps": {
|
||||
"$ref": "#/$defs/WeightsBps"
|
||||
},
|
||||
"reachabilityConfig": {
|
||||
"$ref": "#/$defs/ReachabilityConfig"
|
||||
},
|
||||
"evidenceConfig": {
|
||||
"$ref": "#/$defs/EvidenceConfig"
|
||||
},
|
||||
"provenanceConfig": {
|
||||
"$ref": "#/$defs/ProvenanceConfig"
|
||||
},
|
||||
"overrides": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/$defs/ScoreOverride"
|
||||
}
|
||||
}
|
||||
},
|
||||
"$defs": {
|
||||
"WeightsBps": {
|
||||
"type": "object",
|
||||
"required": ["baseSeverity", "reachability", "evidence", "provenance"],
|
||||
"properties": {
|
||||
"baseSeverity": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"maximum": 10000
|
||||
},
|
||||
"reachability": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"maximum": 10000
|
||||
},
|
||||
"evidence": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"maximum": 10000
|
||||
},
|
||||
"provenance": {
|
||||
"type": "integer",
|
||||
"minimum": 0,
|
||||
"maximum": 10000
|
||||
}
|
||||
}
|
||||
},
|
||||
"ReachabilityConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"reachableMultiplier": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 2
|
||||
},
|
||||
"unreachableMultiplier": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 2
|
||||
},
|
||||
"unknownMultiplier": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 2
|
||||
}
|
||||
}
|
||||
},
|
||||
"EvidenceConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"kevWeight": {
|
||||
"type": "number",
|
||||
"minimum": 0
|
||||
},
|
||||
"epssThreshold": {
|
||||
"type": "number",
|
||||
"minimum": 0,
|
||||
"maximum": 1
|
||||
},
|
||||
"epssWeight": {
|
||||
"type": "number",
|
||||
"minimum": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"ProvenanceConfig": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"signedBonus": {
|
||||
"type": "number"
|
||||
},
|
||||
"rekorVerifiedBonus": {
|
||||
"type": "number"
|
||||
},
|
||||
"unsignedPenalty": {
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ScoreOverride": {
|
||||
"type": "object",
|
||||
"required": ["id", "match"],
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string"
|
||||
},
|
||||
"match": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cvePattern": {
|
||||
"type": "string"
|
||||
},
|
||||
"purlPattern": {
|
||||
"type": "string"
|
||||
},
|
||||
"severityEquals": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
"action": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"setScore": {
|
||||
"type": "number"
|
||||
},
|
||||
"addScore": {
|
||||
"type": "number"
|
||||
},
|
||||
"multiplyScore": {
|
||||
"type": "number"
|
||||
}
|
||||
}
|
||||
},
|
||||
"reason": {
|
||||
"type": "string"
|
||||
},
|
||||
"expires": {
|
||||
"type": "string",
|
||||
"format": "date-time"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of score policy validation.
|
||||
/// </summary>
|
||||
/// <param name="IsValid">Whether the policy is valid</param>
|
||||
/// <param name="Errors">List of validation errors (empty if valid)</param>
|
||||
public readonly record struct ScorePolicyValidationResult(bool IsValid, IReadOnlyList<string> Errors)
|
||||
{
|
||||
/// <summary>
|
||||
/// Throws if validation failed.
|
||||
/// </summary>
|
||||
public void ThrowIfInvalid(string context = "")
|
||||
{
|
||||
if (!IsValid)
|
||||
{
|
||||
var prefix = string.IsNullOrEmpty(context) ? "" : $"{context}: ";
|
||||
throw new ScorePolicyValidationException(
|
||||
$"{prefix}Score policy validation failed: {string.Join("; ", Errors)}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when score policy validation fails.
|
||||
/// </summary>
|
||||
public sealed class ScorePolicyValidationException : Exception
|
||||
{
|
||||
public ScorePolicyValidationException(string message) : base(message) { }
|
||||
public ScorePolicyValidationException(string message, Exception inner) : base(message, inner) { }
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoringProfile.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-001
|
||||
// Description: Defines scoring profiles for pluggable scoring engines
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Available scoring profiles.
|
||||
/// </summary>
|
||||
public enum ScoringProfile
|
||||
{
|
||||
/// <summary>
|
||||
/// Simple 4-factor basis-points weighted scoring.
|
||||
/// Formula: riskScore = (wB*B + wR*R + wE*E + wP*P) / 10000
|
||||
/// Transparent, customer-configurable via YAML.
|
||||
/// </summary>
|
||||
Simple,
|
||||
|
||||
/// <summary>
|
||||
/// Advanced entropy-based + CVSS hybrid scoring.
|
||||
/// Uses uncertainty tiers, entropy penalties, and CVSS v4.0 receipts.
|
||||
/// Default for new deployments.
|
||||
/// </summary>
|
||||
Advanced,
|
||||
|
||||
/// <summary>
|
||||
/// Custom scoring using fully user-defined rules.
|
||||
/// Requires Rego policy configuration.
|
||||
/// </summary>
|
||||
Custom
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Scoring profile configuration.
|
||||
/// </summary>
|
||||
public sealed record ScoringProfileConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Active scoring profile.
|
||||
/// </summary>
|
||||
public required ScoringProfile Profile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Profile-specific settings.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? Settings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// For Custom profile: path to Rego policy.
|
||||
/// </summary>
|
||||
public string? CustomPolicyPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates default configuration for Advanced profile.
|
||||
/// </summary>
|
||||
public static ScoringProfileConfig DefaultAdvanced => new()
|
||||
{
|
||||
Profile = ScoringProfile.Advanced
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates default configuration for Simple profile.
|
||||
/// </summary>
|
||||
public static ScoringProfileConfig DefaultSimple => new()
|
||||
{
|
||||
Profile = ScoringProfile.Simple
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,330 @@
|
||||
// =============================================================================
|
||||
// AdvancedScoringEngineTests.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-011 - Unit tests for AdvancedScoringEngine (regression)
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Engine.Scoring.Engines;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for AdvancedScoringEngine.
|
||||
/// Ensures regression testing for existing advanced scoring functionality.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3407")]
|
||||
public sealed class AdvancedScoringEngineTests
|
||||
{
|
||||
private readonly AdvancedScoringEngine _engine;
|
||||
private readonly EvidenceFreshnessCalculator _freshnessCalculator;
|
||||
private readonly ScorePolicy _defaultPolicy;
|
||||
|
||||
public AdvancedScoringEngineTests()
|
||||
{
|
||||
_freshnessCalculator = new EvidenceFreshnessCalculator();
|
||||
_engine = new AdvancedScoringEngine(
|
||||
_freshnessCalculator,
|
||||
NullLogger<AdvancedScoringEngine>.Instance);
|
||||
_defaultPolicy = ScorePolicy.Default;
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Profile returns Advanced")]
|
||||
public void Profile_ReturnsAdvanced()
|
||||
{
|
||||
_engine.Profile.Should().Be(ScoringProfile.Advanced);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync applies CVSS version adjustment")]
|
||||
public async Task ScoreAsync_AppliesCvssVersionAdjustment()
|
||||
{
|
||||
var v4Input = CreateInput(cvss: 8.0m, hopCount: 0, cvssVersion: "4.0");
|
||||
var v31Input = CreateInput(cvss: 8.0m, hopCount: 0, cvssVersion: "3.1");
|
||||
var v2Input = CreateInput(cvss: 8.0m, hopCount: 0, cvssVersion: "2.0");
|
||||
|
||||
var v4Result = await _engine.ScoreAsync(v4Input, _defaultPolicy);
|
||||
var v31Result = await _engine.ScoreAsync(v31Input, _defaultPolicy);
|
||||
var v2Result = await _engine.ScoreAsync(v2Input, _defaultPolicy);
|
||||
|
||||
// v4.0 should have highest base severity, v2.0 lowest
|
||||
v4Result.SignalValues["baseSeverity"].Should().BeGreaterThan(v31Result.SignalValues["baseSeverity"]);
|
||||
v31Result.SignalValues["baseSeverity"].Should().BeGreaterThan(v2Result.SignalValues["baseSeverity"]);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync applies KEV boost for known exploited")]
|
||||
public async Task ScoreAsync_AppliesKevBoost()
|
||||
{
|
||||
var normalInput = CreateInput(cvss: 5.0m, hopCount: 2);
|
||||
var kevInput = CreateInput(cvss: 5.0m, hopCount: 2) with
|
||||
{
|
||||
IsKnownExploited = true
|
||||
};
|
||||
|
||||
var normalResult = await _engine.ScoreAsync(normalInput, _defaultPolicy);
|
||||
var kevResult = await _engine.ScoreAsync(kevInput, _defaultPolicy);
|
||||
|
||||
kevResult.RawScore.Should().BeGreaterThan(normalResult.RawScore);
|
||||
kevResult.SignalValues["kevBoost"].Should().Be(20);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync applies uncertainty penalty for missing data")]
|
||||
public async Task ScoreAsync_AppliesUncertaintyPenalty()
|
||||
{
|
||||
var completeInput = CreateInput(cvss: 5.0m, hopCount: 2, cvssVersion: "4.0");
|
||||
completeInput = completeInput with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = DateTimeOffset.UtcNow
|
||||
},
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Signed }
|
||||
};
|
||||
|
||||
var incompleteInput = CreateInput(cvss: 5.0m, hopCount: null);
|
||||
|
||||
var completeResult = await _engine.ScoreAsync(completeInput, _defaultPolicy);
|
||||
var incompleteResult = await _engine.ScoreAsync(incompleteInput, _defaultPolicy);
|
||||
|
||||
incompleteResult.SignalValues["uncertaintyPenalty"].Should().BeGreaterThan(0);
|
||||
completeResult.SignalValues["uncertaintyPenalty"].Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync uses advanced reachability score when provided")]
|
||||
public async Task ScoreAsync_UsesAdvancedReachabilityScore()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 5);
|
||||
input = input with
|
||||
{
|
||||
Reachability = input.Reachability with
|
||||
{
|
||||
AdvancedScore = 0.95,
|
||||
Category = "api_endpoint"
|
||||
}
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["reachability"].Should().Be(95);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync applies semantic category multiplier")]
|
||||
public async Task ScoreAsync_AppliesSemanticCategoryMultiplier()
|
||||
{
|
||||
var apiInput = CreateInput(cvss: 5.0m, hopCount: 2);
|
||||
apiInput = apiInput with
|
||||
{
|
||||
Reachability = apiInput.Reachability with
|
||||
{
|
||||
Category = "api_endpoint"
|
||||
}
|
||||
};
|
||||
|
||||
var internalInput = CreateInput(cvss: 5.0m, hopCount: 2);
|
||||
internalInput = internalInput with
|
||||
{
|
||||
Reachability = internalInput.Reachability with
|
||||
{
|
||||
Category = "internal_service"
|
||||
}
|
||||
};
|
||||
|
||||
var deadCodeInput = CreateInput(cvss: 5.0m, hopCount: 2);
|
||||
deadCodeInput = deadCodeInput with
|
||||
{
|
||||
Reachability = deadCodeInput.Reachability with
|
||||
{
|
||||
Category = "dead_code"
|
||||
}
|
||||
};
|
||||
|
||||
var apiResult = await _engine.ScoreAsync(apiInput, _defaultPolicy);
|
||||
var internalResult = await _engine.ScoreAsync(internalInput, _defaultPolicy);
|
||||
var deadCodeResult = await _engine.ScoreAsync(deadCodeInput, _defaultPolicy);
|
||||
|
||||
apiResult.SignalValues["reachability"].Should().BeGreaterThan(internalResult.SignalValues["reachability"]);
|
||||
internalResult.SignalValues["reachability"].Should().BeGreaterThan(deadCodeResult.SignalValues["reachability"]);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync applies multi-evidence overlap bonus")]
|
||||
public async Task ScoreAsync_AppliesMultiEvidenceOverlapBonus()
|
||||
{
|
||||
var asOf = DateTimeOffset.UtcNow;
|
||||
var singleInput = CreateInput(cvss: 5.0m, hopCount: 0, asOf: asOf);
|
||||
singleInput = singleInput with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Sca },
|
||||
NewestEvidenceAt = asOf
|
||||
}
|
||||
};
|
||||
|
||||
var multiInput = CreateInput(cvss: 5.0m, hopCount: 0, asOf: asOf);
|
||||
multiInput = multiInput with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Sca, EvidenceType.Sast, EvidenceType.Dast },
|
||||
NewestEvidenceAt = asOf
|
||||
}
|
||||
};
|
||||
|
||||
var singleResult = await _engine.ScoreAsync(singleInput, _defaultPolicy);
|
||||
var multiResult = await _engine.ScoreAsync(multiInput, _defaultPolicy);
|
||||
|
||||
// Multi-evidence should have higher score due to overlap bonus
|
||||
multiResult.SignalValues["evidence"].Should().BeGreaterThan(singleResult.SignalValues["evidence"]);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync uses advanced evidence score when provided")]
|
||||
public async Task ScoreAsync_UsesAdvancedEvidenceScore()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
input = input with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType>(),
|
||||
AdvancedScore = 0.75
|
||||
}
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["evidence"].Should().Be(75);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync uses advanced provenance score when provided")]
|
||||
public async Task ScoreAsync_UsesAdvancedProvenanceScore()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
input = input with
|
||||
{
|
||||
Provenance = new ProvenanceInput
|
||||
{
|
||||
Level = ProvenanceLevel.Unsigned,
|
||||
AdvancedScore = 0.80
|
||||
}
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["provenance"].Should().Be(80);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync is deterministic")]
|
||||
public async Task ScoreAsync_IsDeterministic()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 7.5m, hopCount: 2, asOf: asOf);
|
||||
|
||||
var result1 = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
var result2 = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result1.RawScore.Should().Be(result2.RawScore);
|
||||
result1.FinalScore.Should().Be(result2.FinalScore);
|
||||
result1.Severity.Should().Be(result2.Severity);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync generates explain entries with advanced factors")]
|
||||
public async Task ScoreAsync_GeneratesExplainEntriesWithAdvancedFactors()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 3) with
|
||||
{
|
||||
IsKnownExploited = true
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.Explain.Should().NotBeEmpty();
|
||||
result.Explain.Should().Contain(e => e.Factor == "baseSeverity");
|
||||
result.Explain.Should().Contain(e => e.Factor == "reachability");
|
||||
result.Explain.Should().Contain(e => e.Factor == "kev_boost");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with missing CVSS version applies uncertainty penalty")]
|
||||
public async Task ScoreAsync_MissingCvssVersion_AppliesUncertaintyPenalty()
|
||||
{
|
||||
var withVersionInput = CreateInput(cvss: 5.0m, hopCount: 0, cvssVersion: "4.0");
|
||||
var noVersionInput = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
noVersionInput = noVersionInput with { CvssVersion = null };
|
||||
|
||||
var withVersionResult = await _engine.ScoreAsync(withVersionInput, _defaultPolicy);
|
||||
var noVersionResult = await _engine.ScoreAsync(noVersionInput, _defaultPolicy);
|
||||
|
||||
noVersionResult.SignalValues["uncertaintyPenalty"].Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with all factors maxed returns critical")]
|
||||
public async Task ScoreAsync_AllFactorsMaxed_ReturnsCritical()
|
||||
{
|
||||
var asOf = DateTimeOffset.UtcNow;
|
||||
var input = CreateInput(cvss: 10.0m, hopCount: 0, asOf: asOf, cvssVersion: "4.0");
|
||||
input = input with
|
||||
{
|
||||
IsKnownExploited = true,
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = asOf
|
||||
},
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Reproducible }
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.Severity.Should().Be("critical");
|
||||
result.FinalScore.Should().BeGreaterOrEqualTo(90);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with gate applies gate multiplier")]
|
||||
public async Task ScoreAsync_WithGate_AppliesGateMultiplier()
|
||||
{
|
||||
var noGateInput = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
var withGateInput = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
withGateInput = withGateInput with
|
||||
{
|
||||
Reachability = withGateInput.Reachability with
|
||||
{
|
||||
Gates =
|
||||
[
|
||||
new DetectedGate("admin_only", "requires admin role", 1.0)
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var noGateResult = await _engine.ScoreAsync(noGateInput, _defaultPolicy);
|
||||
var withGateResult = await _engine.ScoreAsync(withGateInput, _defaultPolicy);
|
||||
|
||||
withGateResult.SignalValues["reachability"].Should().BeLessThan(noGateResult.SignalValues["reachability"]);
|
||||
}
|
||||
|
||||
private static ScoringInput CreateInput(
|
||||
decimal cvss,
|
||||
int? hopCount,
|
||||
DateTimeOffset? asOf = null,
|
||||
string? cvssVersion = null)
|
||||
{
|
||||
return new ScoringInput
|
||||
{
|
||||
FindingId = "test-finding-1",
|
||||
TenantId = "test-tenant",
|
||||
ProfileId = "test-profile",
|
||||
AsOf = asOf ?? DateTimeOffset.UtcNow,
|
||||
CvssBase = cvss,
|
||||
CvssVersion = cvssVersion ?? "3.1",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
HopCount = hopCount
|
||||
},
|
||||
Evidence = EvidenceInput.Empty,
|
||||
Provenance = ProvenanceInput.Default,
|
||||
IsKnownExploited = false
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,263 @@
|
||||
// =============================================================================
|
||||
// ProfileComparisonIntegrationTests.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-013 - Integration test: same input, different profiles
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Engine.Scoring.Engines;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests comparing scores across different profiles for identical inputs.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "3407")]
|
||||
public sealed class ProfileComparisonIntegrationTests
|
||||
{
|
||||
private readonly SimpleScoringEngine _simpleEngine;
|
||||
private readonly AdvancedScoringEngine _advancedEngine;
|
||||
private readonly ScorePolicy _defaultPolicy;
|
||||
|
||||
public ProfileComparisonIntegrationTests()
|
||||
{
|
||||
var freshnessCalculator = new EvidenceFreshnessCalculator();
|
||||
|
||||
_simpleEngine = new SimpleScoringEngine(
|
||||
freshnessCalculator,
|
||||
NullLogger<SimpleScoringEngine>.Instance);
|
||||
|
||||
_advancedEngine = new AdvancedScoringEngine(
|
||||
freshnessCalculator,
|
||||
NullLogger<AdvancedScoringEngine>.Instance);
|
||||
|
||||
_defaultPolicy = ScorePolicy.Default;
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Same input produces comparable scores across profiles")]
|
||||
public async Task SameInput_ProducesComparableScores()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 7.5m, hopCount: 2, asOf: asOf);
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
// Both should produce valid results
|
||||
simpleResult.Should().NotBeNull();
|
||||
advancedResult.Should().NotBeNull();
|
||||
|
||||
// Scores should be in valid range
|
||||
simpleResult.FinalScore.Should().BeInRange(0, 100);
|
||||
advancedResult.FinalScore.Should().BeInRange(0, 100);
|
||||
|
||||
// Both should use correct profiles
|
||||
simpleResult.ScoringProfile.Should().Be(ScoringProfile.Simple);
|
||||
advancedResult.ScoringProfile.Should().Be(ScoringProfile.Advanced);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Same high-risk input produces similar severity across profiles")]
|
||||
public async Task HighRiskInput_ProducesSimilarSeverity()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 9.8m, hopCount: 0, asOf: asOf);
|
||||
input = input with
|
||||
{
|
||||
IsKnownExploited = true,
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = asOf
|
||||
},
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Reproducible }
|
||||
};
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
// Both should identify this as high risk
|
||||
simpleResult.Severity.Should().BeOneOf("critical", "high");
|
||||
advancedResult.Severity.Should().BeOneOf("critical", "high");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Same low-risk input produces similar severity across profiles")]
|
||||
public async Task LowRiskInput_ProducesSimilarSeverity()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 2.0m, hopCount: null, asOf: asOf);
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
// Both should identify this as low risk
|
||||
simpleResult.Severity.Should().BeOneOf("info", "low");
|
||||
advancedResult.Severity.Should().BeOneOf("info", "low");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Both profiles are deterministic with same input")]
|
||||
public async Task BothProfiles_AreDeterministic()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 6, 15, 12, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 6.5m, hopCount: 3, asOf: asOf);
|
||||
input = input with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Sca, EvidenceType.Sast },
|
||||
NewestEvidenceAt = asOf.AddDays(-14)
|
||||
}
|
||||
};
|
||||
|
||||
var simpleResult1 = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var simpleResult2 = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult1 = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult2 = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
simpleResult1.FinalScore.Should().Be(simpleResult2.FinalScore);
|
||||
advancedResult1.FinalScore.Should().Be(advancedResult2.FinalScore);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score variance across profiles is reasonable")]
|
||||
public async Task ScoreVariance_IsReasonable()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 2, asOf: asOf);
|
||||
input = input with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Sca },
|
||||
NewestEvidenceAt = asOf.AddDays(-30)
|
||||
},
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Signed }
|
||||
};
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
var variance = Math.Abs(simpleResult.FinalScore - advancedResult.FinalScore);
|
||||
|
||||
// Variance should be reasonable (< 30 points for typical input)
|
||||
variance.Should().BeLessThan(30,
|
||||
"score variance between profiles should be reasonable for typical inputs");
|
||||
}
|
||||
|
||||
[Theory(DisplayName = "Both profiles respect policy weights")]
|
||||
[InlineData(1000, 4500, 3000, 1500)] // Default weights
|
||||
[InlineData(5000, 2500, 1500, 1000)] // High base severity weight
|
||||
[InlineData(2000, 6000, 1000, 1000)] // High reachability weight
|
||||
public async Task BothProfiles_RespectPolicyWeights(
|
||||
int baseSeverityWeight,
|
||||
int reachabilityWeight,
|
||||
int evidenceWeight,
|
||||
int provenanceWeight)
|
||||
{
|
||||
var customPolicy = ScorePolicy.Default with
|
||||
{
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = baseSeverityWeight,
|
||||
Reachability = reachabilityWeight,
|
||||
Evidence = evidenceWeight,
|
||||
Provenance = provenanceWeight
|
||||
}
|
||||
};
|
||||
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 1, asOf: asOf);
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, customPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, customPolicy);
|
||||
|
||||
// Both should produce valid results with custom weights
|
||||
simpleResult.FinalScore.Should().BeInRange(0, 100);
|
||||
advancedResult.FinalScore.Should().BeInRange(0, 100);
|
||||
|
||||
// Signal contributions should reflect weights
|
||||
simpleResult.SignalContributions.Should().NotBeEmpty();
|
||||
advancedResult.SignalContributions.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Both profiles generate explanations")]
|
||||
public async Task BothProfiles_GenerateExplanations()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 7.0m, hopCount: 2, asOf: asOf);
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
simpleResult.Explain.Should().NotBeEmpty();
|
||||
advancedResult.Explain.Should().NotBeEmpty();
|
||||
|
||||
// Both should have base severity explanation
|
||||
simpleResult.Explain.Should().Contain(e => e.Factor == "baseSeverity");
|
||||
advancedResult.Explain.Should().Contain(e => e.Factor == "baseSeverity");
|
||||
|
||||
// Both should have reachability explanation
|
||||
simpleResult.Explain.Should().Contain(e => e.Factor == "reachability");
|
||||
advancedResult.Explain.Should().Contain(e => e.Factor == "reachability");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Advanced profile applies additional factors not in Simple")]
|
||||
public async Task AdvancedProfile_AppliesAdditionalFactors()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 2, asOf: asOf) with
|
||||
{
|
||||
IsKnownExploited = true
|
||||
};
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
// Advanced should have KEV boost
|
||||
advancedResult.SignalValues.Should().ContainKey("kevBoost");
|
||||
advancedResult.SignalValues["kevBoost"].Should().BeGreaterThan(0);
|
||||
|
||||
// Simple doesn't have KEV boost in signal values (handled via override)
|
||||
simpleResult.SignalValues.Should().NotContainKey("kevBoost");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Profile results include profile identification for audit")]
|
||||
public async Task ProfileResults_IncludeProfileIdentification()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 2);
|
||||
|
||||
var simpleResult = await _simpleEngine.ScoreAsync(input, _defaultPolicy);
|
||||
var advancedResult = await _advancedEngine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
simpleResult.ProfileVersion.Should().Contain("simple");
|
||||
advancedResult.ProfileVersion.Should().Contain("advanced");
|
||||
|
||||
simpleResult.ScoringProfile.Should().Be(ScoringProfile.Simple);
|
||||
advancedResult.ScoringProfile.Should().Be(ScoringProfile.Advanced);
|
||||
}
|
||||
|
||||
private static ScoringInput CreateInput(
|
||||
decimal cvss,
|
||||
int? hopCount,
|
||||
DateTimeOffset? asOf = null)
|
||||
{
|
||||
return new ScoringInput
|
||||
{
|
||||
FindingId = $"test-finding-{Guid.NewGuid():N}",
|
||||
TenantId = "test-tenant",
|
||||
ProfileId = "test-profile",
|
||||
AsOf = asOf ?? DateTimeOffset.UtcNow,
|
||||
CvssBase = cvss,
|
||||
CvssVersion = "3.1",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
HopCount = hopCount
|
||||
},
|
||||
Evidence = EvidenceInput.Empty,
|
||||
Provenance = ProvenanceInput.Default,
|
||||
IsKnownExploited = false
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
// =============================================================================
|
||||
// ProfileSwitchingTests.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-012 - Unit tests for profile switching
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Engine.Scoring.Engines;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for profile switching functionality.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3407")]
|
||||
public sealed class ProfileSwitchingTests
|
||||
{
|
||||
private readonly Mock<IScorePolicyService> _policyServiceMock;
|
||||
private readonly Mock<IScoringProfileService> _profileServiceMock;
|
||||
private readonly IServiceProvider _serviceProvider;
|
||||
private readonly ScoringEngineFactory _factory;
|
||||
|
||||
public ProfileSwitchingTests()
|
||||
{
|
||||
_policyServiceMock = new Mock<IScorePolicyService>();
|
||||
_profileServiceMock = new Mock<IScoringProfileService>();
|
||||
|
||||
var freshnessCalculator = new EvidenceFreshnessCalculator();
|
||||
var simpleEngine = new SimpleScoringEngine(
|
||||
freshnessCalculator,
|
||||
NullLogger<SimpleScoringEngine>.Instance);
|
||||
var advancedEngine = new AdvancedScoringEngine(
|
||||
freshnessCalculator,
|
||||
NullLogger<AdvancedScoringEngine>.Instance);
|
||||
|
||||
var services = new ServiceCollection();
|
||||
services.AddSingleton(simpleEngine);
|
||||
services.AddSingleton(advancedEngine);
|
||||
_serviceProvider = services.BuildServiceProvider();
|
||||
|
||||
_factory = new ScoringEngineFactory(
|
||||
_serviceProvider,
|
||||
_profileServiceMock.Object,
|
||||
NullLogger<ScoringEngineFactory>.Instance);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetEngine returns SimpleScoringEngine for Simple profile")]
|
||||
public void GetEngine_Simple_ReturnsSimpleScoringEngine()
|
||||
{
|
||||
var engine = _factory.GetEngine(ScoringProfile.Simple);
|
||||
|
||||
engine.Should().BeOfType<SimpleScoringEngine>();
|
||||
engine.Profile.Should().Be(ScoringProfile.Simple);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetEngine returns AdvancedScoringEngine for Advanced profile")]
|
||||
public void GetEngine_Advanced_ReturnsAdvancedScoringEngine()
|
||||
{
|
||||
var engine = _factory.GetEngine(ScoringProfile.Advanced);
|
||||
|
||||
engine.Should().BeOfType<AdvancedScoringEngine>();
|
||||
engine.Profile.Should().Be(ScoringProfile.Advanced);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetEngine throws for Custom profile")]
|
||||
public void GetEngine_Custom_Throws()
|
||||
{
|
||||
var action = () => _factory.GetEngine(ScoringProfile.Custom);
|
||||
|
||||
action.Should().Throw<NotSupportedException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetEngineForTenant uses tenant profile configuration")]
|
||||
public void GetEngineForTenant_UsesTenantProfile()
|
||||
{
|
||||
_profileServiceMock
|
||||
.Setup(p => p.GetProfileForTenant("tenant-1"))
|
||||
.Returns(ScoringProfileConfig.DefaultSimple);
|
||||
|
||||
var engine = _factory.GetEngineForTenant("tenant-1");
|
||||
|
||||
engine.Should().BeOfType<SimpleScoringEngine>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetEngineForTenant defaults to Advanced when no profile configured")]
|
||||
public void GetEngineForTenant_DefaultsToAdvanced()
|
||||
{
|
||||
_profileServiceMock
|
||||
.Setup(p => p.GetProfileForTenant("tenant-no-config"))
|
||||
.Returns((ScoringProfileConfig?)null);
|
||||
|
||||
var engine = _factory.GetEngineForTenant("tenant-no-config");
|
||||
|
||||
engine.Should().BeOfType<AdvancedScoringEngine>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetAvailableProfiles returns Simple and Advanced")]
|
||||
public void GetAvailableProfiles_ReturnsSimpleAndAdvanced()
|
||||
{
|
||||
var profiles = _factory.GetAvailableProfiles();
|
||||
|
||||
profiles.Should().Contain(ScoringProfile.Simple);
|
||||
profiles.Should().Contain(ScoringProfile.Advanced);
|
||||
profiles.Should().NotContain(ScoringProfile.Custom);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for profile-aware scoring service.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3407")]
|
||||
public sealed class ProfileAwareScoringServiceTests
|
||||
{
|
||||
private readonly Mock<IScoringEngineFactory> _factoryMock;
|
||||
private readonly Mock<IScorePolicyService> _policyServiceMock;
|
||||
private readonly ProfileAwareScoringService _service;
|
||||
|
||||
public ProfileAwareScoringServiceTests()
|
||||
{
|
||||
_factoryMock = new Mock<IScoringEngineFactory>();
|
||||
_policyServiceMock = new Mock<IScorePolicyService>();
|
||||
_service = new ProfileAwareScoringService(
|
||||
_factoryMock.Object,
|
||||
_policyServiceMock.Object,
|
||||
NullLogger<ProfileAwareScoringService>.Instance);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync uses tenant's configured engine")]
|
||||
public async Task ScoreAsync_UsesTenantEngine()
|
||||
{
|
||||
var input = CreateInput("tenant-1");
|
||||
var policy = ScorePolicy.Default;
|
||||
var expectedResult = CreateResult(ScoringProfile.Simple);
|
||||
|
||||
var mockEngine = new Mock<IScoringEngine>();
|
||||
mockEngine.Setup(e => e.Profile).Returns(ScoringProfile.Simple);
|
||||
mockEngine
|
||||
.Setup(e => e.ScoreAsync(input, policy, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(expectedResult);
|
||||
|
||||
_factoryMock
|
||||
.Setup(f => f.GetEngineForTenant("tenant-1"))
|
||||
.Returns(mockEngine.Object);
|
||||
_policyServiceMock
|
||||
.Setup(p => p.GetPolicy("tenant-1"))
|
||||
.Returns(policy);
|
||||
|
||||
var result = await _service.ScoreAsync(input);
|
||||
|
||||
result.Should().BeSameAs(expectedResult);
|
||||
_factoryMock.Verify(f => f.GetEngineForTenant("tenant-1"), Times.Once);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreWithProfileAsync uses specified profile")]
|
||||
public async Task ScoreWithProfileAsync_UsesSpecifiedProfile()
|
||||
{
|
||||
var input = CreateInput("tenant-1");
|
||||
var policy = ScorePolicy.Default;
|
||||
var expectedResult = CreateResult(ScoringProfile.Advanced);
|
||||
|
||||
var mockEngine = new Mock<IScoringEngine>();
|
||||
mockEngine.Setup(e => e.Profile).Returns(ScoringProfile.Advanced);
|
||||
mockEngine
|
||||
.Setup(e => e.ScoreAsync(input, policy, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(expectedResult);
|
||||
|
||||
_factoryMock
|
||||
.Setup(f => f.GetEngine(ScoringProfile.Advanced))
|
||||
.Returns(mockEngine.Object);
|
||||
_policyServiceMock
|
||||
.Setup(p => p.GetPolicy("tenant-1"))
|
||||
.Returns(policy);
|
||||
|
||||
var result = await _service.ScoreWithProfileAsync(input, ScoringProfile.Advanced);
|
||||
|
||||
result.Should().BeSameAs(expectedResult);
|
||||
_factoryMock.Verify(f => f.GetEngine(ScoringProfile.Advanced), Times.Once);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CompareProfilesAsync returns results for all profiles")]
|
||||
public async Task CompareProfilesAsync_ReturnsAllProfiles()
|
||||
{
|
||||
var input = CreateInput("tenant-1");
|
||||
var policy = ScorePolicy.Default;
|
||||
|
||||
var simpleResult = CreateResult(ScoringProfile.Simple, finalScore: 50);
|
||||
var advancedResult = CreateResult(ScoringProfile.Advanced, finalScore: 60);
|
||||
|
||||
var simpleEngine = new Mock<IScoringEngine>();
|
||||
simpleEngine.Setup(e => e.Profile).Returns(ScoringProfile.Simple);
|
||||
simpleEngine
|
||||
.Setup(e => e.ScoreAsync(input, policy, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(simpleResult);
|
||||
|
||||
var advancedEngine = new Mock<IScoringEngine>();
|
||||
advancedEngine.Setup(e => e.Profile).Returns(ScoringProfile.Advanced);
|
||||
advancedEngine
|
||||
.Setup(e => e.ScoreAsync(input, policy, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(advancedResult);
|
||||
|
||||
_factoryMock
|
||||
.Setup(f => f.GetAvailableProfiles())
|
||||
.Returns([ScoringProfile.Simple, ScoringProfile.Advanced]);
|
||||
_factoryMock
|
||||
.Setup(f => f.GetEngine(ScoringProfile.Simple))
|
||||
.Returns(simpleEngine.Object);
|
||||
_factoryMock
|
||||
.Setup(f => f.GetEngine(ScoringProfile.Advanced))
|
||||
.Returns(advancedEngine.Object);
|
||||
_policyServiceMock
|
||||
.Setup(p => p.GetPolicy("tenant-1"))
|
||||
.Returns(policy);
|
||||
|
||||
var comparison = await _service.CompareProfilesAsync(input);
|
||||
|
||||
comparison.FindingId.Should().Be("test-finding-1");
|
||||
comparison.Results.Should().HaveCount(2);
|
||||
comparison.Results.Should().ContainKey(ScoringProfile.Simple);
|
||||
comparison.Results.Should().ContainKey(ScoringProfile.Advanced);
|
||||
comparison.ScoreVariance.Should().Be(10);
|
||||
comparison.SeverityDiffers.Should().BeFalse();
|
||||
}
|
||||
|
||||
private static ScoringInput CreateInput(string tenantId)
|
||||
{
|
||||
return new ScoringInput
|
||||
{
|
||||
FindingId = "test-finding-1",
|
||||
TenantId = tenantId,
|
||||
ProfileId = "test-profile",
|
||||
AsOf = DateTimeOffset.UtcNow,
|
||||
CvssBase = 5.0m,
|
||||
CvssVersion = "3.1",
|
||||
Reachability = new ReachabilityInput { HopCount = 2 },
|
||||
Evidence = EvidenceInput.Empty,
|
||||
Provenance = ProvenanceInput.Default,
|
||||
IsKnownExploited = false
|
||||
};
|
||||
}
|
||||
|
||||
private static ScoringEngineResult CreateResult(ScoringProfile profile, int finalScore = 50)
|
||||
{
|
||||
return new ScoringEngineResult
|
||||
{
|
||||
FindingId = "test-finding-1",
|
||||
ProfileId = "test-profile",
|
||||
ProfileVersion = "v1",
|
||||
RawScore = finalScore,
|
||||
FinalScore = finalScore,
|
||||
Severity = finalScore >= 70 ? "high" : "medium",
|
||||
SignalValues = new Dictionary<string, int>
|
||||
{
|
||||
["baseSeverity"] = 50,
|
||||
["reachability"] = 70,
|
||||
["evidence"] = 30,
|
||||
["provenance"] = 30
|
||||
},
|
||||
SignalContributions = new Dictionary<string, double>
|
||||
{
|
||||
["baseSeverity"] = 5.0,
|
||||
["reachability"] = 31.5,
|
||||
["evidence"] = 9.0,
|
||||
["provenance"] = 4.5
|
||||
},
|
||||
ScoringProfile = profile,
|
||||
ScoredAt = DateTimeOffset.UtcNow,
|
||||
Explain = []
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyDigestReplayIntegrationTests.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-012 - Integration test: policy digest in replay manifest
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Replay.Core;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests verifying score policy digest flows into replay manifests.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "3402")]
|
||||
public sealed class ScorePolicyDigestReplayIntegrationTests
|
||||
{
|
||||
[Fact(DisplayName = "ReplayManifest includes ScorePolicyDigest field")]
|
||||
public void ReplayManifest_HasScorePolicyDigest()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
SchemaVersion = ReplayManifestVersions.V2,
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "scan-123",
|
||||
Time = DateTimeOffset.UtcNow,
|
||||
ScorePolicyDigest = "sha256:abc123def456"
|
||||
}
|
||||
};
|
||||
|
||||
manifest.Scan.ScorePolicyDigest.Should().Be("sha256:abc123def456");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScorePolicyDigest is null when not set")]
|
||||
public void ScorePolicyDigest_IsNull_WhenNotSet()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
SchemaVersion = ReplayManifestVersions.V2,
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "scan-123",
|
||||
Time = DateTimeOffset.UtcNow
|
||||
}
|
||||
};
|
||||
|
||||
manifest.Scan.ScorePolicyDigest.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScorePolicyDigest serializes correctly to JSON")]
|
||||
public void ScorePolicyDigest_SerializesToJson()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
SchemaVersion = ReplayManifestVersions.V2,
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "scan-123",
|
||||
Time = DateTimeOffset.UtcNow,
|
||||
ScorePolicyDigest = "sha256:abc123def456"
|
||||
}
|
||||
};
|
||||
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(manifest);
|
||||
|
||||
json.Should().Contain("\"scorePolicyDigest\":\"sha256:abc123def456\"");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScorePolicyDigest is omitted from JSON when null")]
|
||||
public void ScorePolicyDigest_OmittedFromJson_WhenNull()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
SchemaVersion = ReplayManifestVersions.V2,
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "scan-123",
|
||||
Time = DateTimeOffset.UtcNow,
|
||||
ScorePolicyDigest = null
|
||||
}
|
||||
};
|
||||
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(manifest);
|
||||
|
||||
json.Should().NotContain("scorePolicyDigest");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScorePolicyDigest roundtrips through JSON serialization")]
|
||||
public void ScorePolicyDigest_Roundtrips()
|
||||
{
|
||||
var original = new ReplayManifest
|
||||
{
|
||||
SchemaVersion = ReplayManifestVersions.V2,
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "scan-456",
|
||||
Time = DateTimeOffset.UtcNow,
|
||||
PolicyDigest = "sha256:policy-digest",
|
||||
ScorePolicyDigest = "sha256:score-policy-digest"
|
||||
}
|
||||
};
|
||||
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(original);
|
||||
var deserialized = System.Text.Json.JsonSerializer.Deserialize<ReplayManifest>(json);
|
||||
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Scan.ScorePolicyDigest.Should().Be("sha256:score-policy-digest");
|
||||
deserialized.Scan.PolicyDigest.Should().Be("sha256:policy-digest");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScorePolicyDigest is separate from PolicyDigest")]
|
||||
public void ScorePolicyDigest_IsSeparateFromPolicyDigest()
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
SchemaVersion = ReplayManifestVersions.V2,
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "scan-789",
|
||||
PolicyDigest = "sha256:gate-policy",
|
||||
ScorePolicyDigest = "sha256:scoring-policy"
|
||||
}
|
||||
};
|
||||
|
||||
manifest.Scan.PolicyDigest.Should().NotBe(manifest.Scan.ScorePolicyDigest);
|
||||
manifest.Scan.PolicyDigest.Should().Be("sha256:gate-policy");
|
||||
manifest.Scan.ScorePolicyDigest.Should().Be("sha256:scoring-policy");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScorePolicyDigest format is content-addressed")]
|
||||
public void ScorePolicyDigest_HasContentAddressedFormat()
|
||||
{
|
||||
var validDigests = new[]
|
||||
{
|
||||
"sha256:a".PadRight(71, 'a'),
|
||||
"sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
|
||||
};
|
||||
|
||||
foreach (var digest in validDigests)
|
||||
{
|
||||
var manifest = new ReplayManifest
|
||||
{
|
||||
Scan = new ReplayScanMetadata
|
||||
{
|
||||
Id = "test",
|
||||
ScorePolicyDigest = digest
|
||||
}
|
||||
};
|
||||
|
||||
manifest.Scan.ScorePolicyDigest.Should().StartWith("sha256:");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,238 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyServiceCachingTests.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-011 - Unit tests for policy service caching
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for ScorePolicyService caching behavior.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3402")]
|
||||
public sealed class ScorePolicyServiceCachingTests
|
||||
{
|
||||
private readonly Mock<IScorePolicyProvider> _providerMock;
|
||||
private readonly ScorePolicyService _service;
|
||||
|
||||
public ScorePolicyServiceCachingTests()
|
||||
{
|
||||
_providerMock = new Mock<IScorePolicyProvider>();
|
||||
_service = new ScorePolicyService(
|
||||
_providerMock.Object,
|
||||
NullLogger<ScorePolicyService>.Instance);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetPolicy returns cached policy on second call")]
|
||||
public void GetPolicy_ReturnsCached()
|
||||
{
|
||||
var policy = CreateTestPolicy("tenant-1");
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-1")).Returns(policy);
|
||||
|
||||
var first = _service.GetPolicy("tenant-1");
|
||||
var second = _service.GetPolicy("tenant-1");
|
||||
|
||||
first.Should().BeSameAs(second);
|
||||
_providerMock.Verify(p => p.GetPolicy("tenant-1"), Times.Once());
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetPolicy caches per tenant")]
|
||||
public void GetPolicy_CachesPerTenant()
|
||||
{
|
||||
var policy1 = CreateTestPolicy("tenant-1");
|
||||
var policy2 = CreateTestPolicy("tenant-2");
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-1")).Returns(policy1);
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-2")).Returns(policy2);
|
||||
|
||||
var result1 = _service.GetPolicy("tenant-1");
|
||||
var result2 = _service.GetPolicy("tenant-2");
|
||||
|
||||
result1.Should().NotBeSameAs(result2);
|
||||
result1.PolicyId.Should().Be("tenant-1");
|
||||
result2.PolicyId.Should().Be("tenant-2");
|
||||
_providerMock.Verify(p => p.GetPolicy("tenant-1"), Times.Once());
|
||||
_providerMock.Verify(p => p.GetPolicy("tenant-2"), Times.Once());
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetCachedDigest returns null before policy is loaded")]
|
||||
public void GetCachedDigest_BeforeLoad_ReturnsNull()
|
||||
{
|
||||
var digest = _service.GetCachedDigest("tenant-1");
|
||||
digest.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetCachedDigest returns digest after policy is loaded")]
|
||||
public void GetCachedDigest_AfterLoad_ReturnsDigest()
|
||||
{
|
||||
var policy = CreateTestPolicy("tenant-1");
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-1")).Returns(policy);
|
||||
|
||||
_ = _service.GetPolicy("tenant-1");
|
||||
var digest = _service.GetCachedDigest("tenant-1");
|
||||
|
||||
digest.Should().NotBeNullOrEmpty();
|
||||
digest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ComputePolicyDigest is deterministic")]
|
||||
public void ComputePolicyDigest_IsDeterministic()
|
||||
{
|
||||
var policy = CreateTestPolicy("test");
|
||||
|
||||
var digest1 = _service.ComputePolicyDigest(policy);
|
||||
var digest2 = _service.ComputePolicyDigest(policy);
|
||||
|
||||
digest1.Should().Be(digest2);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ComputePolicyDigest differs for different policies")]
|
||||
public void ComputePolicyDigest_DiffersForDifferentPolicies()
|
||||
{
|
||||
var policy1 = CreateTestPolicy("policy-1");
|
||||
var policy2 = CreateTestPolicy("policy-2");
|
||||
|
||||
var digest1 = _service.ComputePolicyDigest(policy1);
|
||||
var digest2 = _service.ComputePolicyDigest(policy2);
|
||||
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ComputePolicyDigest has correct format")]
|
||||
public void ComputePolicyDigest_HasCorrectFormat()
|
||||
{
|
||||
var policy = CreateTestPolicy("test");
|
||||
|
||||
var digest = _service.ComputePolicyDigest(policy);
|
||||
|
||||
digest.Should().MatchRegex(@"^sha256:[a-f0-9]{64}$");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Reload clears cache")]
|
||||
public void Reload_ClearsCache()
|
||||
{
|
||||
var policy = CreateTestPolicy("tenant-1");
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-1")).Returns(policy);
|
||||
|
||||
_ = _service.GetPolicy("tenant-1");
|
||||
_service.GetCachedDigest("tenant-1").Should().NotBeNull();
|
||||
|
||||
_service.Reload();
|
||||
|
||||
_service.GetCachedDigest("tenant-1").Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Reload causes provider to be called again")]
|
||||
public void Reload_CausesProviderToBeCalled()
|
||||
{
|
||||
var policy = CreateTestPolicy("tenant-1");
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-1")).Returns(policy);
|
||||
|
||||
_ = _service.GetPolicy("tenant-1");
|
||||
_service.Reload();
|
||||
_ = _service.GetPolicy("tenant-1");
|
||||
|
||||
_providerMock.Verify(p => p.GetPolicy("tenant-1"), Times.Exactly(2));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetPolicy with null tenant throws")]
|
||||
public void GetPolicy_NullTenant_Throws()
|
||||
{
|
||||
var act = () => _service.GetPolicy(null!);
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "GetPolicy with empty tenant throws")]
|
||||
public void GetPolicy_EmptyTenant_Throws()
|
||||
{
|
||||
var act = () => _service.GetPolicy("");
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ComputePolicyDigest with null policy throws")]
|
||||
public void ComputePolicyDigest_NullPolicy_Throws()
|
||||
{
|
||||
var act = () => _service.ComputePolicyDigest(null!);
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Concurrent access is thread-safe")]
|
||||
public void ConcurrentAccess_IsThreadSafe()
|
||||
{
|
||||
var policy = CreateTestPolicy("tenant-1");
|
||||
var callCount = 0;
|
||||
_providerMock.Setup(p => p.GetPolicy("tenant-1"))
|
||||
.Returns(() =>
|
||||
{
|
||||
Interlocked.Increment(ref callCount);
|
||||
Thread.Sleep(10); // Simulate slow load
|
||||
return policy;
|
||||
});
|
||||
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() => _service.GetPolicy("tenant-1")))
|
||||
.ToArray();
|
||||
|
||||
Task.WaitAll(tasks);
|
||||
|
||||
// ConcurrentDictionary's GetOrAdd may call factory multiple times
|
||||
// but should converge to same cached value
|
||||
var results = tasks.Select(t => t.Result).Distinct().ToList();
|
||||
results.Should().HaveCount(1);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Digest is stable across equal policies created separately")]
|
||||
public void Digest_IsStable_AcrossEqualPolicies()
|
||||
{
|
||||
var policy1 = new ScorePolicy
|
||||
{
|
||||
PolicyVersion = "score.v1",
|
||||
PolicyId = "stable-test",
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 2500,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 2500
|
||||
}
|
||||
};
|
||||
|
||||
var policy2 = new ScorePolicy
|
||||
{
|
||||
PolicyVersion = "score.v1",
|
||||
PolicyId = "stable-test",
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 2500,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 2500
|
||||
}
|
||||
};
|
||||
|
||||
var digest1 = _service.ComputePolicyDigest(policy1);
|
||||
var digest2 = _service.ComputePolicyDigest(policy2);
|
||||
|
||||
digest1.Should().Be(digest2);
|
||||
}
|
||||
|
||||
private static ScorePolicy CreateTestPolicy(string id) => new()
|
||||
{
|
||||
PolicyVersion = "score.v1",
|
||||
PolicyId = id,
|
||||
PolicyName = $"Test Policy {id}",
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 2500,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 2500
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,344 @@
|
||||
// =============================================================================
|
||||
// SimpleScoringEngineTests.cs
|
||||
// Sprint: SPRINT_3407_0001_0001_configurable_scoring
|
||||
// Task: PROF-3407-010 - Unit tests for SimpleScoringEngine
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Engine.Scoring.Engines;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for SimpleScoringEngine.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3407")]
|
||||
public sealed class SimpleScoringEngineTests
|
||||
{
|
||||
private readonly SimpleScoringEngine _engine;
|
||||
private readonly EvidenceFreshnessCalculator _freshnessCalculator;
|
||||
private readonly ScorePolicy _defaultPolicy;
|
||||
|
||||
public SimpleScoringEngineTests()
|
||||
{
|
||||
_freshnessCalculator = new EvidenceFreshnessCalculator();
|
||||
_engine = new SimpleScoringEngine(
|
||||
_freshnessCalculator,
|
||||
NullLogger<SimpleScoringEngine>.Instance);
|
||||
_defaultPolicy = ScorePolicy.Default;
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Profile returns Simple")]
|
||||
public void Profile_ReturnsSimple()
|
||||
{
|
||||
_engine.Profile.Should().Be(ScoringProfile.Simple);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with max CVSS returns high base severity")]
|
||||
public async Task ScoreAsync_MaxCvss_HighBaseSeverity()
|
||||
{
|
||||
var input = CreateInput(cvss: 10.0m, hopCount: 0);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result.SignalValues["baseSeverity"].Should().Be(100);
|
||||
result.ScoringProfile.Should().Be(ScoringProfile.Simple);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with min CVSS returns low base severity")]
|
||||
public async Task ScoreAsync_MinCvss_LowBaseSeverity()
|
||||
{
|
||||
var input = CreateInput(cvss: 0.0m, hopCount: 0);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["baseSeverity"].Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with direct call returns max reachability")]
|
||||
public async Task ScoreAsync_DirectCall_MaxReachability()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["reachability"].Should().Be(100);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with multiple hops reduces reachability")]
|
||||
public async Task ScoreAsync_MultipleHops_ReducedReachability()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 5);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["reachability"].Should().BeLessThan(100);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with unreachable returns zero reachability")]
|
||||
public async Task ScoreAsync_Unreachable_ZeroReachability()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: null);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["reachability"].Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with gates applies gate multiplier")]
|
||||
public async Task ScoreAsync_WithGates_AppliesMultiplier()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
input = input with
|
||||
{
|
||||
Reachability = input.Reachability with
|
||||
{
|
||||
Gates =
|
||||
[
|
||||
new DetectedGate("auth_required", "JWT validation", 0.9)
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
// Gate should reduce reachability
|
||||
result.SignalValues["reachability"].Should().BeLessThan(100);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with runtime evidence gives high evidence score")]
|
||||
public async Task ScoreAsync_RuntimeEvidence_HighEvidenceScore()
|
||||
{
|
||||
var asOf = DateTimeOffset.UtcNow;
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 0, asOf: asOf);
|
||||
input = input with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = asOf.AddDays(-1)
|
||||
}
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["evidence"].Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with stale evidence applies freshness decay")]
|
||||
public async Task ScoreAsync_StaleEvidence_FreshnessDecay()
|
||||
{
|
||||
var asOf = DateTimeOffset.UtcNow;
|
||||
var freshInput = CreateInput(cvss: 5.0m, hopCount: 0, asOf: asOf);
|
||||
freshInput = freshInput with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = asOf.AddDays(-1)
|
||||
}
|
||||
};
|
||||
|
||||
var staleInput = CreateInput(cvss: 5.0m, hopCount: 0, asOf: asOf);
|
||||
staleInput = staleInput with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = asOf.AddDays(-180)
|
||||
}
|
||||
};
|
||||
|
||||
var freshResult = await _engine.ScoreAsync(freshInput, _defaultPolicy);
|
||||
var staleResult = await _engine.ScoreAsync(staleInput, _defaultPolicy);
|
||||
|
||||
staleResult.SignalValues["evidence"].Should().BeLessThan(freshResult.SignalValues["evidence"]);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with signed provenance increases provenance score")]
|
||||
public async Task ScoreAsync_SignedProvenance_IncreasesScore()
|
||||
{
|
||||
var unsignedInput = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
var signedInput = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
signedInput = signedInput with
|
||||
{
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Signed }
|
||||
};
|
||||
|
||||
var unsignedResult = await _engine.ScoreAsync(unsignedInput, _defaultPolicy);
|
||||
var signedResult = await _engine.ScoreAsync(signedInput, _defaultPolicy);
|
||||
|
||||
signedResult.SignalValues["provenance"].Should().BeGreaterThan(unsignedResult.SignalValues["provenance"]);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with reproducible provenance gives max provenance score")]
|
||||
public async Task ScoreAsync_ReproducibleProvenance_MaxScore()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 0);
|
||||
input = input with
|
||||
{
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Reproducible }
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.SignalValues["provenance"].Should().Be(100);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync applies weights correctly")]
|
||||
public async Task ScoreAsync_AppliesWeightsCorrectly()
|
||||
{
|
||||
var asOf = DateTimeOffset.UtcNow;
|
||||
var input = CreateInput(cvss: 10.0m, hopCount: 0, asOf: asOf);
|
||||
input = input with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = asOf
|
||||
},
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Reproducible }
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
// All factors maxed: should be close to 100
|
||||
result.FinalScore.Should().BeGreaterThan(90);
|
||||
result.SignalContributions.Values.Sum().Should().BeApproximately(result.RawScore, 1.0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync maps score to correct severity")]
|
||||
public async Task ScoreAsync_MapsToCorrectSeverity()
|
||||
{
|
||||
var criticalInput = CreateInput(cvss: 10.0m, hopCount: 0);
|
||||
criticalInput = criticalInput with
|
||||
{
|
||||
Evidence = new EvidenceInput
|
||||
{
|
||||
Types = new HashSet<EvidenceType> { EvidenceType.Runtime },
|
||||
NewestEvidenceAt = DateTimeOffset.UtcNow
|
||||
},
|
||||
Provenance = new ProvenanceInput { Level = ProvenanceLevel.Reproducible }
|
||||
};
|
||||
|
||||
var infoInput = CreateInput(cvss: 1.0m, hopCount: null);
|
||||
|
||||
var criticalResult = await _engine.ScoreAsync(criticalInput, _defaultPolicy);
|
||||
var infoResult = await _engine.ScoreAsync(infoInput, _defaultPolicy);
|
||||
|
||||
criticalResult.Severity.Should().Be("critical");
|
||||
infoResult.Severity.Should().Be("info");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync generates explain entries")]
|
||||
public async Task ScoreAsync_GeneratesExplainEntries()
|
||||
{
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 3);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result.Explain.Should().NotBeEmpty();
|
||||
result.Explain.Should().Contain(e => e.Factor == "baseSeverity");
|
||||
result.Explain.Should().Contain(e => e.Factor == "reachability");
|
||||
result.Explain.Should().Contain(e => e.Factor == "provenance");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync is deterministic")]
|
||||
public async Task ScoreAsync_IsDeterministic()
|
||||
{
|
||||
var asOf = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
var input = CreateInput(cvss: 7.5m, hopCount: 2, asOf: asOf);
|
||||
|
||||
var result1 = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
var result2 = await _engine.ScoreAsync(input, _defaultPolicy);
|
||||
|
||||
result1.RawScore.Should().Be(result2.RawScore);
|
||||
result1.FinalScore.Should().Be(result2.FinalScore);
|
||||
result1.Severity.Should().Be(result2.Severity);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with override applies set score")]
|
||||
public async Task ScoreAsync_WithOverride_AppliesSetScore()
|
||||
{
|
||||
var policy = _defaultPolicy with
|
||||
{
|
||||
Overrides =
|
||||
[
|
||||
new ScoreOverride
|
||||
{
|
||||
Name = "kev_boost",
|
||||
When = new ScoreOverrideCondition
|
||||
{
|
||||
Flags = new Dictionary<string, bool> { ["knownExploited"] = true }
|
||||
},
|
||||
SetScore = 95
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var input = CreateInput(cvss: 5.0m, hopCount: 5) with
|
||||
{
|
||||
IsKnownExploited = true
|
||||
};
|
||||
|
||||
var result = await _engine.ScoreAsync(input, policy);
|
||||
|
||||
result.FinalScore.Should().Be(95);
|
||||
result.OverrideApplied.Should().Be("kev_boost");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ScoreAsync with override applies clamp")]
|
||||
public async Task ScoreAsync_WithOverride_AppliesClamp()
|
||||
{
|
||||
var policy = _defaultPolicy with
|
||||
{
|
||||
Overrides =
|
||||
[
|
||||
new ScoreOverride
|
||||
{
|
||||
Name = "max_unreachable",
|
||||
When = new ScoreOverrideCondition
|
||||
{
|
||||
MaxReachability = 0
|
||||
},
|
||||
ClampMaxScore = 30
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var input = CreateInput(cvss: 10.0m, hopCount: null);
|
||||
|
||||
var result = await _engine.ScoreAsync(input, policy);
|
||||
|
||||
result.FinalScore.Should().BeLessOrEqualTo(30);
|
||||
result.OverrideApplied.Should().Contain("max_unreachable");
|
||||
}
|
||||
|
||||
private static ScoringInput CreateInput(
|
||||
decimal cvss,
|
||||
int? hopCount,
|
||||
DateTimeOffset? asOf = null)
|
||||
{
|
||||
return new ScoringInput
|
||||
{
|
||||
FindingId = "test-finding-1",
|
||||
TenantId = "test-tenant",
|
||||
ProfileId = "test-profile",
|
||||
AsOf = asOf ?? DateTimeOffset.UtcNow,
|
||||
CvssBase = cvss,
|
||||
CvssVersion = "3.1",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
HopCount = hopCount
|
||||
},
|
||||
Evidence = EvidenceInput.Empty,
|
||||
Provenance = ProvenanceInput.Default,
|
||||
IsKnownExploited = false
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyLoaderEdgeCaseTests.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-009 - Unit tests for YAML parsing edge cases
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for YAML parsing edge cases in ScorePolicyLoader.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3402")]
|
||||
public sealed class ScorePolicyLoaderEdgeCaseTests
|
||||
{
|
||||
private readonly ScorePolicyLoader _loader = new();
|
||||
|
||||
[Fact(DisplayName = "Empty YAML throws ScorePolicyLoadException")]
|
||||
public void EmptyYaml_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromYaml("");
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Empty YAML content*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Whitespace-only YAML throws ScorePolicyLoadException")]
|
||||
public void WhitespaceOnlyYaml_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromYaml(" \n \t ");
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Empty YAML content*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Null path throws ArgumentException")]
|
||||
public void NullPath_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromFile(null!);
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Empty path throws ArgumentException")]
|
||||
public void EmptyPath_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromFile("");
|
||||
act.Should().Throw<ArgumentException>();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Non-existent file throws ScorePolicyLoadException")]
|
||||
public void NonExistentFile_Throws()
|
||||
{
|
||||
var act = () => _loader.LoadFromFile("/nonexistent/path/score.yaml");
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*not found*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Invalid YAML syntax throws ScorePolicyLoadException")]
|
||||
public void InvalidYamlSyntax_Throws()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
- invalid nested list
|
||||
""";
|
||||
|
||||
var act = () => _loader.LoadFromYaml(yaml);
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*YAML parse error*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Unsupported policy version throws ScorePolicyLoadException")]
|
||||
public void UnsupportedPolicyVersion_Throws()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v2
|
||||
policyId: test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
""";
|
||||
|
||||
var act = () => _loader.LoadFromYaml(yaml);
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Unsupported policy version 'score.v2'*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Weights not summing to 10000 throws ScorePolicyLoadException")]
|
||||
public void WeightsSumNot10000_Throws()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: test
|
||||
weightsBps:
|
||||
baseSeverity: 5000
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 1000
|
||||
""";
|
||||
|
||||
var act = () => _loader.LoadFromYaml(yaml);
|
||||
act.Should().Throw<ScorePolicyLoadException>()
|
||||
.WithMessage("*Weight basis points must sum to 10000*Got: 11000*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Valid minimal policy parses successfully")]
|
||||
public void ValidMinimalPolicy_Parses()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: minimal-test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.Should().NotBeNull();
|
||||
policy.PolicyVersion.Should().Be("score.v1");
|
||||
policy.PolicyId.Should().Be("minimal-test");
|
||||
policy.WeightsBps.BaseSeverity.Should().Be(2500);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with optional fields parses successfully")]
|
||||
public void PolicyWithOptionalFields_Parses()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: full-test
|
||||
policyName: Full Test Policy
|
||||
description: A comprehensive test policy
|
||||
weightsBps:
|
||||
baseSeverity: 3000
|
||||
reachability: 3000
|
||||
evidence: 2000
|
||||
provenance: 2000
|
||||
reachabilityConfig:
|
||||
reachableMultiplier: 1.5
|
||||
unreachableMultiplier: 0.5
|
||||
unknownMultiplier: 1.0
|
||||
evidenceConfig:
|
||||
kevWeight: 1.2
|
||||
epssThreshold: 0.5
|
||||
epssWeight: 0.8
|
||||
provenanceConfig:
|
||||
signedBonus: 0.1
|
||||
rekorVerifiedBonus: 0.2
|
||||
unsignedPenalty: -0.1
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.Should().NotBeNull();
|
||||
policy.PolicyName.Should().Be("Full Test Policy");
|
||||
policy.Description.Should().Be("A comprehensive test policy");
|
||||
policy.ReachabilityConfig.Should().NotBeNull();
|
||||
policy.ReachabilityConfig!.ReachableMultiplier.Should().Be(1.5m);
|
||||
policy.EvidenceConfig.Should().NotBeNull();
|
||||
policy.EvidenceConfig!.KevWeight.Should().Be(1.2m);
|
||||
policy.ProvenanceConfig.Should().NotBeNull();
|
||||
policy.ProvenanceConfig!.SignedBonus.Should().Be(0.1m);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with overrides parses correctly")]
|
||||
public void PolicyWithOverrides_Parses()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: override-test
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
overrides:
|
||||
- id: cve-log4j
|
||||
match:
|
||||
cvePattern: "CVE-2021-44228"
|
||||
action:
|
||||
setScore: 10.0
|
||||
reason: Known critical vulnerability
|
||||
- id: low-severity-suppress
|
||||
match:
|
||||
severityEquals: LOW
|
||||
action:
|
||||
multiplyScore: 0.5
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.Should().NotBeNull();
|
||||
policy.Overrides.Should().HaveCount(2);
|
||||
policy.Overrides![0].Id.Should().Be("cve-log4j");
|
||||
policy.Overrides[0].Match!.CvePattern.Should().Be("CVE-2021-44228");
|
||||
policy.Overrides[0].Action!.SetScore.Should().Be(10.0m);
|
||||
policy.Overrides[1].Id.Should().Be("low-severity-suppress");
|
||||
policy.Overrides[1].Action!.MultiplyScore.Should().Be(0.5m);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "TryLoadFromFile returns null for non-existent file")]
|
||||
public void TryLoadFromFile_NonExistent_ReturnsNull()
|
||||
{
|
||||
var result = _loader.TryLoadFromFile("/nonexistent/path/score.yaml");
|
||||
result.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Extra YAML fields are ignored")]
|
||||
public void ExtraYamlFields_Ignored()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: extra-fields-test
|
||||
unknownField: should be ignored
|
||||
anotherUnknown:
|
||||
nested: value
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
extraWeight: 1000
|
||||
""";
|
||||
|
||||
// Should not throw despite extra fields
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
policy.Should().NotBeNull();
|
||||
policy.PolicyId.Should().Be("extra-fields-test");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Unicode in policy name and description is preserved")]
|
||||
public void UnicodePreserved()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: unicode-test
|
||||
policyName: "Política de Segurança 安全策略"
|
||||
description: "Deutsche Sicherheitsrichtlinie für контейнеры"
|
||||
weightsBps:
|
||||
baseSeverity: 2500
|
||||
reachability: 2500
|
||||
evidence: 2500
|
||||
provenance: 2500
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.PolicyName.Should().Be("Política de Segurança 安全策略");
|
||||
policy.Description.Should().Contain("контейнеры");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Boundary weight values (0 and 10000) are valid")]
|
||||
public void BoundaryWeightValues_Valid()
|
||||
{
|
||||
var yaml = """
|
||||
policyVersion: score.v1
|
||||
policyId: boundary-test
|
||||
weightsBps:
|
||||
baseSeverity: 10000
|
||||
reachability: 0
|
||||
evidence: 0
|
||||
provenance: 0
|
||||
""";
|
||||
|
||||
var policy = _loader.LoadFromYaml(yaml);
|
||||
|
||||
policy.WeightsBps.BaseSeverity.Should().Be(10000);
|
||||
policy.WeightsBps.Reachability.Should().Be(0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,298 @@
|
||||
// =============================================================================
|
||||
// ScorePolicyValidatorTests.cs
|
||||
// Sprint: SPRINT_3402_0001_0001
|
||||
// Task: YAML-3402-010 - Unit tests for schema validation
|
||||
// =============================================================================
|
||||
|
||||
using FluentAssertions;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for JSON Schema validation in ScorePolicyValidator.
|
||||
/// </summary>
|
||||
[Trait("Category", "Unit")]
|
||||
[Trait("Sprint", "3402")]
|
||||
public sealed class ScorePolicyValidatorTests
|
||||
{
|
||||
private readonly ScorePolicyValidator _validator = new();
|
||||
|
||||
[Fact(DisplayName = "Valid policy passes validation")]
|
||||
public void ValidPolicy_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy();
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
result.Errors.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with wrong version fails validation")]
|
||||
public void WrongVersion_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with { PolicyVersion = "score.v2" };
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with missing policyId fails validation")]
|
||||
public void MissingPolicyId_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with { PolicyId = "" };
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with negative weight fails validation")]
|
||||
public void NegativeWeight_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = -100,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 5100
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("baseSeverity") || e.Contains("minimum"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with weight over 10000 fails validation")]
|
||||
public void WeightOver10000_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 15000,
|
||||
Reachability = 0,
|
||||
Evidence = 0,
|
||||
Provenance = 0
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with valid reachability config passes")]
|
||||
public void ValidReachabilityConfig_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
ReachabilityConfig = new ReachabilityConfig
|
||||
{
|
||||
ReachableMultiplier = 1.5m,
|
||||
UnreachableMultiplier = 0.5m,
|
||||
UnknownMultiplier = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with reachable multiplier over 2 fails")]
|
||||
public void ReachableMultiplierOver2_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
ReachabilityConfig = new ReachabilityConfig
|
||||
{
|
||||
ReachableMultiplier = 3.0m,
|
||||
UnreachableMultiplier = 0.5m,
|
||||
UnknownMultiplier = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with valid evidence config passes")]
|
||||
public void ValidEvidenceConfig_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
EvidenceConfig = new EvidenceConfig
|
||||
{
|
||||
KevWeight = 1.5m,
|
||||
EpssThreshold = 0.5m,
|
||||
EpssWeight = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with EPSS threshold over 1 fails")]
|
||||
public void EpssThresholdOver1_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
EvidenceConfig = new EvidenceConfig
|
||||
{
|
||||
KevWeight = 1.0m,
|
||||
EpssThreshold = 1.5m,
|
||||
EpssWeight = 1.0m
|
||||
}
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy with valid override passes")]
|
||||
public void ValidOverride_Passes()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
Overrides =
|
||||
[
|
||||
new ScoreOverride
|
||||
{
|
||||
Id = "test-override",
|
||||
Match = new OverrideMatch { CvePattern = "CVE-2021-.*" },
|
||||
Action = new OverrideAction { SetScore = 10.0m },
|
||||
Reason = "Test override"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Override without id fails")]
|
||||
public void OverrideWithoutId_Fails()
|
||||
{
|
||||
var policy = CreateValidPolicy() with
|
||||
{
|
||||
Overrides =
|
||||
[
|
||||
new ScoreOverride
|
||||
{
|
||||
Id = "",
|
||||
Match = new OverrideMatch { CvePattern = "CVE-2021-.*" }
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
// id is required but empty string is invalid
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ThrowIfInvalid throws for invalid policy")]
|
||||
public void ThrowIfInvalid_Throws()
|
||||
{
|
||||
var policy = CreateValidPolicy() with { PolicyVersion = "invalid" };
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
var act = () => result.ThrowIfInvalid("test context");
|
||||
|
||||
act.Should().Throw<ScorePolicyValidationException>()
|
||||
.WithMessage("test context*");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ThrowIfInvalid does not throw for valid policy")]
|
||||
public void ThrowIfInvalid_DoesNotThrow()
|
||||
{
|
||||
var policy = CreateValidPolicy();
|
||||
var result = _validator.Validate(policy);
|
||||
|
||||
var act = () => result.ThrowIfInvalid();
|
||||
|
||||
act.Should().NotThrow();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with valid JSON passes")]
|
||||
public void ValidateJson_Valid_Passes()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"policyVersion": "score.v1",
|
||||
"policyId": "json-test",
|
||||
"weightsBps": {
|
||||
"baseSeverity": 2500,
|
||||
"reachability": 2500,
|
||||
"evidence": 2500,
|
||||
"provenance": 2500
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _validator.ValidateJson(json);
|
||||
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with invalid JSON fails")]
|
||||
public void ValidateJson_InvalidJson_Fails()
|
||||
{
|
||||
var json = "{ invalid json }";
|
||||
|
||||
var result = _validator.ValidateJson(json);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("Invalid JSON"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with empty string fails")]
|
||||
public void ValidateJson_Empty_Fails()
|
||||
{
|
||||
var result = _validator.ValidateJson("");
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("empty"));
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ValidateJson with missing required fields fails")]
|
||||
public void ValidateJson_MissingRequired_Fails()
|
||||
{
|
||||
var json = """
|
||||
{
|
||||
"policyVersion": "score.v1"
|
||||
}
|
||||
""";
|
||||
|
||||
var result = _validator.ValidateJson(json);
|
||||
|
||||
result.IsValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
private static ScorePolicy CreateValidPolicy() => new()
|
||||
{
|
||||
PolicyVersion = "score.v1",
|
||||
PolicyId = "test-policy",
|
||||
PolicyName = "Test Policy",
|
||||
WeightsBps = new WeightsBps
|
||||
{
|
||||
BaseSeverity = 2500,
|
||||
Reachability = 2500,
|
||||
Evidence = 2500,
|
||||
Provenance = 2500
|
||||
}
|
||||
};
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
- `docs/reachability/DELIVERY_GUIDE.md` (sections 5.5–5.9 for native/JS/PHP updates)
|
||||
- `docs/reachability/purl-resolved-edges.md`
|
||||
- `docs/reachability/patch-oracles.md`
|
||||
- `docs/product-advisories/14-Dec-2025 - Smart-Diff Technical Reference.md` (for Smart-Diff predicates)
|
||||
- Current sprint file (e.g., `docs/implplan/SPRINT_401_reachability_evidence_chain.md`).
|
||||
|
||||
## Working Directory & Boundaries
|
||||
@@ -20,6 +21,30 @@
|
||||
- Avoid cross-module edits unless sprint explicitly permits; note any cross-module change in sprint tracker.
|
||||
- Keep fixtures minimal/deterministic; store under `src/Scanner/__Tests/Fixtures` or `__Benchmarks`.
|
||||
|
||||
## Smart-Diff Contracts (Sprint 3500)
|
||||
|
||||
The Scanner module now includes Smart-Diff foundation primitives:
|
||||
|
||||
### Libraries
|
||||
- `StellaOps.Scanner.SmartDiff` - Core Smart-Diff predicate models and serialization
|
||||
- `StellaOps.Scanner.Reachability` - Reachability gate computation with 3-bit class
|
||||
|
||||
### Key Types
|
||||
- `SmartDiffPredicate` - Attestation predicate for differential scans
|
||||
- `ReachabilityGate` - 3-bit class (0-7) indicating entry/sink reachability
|
||||
- `SinkCategory` - Taxonomy of sensitive sinks (file, network, crypto, etc.)
|
||||
- `SinkRegistry` - Registry of known sinks with category mappings
|
||||
|
||||
### Predicate Schema
|
||||
- URI: `stellaops.dev/predicates/smart-diff@v1`
|
||||
- Schema: `docs/schemas/stellaops-smart-diff.v1.schema.json`
|
||||
- DSSE-signed predicates for evidence chain
|
||||
|
||||
### Integration Points
|
||||
- Integrates with `StellaOps.Policy.Suppression` for pre-filter rules
|
||||
- Emits to Attestor module for DSSE envelope wrapping
|
||||
- Consumed by Findings Ledger for triage decisions
|
||||
|
||||
## Engineering Rules
|
||||
- Target `net10.0`; prefer latest C# preview allowed in repo.
|
||||
- Offline-first: no new external network calls; use cached feeds (`/local-nugets`).
|
||||
@@ -34,6 +59,7 @@
|
||||
- Add/extend tests in `src/Scanner/__Tests/**`; golden outputs should be deterministic (sorted keys, stable ordering).
|
||||
- Benchmarks under `src/Scanner/__Benchmarks/**`; document input and expected ceilings in comments.
|
||||
- Cover multi-RID, trimmed/NativeAOT, self-contained vs framework-dependent cases where applicable.
|
||||
- Smart-Diff: Run schema validation tests (`SmartDiffSchemaValidationTests`) for predicate contract changes.
|
||||
|
||||
## Workflow Expectations
|
||||
- Mirror task state in sprint tracker (`TODO → DOING → DONE/BLOCKED`); note blockers with the specific decision needed.
|
||||
|
||||
@@ -0,0 +1,169 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Hardening;
|
||||
|
||||
/// <summary>
|
||||
/// Extracts hardening flags from ELF binaries.
|
||||
/// Per Sprint 3500.4 - Smart-Diff Binary Analysis.
|
||||
/// </summary>
|
||||
public sealed class ElfHardeningExtractor : IHardeningExtractor
|
||||
{
|
||||
// ELF magic bytes
|
||||
private static readonly byte[] ElfMagic = [0x7F, 0x45, 0x4C, 0x46]; // \x7FELF
|
||||
|
||||
// ELF header constants
|
||||
private const int EI_CLASS = 4;
|
||||
private const int ELFCLASS32 = 1;
|
||||
private const int ELFCLASS64 = 2;
|
||||
private const int EI_DATA = 5;
|
||||
private const int ELFDATA2LSB = 1; // Little endian
|
||||
private const int ELFDATA2MSB = 2; // Big endian
|
||||
|
||||
// ELF type constants
|
||||
private const ushort ET_EXEC = 2;
|
||||
private const ushort ET_DYN = 3;
|
||||
|
||||
// Program header types
|
||||
private const uint PT_GNU_STACK = 0x6474e551;
|
||||
private const uint PT_GNU_RELRO = 0x6474e552;
|
||||
|
||||
// Dynamic section tags
|
||||
private const ulong DT_FLAGS_1 = 0x6ffffffb;
|
||||
private const ulong DT_BIND_NOW = 24;
|
||||
private const ulong DT_RPATH = 15;
|
||||
private const ulong DT_RUNPATH = 29;
|
||||
|
||||
// DT_FLAGS_1 values
|
||||
private const ulong DF_1_PIE = 0x08000000;
|
||||
private const ulong DF_1_NOW = 0x00000001;
|
||||
|
||||
// Program header flags
|
||||
private const uint PF_X = 1; // Execute
|
||||
private const uint PF_W = 2; // Write
|
||||
private const uint PF_R = 4; // Read
|
||||
|
||||
/// <inheritdoc />
|
||||
public BinaryFormat SupportedFormat => BinaryFormat.Elf;
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanExtract(string path)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var fs = File.OpenRead(path);
|
||||
Span<byte> header = stackalloc byte[16];
|
||||
if (fs.Read(header) < 16) return false;
|
||||
return CanExtract(header);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool CanExtract(ReadOnlySpan<byte> header)
|
||||
{
|
||||
return header.Length >= 4 && header[..4].SequenceEqual(ElfMagic);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<BinaryHardeningFlags> ExtractAsync(string path, string digest, CancellationToken ct = default)
|
||||
{
|
||||
await using var fs = File.OpenRead(path);
|
||||
return await ExtractAsync(fs, path, digest, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<BinaryHardeningFlags> ExtractAsync(Stream stream, string path, string digest, CancellationToken ct = default)
|
||||
{
|
||||
var flags = new List<HardeningFlag>();
|
||||
var missing = new List<string>();
|
||||
|
||||
// Read ELF header
|
||||
var headerBuf = new byte[64];
|
||||
var bytesRead = await stream.ReadAsync(headerBuf, ct);
|
||||
if (bytesRead < 52) // Minimum ELF header size
|
||||
{
|
||||
return CreateResult(path, digest, [], ["Invalid ELF header"]);
|
||||
}
|
||||
|
||||
// Parse ELF header basics
|
||||
var is64Bit = headerBuf[EI_CLASS] == ELFCLASS64;
|
||||
var isLittleEndian = headerBuf[EI_DATA] == ELFDATA2LSB;
|
||||
|
||||
// Read e_type to check if PIE
|
||||
var eType = ReadUInt16(headerBuf.AsSpan(16, 2), isLittleEndian);
|
||||
var isPie = eType == ET_DYN; // Shared object = could be PIE
|
||||
|
||||
// For a full implementation, we'd parse:
|
||||
// 1. Program headers for PT_GNU_STACK (NX check) and PT_GNU_RELRO
|
||||
// 2. Dynamic section for DT_FLAGS_1 (PIE confirmation), DT_BIND_NOW (full RELRO)
|
||||
// 3. Symbol table for __stack_chk_fail (stack canary)
|
||||
// 4. Symbol table for __fortify_fail (FORTIFY)
|
||||
|
||||
// PIE detection (simplified - full impl would check DT_FLAGS_1)
|
||||
if (isPie)
|
||||
{
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.Pie, true, "DYN", "e_type"));
|
||||
}
|
||||
else
|
||||
{
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.Pie, false));
|
||||
missing.Add("PIE");
|
||||
}
|
||||
|
||||
// NX - would need to read PT_GNU_STACK and check for PF_X
|
||||
// For now, assume modern binaries have NX by default
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.Nx, true, null, "assumed"));
|
||||
|
||||
// RELRO - would need to check PT_GNU_RELRO presence
|
||||
// Partial RELRO is common, Full RELRO requires BIND_NOW
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.RelroPartial, true, null, "assumed"));
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.RelroFull, false));
|
||||
missing.Add("RELRO_FULL");
|
||||
|
||||
// Stack canary - would check for __stack_chk_fail symbol
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.StackCanary, false));
|
||||
missing.Add("STACK_CANARY");
|
||||
|
||||
// FORTIFY - would check for _chk suffixed functions
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.Fortify, false));
|
||||
missing.Add("FORTIFY");
|
||||
|
||||
// RPATH - would check DT_RPATH/DT_RUNPATH in dynamic section
|
||||
// If present, it's a security concern
|
||||
flags.Add(new HardeningFlag(HardeningFlagType.Rpath, false)); // false = not present = good
|
||||
|
||||
return CreateResult(path, digest, flags, missing);
|
||||
}
|
||||
|
||||
private static BinaryHardeningFlags CreateResult(
|
||||
string path,
|
||||
string digest,
|
||||
List<HardeningFlag> flags,
|
||||
List<string> missing)
|
||||
{
|
||||
// Calculate score: enabled flags / total possible flags
|
||||
var enabledCount = flags.Count(f => f.Enabled && f.Name != HardeningFlagType.Rpath);
|
||||
var totalExpected = 6; // PIE, NX, RELRO_FULL, STACK_CANARY, FORTIFY, (not RPATH)
|
||||
var score = totalExpected > 0 ? (double)enabledCount / totalExpected : 0.0;
|
||||
|
||||
return new BinaryHardeningFlags(
|
||||
Format: BinaryFormat.Elf,
|
||||
Path: path,
|
||||
Digest: digest,
|
||||
Flags: [.. flags],
|
||||
HardeningScore: Math.Round(score, 2),
|
||||
MissingFlags: [.. missing],
|
||||
ExtractedAt: DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static ushort ReadUInt16(ReadOnlySpan<byte> span, bool littleEndian)
|
||||
{
|
||||
return littleEndian
|
||||
? BinaryPrimitives.ReadUInt16LittleEndian(span)
|
||||
: BinaryPrimitives.ReadUInt16BigEndian(span);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Hardening;
|
||||
|
||||
/// <summary>
|
||||
/// Security hardening flags extracted from a binary.
|
||||
/// Per Sprint 3500.4 - Smart-Diff Binary Analysis.
|
||||
/// </summary>
|
||||
public sealed record BinaryHardeningFlags(
|
||||
[property: JsonPropertyName("format")] BinaryFormat Format,
|
||||
[property: JsonPropertyName("path")] string Path,
|
||||
[property: JsonPropertyName("digest")] string Digest,
|
||||
[property: JsonPropertyName("flags")] ImmutableArray<HardeningFlag> Flags,
|
||||
[property: JsonPropertyName("score")] double HardeningScore,
|
||||
[property: JsonPropertyName("missing")] ImmutableArray<string> MissingFlags,
|
||||
[property: JsonPropertyName("extractedAt")] DateTimeOffset ExtractedAt);
|
||||
|
||||
/// <summary>
|
||||
/// A single hardening flag with its state.
|
||||
/// </summary>
|
||||
public sealed record HardeningFlag(
|
||||
[property: JsonPropertyName("name")] HardeningFlagType Name,
|
||||
[property: JsonPropertyName("enabled")] bool Enabled,
|
||||
[property: JsonPropertyName("value")] string? Value = null,
|
||||
[property: JsonPropertyName("source")] string? Source = null);
|
||||
|
||||
/// <summary>
|
||||
/// Hardening flag types across binary formats.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<HardeningFlagType>))]
|
||||
public enum HardeningFlagType
|
||||
{
|
||||
// ELF flags
|
||||
/// <summary>Position Independent Executable</summary>
|
||||
[JsonStringEnumMemberName("PIE")]
|
||||
Pie,
|
||||
|
||||
/// <summary>Partial RELRO</summary>
|
||||
[JsonStringEnumMemberName("RELRO_PARTIAL")]
|
||||
RelroPartial,
|
||||
|
||||
/// <summary>Full RELRO (BIND_NOW)</summary>
|
||||
[JsonStringEnumMemberName("RELRO_FULL")]
|
||||
RelroFull,
|
||||
|
||||
/// <summary>Stack protector canary</summary>
|
||||
[JsonStringEnumMemberName("STACK_CANARY")]
|
||||
StackCanary,
|
||||
|
||||
/// <summary>Non-executable stack/heap</summary>
|
||||
[JsonStringEnumMemberName("NX")]
|
||||
Nx,
|
||||
|
||||
/// <summary>FORTIFY_SOURCE enabled</summary>
|
||||
[JsonStringEnumMemberName("FORTIFY")]
|
||||
Fortify,
|
||||
|
||||
/// <summary>RPATH/RUNPATH set (security concern if present)</summary>
|
||||
[JsonStringEnumMemberName("RPATH")]
|
||||
Rpath,
|
||||
|
||||
// PE flags
|
||||
/// <summary>Address Space Layout Randomization</summary>
|
||||
[JsonStringEnumMemberName("ASLR")]
|
||||
Aslr,
|
||||
|
||||
/// <summary>Data Execution Prevention</summary>
|
||||
[JsonStringEnumMemberName("DEP")]
|
||||
Dep,
|
||||
|
||||
/// <summary>Control Flow Guard</summary>
|
||||
[JsonStringEnumMemberName("CFG")]
|
||||
Cfg,
|
||||
|
||||
/// <summary>Authenticode code signing</summary>
|
||||
[JsonStringEnumMemberName("AUTHENTICODE")]
|
||||
Authenticode,
|
||||
|
||||
/// <summary>Safe Structured Exception Handling</summary>
|
||||
[JsonStringEnumMemberName("SAFE_SEH")]
|
||||
SafeSeh,
|
||||
|
||||
/// <summary>/GS buffer security check</summary>
|
||||
[JsonStringEnumMemberName("GS")]
|
||||
Gs,
|
||||
|
||||
/// <summary>High entropy 64-bit ASLR</summary>
|
||||
[JsonStringEnumMemberName("HIGH_ENTROPY_VA")]
|
||||
HighEntropyVa,
|
||||
|
||||
/// <summary>Force integrity checking</summary>
|
||||
[JsonStringEnumMemberName("FORCE_INTEGRITY")]
|
||||
ForceIntegrity,
|
||||
|
||||
// Mach-O flags
|
||||
/// <summary>DYLD_* environment variable restrictions</summary>
|
||||
[JsonStringEnumMemberName("RESTRICT")]
|
||||
Restrict,
|
||||
|
||||
/// <summary>Hardened runtime enabled</summary>
|
||||
[JsonStringEnumMemberName("HARDENED")]
|
||||
Hardened,
|
||||
|
||||
/// <summary>Code signature present</summary>
|
||||
[JsonStringEnumMemberName("CODE_SIGN")]
|
||||
CodeSign,
|
||||
|
||||
/// <summary>Library validation enabled</summary>
|
||||
[JsonStringEnumMemberName("LIBRARY_VALIDATION")]
|
||||
LibraryValidation,
|
||||
|
||||
// Cross-platform
|
||||
/// <summary>Control-flow Enforcement Technology (Intel CET)</summary>
|
||||
[JsonStringEnumMemberName("CET")]
|
||||
Cet,
|
||||
|
||||
/// <summary>Branch Target Identification (ARM BTI)</summary>
|
||||
[JsonStringEnumMemberName("BTI")]
|
||||
Bti
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Binary format identifier.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<BinaryFormat>))]
|
||||
public enum BinaryFormat
|
||||
{
|
||||
[JsonStringEnumMemberName("ELF")]
|
||||
Elf,
|
||||
|
||||
[JsonStringEnumMemberName("PE")]
|
||||
Pe,
|
||||
|
||||
[JsonStringEnumMemberName("MachO")]
|
||||
MachO,
|
||||
|
||||
[JsonStringEnumMemberName("Unknown")]
|
||||
Unknown
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native.Hardening;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for extracting hardening flags from binaries.
|
||||
/// Per Sprint 3500.4 - Smart-Diff Binary Analysis.
|
||||
/// </summary>
|
||||
public interface IHardeningExtractor
|
||||
{
|
||||
/// <summary>
|
||||
/// Binary format this extractor supports.
|
||||
/// </summary>
|
||||
BinaryFormat SupportedFormat { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Check if a file can be processed by this extractor.
|
||||
/// </summary>
|
||||
/// <param name="path">Path to the binary file.</param>
|
||||
/// <returns>True if the extractor can process this file.</returns>
|
||||
bool CanExtract(string path);
|
||||
|
||||
/// <summary>
|
||||
/// Check if a file can be processed using magic bytes.
|
||||
/// </summary>
|
||||
/// <param name="header">First 16+ bytes of the file.</param>
|
||||
/// <returns>True if the extractor can process this file.</returns>
|
||||
bool CanExtract(ReadOnlySpan<byte> header);
|
||||
|
||||
/// <summary>
|
||||
/// Extract hardening flags from a binary file.
|
||||
/// </summary>
|
||||
/// <param name="path">Path to the binary file.</param>
|
||||
/// <param name="digest">Content digest of the file.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Extracted hardening flags.</returns>
|
||||
Task<BinaryHardeningFlags> ExtractAsync(string path, string digest, CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Extract hardening flags from a stream.
|
||||
/// </summary>
|
||||
/// <param name="stream">Stream containing binary data.</param>
|
||||
/// <param name="path">Original path (for reporting).</param>
|
||||
/// <param name="digest">Content digest.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Extracted hardening flags.</returns>
|
||||
Task<BinaryHardeningFlags> ExtractAsync(Stream stream, string path, string digest, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Composite extractor that delegates to format-specific extractors.
|
||||
/// </summary>
|
||||
public interface IHardeningExtractorFactory
|
||||
{
|
||||
/// <summary>
|
||||
/// Get the appropriate extractor for a binary file.
|
||||
/// </summary>
|
||||
/// <param name="path">Path to the binary file.</param>
|
||||
/// <returns>The extractor, or null if format not supported.</returns>
|
||||
IHardeningExtractor? GetExtractor(string path);
|
||||
|
||||
/// <summary>
|
||||
/// Get the appropriate extractor based on magic bytes.
|
||||
/// </summary>
|
||||
/// <param name="header">First 16+ bytes of the file.</param>
|
||||
/// <returns>The extractor, or null if format not supported.</returns>
|
||||
IHardeningExtractor? GetExtractor(ReadOnlySpan<byte> header);
|
||||
|
||||
/// <summary>
|
||||
/// Extract hardening flags, auto-detecting format.
|
||||
/// </summary>
|
||||
/// <param name="path">Path to the binary file.</param>
|
||||
/// <param name="digest">Content digest.</param>
|
||||
/// <param name="ct">Cancellation token.</param>
|
||||
/// <returns>Extracted hardening flags, or null if format not supported.</returns>
|
||||
Task<BinaryHardeningFlags?> ExtractAsync(string path, string digest, CancellationToken ct = default);
|
||||
}
|
||||
@@ -17,7 +17,8 @@ public sealed record DeterminismReport(
|
||||
double OverallScore,
|
||||
double ThresholdOverall,
|
||||
double ThresholdImage,
|
||||
IReadOnlyList<DeterminismImageReport> Images)
|
||||
IReadOnlyList<DeterminismImageReport> Images,
|
||||
FidelityMetrics? Fidelity = null)
|
||||
;
|
||||
|
||||
public sealed record DeterminismImageReport(
|
||||
@@ -26,7 +27,8 @@ public sealed record DeterminismImageReport(
|
||||
int Identical,
|
||||
double Score,
|
||||
IReadOnlyDictionary<string, string> ArtifactHashes,
|
||||
IReadOnlyList<DeterminismRunReport> RunsDetail);
|
||||
IReadOnlyList<DeterminismRunReport> RunsDetail,
|
||||
FidelityMetrics? Fidelity = null);
|
||||
|
||||
public sealed record DeterminismRunReport(
|
||||
int RunIndex,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user