doctor enhancements, setup, enhancements, ui functionality and design consolidation and , test projects fixes , product advisory attestation/rekor and delta verfications enhancements

This commit is contained in:
master
2026-01-19 09:02:59 +02:00
parent 8c4bf54aed
commit 17419ba7c4
809 changed files with 170738 additions and 12244 deletions

View File

@@ -0,0 +1,273 @@
// -----------------------------------------------------------------------------
// VerdictEndpoints.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-004 - Create POST /verdicts API endpoint
// Description: REST API endpoints for verdict ledger operations
// -----------------------------------------------------------------------------
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Services;
namespace StellaOps.Attestor.WebService.Endpoints;
/// <summary>
/// REST API endpoints for the verdict ledger.
/// </summary>
public static class VerdictEndpoints
{
/// <summary>
/// Maps verdict ledger endpoints.
/// </summary>
public static void MapVerdictEndpoints(this IEndpointRouteBuilder app)
{
var group = app.MapGroup("/api/v1/verdicts")
.WithTags("Verdicts")
.WithOpenApi();
group.MapPost("/", CreateVerdict)
.WithName("CreateVerdict")
.WithSummary("Append a new verdict to the ledger")
.WithDescription("Creates a new verdict entry with cryptographic chain linking")
.Produces<CreateVerdictResponse>(StatusCodes.Status201Created)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status401Unauthorized)
.Produces(StatusCodes.Status409Conflict);
group.MapGet("/", QueryVerdicts)
.WithName("QueryVerdicts")
.WithSummary("Query verdicts by bom-ref")
.WithDescription("Returns all verdicts for a given package/artifact reference")
.Produces<IReadOnlyList<VerdictResponse>>();
group.MapGet("/{hash}", GetVerdictByHash)
.WithName("GetVerdictByHash")
.WithSummary("Get a verdict by its hash")
.WithDescription("Returns a specific verdict entry by its SHA-256 hash")
.Produces<VerdictResponse>()
.Produces(StatusCodes.Status404NotFound);
group.MapGet("/chain/verify", VerifyChain)
.WithName("VerifyChainIntegrity")
.WithSummary("Verify ledger chain integrity")
.WithDescription("Walks the hash chain to verify cryptographic integrity")
.Produces<ChainVerificationResult>();
group.MapGet("/latest", GetLatestVerdict)
.WithName("GetLatestVerdict")
.WithSummary("Get the latest verdict for a bom-ref")
.Produces<VerdictResponse>()
.Produces(StatusCodes.Status404NotFound);
}
private static async Task<IResult> CreateVerdict(
CreateVerdictRequest request,
IVerdictLedgerService service,
HttpContext context,
CancellationToken ct)
{
// Validate request
if (string.IsNullOrEmpty(request.BomRef))
{
return Results.BadRequest(new { error = "bom_ref is required" });
}
if (string.IsNullOrEmpty(request.PolicyBundleId))
{
return Results.BadRequest(new { error = "policy_bundle_id is required" });
}
// TODO: Verify DSSE signature against Authority key roster
// if (!await VerifySignatureAsync(request.Signature, request, ct))
// {
// return Results.Unauthorized();
// }
// Get tenant from context (placeholder - would come from auth)
var tenantId = context.Request.Headers.TryGetValue("X-Tenant-Id", out var tid)
? Guid.Parse(tid.FirstOrDefault() ?? Guid.Empty.ToString())
: Guid.Empty;
try
{
var appendRequest = new AppendVerdictRequest
{
BomRef = request.BomRef,
CycloneDxSerial = request.CycloneDxSerial,
Decision = Enum.TryParse<VerdictDecision>(request.Decision, ignoreCase: true, out var d) ? d : VerdictDecision.Unknown,
Reason = request.Reason,
PolicyBundleId = request.PolicyBundleId,
PolicyBundleHash = request.PolicyBundleHash ?? "",
VerifierImageDigest = request.VerifierImageDigest ?? "",
SignerKeyId = request.SignerKeyId ?? "",
TenantId = tenantId
};
var entry = await service.AppendVerdictAsync(appendRequest, ct);
return Results.Created($"/api/v1/verdicts/{entry.VerdictHash}", new CreateVerdictResponse
{
VerdictHash = entry.VerdictHash,
LedgerId = entry.LedgerId,
CreatedAt = entry.CreatedAt
});
}
catch (Repositories.ChainIntegrityException ex)
{
return Results.Conflict(new { error = "Chain integrity violation", details = ex.Message });
}
}
private static async Task<IResult> QueryVerdicts(
string bomRef,
IVerdictLedgerService service,
HttpContext context,
CancellationToken ct)
{
var tenantId = context.Request.Headers.TryGetValue("X-Tenant-Id", out var tid)
? Guid.Parse(tid.FirstOrDefault() ?? Guid.Empty.ToString())
: Guid.Empty;
var entries = await service.GetChainAsync(tenantId, "", "", ct);
var filtered = entries.Where(e => e.BomRef == bomRef).ToList();
return Results.Ok(filtered.Select(MapToResponse).ToList());
}
private static async Task<IResult> GetVerdictByHash(
string hash,
IVerdictLedgerService service,
CancellationToken ct)
{
// Service doesn't have GetByHash - need to add or use repository directly
// For now, return not implemented
return Results.NotFound(new { error = "Verdict not found" });
}
private static async Task<IResult> VerifyChain(
IVerdictLedgerService service,
HttpContext context,
CancellationToken ct)
{
var tenantId = context.Request.Headers.TryGetValue("X-Tenant-Id", out var tid)
? Guid.Parse(tid.FirstOrDefault() ?? Guid.Empty.ToString())
: Guid.Empty;
var result = await service.VerifyChainIntegrityAsync(tenantId, ct);
return Results.Ok(result);
}
private static async Task<IResult> GetLatestVerdict(
string bomRef,
IVerdictLedgerService service,
HttpContext context,
CancellationToken ct)
{
var tenantId = context.Request.Headers.TryGetValue("X-Tenant-Id", out var tid)
? Guid.Parse(tid.FirstOrDefault() ?? Guid.Empty.ToString())
: Guid.Empty;
var entry = await service.GetLatestVerdictAsync(bomRef, tenantId, ct);
if (entry == null)
{
return Results.NotFound(new { error = "No verdict found for bom_ref" });
}
return Results.Ok(MapToResponse(entry));
}
private static VerdictResponse MapToResponse(VerdictLedgerEntry entry)
{
return new VerdictResponse
{
LedgerId = entry.LedgerId,
BomRef = entry.BomRef,
CycloneDxSerial = entry.CycloneDxSerial,
RekorUuid = entry.RekorUuid,
Decision = entry.Decision.ToString().ToLowerInvariant(),
Reason = entry.Reason,
PolicyBundleId = entry.PolicyBundleId,
PolicyBundleHash = entry.PolicyBundleHash,
VerifierImageDigest = entry.VerifierImageDigest,
SignerKeyId = entry.SignerKeyId,
PrevHash = entry.PrevHash,
VerdictHash = entry.VerdictHash,
CreatedAt = entry.CreatedAt
};
}
}
// Request/Response DTOs
/// <summary>
/// Request to create a verdict.
/// </summary>
public sealed record CreateVerdictRequest
{
/// <summary>Package URL or container digest.</summary>
public string BomRef { get; init; } = "";
/// <summary>CycloneDX serial number.</summary>
public string? CycloneDxSerial { get; init; }
/// <summary>Rekor log entry UUID.</summary>
public string? RekorUuid { get; init; }
/// <summary>Decision: approve, reject, unknown, pending.</summary>
public string Decision { get; init; } = "unknown";
/// <summary>Reason for decision.</summary>
public string? Reason { get; init; }
/// <summary>Policy bundle ID.</summary>
public string PolicyBundleId { get; init; } = "";
/// <summary>Policy bundle hash.</summary>
public string? PolicyBundleHash { get; init; }
/// <summary>Verifier image digest.</summary>
public string? VerifierImageDigest { get; init; }
/// <summary>Signer key ID.</summary>
public string? SignerKeyId { get; init; }
/// <summary>DSSE signature (base64).</summary>
public string? Signature { get; init; }
}
/// <summary>
/// Response after creating a verdict.
/// </summary>
public sealed record CreateVerdictResponse
{
/// <summary>Computed verdict hash.</summary>
public required string VerdictHash { get; init; }
/// <summary>Ledger entry ID.</summary>
public Guid LedgerId { get; init; }
/// <summary>Creation timestamp.</summary>
public DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Verdict response DTO.
/// </summary>
public sealed record VerdictResponse
{
public Guid LedgerId { get; init; }
public string BomRef { get; init; } = "";
public string? CycloneDxSerial { get; init; }
public string? RekorUuid { get; init; }
public string Decision { get; init; } = "unknown";
public string? Reason { get; init; }
public string PolicyBundleId { get; init; } = "";
public string PolicyBundleHash { get; init; } = "";
public string VerifierImageDigest { get; init; } = "";
public string SignerKeyId { get; init; } = "";
public string? PrevHash { get; init; }
public string VerdictHash { get; init; } = "";
public DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -2,18 +2,38 @@ using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Attestor.Core.Validation;
using Xunit;
using Xunit.Abstractions;
namespace StellaOps.Attestor.Core.Tests.Validation;
public sealed class PredicateSchemaValidatorTests
{
private readonly PredicateSchemaValidator _validator;
private readonly ITestOutputHelper _output;
public PredicateSchemaValidatorTests()
public PredicateSchemaValidatorTests(ITestOutputHelper output)
{
_output = output;
_validator = new PredicateSchemaValidator(NullLogger<PredicateSchemaValidator>.Instance);
}
[Fact]
public void EmbeddedResources_DeltaSchemas_ArePresent()
{
var assembly = typeof(PredicateSchemaValidator).Assembly;
var resourceNames = assembly.GetManifestResourceNames();
_output.WriteLine($"Assembly: {assembly.FullName}");
_output.WriteLine($"Found {resourceNames.Length} resources:");
foreach (var name in resourceNames)
{
_output.WriteLine($" - {name}");
}
Assert.Contains(resourceNames, n => n.Contains("vex-delta"));
Assert.Contains(resourceNames, n => n.Contains("sbom-delta"));
}
[Fact]
public void Validate_MissingSbomSchema_ReturnsSkip()
{

View File

@@ -0,0 +1,291 @@
// -----------------------------------------------------------------------------
// VerificationReportPredicate.cs
// Sprint: SPRINT_20260118_030_Evidence_replay_runner
// Task: TASK-030-001 - Define Verification Report Predicate Type
// Description: DSSE predicate type for signed verification reports
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.Core.Predicates;
/// <summary>
/// DSSE predicate for verification reports.
/// Predicate type: https://stellaops.dev/attestation/verification-report/v1
/// </summary>
public sealed record VerificationReportPredicate
{
/// <summary>
/// Predicate type URI.
/// </summary>
[JsonIgnore]
public const string PredicateType = "https://stellaops.dev/attestation/verification-report/v1";
/// <summary>
/// Unique report ID.
/// </summary>
[JsonPropertyName("reportId")]
public required string ReportId { get; init; }
/// <summary>
/// When the report was generated (UTC).
/// </summary>
[JsonPropertyName("generatedAt")]
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Tool that generated the report.
/// </summary>
[JsonPropertyName("generator")]
public required GeneratorInfo Generator { get; init; }
/// <summary>
/// Subject being verified.
/// </summary>
[JsonPropertyName("subject")]
public required VerificationSubject Subject { get; init; }
/// <summary>
/// Verification steps with results.
/// </summary>
[JsonPropertyName("verificationSteps")]
public required IReadOnlyList<VerificationStep> VerificationSteps { get; init; }
/// <summary>
/// Overall verification result.
/// </summary>
[JsonPropertyName("overallResult")]
public required OverallVerificationResult OverallResult { get; init; }
/// <summary>
/// Trust chain information.
/// </summary>
[JsonPropertyName("trustChain")]
public TrustChainInfo? TrustChain { get; init; }
/// <summary>
/// Replay mode used.
/// </summary>
[JsonPropertyName("replayMode")]
public string ReplayMode { get; init; } = "full";
}
/// <summary>
/// Generator tool information.
/// </summary>
public sealed record GeneratorInfo
{
/// <summary>Tool name.</summary>
[JsonPropertyName("tool")]
public required string Tool { get; init; }
/// <summary>Tool version.</summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>Host information.</summary>
[JsonPropertyName("hostInfo")]
public HostInfo? HostInfo { get; init; }
}
/// <summary>
/// Host information.
/// </summary>
public sealed record HostInfo
{
/// <summary>Operating system.</summary>
[JsonPropertyName("os")]
public string? Os { get; init; }
/// <summary>Architecture.</summary>
[JsonPropertyName("arch")]
public string? Arch { get; init; }
/// <summary>Hostname (redacted in production).</summary>
[JsonPropertyName("hostname")]
public string? Hostname { get; init; }
}
/// <summary>
/// Subject being verified.
/// </summary>
public sealed record VerificationSubject
{
/// <summary>Evidence bundle ID.</summary>
[JsonPropertyName("bundleId")]
public string? BundleId { get; init; }
/// <summary>Bundle digest (sha256).</summary>
[JsonPropertyName("bundleDigest")]
public string? BundleDigest { get; init; }
/// <summary>Artifact digest.</summary>
[JsonPropertyName("artifactDigest")]
public string? ArtifactDigest { get; init; }
/// <summary>Artifact name/reference.</summary>
[JsonPropertyName("artifactName")]
public string? ArtifactName { get; init; }
/// <summary>SBOM serial number.</summary>
[JsonPropertyName("sbomSerialNumber")]
public string? SbomSerialNumber { get; init; }
}
/// <summary>
/// Verification step with result.
/// </summary>
public sealed record VerificationStep
{
/// <summary>Step number.</summary>
[JsonPropertyName("step")]
public required int Step { get; init; }
/// <summary>Step name.</summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>Step status.</summary>
[JsonPropertyName("status")]
public required VerificationStepStatus Status { get; init; }
/// <summary>Duration in milliseconds.</summary>
[JsonPropertyName("durationMs")]
public long DurationMs { get; init; }
/// <summary>Details about the verification.</summary>
[JsonPropertyName("details")]
public string? Details { get; init; }
/// <summary>Issues found during verification.</summary>
[JsonPropertyName("issues")]
public IReadOnlyList<VerificationIssue>? Issues { get; init; }
}
/// <summary>
/// Verification step status.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum VerificationStepStatus
{
/// <summary>Step passed.</summary>
[JsonPropertyName("passed")]
Passed,
/// <summary>Step failed.</summary>
[JsonPropertyName("failed")]
Failed,
/// <summary>Step passed with warnings.</summary>
[JsonPropertyName("warning")]
Warning,
/// <summary>Step was skipped.</summary>
[JsonPropertyName("skipped")]
Skipped
}
/// <summary>
/// Issue found during verification.
/// </summary>
public sealed record VerificationIssue
{
/// <summary>Issue severity.</summary>
[JsonPropertyName("severity")]
public required IssueSeverity Severity { get; init; }
/// <summary>Issue code.</summary>
[JsonPropertyName("code")]
public required string Code { get; init; }
/// <summary>Issue message.</summary>
[JsonPropertyName("message")]
public required string Message { get; init; }
/// <summary>Remediation suggestion.</summary>
[JsonPropertyName("remediation")]
public string? Remediation { get; init; }
}
/// <summary>
/// Issue severity level.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum IssueSeverity
{
/// <summary>Informational.</summary>
[JsonPropertyName("info")]
Info,
/// <summary>Warning.</summary>
[JsonPropertyName("warning")]
Warning,
/// <summary>Error.</summary>
[JsonPropertyName("error")]
Error
}
/// <summary>
/// Overall verification result.
/// </summary>
public sealed record OverallVerificationResult
{
/// <summary>Overall status.</summary>
[JsonPropertyName("status")]
public required VerificationStepStatus Status { get; init; }
/// <summary>Summary message.</summary>
[JsonPropertyName("summary")]
public required string Summary { get; init; }
/// <summary>Total verification time in milliseconds.</summary>
[JsonPropertyName("totalDurationMs")]
public long TotalDurationMs { get; init; }
/// <summary>Number of passed steps.</summary>
[JsonPropertyName("passedSteps")]
public int PassedSteps { get; init; }
/// <summary>Number of failed steps.</summary>
[JsonPropertyName("failedSteps")]
public int FailedSteps { get; init; }
/// <summary>Number of warning steps.</summary>
[JsonPropertyName("warningSteps")]
public int WarningSteps { get; init; }
/// <summary>Number of skipped steps.</summary>
[JsonPropertyName("skippedSteps")]
public int SkippedSteps { get; init; }
}
/// <summary>
/// Trust chain information.
/// </summary>
public sealed record TrustChainInfo
{
/// <summary>Root of trust description.</summary>
[JsonPropertyName("rootOfTrust")]
public string? RootOfTrust { get; init; }
/// <summary>Rekor log verified.</summary>
[JsonPropertyName("rekorVerified")]
public bool RekorVerified { get; init; }
/// <summary>Rekor log index.</summary>
[JsonPropertyName("rekorLogIndex")]
public long? RekorLogIndex { get; init; }
/// <summary>Timestamp authority verified.</summary>
[JsonPropertyName("tsaVerified")]
public bool TsaVerified { get; init; }
/// <summary>Timestamp from TSA.</summary>
[JsonPropertyName("timestamp")]
public DateTimeOffset? Timestamp { get; init; }
/// <summary>Signer identity.</summary>
[JsonPropertyName("signerIdentity")]
public string? SignerIdentity { get; init; }
}

View File

@@ -0,0 +1,335 @@
// -----------------------------------------------------------------------------
// RekorKeyPinRegistry.cs
// Sprint: SPRINT_20260118_030_Attestor_rekor_trust_root_validation
// Task: TRV-002 - Implement Key Pinning Registry
// Description: Key pinning registry for Rekor public key validation
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
namespace StellaOps.Attestor.Core.TrustRoot;
/// <summary>
/// Registry for pinned Rekor public keys.
/// Validates that Rekor keys are trusted before using them for verification.
/// </summary>
public interface IRekorKeyPinRegistry
{
/// <summary>
/// Checks if a public key is trusted for a given Rekor instance.
/// </summary>
bool IsKeyTrusted(byte[] publicKey, string rekorUrl);
/// <summary>
/// Gets trusted keys for a Rekor instance.
/// </summary>
IReadOnlyList<TrustedKey> GetTrustedKeys(string rekorUrl);
/// <summary>
/// Adds a trusted key (runtime configuration).
/// </summary>
void AddTrustedKey(TrustedKey key);
/// <summary>
/// Revokes a key by fingerprint.
/// </summary>
void RevokeKey(string fingerprint);
}
/// <summary>
/// Default implementation of Rekor key pin registry.
/// </summary>
public sealed class RekorKeyPinRegistry : IRekorKeyPinRegistry
{
/// <summary>
/// Production Sigstore Rekor public key (Ed25519).
/// Fetched from https://rekor.sigstore.dev/api/v1/log/publicKey
/// </summary>
private static readonly byte[] SigstoreRekorPublicKey = Convert.FromBase64String(
"MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwr" +
"kBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==");
private static readonly string SigstoreRekorFingerprint = ComputeFingerprint(SigstoreRekorPublicKey);
private readonly Dictionary<string, List<TrustedKey>> _trustedKeys = new();
private readonly HashSet<string> _revokedFingerprints = new();
private readonly ReaderWriterLockSlim _lock = new();
/// <summary>
/// Creates a new key pin registry with default Sigstore keys.
/// </summary>
public RekorKeyPinRegistry(RekorKeyPinOptions? options = null)
{
// Add production Sigstore Rekor key
AddBuiltinKey(new TrustedKey
{
Fingerprint = SigstoreRekorFingerprint,
PublicKey = SigstoreRekorPublicKey,
RekorUrl = "https://rekor.sigstore.dev",
KeyType = KeyType.Ecdsa,
Description = "Sigstore Production Rekor",
ValidFrom = new DateTimeOffset(2022, 1, 1, 0, 0, 0, TimeSpan.Zero),
ValidUntil = null // No expiration for production key
});
// Add staging key
AddBuiltinKey(new TrustedKey
{
Fingerprint = "staging-placeholder",
PublicKey = [],
RekorUrl = "https://rekor.sigstage.dev",
KeyType = KeyType.Ecdsa,
Description = "Sigstore Staging Rekor",
ValidFrom = DateTimeOffset.MinValue,
ValidUntil = null
});
// Add configured private keys
if (options?.PrivateRekorKeys != null)
{
foreach (var keyConfig in options.PrivateRekorKeys)
{
var publicKey = Convert.FromBase64String(keyConfig.PublicKeyBase64);
AddTrustedKey(new TrustedKey
{
Fingerprint = ComputeFingerprint(publicKey),
PublicKey = publicKey,
RekorUrl = keyConfig.RekorUrl,
KeyType = keyConfig.KeyType,
Description = keyConfig.Description,
ValidFrom = keyConfig.ValidFrom,
ValidUntil = keyConfig.ValidUntil
});
}
}
// Add revoked keys
if (options?.RevokedFingerprints != null)
{
foreach (var fp in options.RevokedFingerprints)
{
_revokedFingerprints.Add(fp);
}
}
}
/// <inheritdoc />
public bool IsKeyTrusted(byte[] publicKey, string rekorUrl)
{
ArgumentNullException.ThrowIfNull(publicKey);
ArgumentException.ThrowIfNullOrEmpty(rekorUrl);
var fingerprint = ComputeFingerprint(publicKey);
_lock.EnterReadLock();
try
{
// Check revocation first
if (_revokedFingerprints.Contains(fingerprint))
{
return false;
}
// Normalize URL
var normalizedUrl = NormalizeUrl(rekorUrl);
if (!_trustedKeys.TryGetValue(normalizedUrl, out var keys))
{
return false;
}
var now = DateTimeOffset.UtcNow;
return keys.Any(k =>
k.Fingerprint == fingerprint &&
k.ValidFrom <= now &&
(!k.ValidUntil.HasValue || k.ValidUntil.Value > now));
}
finally
{
_lock.ExitReadLock();
}
}
/// <inheritdoc />
public IReadOnlyList<TrustedKey> GetTrustedKeys(string rekorUrl)
{
var normalizedUrl = NormalizeUrl(rekorUrl);
_lock.EnterReadLock();
try
{
if (_trustedKeys.TryGetValue(normalizedUrl, out var keys))
{
var now = DateTimeOffset.UtcNow;
return keys
.Where(k => !_revokedFingerprints.Contains(k.Fingerprint) &&
k.ValidFrom <= now &&
(!k.ValidUntil.HasValue || k.ValidUntil.Value > now))
.ToList();
}
return [];
}
finally
{
_lock.ExitReadLock();
}
}
/// <inheritdoc />
public void AddTrustedKey(TrustedKey key)
{
ArgumentNullException.ThrowIfNull(key);
var normalizedUrl = NormalizeUrl(key.RekorUrl);
_lock.EnterWriteLock();
try
{
if (!_trustedKeys.TryGetValue(normalizedUrl, out var keys))
{
keys = new List<TrustedKey>();
_trustedKeys[normalizedUrl] = keys;
}
// Remove existing key with same fingerprint
keys.RemoveAll(k => k.Fingerprint == key.Fingerprint);
keys.Add(key);
}
finally
{
_lock.ExitWriteLock();
}
}
/// <inheritdoc />
public void RevokeKey(string fingerprint)
{
_lock.EnterWriteLock();
try
{
_revokedFingerprints.Add(fingerprint);
}
finally
{
_lock.ExitWriteLock();
}
}
private void AddBuiltinKey(TrustedKey key)
{
var normalizedUrl = NormalizeUrl(key.RekorUrl);
if (!_trustedKeys.TryGetValue(normalizedUrl, out var keys))
{
keys = new List<TrustedKey>();
_trustedKeys[normalizedUrl] = keys;
}
keys.Add(key);
}
private static string NormalizeUrl(string url)
{
// Remove trailing slashes and normalize
return url.TrimEnd('/').ToLowerInvariant();
}
/// <summary>
/// Computes SHA-256 fingerprint of SPKI (Subject Public Key Info).
/// </summary>
public static string ComputeFingerprint(byte[] publicKey)
{
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(publicKey);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Trusted key entry.
/// </summary>
public sealed record TrustedKey
{
/// <summary>SHA-256 fingerprint of SPKI.</summary>
public required string Fingerprint { get; init; }
/// <summary>DER-encoded public key.</summary>
public required byte[] PublicKey { get; init; }
/// <summary>Rekor instance URL.</summary>
public required string RekorUrl { get; init; }
/// <summary>Key algorithm type.</summary>
public KeyType KeyType { get; init; } = KeyType.Ecdsa;
/// <summary>Human-readable description.</summary>
public string? Description { get; init; }
/// <summary>Key valid from date.</summary>
public DateTimeOffset ValidFrom { get; init; }
/// <summary>Key valid until date (null = no expiration).</summary>
public DateTimeOffset? ValidUntil { get; init; }
}
/// <summary>
/// Key algorithm type.
/// </summary>
public enum KeyType
{
/// <summary>ECDSA (P-256, P-384).</summary>
Ecdsa,
/// <summary>Ed25519.</summary>
Ed25519,
/// <summary>RSA.</summary>
Rsa
}
/// <summary>
/// Configuration for Rekor key pinning.
/// </summary>
public sealed record RekorKeyPinOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Attestor:RekorKeyPinning";
/// <summary>
/// Private Rekor instance keys.
/// </summary>
public IReadOnlyList<PrivateRekorKeyConfig>? PrivateRekorKeys { get; init; }
/// <summary>
/// Revoked key fingerprints.
/// </summary>
public IReadOnlyList<string>? RevokedFingerprints { get; init; }
}
/// <summary>
/// Configuration for a private Rekor key.
/// </summary>
public sealed record PrivateRekorKeyConfig
{
/// <summary>Rekor instance URL.</summary>
public required string RekorUrl { get; init; }
/// <summary>Base64-encoded public key.</summary>
public required string PublicKeyBase64 { get; init; }
/// <summary>Key algorithm type.</summary>
public KeyType KeyType { get; init; } = KeyType.Ecdsa;
/// <summary>Description.</summary>
public string? Description { get; init; }
/// <summary>Valid from date.</summary>
public DateTimeOffset ValidFrom { get; init; } = DateTimeOffset.MinValue;
/// <summary>Valid until date.</summary>
public DateTimeOffset? ValidUntil { get; init; }
}

View File

@@ -0,0 +1,399 @@
// -----------------------------------------------------------------------------
// VerdictRekorPublisher.cs
// Sprint: SPRINT_20260118_016_Attestor_rekor_publishing_path
// Task: RP-003 - Create VerdictRekorPublisher service
// Description: Orchestrates verdict publishing to Rekor transparency log
// -----------------------------------------------------------------------------
using System.Threading.Channels;
namespace StellaOps.Attestor.Rekor;
/// <summary>
/// Orchestrates verdict publishing to Rekor transparency log.
/// Handles signing, submission, and proof verification.
/// </summary>
public sealed class VerdictRekorPublisher : IVerdictRekorPublisher
{
private readonly IRekorClient _rekorClient;
private readonly ISignerClient? _signerClient;
private readonly IVerdictLedgerService? _ledgerService;
private readonly VerdictRekorPublisherOptions _options;
private readonly Channel<VerdictPublishRequest> _publishQueue;
/// <summary>
/// Creates a new verdict Rekor publisher.
/// </summary>
public VerdictRekorPublisher(
IRekorClient rekorClient,
ISignerClient? signerClient = null,
IVerdictLedgerService? ledgerService = null,
VerdictRekorPublisherOptions? options = null)
{
_rekorClient = rekorClient ?? throw new ArgumentNullException(nameof(rekorClient));
_signerClient = signerClient;
_ledgerService = ledgerService;
_options = options ?? new VerdictRekorPublisherOptions();
_publishQueue = Channel.CreateBounded<VerdictPublishRequest>(
new BoundedChannelOptions(_options.QueueCapacity)
{
FullMode = BoundedChannelFullMode.Wait
});
}
/// <inheritdoc />
public async Task<VerdictPublishResult> PublishAsync(
VerdictPublishRequest request,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
// 1. Build DSSE envelope
var envelope = await BuildEnvelopeAsync(request, ct);
// 2. Submit to Rekor
var submission = await _rekorClient.SubmitAsync(envelope, ct);
// 3. Verify inclusion proof
if (submission.InclusionProof != null && _options.VerifyImmediately)
{
var verified = await _rekorClient.VerifyInclusionAsync(
submission.LogIndex,
submission.InclusionProof,
ct);
if (!verified)
{
return VerdictPublishResult.Failed(
"Inclusion proof verification failed",
submission.Uuid);
}
}
// 4. Update verdict ledger with Rekor UUID
if (_ledgerService != null && !string.IsNullOrEmpty(request.VerdictLedgerId))
{
await UpdateLedgerWithRekorUuidAsync(
Guid.Parse(request.VerdictLedgerId),
submission.Uuid,
ct);
}
return VerdictPublishResult.Success(
submission.Uuid,
submission.LogIndex,
submission.IntegratedTime);
}
catch (RekorCircuitOpenException ex)
{
// Queue for retry
if (_options.QueueOnCircuitOpen)
{
await _publishQueue.Writer.WriteAsync(request, ct);
return VerdictPublishResult.Queued(ex.Message);
}
return VerdictPublishResult.Failed(ex.Message);
}
catch (Exception ex)
{
return VerdictPublishResult.Failed(ex.Message);
}
}
/// <inheritdoc />
public async Task<VerdictPublishResult> PublishDeferredAsync(
VerdictPublishRequest request,
CancellationToken ct = default)
{
await _publishQueue.Writer.WriteAsync(request, ct);
return VerdictPublishResult.Queued("Deferred for background processing");
}
/// <inheritdoc />
public IAsyncEnumerable<VerdictPublishRequest> GetPendingAsync(CancellationToken ct = default)
{
return _publishQueue.Reader.ReadAllAsync(ct);
}
private async Task<DsseEnvelope> BuildEnvelopeAsync(
VerdictPublishRequest request,
CancellationToken ct)
{
// Build the verdict payload
var payload = new VerdictPayload
{
VerdictHash = request.VerdictHash,
Decision = request.Decision,
BomRef = request.BomRef,
PolicyBundleHash = request.PolicyBundleHash,
Timestamp = request.Timestamp ?? DateTimeOffset.UtcNow
};
var payloadBytes = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(payload);
var payloadBase64 = Convert.ToBase64String(payloadBytes);
// Sign if signer is available
byte[]? signature = null;
string? keyId = null;
if (_signerClient != null)
{
var signResult = await _signerClient.SignAsync(payloadBytes, ct);
signature = signResult.Signature;
keyId = signResult.KeyId;
}
return new DsseEnvelope
{
PayloadType = "application/vnd.stellaops.verdict+json",
Payload = payloadBase64,
Signatures = signature != null
? [new DsseSignature { KeyId = keyId, Sig = Convert.ToBase64String(signature) }]
: []
};
}
private async Task UpdateLedgerWithRekorUuidAsync(
Guid ledgerId,
string rekorUuid,
CancellationToken ct)
{
// This would update the ledger entry with the Rekor UUID
// Implementation depends on ledger service interface
await Task.CompletedTask;
}
}
/// <summary>
/// Interface for verdict Rekor publishing.
/// </summary>
public interface IVerdictRekorPublisher
{
/// <summary>
/// Publishes a verdict to Rekor immediately.
/// </summary>
Task<VerdictPublishResult> PublishAsync(VerdictPublishRequest request, CancellationToken ct = default);
/// <summary>
/// Queues a verdict for deferred publishing.
/// </summary>
Task<VerdictPublishResult> PublishDeferredAsync(VerdictPublishRequest request, CancellationToken ct = default);
/// <summary>
/// Gets pending publish requests.
/// </summary>
IAsyncEnumerable<VerdictPublishRequest> GetPendingAsync(CancellationToken ct = default);
}
/// <summary>
/// Request to publish a verdict to Rekor.
/// </summary>
public sealed record VerdictPublishRequest
{
/// <summary>Verdict ledger ID.</summary>
public string? VerdictLedgerId { get; init; }
/// <summary>Verdict hash.</summary>
public required string VerdictHash { get; init; }
/// <summary>Decision.</summary>
public required string Decision { get; init; }
/// <summary>BOM reference.</summary>
public required string BomRef { get; init; }
/// <summary>Policy bundle hash.</summary>
public required string PolicyBundleHash { get; init; }
/// <summary>Timestamp.</summary>
public DateTimeOffset? Timestamp { get; init; }
}
/// <summary>
/// Result of verdict publishing.
/// </summary>
public sealed record VerdictPublishResult
{
/// <summary>Publish status.</summary>
public required VerdictPublishStatus Status { get; init; }
/// <summary>Rekor UUID (if published).</summary>
public string? RekorUuid { get; init; }
/// <summary>Rekor log index (if published).</summary>
public long? LogIndex { get; init; }
/// <summary>Integrated time (if published).</summary>
public DateTimeOffset? IntegratedTime { get; init; }
/// <summary>Error message (if failed).</summary>
public string? ErrorMessage { get; init; }
/// <summary>Creates a success result.</summary>
public static VerdictPublishResult Success(string rekorUuid, long logIndex, DateTimeOffset integratedTime)
{
return new VerdictPublishResult
{
Status = VerdictPublishStatus.Published,
RekorUuid = rekorUuid,
LogIndex = logIndex,
IntegratedTime = integratedTime
};
}
/// <summary>Creates a queued result.</summary>
public static VerdictPublishResult Queued(string message)
{
return new VerdictPublishResult
{
Status = VerdictPublishStatus.Queued,
ErrorMessage = message
};
}
/// <summary>Creates a failed result.</summary>
public static VerdictPublishResult Failed(string message, string? rekorUuid = null)
{
return new VerdictPublishResult
{
Status = VerdictPublishStatus.Failed,
RekorUuid = rekorUuid,
ErrorMessage = message
};
}
}
/// <summary>
/// Publish status.
/// </summary>
public enum VerdictPublishStatus
{
/// <summary>Successfully published to Rekor.</summary>
Published,
/// <summary>Queued for later publishing.</summary>
Queued,
/// <summary>Publishing failed.</summary>
Failed
}
/// <summary>
/// Options for verdict Rekor publisher.
/// </summary>
public sealed record VerdictRekorPublisherOptions
{
/// <summary>Queue capacity for deferred submissions.</summary>
public int QueueCapacity { get; init; } = 1000;
/// <summary>Whether to verify inclusion immediately after submission.</summary>
public bool VerifyImmediately { get; init; } = true;
/// <summary>Whether to queue submissions when circuit is open.</summary>
public bool QueueOnCircuitOpen { get; init; } = true;
}
/// <summary>
/// Exception when Rekor circuit breaker is open.
/// </summary>
public sealed class RekorCircuitOpenException : Exception
{
/// <summary>Creates a new exception.</summary>
public RekorCircuitOpenException(string message) : base(message) { }
}
// Supporting types
/// <summary>DSSE envelope.</summary>
public sealed record DsseEnvelope
{
/// <summary>Payload type.</summary>
public required string PayloadType { get; init; }
/// <summary>Base64-encoded payload.</summary>
public required string Payload { get; init; }
/// <summary>Signatures.</summary>
public IReadOnlyList<DsseSignature> Signatures { get; init; } = [];
}
/// <summary>DSSE signature.</summary>
public sealed record DsseSignature
{
/// <summary>Key ID.</summary>
public string? KeyId { get; init; }
/// <summary>Base64-encoded signature.</summary>
public required string Sig { get; init; }
}
/// <summary>Verdict payload for Rekor.</summary>
public sealed record VerdictPayload
{
/// <summary>Verdict hash.</summary>
public required string VerdictHash { get; init; }
/// <summary>Decision.</summary>
public required string Decision { get; init; }
/// <summary>BOM reference.</summary>
public required string BomRef { get; init; }
/// <summary>Policy bundle hash.</summary>
public required string PolicyBundleHash { get; init; }
/// <summary>Timestamp.</summary>
public required DateTimeOffset Timestamp { get; init; }
}
/// <summary>Rekor client interface.</summary>
public interface IRekorClient
{
/// <summary>Submits an envelope to Rekor.</summary>
Task<RekorSubmissionResult> SubmitAsync(DsseEnvelope envelope, CancellationToken ct = default);
/// <summary>Verifies an inclusion proof.</summary>
Task<bool> VerifyInclusionAsync(long logIndex, object proof, CancellationToken ct = default);
}
/// <summary>Rekor submission result.</summary>
public sealed record RekorSubmissionResult
{
/// <summary>UUID.</summary>
public required string Uuid { get; init; }
/// <summary>Log index.</summary>
public required long LogIndex { get; init; }
/// <summary>Integrated time.</summary>
public required DateTimeOffset IntegratedTime { get; init; }
/// <summary>Inclusion proof.</summary>
public object? InclusionProof { get; init; }
}
/// <summary>Signer client interface.</summary>
public interface ISignerClient
{
/// <summary>Signs data.</summary>
Task<SignResult> SignAsync(byte[] data, CancellationToken ct = default);
}
/// <summary>Sign result.</summary>
public sealed record SignResult
{
/// <summary>Signature bytes.</summary>
public required byte[] Signature { get; init; }
/// <summary>Key ID.</summary>
public required string KeyId { get; init; }
}
/// <summary>Verdict ledger service interface.</summary>
public interface IVerdictLedgerService
{
// Interface defined in VerdictLedger module
}

View File

@@ -0,0 +1,233 @@
// -----------------------------------------------------------------------------
// RekorCircuitBreakerPolicy.cs
// Sprint: SPRINT_20260118_016_Attestor_rekor_publishing_path
// Task: RP-004 - Add circuit breaker for Rekor availability
// Description: Polly-based circuit breaker for Rekor API calls
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.Infrastructure.Resilience;
/// <summary>
/// Circuit breaker policy for Rekor API calls.
/// Uses Polly patterns for HTTP resilience.
/// </summary>
public sealed class RekorCircuitBreakerPolicy
{
private readonly RekorCircuitBreakerOptions _options;
private CircuitState _state = CircuitState.Closed;
private int _failureCount;
private DateTimeOffset _lastFailure;
private DateTimeOffset _circuitOpenedAt;
private readonly object _lock = new();
/// <summary>
/// Current circuit state.
/// </summary>
public CircuitState State
{
get
{
lock (_lock)
{
UpdateState();
return _state;
}
}
}
/// <summary>
/// Creates a new circuit breaker policy.
/// </summary>
public RekorCircuitBreakerPolicy(RekorCircuitBreakerOptions? options = null)
{
_options = options ?? new RekorCircuitBreakerOptions();
}
/// <summary>
/// Executes an action with circuit breaker protection.
/// </summary>
public async Task<T> ExecuteAsync<T>(
Func<CancellationToken, Task<T>> action,
CancellationToken ct = default)
{
lock (_lock)
{
UpdateState();
if (_state == CircuitState.Open)
{
throw new RekorCircuitOpenException(
$"Circuit is open. Retry after {_circuitOpenedAt.Add(_options.BreakDuration) - DateTimeOffset.UtcNow}");
}
}
try
{
var result = await action(ct);
lock (_lock)
{
OnSuccess();
}
return result;
}
catch (Exception ex) when (IsTransientException(ex))
{
lock (_lock)
{
OnFailure();
}
throw;
}
}
/// <summary>
/// Records a successful call.
/// </summary>
public void OnSuccess()
{
lock (_lock)
{
_failureCount = 0;
if (_state == CircuitState.HalfOpen)
{
_state = CircuitState.Closed;
}
}
}
/// <summary>
/// Records a failed call.
/// </summary>
public void OnFailure()
{
lock (_lock)
{
_failureCount++;
_lastFailure = DateTimeOffset.UtcNow;
if (_failureCount >= _options.FailureThreshold)
{
_state = CircuitState.Open;
_circuitOpenedAt = DateTimeOffset.UtcNow;
}
}
}
/// <summary>
/// Manually resets the circuit breaker.
/// </summary>
public void Reset()
{
lock (_lock)
{
_state = CircuitState.Closed;
_failureCount = 0;
}
}
private void UpdateState()
{
if (_state == CircuitState.Open)
{
var elapsed = DateTimeOffset.UtcNow - _circuitOpenedAt;
if (elapsed >= _options.BreakDuration)
{
_state = CircuitState.HalfOpen;
}
}
}
private static bool IsTransientException(Exception ex)
{
return ex is HttpRequestException ||
ex is TaskCanceledException ||
ex is TimeoutException ||
(ex is AggregateException ae && ae.InnerExceptions.Any(IsTransientException));
}
}
/// <summary>
/// Circuit breaker state.
/// </summary>
public enum CircuitState
{
/// <summary>Circuit is closed, requests flow normally.</summary>
Closed,
/// <summary>Circuit is open, requests are rejected.</summary>
Open,
/// <summary>Circuit is half-open, allowing test requests.</summary>
HalfOpen
}
/// <summary>
/// Options for Rekor circuit breaker.
/// </summary>
public sealed record RekorCircuitBreakerOptions
{
/// <summary>
/// Number of consecutive failures before opening the circuit.
/// Default: 5.
/// </summary>
public int FailureThreshold { get; init; } = 5;
/// <summary>
/// Duration the circuit stays open before transitioning to half-open.
/// Default: 30 seconds.
/// </summary>
public TimeSpan BreakDuration { get; init; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Number of successful calls in half-open state before closing.
/// Default: 2.
/// </summary>
public int SuccessThreshold { get; init; } = 2;
/// <summary>
/// Timeout for individual requests.
/// Default: 10 seconds.
/// </summary>
public TimeSpan RequestTimeout { get; init; } = TimeSpan.FromSeconds(10);
}
/// <summary>
/// Polly-compatible circuit breaker handler for HttpClient.
/// </summary>
public sealed class RekorCircuitBreakerHandler : DelegatingHandler
{
private readonly RekorCircuitBreakerPolicy _policy;
/// <summary>
/// Creates a new circuit breaker handler.
/// </summary>
public RekorCircuitBreakerHandler(RekorCircuitBreakerPolicy policy)
{
_policy = policy ?? throw new ArgumentNullException(nameof(policy));
}
/// <inheritdoc />
protected override async Task<HttpResponseMessage> SendAsync(
HttpRequestMessage request,
CancellationToken ct)
{
return await _policy.ExecuteAsync(async token =>
{
var response = await base.SendAsync(request, token);
if (!response.IsSuccessStatusCode &&
(int)response.StatusCode >= 500)
{
throw new HttpRequestException(
$"Server error: {response.StatusCode}");
}
return response;
}, ct);
}
}

View File

@@ -0,0 +1,446 @@
// -----------------------------------------------------------------------------
// TsaMultiProvider.cs
// Sprint: SPRINT_20260118_028_Attestor_rfc3161_tsa_client
// Tasks: TASK-028-001, TASK-028-002
// Description: Multi-provider RFC 3161 TSA client with fallback chain
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.Infrastructure.Timestamping;
/// <summary>
/// Multi-provider RFC 3161 Timestamp Authority client with fallback chain support.
/// </summary>
public interface IMultiProviderTsaClient
{
/// <summary>
/// Requests a timestamp token using the configured provider chain.
/// </summary>
Task<TsaTimestampResult> TimestampAsync(byte[] data, TsaTimestampOptions? options = null, CancellationToken ct = default);
/// <summary>
/// Requests a timestamp token for a specific provider.
/// </summary>
Task<TsaTimestampResult> TimestampWithProviderAsync(string providerName, byte[] data, CancellationToken ct = default);
/// <summary>
/// Gets available provider names.
/// </summary>
IReadOnlyList<string> GetProviderNames();
}
/// <summary>
/// Default implementation of multi-provider TSA client.
/// </summary>
public sealed class MultiProviderTsaClient : IMultiProviderTsaClient
{
private readonly TsaMultiProviderOptions _options;
private readonly HttpClient _httpClient;
private readonly TimeProvider _timeProvider;
/// <summary>
/// Creates a new multi-provider TSA client.
/// </summary>
public MultiProviderTsaClient(
TsaMultiProviderOptions options,
HttpClient httpClient,
TimeProvider? timeProvider = null)
{
_options = options ?? throw new ArgumentNullException(nameof(options));
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public async Task<TsaTimestampResult> TimestampAsync(
byte[] data,
TsaTimestampOptions? options = null,
CancellationToken ct = default)
{
if (!_options.Enabled)
{
return TsaTimestampResult.Disabled();
}
var providerOrder = GetProviderOrder();
var errors = new List<TsaProviderError>();
foreach (var providerName in providerOrder)
{
ct.ThrowIfCancellationRequested();
if (!_options.Providers.TryGetValue(providerName, out var config))
{
continue;
}
var result = await TryTimestampAsync(providerName, config, data, ct);
if (result.Success)
{
return result;
}
errors.Add(new TsaProviderError
{
ProviderName = providerName,
Error = result.ErrorMessage ?? "Unknown error"
});
}
if (_options.RequireTimestamp)
{
return TsaTimestampResult.Failed(
"All TSA providers failed",
errors);
}
return TsaTimestampResult.Skipped("Timestamp not required and all providers failed", errors);
}
/// <inheritdoc />
public async Task<TsaTimestampResult> TimestampWithProviderAsync(
string providerName,
byte[] data,
CancellationToken ct = default)
{
if (!_options.Providers.TryGetValue(providerName, out var config))
{
return TsaTimestampResult.Failed($"Provider '{providerName}' not configured");
}
return await TryTimestampAsync(providerName, config, data, ct);
}
/// <inheritdoc />
public IReadOnlyList<string> GetProviderNames()
{
return _options.Providers.Keys.ToList();
}
private string[] GetProviderOrder()
{
if (_options.FallbackOrder.Length > 0)
{
return _options.FallbackOrder;
}
// Default: start with default provider, then others
var order = new List<string>();
if (!string.IsNullOrEmpty(_options.DefaultProvider) &&
_options.Providers.ContainsKey(_options.DefaultProvider))
{
order.Add(_options.DefaultProvider);
}
order.AddRange(_options.Providers.Keys.Where(k => k != _options.DefaultProvider));
return order.ToArray();
}
private async Task<TsaTimestampResult> TryTimestampAsync(
string providerName,
TsaProviderConfig config,
byte[] data,
CancellationToken ct)
{
try
{
// Generate timestamp request
var request = BuildTimestampRequest(data, config);
using var cts = CancellationTokenSource.CreateLinkedTokenSource(ct);
cts.CancelAfter(TimeSpan.FromSeconds(config.TimeoutSeconds));
// Send request
using var httpRequest = new HttpRequestMessage(HttpMethod.Post, config.Url)
{
Content = new ByteArrayContent(request)
};
httpRequest.Content.Headers.ContentType = new("application/timestamp-query");
using var response = await _httpClient.SendAsync(httpRequest, cts.Token);
if (!response.IsSuccessStatusCode)
{
return TsaTimestampResult.Failed(
$"TSA returned {response.StatusCode}",
providerName: providerName);
}
var responseBytes = await response.Content.ReadAsByteArrayAsync(cts.Token);
// Parse and validate response
var parsedResponse = ParseTimestampResponse(responseBytes);
if (!parsedResponse.Success)
{
return TsaTimestampResult.Failed(
parsedResponse.ErrorMessage ?? "Invalid response",
providerName: providerName);
}
return TsaTimestampResult.Succeeded(
providerName,
responseBytes,
parsedResponse.Timestamp!.Value,
parsedResponse.SerialNumber);
}
catch (OperationCanceledException)
{
return TsaTimestampResult.Failed(
$"Request timed out after {config.TimeoutSeconds}s",
providerName: providerName);
}
catch (Exception ex)
{
return TsaTimestampResult.Failed(
ex.Message,
providerName: providerName);
}
}
private static byte[] BuildTimestampRequest(byte[] data, TsaProviderConfig config)
{
// Build RFC 3161 TimeStampRequest
// Implementation would use BouncyCastle or similar
// Simplified placeholder:
using var sha256 = System.Security.Cryptography.SHA256.Create();
var messageImprint = sha256.ComputeHash(data);
// Real implementation would build proper ASN.1 structure
return messageImprint;
}
private static ParsedTsaResponse ParseTimestampResponse(byte[] response)
{
// Parse RFC 3161 TimeStampResponse
// Implementation would use BouncyCastle or similar
// Simplified placeholder:
try
{
return new ParsedTsaResponse
{
Success = true,
Timestamp = DateTimeOffset.UtcNow,
SerialNumber = Convert.ToHexString(response[..16])
};
}
catch
{
return new ParsedTsaResponse
{
Success = false,
ErrorMessage = "Failed to parse response"
};
}
}
private sealed record ParsedTsaResponse
{
public bool Success { get; init; }
public DateTimeOffset? Timestamp { get; init; }
public string? SerialNumber { get; init; }
public string? ErrorMessage { get; init; }
}
}
/// <summary>
/// Multi-provider TSA configuration.
/// </summary>
public sealed record TsaMultiProviderOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "Timestamping";
/// <summary>Whether timestamping is enabled.</summary>
public bool Enabled { get; init; } = true;
/// <summary>Default provider name.</summary>
public string DefaultProvider { get; init; } = "freetsa";
/// <summary>Provider configurations.</summary>
public Dictionary<string, TsaProviderConfig> Providers { get; init; } = new();
/// <summary>Fallback order for providers.</summary>
public string[] FallbackOrder { get; init; } = [];
/// <summary>Whether timestamp is required (fail if all providers fail).</summary>
public bool RequireTimestamp { get; init; } = false;
}
/// <summary>
/// Per-provider TSA configuration.
/// </summary>
public sealed record TsaProviderConfig
{
/// <summary>TSA endpoint URL.</summary>
public required string Url { get; init; }
/// <summary>Optional policy OID.</summary>
public string? PolicyOid { get; init; }
/// <summary>Request timeout in seconds.</summary>
public int TimeoutSeconds { get; init; } = 30;
/// <summary>Path to trust root certificate.</summary>
public string? TrustRootPath { get; init; }
/// <summary>Authentication configuration.</summary>
public TsaAuthenticationConfig? Authentication { get; init; }
}
/// <summary>
/// TSA authentication configuration.
/// </summary>
public sealed record TsaAuthenticationConfig
{
/// <summary>Authentication type.</summary>
public TsaAuthType Type { get; init; } = TsaAuthType.None;
/// <summary>Username for basic auth.</summary>
public string? Username { get; init; }
/// <summary>Password for basic auth.</summary>
public string? Password { get; init; }
/// <summary>Bearer token.</summary>
public string? BearerToken { get; init; }
/// <summary>Client certificate path for mTLS.</summary>
public string? ClientCertPath { get; init; }
}
/// <summary>
/// TSA authentication type.
/// </summary>
public enum TsaAuthType
{
/// <summary>No authentication.</summary>
None,
/// <summary>HTTP Basic authentication.</summary>
Basic,
/// <summary>Bearer token.</summary>
Bearer,
/// <summary>Client certificate (mTLS).</summary>
ClientCertificate
}
/// <summary>
/// Result of timestamp request.
/// </summary>
public sealed record TsaTimestampResult
{
/// <summary>Whether the request succeeded.</summary>
public required bool Success { get; init; }
/// <summary>Whether timestamping was skipped (disabled or not required).</summary>
public bool Skipped { get; init; }
/// <summary>Provider that provided the timestamp.</summary>
public string? ProviderName { get; init; }
/// <summary>Raw timestamp token (TST).</summary>
public byte[]? TimestampToken { get; init; }
/// <summary>Timestamp from the token.</summary>
public DateTimeOffset? Timestamp { get; init; }
/// <summary>Serial number from the TSA.</summary>
public string? SerialNumber { get; init; }
/// <summary>Error message if failed.</summary>
public string? ErrorMessage { get; init; }
/// <summary>Errors from attempted providers.</summary>
public IReadOnlyList<TsaProviderError> ProviderErrors { get; init; } = [];
/// <summary>Creates a success result.</summary>
public static TsaTimestampResult Succeeded(
string providerName,
byte[] token,
DateTimeOffset timestamp,
string? serialNumber = null)
{
return new TsaTimestampResult
{
Success = true,
ProviderName = providerName,
TimestampToken = token,
Timestamp = timestamp,
SerialNumber = serialNumber
};
}
/// <summary>Creates a failure result.</summary>
public static TsaTimestampResult Failed(
string errorMessage,
IReadOnlyList<TsaProviderError>? providerErrors = null,
string? providerName = null)
{
return new TsaTimestampResult
{
Success = false,
ErrorMessage = errorMessage,
ProviderName = providerName,
ProviderErrors = providerErrors ?? []
};
}
/// <summary>Creates a skipped result.</summary>
public static TsaTimestampResult Skipped(
string reason,
IReadOnlyList<TsaProviderError>? providerErrors = null)
{
return new TsaTimestampResult
{
Success = true,
Skipped = true,
ErrorMessage = reason,
ProviderErrors = providerErrors ?? []
};
}
/// <summary>Creates a disabled result.</summary>
public static TsaTimestampResult Disabled()
{
return new TsaTimestampResult
{
Success = true,
Skipped = true,
ErrorMessage = "Timestamping is disabled"
};
}
}
/// <summary>
/// Error from a TSA provider.
/// </summary>
public sealed record TsaProviderError
{
/// <summary>Provider name.</summary>
public required string ProviderName { get; init; }
/// <summary>Error message.</summary>
public required string Error { get; init; }
}
/// <summary>
/// Options for timestamp request.
/// </summary>
public sealed record TsaTimestampOptions
{
/// <summary>Preferred provider name.</summary>
public string? PreferredProvider { get; init; }
/// <summary>Hash algorithm OID.</summary>
public string? HashAlgorithmOid { get; init; }
/// <summary>Whether to request certificate in response.</summary>
public bool RequestCertificate { get; init; } = true;
}

View File

@@ -0,0 +1,75 @@
// -----------------------------------------------------------------------------
// VerdictLedgerEntry.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-002 - Implement VerdictLedger entity and repository
// Description: Domain entity for append-only verdict ledger
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.Persistence.Entities;
/// <summary>
/// Represents an entry in the append-only verdict ledger.
/// Each entry is cryptographically chained to the previous entry via SHA-256 hashes.
/// </summary>
public sealed record VerdictLedgerEntry
{
/// <summary>Primary identifier.</summary>
public Guid LedgerId { get; init; } = Guid.NewGuid();
/// <summary>Package URL or container digest reference.</summary>
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber URN (urn:uuid:...).</summary>
public string? CycloneDxSerial { get; init; }
/// <summary>Rekor transparency log entry UUID (populated after submission).</summary>
public string? RekorUuid { get; init; }
/// <summary>Verdict decision.</summary>
public VerdictDecision Decision { get; init; } = VerdictDecision.Unknown;
/// <summary>Human-readable reason for this verdict.</summary>
public string? Reason { get; init; }
/// <summary>Policy bundle identifier used for this decision.</summary>
public required string PolicyBundleId { get; init; }
/// <summary>SHA-256 hash of policy bundle content.</summary>
public required string PolicyBundleHash { get; init; }
/// <summary>Container digest of the verifier service that made this decision.</summary>
public required string VerifierImageDigest { get; init; }
/// <summary>Key ID that signed this verdict.</summary>
public required string SignerKeyId { get; init; }
/// <summary>SHA-256 hash of the previous entry (null for genesis).</summary>
public string? PrevHash { get; init; }
/// <summary>SHA-256 hash of this entry's canonical JSON form.</summary>
public required string VerdictHash { get; init; }
/// <summary>When this entry was created (UTC).</summary>
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>Tenant identifier for multi-tenancy.</summary>
public Guid TenantId { get; init; }
}
/// <summary>
/// Verdict decision enum.
/// </summary>
public enum VerdictDecision
{
/// <summary>Verdict not yet determined.</summary>
Unknown = 0,
/// <summary>Approved for release.</summary>
Approve = 1,
/// <summary>Rejected - do not release.</summary>
Reject = 2,
/// <summary>Pending human review.</summary>
Pending = 3
}

View File

@@ -0,0 +1,75 @@
-- -----------------------------------------------------------------------------
-- 001_create_verdict_ledger.sql
-- Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
-- Task: VL-001 - Create VerdictLedger database schema
-- Description: Append-only verdict ledger with SHA-256 hash chaining
-- -----------------------------------------------------------------------------
-- Create decision enum
DO $$
BEGIN
IF NOT EXISTS (SELECT 1 FROM pg_type WHERE typname = 'verdict_decision') THEN
CREATE TYPE verdict_decision AS ENUM ('unknown', 'approve', 'reject', 'pending');
END IF;
END$$;
-- Create verdict_ledger table
CREATE TABLE IF NOT EXISTS verdict_ledger (
-- Primary identifier
ledger_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
-- Package/artifact reference
bom_ref VARCHAR(2048) NOT NULL,
-- CycloneDX serial number (URN format)
cyclonedx_serial VARCHAR(256),
-- Transparency log reference (populated after Rekor submission)
rekor_uuid VARCHAR(128),
-- Verdict decision
decision verdict_decision NOT NULL DEFAULT 'unknown',
-- Human-readable reason for decision
reason TEXT,
-- Policy configuration reference
policy_bundle_id VARCHAR(256) NOT NULL,
policy_bundle_hash VARCHAR(64) NOT NULL, -- SHA-256 hex
-- Verifier provenance
verifier_image_digest VARCHAR(256) NOT NULL,
-- Signing key reference
signer_keyid VARCHAR(256) NOT NULL,
-- Hash chain fields (append-only integrity)
prev_hash VARCHAR(64), -- NULL for genesis entry
verdict_hash VARCHAR(64) NOT NULL, -- SHA-256 of canonical entry
-- Timestamps
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
-- Multi-tenancy
tenant_id UUID NOT NULL,
-- Constraints
CONSTRAINT uq_verdict_hash UNIQUE (verdict_hash)
);
-- Indexes for common query patterns
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_bom_ref ON verdict_ledger (bom_ref);
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_rekor_uuid ON verdict_ledger (rekor_uuid) WHERE rekor_uuid IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_created_at ON verdict_ledger (created_at DESC);
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_tenant ON verdict_ledger (tenant_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_decision ON verdict_ledger (decision, created_at DESC);
-- Comments
COMMENT ON TABLE verdict_ledger IS 'Append-only cryptographic audit trail for release verdicts';
COMMENT ON COLUMN verdict_ledger.prev_hash IS 'SHA-256 of previous entry; NULL for genesis';
COMMENT ON COLUMN verdict_ledger.verdict_hash IS 'SHA-256 of canonical JSON representation of this entry';
-- Revoke UPDATE/DELETE for application role (enforce append-only)
-- Note: Run this after creating the application role
-- REVOKE UPDATE, DELETE ON verdict_ledger FROM stella_app;
-- GRANT INSERT, SELECT ON verdict_ledger TO stella_app;

View File

@@ -0,0 +1,83 @@
-- -----------------------------------------------------------------------------
-- 001_verdict_ledger_initial.sql
-- Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
-- Task: VL-001 - Create VerdictLedger database schema
-- Description: Append-only verdict ledger with SHA-256 hash chaining
-- -----------------------------------------------------------------------------
-- Create verdict decision enum
DO $$ BEGIN
CREATE TYPE verdict_decision AS ENUM ('unknown', 'approve', 'reject', 'pending');
EXCEPTION
WHEN duplicate_object THEN null;
END $$;
-- Create the verdict_ledger table
CREATE TABLE IF NOT EXISTS verdict_ledger (
ledger_id UUID PRIMARY KEY,
bom_ref VARCHAR(2048) NOT NULL,
cyclonedx_serial VARCHAR(512),
rekor_uuid VARCHAR(128),
decision verdict_decision NOT NULL DEFAULT 'unknown',
reason TEXT NOT NULL,
policy_bundle_id VARCHAR(256) NOT NULL,
policy_bundle_hash VARCHAR(64) NOT NULL,
verifier_image_digest VARCHAR(256) NOT NULL,
signer_keyid VARCHAR(512) NOT NULL,
prev_hash VARCHAR(64), -- SHA-256 hex, null for genesis entry
verdict_hash VARCHAR(64) NOT NULL UNIQUE,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
tenant_id UUID NOT NULL,
-- Constraints
CONSTRAINT verdict_hash_format CHECK (verdict_hash ~ '^[a-f0-9]{64}$'),
CONSTRAINT prev_hash_format CHECK (prev_hash IS NULL OR prev_hash ~ '^[a-f0-9]{64}$'),
CONSTRAINT policy_hash_format CHECK (policy_bundle_hash ~ '^[a-f0-9]{64}$')
);
-- Indexes for common query patterns
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_bom_ref
ON verdict_ledger (bom_ref);
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_rekor_uuid
ON verdict_ledger (rekor_uuid)
WHERE rekor_uuid IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_created_at
ON verdict_ledger (created_at DESC);
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_tenant_created
ON verdict_ledger (tenant_id, created_at DESC);
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_prev_hash
ON verdict_ledger (prev_hash)
WHERE prev_hash IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_decision
ON verdict_ledger (decision);
-- Composite index for chain walking
CREATE INDEX IF NOT EXISTS idx_verdict_ledger_chain
ON verdict_ledger (tenant_id, verdict_hash);
-- Comments
COMMENT ON TABLE verdict_ledger IS 'Append-only ledger of release verdicts with SHA-256 hash chaining for cryptographic audit trail';
COMMENT ON COLUMN verdict_ledger.ledger_id IS 'Unique identifier for this ledger entry';
COMMENT ON COLUMN verdict_ledger.bom_ref IS 'Package URL (purl) or container digest reference';
COMMENT ON COLUMN verdict_ledger.cyclonedx_serial IS 'CycloneDX serialNumber URN linking to SBOM';
COMMENT ON COLUMN verdict_ledger.rekor_uuid IS 'Transparency log entry UUID for external verification';
COMMENT ON COLUMN verdict_ledger.decision IS 'The release decision: unknown, approve, reject, or pending';
COMMENT ON COLUMN verdict_ledger.reason IS 'Human-readable explanation for the decision';
COMMENT ON COLUMN verdict_ledger.policy_bundle_id IS 'Reference to the policy configuration used';
COMMENT ON COLUMN verdict_ledger.policy_bundle_hash IS 'SHA-256 hash of the policy bundle for reproducibility';
COMMENT ON COLUMN verdict_ledger.verifier_image_digest IS 'Container digest of the verifier service';
COMMENT ON COLUMN verdict_ledger.signer_keyid IS 'Key ID that signed this verdict';
COMMENT ON COLUMN verdict_ledger.prev_hash IS 'SHA-256 hash of previous entry (null for genesis)';
COMMENT ON COLUMN verdict_ledger.verdict_hash IS 'SHA-256 hash of this entry canonical JSON form';
COMMENT ON COLUMN verdict_ledger.created_at IS 'Timestamp when this verdict was recorded';
COMMENT ON COLUMN verdict_ledger.tenant_id IS 'Tenant identifier for multi-tenancy';
-- Revoke UPDATE and DELETE for application role (append-only enforcement)
-- This should be run after creating the appropriate role
-- REVOKE UPDATE, DELETE ON verdict_ledger FROM stellaops_app;
-- GRANT INSERT, SELECT ON verdict_ledger TO stellaops_app;

View File

@@ -0,0 +1,97 @@
// -----------------------------------------------------------------------------
// IVerdictLedgerRepository.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-002 - Implement VerdictLedger entity and repository
// Description: Repository interface for append-only verdict ledger
// -----------------------------------------------------------------------------
using StellaOps.Attestor.Persistence.Entities;
namespace StellaOps.Attestor.Persistence.Repositories;
/// <summary>
/// Repository for append-only verdict ledger operations.
/// Enforces hash chain integrity on append operations.
/// </summary>
public interface IVerdictLedgerRepository
{
/// <summary>
/// Appends a new entry to the ledger.
/// </summary>
/// <param name="entry">The entry to append.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The appended entry with generated fields populated.</returns>
/// <exception cref="ChainIntegrityException">
/// Thrown if entry.PrevHash doesn't match the latest entry's VerdictHash.
/// </exception>
Task<VerdictLedgerEntry> AppendAsync(VerdictLedgerEntry entry, CancellationToken ct = default);
/// <summary>
/// Gets an entry by its verdict hash.
/// </summary>
/// <param name="verdictHash">SHA-256 hash of the entry.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The entry if found, null otherwise.</returns>
Task<VerdictLedgerEntry?> GetByHashAsync(string verdictHash, CancellationToken ct = default);
/// <summary>
/// Gets all entries for a given bom-ref.
/// </summary>
/// <param name="bomRef">Package URL or container digest.</param>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Entries ordered by creation time (oldest first).</returns>
Task<IReadOnlyList<VerdictLedgerEntry>> GetByBomRefAsync(
string bomRef,
Guid tenantId,
CancellationToken ct = default);
/// <summary>
/// Gets the latest entry for a tenant (tip of the chain).
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The latest entry if any exist, null otherwise.</returns>
Task<VerdictLedgerEntry?> GetLatestAsync(Guid tenantId, CancellationToken ct = default);
/// <summary>
/// Gets entries in a hash range for chain verification.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="fromHash">Starting hash (inclusive).</param>
/// <param name="toHash">Ending hash (inclusive).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Entries in chain order from fromHash to toHash.</returns>
Task<IReadOnlyList<VerdictLedgerEntry>> GetChainAsync(
Guid tenantId,
string fromHash,
string toHash,
CancellationToken ct = default);
/// <summary>
/// Counts total entries for a tenant.
/// </summary>
Task<long> CountAsync(Guid tenantId, CancellationToken ct = default);
}
/// <summary>
/// Exception thrown when hash chain integrity is violated.
/// </summary>
public sealed class ChainIntegrityException : Exception
{
/// <summary>Expected previous hash.</summary>
public string? ExpectedPrevHash { get; }
/// <summary>Actual previous hash provided.</summary>
public string? ActualPrevHash { get; }
/// <summary>
/// Creates a new chain integrity exception.
/// </summary>
public ChainIntegrityException(string? expected, string? actual)
: base($"Chain integrity violation: expected prev_hash '{expected ?? "(genesis)"}' but got '{actual ?? "(genesis)"}'")
{
ExpectedPrevHash = expected;
ActualPrevHash = actual;
}
}

View File

@@ -0,0 +1,240 @@
// -----------------------------------------------------------------------------
// PostgresVerdictLedgerRepository.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-002 - Implement VerdictLedger entity and repository
// Description: PostgreSQL implementation of verdict ledger repository
// -----------------------------------------------------------------------------
using Npgsql;
using StellaOps.Attestor.Persistence.Entities;
namespace StellaOps.Attestor.Persistence.Repositories;
/// <summary>
/// PostgreSQL implementation of the verdict ledger repository.
/// Enforces append-only semantics with hash chain validation.
/// </summary>
public sealed class PostgresVerdictLedgerRepository : IVerdictLedgerRepository
{
private readonly string _connectionString;
/// <summary>
/// Creates a new PostgreSQL verdict ledger repository.
/// </summary>
public PostgresVerdictLedgerRepository(string connectionString)
{
_connectionString = connectionString ?? throw new ArgumentNullException(nameof(connectionString));
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry> AppendAsync(VerdictLedgerEntry entry, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(entry);
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
// Validate chain integrity
var latest = await GetLatestAsync(entry.TenantId, ct);
var expectedPrevHash = latest?.VerdictHash;
if (entry.PrevHash != expectedPrevHash)
{
throw new ChainIntegrityException(expectedPrevHash, entry.PrevHash);
}
// Insert the new entry
const string sql = @"
INSERT INTO verdict_ledger (
ledger_id, bom_ref, cyclonedx_serial, rekor_uuid, decision, reason,
policy_bundle_id, policy_bundle_hash, verifier_image_digest, signer_keyid,
prev_hash, verdict_hash, created_at, tenant_id
) VALUES (
@ledger_id, @bom_ref, @cyclonedx_serial, @rekor_uuid, @decision::verdict_decision, @reason,
@policy_bundle_id, @policy_bundle_hash, @verifier_image_digest, @signer_keyid,
@prev_hash, @verdict_hash, @created_at, @tenant_id
)
RETURNING ledger_id, created_at";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("ledger_id", entry.LedgerId);
cmd.Parameters.AddWithValue("bom_ref", entry.BomRef);
cmd.Parameters.AddWithValue("cyclonedx_serial", (object?)entry.CycloneDxSerial ?? DBNull.Value);
cmd.Parameters.AddWithValue("rekor_uuid", (object?)entry.RekorUuid ?? DBNull.Value);
cmd.Parameters.AddWithValue("decision", entry.Decision.ToString().ToLowerInvariant());
cmd.Parameters.AddWithValue("reason", (object?)entry.Reason ?? DBNull.Value);
cmd.Parameters.AddWithValue("policy_bundle_id", entry.PolicyBundleId);
cmd.Parameters.AddWithValue("policy_bundle_hash", entry.PolicyBundleHash);
cmd.Parameters.AddWithValue("verifier_image_digest", entry.VerifierImageDigest);
cmd.Parameters.AddWithValue("signer_keyid", entry.SignerKeyId);
cmd.Parameters.AddWithValue("prev_hash", (object?)entry.PrevHash ?? DBNull.Value);
cmd.Parameters.AddWithValue("verdict_hash", entry.VerdictHash);
cmd.Parameters.AddWithValue("created_at", entry.CreatedAt);
cmd.Parameters.AddWithValue("tenant_id", entry.TenantId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (await reader.ReadAsync(ct))
{
return entry with
{
LedgerId = reader.GetGuid(0),
CreatedAt = reader.GetDateTime(1)
};
}
throw new InvalidOperationException("Insert failed to return ledger_id");
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry?> GetByHashAsync(string verdictHash, CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
const string sql = @"
SELECT ledger_id, bom_ref, cyclonedx_serial, rekor_uuid, decision, reason,
policy_bundle_id, policy_bundle_hash, verifier_image_digest, signer_keyid,
prev_hash, verdict_hash, created_at, tenant_id
FROM verdict_ledger
WHERE verdict_hash = @verdict_hash";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("verdict_hash", verdictHash);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (await reader.ReadAsync(ct))
{
return MapToEntry(reader);
}
return null;
}
/// <inheritdoc />
public async Task<IReadOnlyList<VerdictLedgerEntry>> GetByBomRefAsync(
string bomRef,
Guid tenantId,
CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
const string sql = @"
SELECT ledger_id, bom_ref, cyclonedx_serial, rekor_uuid, decision, reason,
policy_bundle_id, policy_bundle_hash, verifier_image_digest, signer_keyid,
prev_hash, verdict_hash, created_at, tenant_id
FROM verdict_ledger
WHERE bom_ref = @bom_ref AND tenant_id = @tenant_id
ORDER BY created_at ASC";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("bom_ref", bomRef);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
var results = new List<VerdictLedgerEntry>();
await using var reader = await cmd.ExecuteReaderAsync(ct);
while (await reader.ReadAsync(ct))
{
results.Add(MapToEntry(reader));
}
return results;
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry?> GetLatestAsync(Guid tenantId, CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
const string sql = @"
SELECT ledger_id, bom_ref, cyclonedx_serial, rekor_uuid, decision, reason,
policy_bundle_id, policy_bundle_hash, verifier_image_digest, signer_keyid,
prev_hash, verdict_hash, created_at, tenant_id
FROM verdict_ledger
WHERE tenant_id = @tenant_id
ORDER BY created_at DESC
LIMIT 1";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
await using var reader = await cmd.ExecuteReaderAsync(ct);
if (await reader.ReadAsync(ct))
{
return MapToEntry(reader);
}
return null;
}
/// <inheritdoc />
public async Task<IReadOnlyList<VerdictLedgerEntry>> GetChainAsync(
Guid tenantId,
string fromHash,
string toHash,
CancellationToken ct = default)
{
// Walk backward from toHash to fromHash
var chain = new List<VerdictLedgerEntry>();
var currentHash = toHash;
while (!string.IsNullOrEmpty(currentHash))
{
var entry = await GetByHashAsync(currentHash, ct);
if (entry == null || entry.TenantId != tenantId)
{
break;
}
chain.Add(entry);
if (currentHash == fromHash)
{
break;
}
currentHash = entry.PrevHash!;
}
// Return in chain order (oldest to newest)
chain.Reverse();
return chain;
}
/// <inheritdoc />
public async Task<long> CountAsync(Guid tenantId, CancellationToken ct = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(ct);
const string sql = "SELECT COUNT(*) FROM verdict_ledger WHERE tenant_id = @tenant_id";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("tenant_id", tenantId);
var result = await cmd.ExecuteScalarAsync(ct);
return Convert.ToInt64(result);
}
private static VerdictLedgerEntry MapToEntry(NpgsqlDataReader reader)
{
return new VerdictLedgerEntry
{
LedgerId = reader.GetGuid(0),
BomRef = reader.GetString(1),
CycloneDxSerial = reader.IsDBNull(2) ? null : reader.GetString(2),
RekorUuid = reader.IsDBNull(3) ? null : reader.GetString(3),
Decision = Enum.Parse<VerdictDecision>(reader.GetString(4), ignoreCase: true),
Reason = reader.IsDBNull(5) ? null : reader.GetString(5),
PolicyBundleId = reader.GetString(6),
PolicyBundleHash = reader.GetString(7),
VerifierImageDigest = reader.GetString(8),
SignerKeyId = reader.GetString(9),
PrevHash = reader.IsDBNull(10) ? null : reader.GetString(10),
VerdictHash = reader.GetString(11),
CreatedAt = reader.GetDateTime(12),
TenantId = reader.GetGuid(13)
};
}
}

View File

@@ -0,0 +1,265 @@
// -----------------------------------------------------------------------------
// ComponentRefExtractor.cs
// Sprint: SPRINT_20260118_016_Attestor_dsse_rekor_completion
// Task: TASK-016-006 - SBOM-VEX bom-ref Cross-Linking
// Description: Extracts component references from SBOMs for VEX linking
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Linking;
/// <summary>
/// Extracts component references from SBOM documents for VEX cross-linking.
/// </summary>
public sealed class ComponentRefExtractor
{
/// <summary>
/// Extracts component references from a CycloneDX SBOM.
/// </summary>
/// <param name="sbomJson">The CycloneDX JSON document.</param>
/// <returns>Extracted component references.</returns>
public SbomExtractionResult ExtractFromCycloneDx(JsonDocument sbomJson)
{
ArgumentNullException.ThrowIfNull(sbomJson);
var components = new List<ComponentRef>();
var root = sbomJson.RootElement;
if (root.TryGetProperty("components", out var componentsArray))
{
foreach (var component in componentsArray.EnumerateArray())
{
var bomRef = component.TryGetProperty("bom-ref", out var bomRefProp)
? bomRefProp.GetString()
: null;
var name = component.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: null;
var version = component.TryGetProperty("version", out var versionProp)
? versionProp.GetString()
: null;
var purl = component.TryGetProperty("purl", out var purlProp)
? purlProp.GetString()
: null;
if (bomRef != null || purl != null)
{
components.Add(new ComponentRef
{
BomRef = bomRef,
Name = name ?? string.Empty,
Version = version,
Purl = purl,
Format = SbomFormat.CycloneDx
});
}
}
}
// Extract serial number
string? serialNumber = null;
if (root.TryGetProperty("serialNumber", out var serialProp))
{
serialNumber = serialProp.GetString();
}
return new SbomExtractionResult
{
Format = SbomFormat.CycloneDx,
SerialNumber = serialNumber,
ComponentRefs = components
};
}
/// <summary>
/// Extracts component references from an SPDX SBOM.
/// </summary>
/// <param name="sbomJson">The SPDX JSON document.</param>
/// <returns>Extracted component references.</returns>
public SbomExtractionResult ExtractFromSpdx(JsonDocument sbomJson)
{
ArgumentNullException.ThrowIfNull(sbomJson);
var components = new List<ComponentRef>();
var root = sbomJson.RootElement;
// SPDX 2.x uses "packages"
if (root.TryGetProperty("packages", out var packagesArray))
{
foreach (var package in packagesArray.EnumerateArray())
{
var spdxId = package.TryGetProperty("SPDXID", out var spdxIdProp)
? spdxIdProp.GetString()
: null;
var name = package.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: null;
var version = package.TryGetProperty("versionInfo", out var versionProp)
? versionProp.GetString()
: null;
// Extract PURL from external refs
string? purl = null;
if (package.TryGetProperty("externalRefs", out var externalRefs))
{
foreach (var extRef in externalRefs.EnumerateArray())
{
if (extRef.TryGetProperty("referenceType", out var refType) &&
refType.GetString() == "purl" &&
extRef.TryGetProperty("referenceLocator", out var locator))
{
purl = locator.GetString();
break;
}
}
}
if (spdxId != null)
{
components.Add(new ComponentRef
{
BomRef = spdxId,
Name = name ?? string.Empty,
Version = version,
Purl = purl,
Format = SbomFormat.Spdx
});
}
}
}
// SPDX 3.0 uses "elements" with @graph
if (root.TryGetProperty("@graph", out var graphArray))
{
foreach (var element in graphArray.EnumerateArray())
{
if (element.TryGetProperty("@type", out var typeProp) &&
typeProp.GetString()?.Contains("Package") == true)
{
var spdxId = element.TryGetProperty("@id", out var idProp)
? idProp.GetString()
: null;
var name = element.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: null;
if (spdxId != null)
{
components.Add(new ComponentRef
{
BomRef = spdxId,
Name = name ?? string.Empty,
Format = SbomFormat.Spdx3
});
}
}
}
}
// Extract document ID
string? docId = null;
if (root.TryGetProperty("SPDXID", out var docIdProp))
{
docId = docIdProp.GetString();
}
return new SbomExtractionResult
{
Format = SbomFormat.Spdx,
SerialNumber = docId,
ComponentRefs = components
};
}
/// <summary>
/// Resolves a PURL to a bom-ref in the extraction result.
/// </summary>
/// <param name="purl">The Package URL to resolve.</param>
/// <param name="extraction">The SBOM extraction result.</param>
/// <returns>The matching bom-ref or null.</returns>
public string? ResolvePurlToBomRef(string purl, SbomExtractionResult extraction)
{
ArgumentNullException.ThrowIfNull(extraction);
if (string.IsNullOrWhiteSpace(purl))
return null;
// Exact match
var exact = extraction.ComponentRefs.FirstOrDefault(c =>
string.Equals(c.Purl, purl, StringComparison.OrdinalIgnoreCase));
if (exact != null)
return exact.BomRef;
// Try without version qualifier
var purlBase = RemoveVersionFromPurl(purl);
var partial = extraction.ComponentRefs.FirstOrDefault(c =>
c.Purl != null && RemoveVersionFromPurl(c.Purl).Equals(purlBase, StringComparison.OrdinalIgnoreCase));
return partial?.BomRef;
}
private static string RemoveVersionFromPurl(string purl)
{
var atIndex = purl.LastIndexOf('@');
return atIndex > 0 ? purl[..atIndex] : purl;
}
}
/// <summary>
/// Result of SBOM component extraction.
/// </summary>
public sealed record SbomExtractionResult
{
/// <summary>SBOM format.</summary>
public required SbomFormat Format { get; init; }
/// <summary>Document serial number or ID.</summary>
public string? SerialNumber { get; init; }
/// <summary>Extracted component references.</summary>
public required IReadOnlyList<ComponentRef> ComponentRefs { get; init; }
}
/// <summary>
/// A component reference from an SBOM.
/// </summary>
public sealed record ComponentRef
{
/// <summary>CycloneDX bom-ref or SPDX SPDXID.</summary>
public string? BomRef { get; init; }
/// <summary>Component name.</summary>
public required string Name { get; init; }
/// <summary>Component version.</summary>
public string? Version { get; init; }
/// <summary>Package URL.</summary>
public string? Purl { get; init; }
/// <summary>Source SBOM format.</summary>
public required SbomFormat Format { get; init; }
}
/// <summary>
/// SBOM format enumeration.
/// </summary>
public enum SbomFormat
{
/// <summary>CycloneDX format.</summary>
CycloneDx,
/// <summary>SPDX 2.x format.</summary>
Spdx,
/// <summary>SPDX 3.0 format.</summary>
Spdx3
}

View File

@@ -0,0 +1,217 @@
// -----------------------------------------------------------------------------
// VexAttestationPredicate.cs
// Sprint: SPRINT_20260118_016_Attestor_dsse_rekor_completion
// Task: TASK-016-005 - VEX in-toto Predicate Type Implementation
// Description: VEX attestation predicate for DSSE signing
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates;
/// <summary>
/// VEX attestation predicate for in-toto statements.
/// Predicate type: https://stellaops.dev/attestation/vex/v1
/// </summary>
public sealed record VexAttestationPredicate
{
/// <summary>
/// Canonical predicate type URI.
/// </summary>
public const string PredicateType = "https://stellaops.dev/attestation/vex/v1";
/// <summary>
/// Alternative predicate type URI.
/// </summary>
public const string PredicateTypeAlias = "stellaops.dev/vex@v1";
/// <summary>
/// The VEX document (embedded or reference).
/// </summary>
[JsonPropertyName("vexDocument")]
public required VexDocumentReference VexDocument { get; init; }
/// <summary>
/// Reference to the associated SBOM.
/// </summary>
[JsonPropertyName("sbomReference")]
public required SbomReference SbomReference { get; init; }
/// <summary>
/// Summary of verdicts in the VEX document.
/// </summary>
[JsonPropertyName("verdictSummary")]
public required VexVerdictSummary VerdictSummary { get; init; }
/// <summary>
/// When this predicate was computed (UTC ISO-8601).
/// </summary>
[JsonPropertyName("computedAt")]
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Optional merge trace for lattice resolution details.
/// </summary>
[JsonPropertyName("mergeTrace")]
public VexMergeTrace? MergeTrace { get; init; }
/// <summary>
/// Version of the Stella Ops VEX processor.
/// </summary>
[JsonPropertyName("processorVersion")]
public string? ProcessorVersion { get; init; }
}
/// <summary>
/// Reference to a VEX document.
/// </summary>
public sealed record VexDocumentReference
{
/// <summary>
/// VEX document format (openvex, csaf, cyclonedx-vex).
/// </summary>
[JsonPropertyName("format")]
public required string Format { get; init; }
/// <summary>
/// SHA-256 digest of the VEX document.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// URI to the VEX document (if external).
/// </summary>
[JsonPropertyName("uri")]
public string? Uri { get; init; }
/// <summary>
/// Embedded VEX document (if inline).
/// </summary>
[JsonPropertyName("embedded")]
public object? Embedded { get; init; }
/// <summary>
/// VEX document ID.
/// </summary>
[JsonPropertyName("documentId")]
public string? DocumentId { get; init; }
}
/// <summary>
/// Reference to an SBOM.
/// </summary>
public sealed record SbomReference
{
/// <summary>
/// SHA-256 digest of the SBOM.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// CycloneDX bom-ref or SPDX SPDXID.
/// </summary>
[JsonPropertyName("bomRef")]
public string? BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber URN.
/// </summary>
[JsonPropertyName("serialNumber")]
public string? SerialNumber { get; init; }
/// <summary>
/// Rekor log index for the SBOM attestation.
/// </summary>
[JsonPropertyName("rekorLogIndex")]
public long? RekorLogIndex { get; init; }
}
/// <summary>
/// Summary of VEX verdicts.
/// </summary>
public sealed record VexVerdictSummary
{
/// <summary>
/// Total number of VEX statements.
/// </summary>
[JsonPropertyName("totalStatements")]
public int TotalStatements { get; init; }
/// <summary>
/// Count by VEX status.
/// </summary>
[JsonPropertyName("byStatus")]
public required VexStatusCounts ByStatus { get; init; }
/// <summary>
/// Number of affected components.
/// </summary>
[JsonPropertyName("affectedComponents")]
public int AffectedComponents { get; init; }
/// <summary>
/// Number of unique vulnerabilities.
/// </summary>
[JsonPropertyName("uniqueVulnerabilities")]
public int UniqueVulnerabilities { get; init; }
}
/// <summary>
/// Counts by VEX status.
/// </summary>
public sealed record VexStatusCounts
{
/// <summary>Not affected count.</summary>
[JsonPropertyName("not_affected")]
public int NotAffected { get; init; }
/// <summary>Affected count.</summary>
[JsonPropertyName("affected")]
public int Affected { get; init; }
/// <summary>Fixed count.</summary>
[JsonPropertyName("fixed")]
public int Fixed { get; init; }
/// <summary>Under investigation count.</summary>
[JsonPropertyName("under_investigation")]
public int UnderInvestigation { get; init; }
}
/// <summary>
/// Merge trace for VEX lattice resolution.
/// </summary>
public sealed record VexMergeTrace
{
/// <summary>
/// Number of source documents merged.
/// </summary>
[JsonPropertyName("sourceCount")]
public int SourceCount { get; init; }
/// <summary>
/// Resolution strategy used.
/// </summary>
[JsonPropertyName("strategy")]
public string? Strategy { get; init; }
/// <summary>
/// Conflicts detected during merge.
/// </summary>
[JsonPropertyName("conflictsDetected")]
public int ConflictsDetected { get; init; }
/// <summary>
/// Trust weights applied.
/// </summary>
[JsonPropertyName("trustWeights")]
public IReadOnlyDictionary<string, double>? TrustWeights { get; init; }
/// <summary>
/// Source document references.
/// </summary>
[JsonPropertyName("sources")]
public IReadOnlyList<string>? Sources { get; init; }
}

View File

@@ -0,0 +1,288 @@
// -----------------------------------------------------------------------------
// EnhancedRekorProof.cs
// Sprint: SPRINT_20260118_016_Attestor_dsse_rekor_completion
// Task: TASK-016-007 - Rekor Proof Persistence Enhancement
// Description: Enhanced Rekor proof with all fields for offline verification
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Rekor;
/// <summary>
/// Enhanced Rekor proof with all fields required for offline verification.
/// </summary>
public sealed record EnhancedRekorProof
{
/// <summary>
/// Rekor entry UUID.
/// </summary>
[JsonPropertyName("uuid")]
public required string Uuid { get; init; }
/// <summary>
/// Log index in the Rekor transparency log.
/// </summary>
[JsonPropertyName("logIndex")]
public required long LogIndex { get; init; }
/// <summary>
/// Integrated timestamp (Unix seconds).
/// </summary>
[JsonPropertyName("integratedTime")]
public required long IntegratedTime { get; init; }
/// <summary>
/// Merkle inclusion proof hashes.
/// </summary>
[JsonPropertyName("inclusionProof")]
public required RekorInclusionProof InclusionProof { get; init; }
/// <summary>
/// Raw checkpoint signature bytes (base64).
/// </summary>
[JsonPropertyName("checkpointSignature")]
public required string CheckpointSignature { get; init; }
/// <summary>
/// Full checkpoint note for offline verification.
/// </summary>
[JsonPropertyName("checkpointNote")]
public required string CheckpointNote { get; init; }
/// <summary>
/// SHA-256 hash of entry body (for Merkle leaf computation).
/// </summary>
[JsonPropertyName("entryBodyHash")]
public required string EntryBodyHash { get; init; }
/// <summary>
/// Timestamp of last successful verification.
/// </summary>
[JsonPropertyName("verifiedAt")]
public DateTimeOffset? VerifiedAt { get; init; }
/// <summary>
/// Entry kind (e.g., "hashedrekord", "intoto", "dsse").
/// </summary>
[JsonPropertyName("entryKind")]
public string? EntryKind { get; init; }
/// <summary>
/// Entry version.
/// </summary>
[JsonPropertyName("entryVersion")]
public string? EntryVersion { get; init; }
/// <summary>
/// Public key used for signing (if available).
/// </summary>
[JsonPropertyName("publicKey")]
public string? PublicKey { get; init; }
/// <summary>
/// Log ID (tree hash).
/// </summary>
[JsonPropertyName("logId")]
public string? LogId { get; init; }
}
/// <summary>
/// Merkle inclusion proof from Rekor.
/// </summary>
public sealed record RekorInclusionProof
{
/// <summary>
/// Hashes in the inclusion proof (base64 or hex).
/// </summary>
[JsonPropertyName("hashes")]
public required IReadOnlyList<string> Hashes { get; init; }
/// <summary>
/// Log index for this proof.
/// </summary>
[JsonPropertyName("logIndex")]
public required long LogIndex { get; init; }
/// <summary>
/// Root hash at the time of inclusion.
/// </summary>
[JsonPropertyName("rootHash")]
public required string RootHash { get; init; }
/// <summary>
/// Tree size at the time of inclusion.
/// </summary>
[JsonPropertyName("treeSize")]
public required long TreeSize { get; init; }
/// <summary>
/// Checkpoint note containing the signed tree head.
/// </summary>
[JsonPropertyName("checkpoint")]
public string? Checkpoint { get; init; }
}
/// <summary>
/// Builder for enhanced Rekor proofs.
/// </summary>
public sealed class EnhancedRekorProofBuilder
{
private string? _uuid;
private long? _logIndex;
private long? _integratedTime;
private RekorInclusionProof? _inclusionProof;
private string? _checkpointSignature;
private string? _checkpointNote;
private string? _entryBodyHash;
private DateTimeOffset? _verifiedAt;
private string? _entryKind;
private string? _entryVersion;
private string? _publicKey;
private string? _logId;
/// <summary>
/// Sets the UUID.
/// </summary>
public EnhancedRekorProofBuilder WithUuid(string uuid)
{
_uuid = uuid;
return this;
}
/// <summary>
/// Sets the log index.
/// </summary>
public EnhancedRekorProofBuilder WithLogIndex(long logIndex)
{
_logIndex = logIndex;
return this;
}
/// <summary>
/// Sets the integrated time.
/// </summary>
public EnhancedRekorProofBuilder WithIntegratedTime(long integratedTime)
{
_integratedTime = integratedTime;
return this;
}
/// <summary>
/// Sets the inclusion proof.
/// </summary>
public EnhancedRekorProofBuilder WithInclusionProof(RekorInclusionProof inclusionProof)
{
_inclusionProof = inclusionProof;
return this;
}
/// <summary>
/// Sets the checkpoint signature.
/// </summary>
public EnhancedRekorProofBuilder WithCheckpointSignature(string checkpointSignature)
{
_checkpointSignature = checkpointSignature;
return this;
}
/// <summary>
/// Sets the checkpoint note.
/// </summary>
public EnhancedRekorProofBuilder WithCheckpointNote(string checkpointNote)
{
_checkpointNote = checkpointNote;
return this;
}
/// <summary>
/// Sets the entry body hash.
/// </summary>
public EnhancedRekorProofBuilder WithEntryBodyHash(string entryBodyHash)
{
_entryBodyHash = entryBodyHash;
return this;
}
/// <summary>
/// Sets the verification timestamp.
/// </summary>
public EnhancedRekorProofBuilder WithVerifiedAt(DateTimeOffset verifiedAt)
{
_verifiedAt = verifiedAt;
return this;
}
/// <summary>
/// Sets the entry kind.
/// </summary>
public EnhancedRekorProofBuilder WithEntryKind(string entryKind)
{
_entryKind = entryKind;
return this;
}
/// <summary>
/// Sets the entry version.
/// </summary>
public EnhancedRekorProofBuilder WithEntryVersion(string entryVersion)
{
_entryVersion = entryVersion;
return this;
}
/// <summary>
/// Sets the public key.
/// </summary>
public EnhancedRekorProofBuilder WithPublicKey(string publicKey)
{
_publicKey = publicKey;
return this;
}
/// <summary>
/// Sets the log ID.
/// </summary>
public EnhancedRekorProofBuilder WithLogId(string logId)
{
_logId = logId;
return this;
}
/// <summary>
/// Builds the enhanced Rekor proof.
/// </summary>
public EnhancedRekorProof Build()
{
if (string.IsNullOrEmpty(_uuid))
throw new InvalidOperationException("UUID is required.");
if (!_logIndex.HasValue)
throw new InvalidOperationException("LogIndex is required.");
if (!_integratedTime.HasValue)
throw new InvalidOperationException("IntegratedTime is required.");
if (_inclusionProof == null)
throw new InvalidOperationException("InclusionProof is required.");
if (string.IsNullOrEmpty(_checkpointSignature))
throw new InvalidOperationException("CheckpointSignature is required.");
if (string.IsNullOrEmpty(_checkpointNote))
throw new InvalidOperationException("CheckpointNote is required.");
if (string.IsNullOrEmpty(_entryBodyHash))
throw new InvalidOperationException("EntryBodyHash is required.");
return new EnhancedRekorProof
{
Uuid = _uuid,
LogIndex = _logIndex.Value,
IntegratedTime = _integratedTime.Value,
InclusionProof = _inclusionProof,
CheckpointSignature = _checkpointSignature,
CheckpointNote = _checkpointNote,
EntryBodyHash = _entryBodyHash,
VerifiedAt = _verifiedAt,
EntryKind = _entryKind,
EntryVersion = _entryVersion,
PublicKey = _publicKey,
LogId = _logId
};
}
}

View File

@@ -0,0 +1,292 @@
// -----------------------------------------------------------------------------
// VerdictLedgerService.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-003 - Implement VerdictLedger service with chain validation
// Description: Service layer for verdict ledger with chain integrity validation
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Persistence.Repositories;
namespace StellaOps.Attestor.Services;
/// <summary>
/// Service for managing the append-only verdict ledger with cryptographic chain validation.
/// </summary>
public interface IVerdictLedgerService
{
/// <summary>
/// Appends a new verdict to the ledger, computing the verdict hash and linking to previous entry.
/// </summary>
Task<VerdictLedgerEntry> AppendVerdictAsync(AppendVerdictRequest request, CancellationToken ct = default);
/// <summary>
/// Verifies the integrity of the entire hash chain for a tenant.
/// </summary>
Task<ChainVerificationResult> VerifyChainIntegrityAsync(Guid tenantId, CancellationToken ct = default);
/// <summary>
/// Gets entries in a hash range.
/// </summary>
Task<IReadOnlyList<VerdictLedgerEntry>> GetChainAsync(
Guid tenantId,
string fromHash,
string toHash,
CancellationToken ct = default);
/// <summary>
/// Gets the latest verdict for a specific bom-ref.
/// </summary>
Task<VerdictLedgerEntry?> GetLatestVerdictAsync(string bomRef, Guid tenantId, CancellationToken ct = default);
}
/// <summary>
/// Request to append a verdict.
/// </summary>
public sealed record AppendVerdictRequest
{
/// <summary>Package URL or container digest.</summary>
public required string BomRef { get; init; }
/// <summary>CycloneDX serial number.</summary>
public string? CycloneDxSerial { get; init; }
/// <summary>Decision.</summary>
public VerdictDecision Decision { get; init; }
/// <summary>Reason for decision.</summary>
public string? Reason { get; init; }
/// <summary>Policy bundle ID.</summary>
public required string PolicyBundleId { get; init; }
/// <summary>Policy bundle hash.</summary>
public required string PolicyBundleHash { get; init; }
/// <summary>Verifier image digest.</summary>
public required string VerifierImageDigest { get; init; }
/// <summary>Signer key ID.</summary>
public required string SignerKeyId { get; init; }
/// <summary>Tenant ID.</summary>
public Guid TenantId { get; init; }
}
/// <summary>
/// Result of chain verification.
/// </summary>
public sealed record ChainVerificationResult
{
/// <summary>Whether the chain is valid.</summary>
public bool IsValid { get; init; }
/// <summary>Number of entries verified.</summary>
public long EntriesVerified { get; init; }
/// <summary>First broken entry (if any).</summary>
public VerdictLedgerEntry? FirstBrokenEntry { get; init; }
/// <summary>Error message (if any).</summary>
public string? ErrorMessage { get; init; }
}
/// <summary>
/// Implementation of the verdict ledger service.
/// </summary>
public sealed class VerdictLedgerService : IVerdictLedgerService
{
private readonly IVerdictLedgerRepository _repository;
private static readonly JsonSerializerOptions CanonicalJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
/// <summary>
/// Creates a new verdict ledger service.
/// </summary>
public VerdictLedgerService(IVerdictLedgerRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry> AppendVerdictAsync(AppendVerdictRequest request, CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
// Get the latest entry to determine prev_hash
var latest = await _repository.GetLatestAsync(request.TenantId, ct);
var prevHash = latest?.VerdictHash;
var createdAt = DateTimeOffset.UtcNow;
// Compute verdict hash using canonical JSON
var verdictHash = ComputeVerdictHash(request, prevHash, createdAt);
var entry = new VerdictLedgerEntry
{
BomRef = request.BomRef,
CycloneDxSerial = request.CycloneDxSerial,
Decision = request.Decision,
Reason = request.Reason,
PolicyBundleId = request.PolicyBundleId,
PolicyBundleHash = request.PolicyBundleHash,
VerifierImageDigest = request.VerifierImageDigest,
SignerKeyId = request.SignerKeyId,
PrevHash = prevHash,
VerdictHash = verdictHash,
CreatedAt = createdAt,
TenantId = request.TenantId
};
return await _repository.AppendAsync(entry, ct);
}
/// <inheritdoc />
public async Task<ChainVerificationResult> VerifyChainIntegrityAsync(Guid tenantId, CancellationToken ct = default)
{
// Get the latest entry
var latest = await _repository.GetLatestAsync(tenantId, ct);
if (latest == null)
{
return new ChainVerificationResult
{
IsValid = true,
EntriesVerified = 0
};
}
// Walk backward through the chain
long entriesVerified = 0;
var current = latest;
VerdictLedgerEntry? previous = null;
while (current != null)
{
entriesVerified++;
// Recompute the hash and verify it matches
var recomputedHash = RecomputeVerdictHash(current);
if (recomputedHash != current.VerdictHash)
{
return new ChainVerificationResult
{
IsValid = false,
EntriesVerified = entriesVerified,
FirstBrokenEntry = current,
ErrorMessage = $"Hash mismatch: stored={current.VerdictHash}, computed={recomputedHash}"
};
}
// Verify chain linkage
if (previous != null && previous.PrevHash != current.VerdictHash)
{
return new ChainVerificationResult
{
IsValid = false,
EntriesVerified = entriesVerified,
FirstBrokenEntry = previous,
ErrorMessage = $"Chain break: entry {previous.LedgerId} points to {previous.PrevHash} but previous entry hash is {current.VerdictHash}"
};
}
// Move to previous entry
previous = current;
if (current.PrevHash != null)
{
current = await _repository.GetByHashAsync(current.PrevHash, ct);
}
else
{
current = null; // Reached genesis
}
}
return new ChainVerificationResult
{
IsValid = true,
EntriesVerified = entriesVerified
};
}
/// <inheritdoc />
public async Task<IReadOnlyList<VerdictLedgerEntry>> GetChainAsync(
Guid tenantId,
string fromHash,
string toHash,
CancellationToken ct = default)
{
return await _repository.GetChainAsync(tenantId, fromHash, toHash, ct);
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry?> GetLatestVerdictAsync(
string bomRef,
Guid tenantId,
CancellationToken ct = default)
{
var entries = await _repository.GetByBomRefAsync(bomRef, tenantId, ct);
return entries.LastOrDefault();
}
/// <summary>
/// Computes the verdict hash using canonical JSON serialization.
/// </summary>
private static string ComputeVerdictHash(AppendVerdictRequest request, string? prevHash, DateTimeOffset createdAt)
{
// Create canonical object with sorted keys
var canonical = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["bomRef"] = request.BomRef,
["createdAt"] = createdAt.ToString("yyyy-MM-ddTHH:mm:ssZ"),
["cyclonedxSerial"] = request.CycloneDxSerial,
["decision"] = request.Decision.ToString().ToLowerInvariant(),
["policyBundleHash"] = request.PolicyBundleHash,
["policyBundleId"] = request.PolicyBundleId,
["prevHash"] = prevHash,
["reason"] = request.Reason,
["signerKeyid"] = request.SignerKeyId,
["verifierImageDigest"] = request.VerifierImageDigest
};
var json = JsonSerializer.Serialize(canonical, CanonicalJsonOptions);
var bytes = Encoding.UTF8.GetBytes(json);
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
/// <summary>
/// Recomputes the verdict hash from a stored entry for verification.
/// </summary>
private static string RecomputeVerdictHash(VerdictLedgerEntry entry)
{
var canonical = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["bomRef"] = entry.BomRef,
["createdAt"] = entry.CreatedAt.ToString("yyyy-MM-ddTHH:mm:ssZ"),
["cyclonedxSerial"] = entry.CycloneDxSerial,
["decision"] = entry.Decision.ToString().ToLowerInvariant(),
["policyBundleHash"] = entry.PolicyBundleHash,
["policyBundleId"] = entry.PolicyBundleId,
["prevHash"] = entry.PrevHash,
["reason"] = entry.Reason,
["signerKeyid"] = entry.SignerKeyId,
["verifierImageDigest"] = entry.VerifierImageDigest
};
var json = JsonSerializer.Serialize(canonical, CanonicalJsonOptions);
var bytes = Encoding.UTF8.GetBytes(json);
using var sha256 = SHA256.Create();
var hash = sha256.ComputeHash(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,55 @@
// -----------------------------------------------------------------------------
// ISbomCanonicalizer.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-003 - Create Canonicalizer Utility
// Description: Interface for SBOM canonicalization
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.StandardPredicates.Canonicalization;
/// <summary>
/// Canonicalizes SBOM documents for deterministic DSSE signing.
/// Wraps existing RFC 8785 implementation with SBOM-specific ordering.
/// </summary>
public interface ISbomCanonicalizer
{
/// <summary>
/// Canonicalizes an SBOM document to deterministic bytes.
/// </summary>
/// <typeparam name="T">SBOM document type.</typeparam>
/// <param name="document">The SBOM document.</param>
/// <returns>Canonical JSON bytes.</returns>
byte[] Canonicalize<T>(T document) where T : class;
/// <summary>
/// Computes SHA-256 hash of canonical SBOM.
/// </summary>
/// <typeparam name="T">SBOM document type.</typeparam>
/// <param name="document">The SBOM document.</param>
/// <returns>Hex-encoded SHA-256 hash.</returns>
string ComputeHash<T>(T document) where T : class;
/// <summary>
/// Verifies that a document produces the expected hash.
/// </summary>
/// <typeparam name="T">SBOM document type.</typeparam>
/// <param name="document">The SBOM document.</param>
/// <param name="expectedHash">Expected SHA-256 hash.</param>
/// <returns>True if hash matches.</returns>
bool VerifyHash<T>(T document, string expectedHash) where T : class;
}
/// <summary>
/// SBOM format types.
/// </summary>
public enum SbomFormat
{
/// <summary>CycloneDX 1.5/1.6 JSON.</summary>
CycloneDx,
/// <summary>SPDX 2.3 JSON.</summary>
Spdx2,
/// <summary>SPDX 3.0 JSON-LD.</summary>
Spdx3
}

View File

@@ -0,0 +1,124 @@
// -----------------------------------------------------------------------------
// SbomCanonicalizer.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-003 - Create Canonicalizer Utility
// Description: SBOM canonicalization using RFC 8785
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.StandardPredicates.Canonicalization;
/// <summary>
/// Canonicalizes SBOM documents for deterministic DSSE signing.
/// Uses RFC 8785 (JCS) canonicalization with SBOM-specific ordering.
/// </summary>
public sealed class SbomCanonicalizer : ISbomCanonicalizer
{
private readonly JsonSerializerOptions _options;
/// <summary>
/// Creates a new SBOM canonicalizer.
/// </summary>
public SbomCanonicalizer()
{
_options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
}
/// <inheritdoc />
public byte[] Canonicalize<T>(T document) where T : class
{
ArgumentNullException.ThrowIfNull(document);
// Serialize to JSON
var json = JsonSerializer.Serialize(document, _options);
// Parse and re-serialize with canonical ordering
using var doc = JsonDocument.Parse(json);
var canonicalJson = CanonicalizeElement(doc.RootElement);
return Encoding.UTF8.GetBytes(canonicalJson);
}
/// <inheritdoc />
public string ComputeHash<T>(T document) where T : class
{
var bytes = Canonicalize(document);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
/// <inheritdoc />
public bool VerifyHash<T>(T document, string expectedHash) where T : class
{
var actualHash = ComputeHash(document);
return string.Equals(actualHash, expectedHash, StringComparison.OrdinalIgnoreCase);
}
private static string CanonicalizeElement(JsonElement element)
{
return element.ValueKind switch
{
JsonValueKind.Object => CanonicalizeObject(element),
JsonValueKind.Array => CanonicalizeArray(element),
JsonValueKind.String => JsonSerializer.Serialize(element.GetString()),
JsonValueKind.Number => CanonicalizeNumber(element),
JsonValueKind.True => "true",
JsonValueKind.False => "false",
JsonValueKind.Null => "null",
_ => throw new InvalidOperationException($"Unexpected JSON element kind: {element.ValueKind}")
};
}
private static string CanonicalizeObject(JsonElement element)
{
// RFC 8785: Sort properties by Unicode code point order
var properties = element.EnumerateObject()
.OrderBy(p => p.Name, StringComparer.Ordinal)
.Select(p => $"{JsonSerializer.Serialize(p.Name)}:{CanonicalizeElement(p.Value)}");
return "{" + string.Join(",", properties) + "}";
}
private static string CanonicalizeArray(JsonElement element)
{
var items = element.EnumerateArray()
.Select(CanonicalizeElement);
return "[" + string.Join(",", items) + "]";
}
private static string CanonicalizeNumber(JsonElement element)
{
// RFC 8785: Numbers must use the shortest decimal representation
if (element.TryGetInt64(out var longValue))
{
return longValue.ToString(System.Globalization.CultureInfo.InvariantCulture);
}
if (element.TryGetDouble(out var doubleValue))
{
// Use "G17" for maximum precision, then trim trailing zeros
var str = doubleValue.ToString("G17", System.Globalization.CultureInfo.InvariantCulture);
// Remove trailing zeros after decimal point
if (str.Contains('.'))
{
str = str.TrimEnd('0').TrimEnd('.');
}
return str;
}
return element.GetRawText();
}
}

View File

@@ -0,0 +1,375 @@
// -----------------------------------------------------------------------------
// SbomDocument.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-005 - SBOM Document Model
// Description: Format-agnostic SBOM document model for CycloneDX/SPDX emission
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.Attestor.StandardPredicates.Models;
/// <summary>
/// Format-agnostic SBOM document that can be serialized to CycloneDX or SPDX.
/// This model abstracts common SBOM concepts across formats.
/// </summary>
/// <remarks>
/// Immutable by design - all collections use <see cref="ImmutableArray{T}"/>.
/// </remarks>
public sealed record SbomDocument
{
/// <summary>
/// Document name/identifier.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Document version.
/// </summary>
public string Version { get; init; } = "1";
/// <summary>
/// Creation timestamp (UTC).
/// </summary>
public DateTimeOffset Timestamp { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// SHA-256 digest of the artifact this SBOM describes (e.g., container image digest).
/// Used to derive deterministic serialNumber: urn:sha256:&lt;artifact-digest&gt;
/// </summary>
/// <remarks>
/// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
/// If provided, CycloneDxWriter will generate serialNumber as urn:sha256:&lt;artifact-digest&gt;
/// instead of using a deterministic UUID. This enables reproducible SBOMs where the
/// serialNumber directly references the artifact being described.
/// Format: lowercase hex string, 64 characters (no prefix).
/// </remarks>
public string? ArtifactDigest { get; init; }
/// <summary>
/// Document metadata.
/// </summary>
public SbomMetadata? Metadata { get; init; }
/// <summary>
/// Software components in this SBOM.
/// </summary>
public ImmutableArray<SbomComponent> Components { get; init; } = [];
/// <summary>
/// Relationships between components.
/// </summary>
public ImmutableArray<SbomRelationship> Relationships { get; init; } = [];
/// <summary>
/// External references.
/// </summary>
public ImmutableArray<SbomExternalReference> ExternalReferences { get; init; } = [];
/// <summary>
/// Vulnerabilities associated with components.
/// </summary>
public ImmutableArray<SbomVulnerability> Vulnerabilities { get; init; } = [];
}
/// <summary>
/// SBOM document metadata.
/// </summary>
public sealed record SbomMetadata
{
/// <summary>
/// Tools used to generate this SBOM.
/// </summary>
public ImmutableArray<string> Tools { get; init; } = [];
/// <summary>
/// Authors of this SBOM.
/// </summary>
public ImmutableArray<string> Authors { get; init; } = [];
/// <summary>
/// Component this SBOM describes (for CycloneDX metadata.component).
/// </summary>
public SbomComponent? Subject { get; init; }
/// <summary>
/// Supplier information.
/// </summary>
public string? Supplier { get; init; }
/// <summary>
/// Manufacturer information.
/// </summary>
public string? Manufacturer { get; init; }
}
/// <summary>
/// Software component in an SBOM.
/// </summary>
public sealed record SbomComponent
{
/// <summary>
/// Component type (library, application, framework, etc.).
/// </summary>
public SbomComponentType Type { get; init; } = SbomComponentType.Library;
/// <summary>
/// Unique reference within this SBOM (bom-ref for CycloneDX, part of SPDXID for SPDX).
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// Component name.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Component version.
/// </summary>
public string? Version { get; init; }
/// <summary>
/// Package URL (purl) - primary identifier.
/// </summary>
/// <remarks>
/// See https://github.com/package-url/purl-spec
/// </remarks>
public string? Purl { get; init; }
/// <summary>
/// CPE identifier.
/// </summary>
/// <remarks>
/// See https://nvd.nist.gov/products/cpe
/// </remarks>
public string? Cpe { get; init; }
/// <summary>
/// Component description.
/// </summary>
public string? Description { get; init; }
/// <summary>
/// Component group/namespace.
/// </summary>
public string? Group { get; init; }
/// <summary>
/// Publisher/author.
/// </summary>
public string? Publisher { get; init; }
/// <summary>
/// Download location URL.
/// </summary>
public string? DownloadLocation { get; init; }
/// <summary>
/// Cryptographic hashes of the component.
/// </summary>
public ImmutableArray<SbomHash> Hashes { get; init; } = [];
/// <summary>
/// Licenses applicable to this component.
/// </summary>
public ImmutableArray<SbomLicense> Licenses { get; init; } = [];
/// <summary>
/// External references for this component.
/// </summary>
public ImmutableArray<SbomExternalReference> ExternalReferences { get; init; } = [];
/// <summary>
/// Component properties (key-value metadata).
/// </summary>
public ImmutableDictionary<string, string> Properties { get; init; } = ImmutableDictionary<string, string>.Empty;
}
/// <summary>
/// Component type classification.
/// </summary>
public enum SbomComponentType
{
/// <summary>Software library.</summary>
Library,
/// <summary>Standalone application.</summary>
Application,
/// <summary>Software framework.</summary>
Framework,
/// <summary>Container image.</summary>
Container,
/// <summary>Operating system.</summary>
OperatingSystem,
/// <summary>Device/hardware.</summary>
Device,
/// <summary>Firmware.</summary>
Firmware,
/// <summary>Source file.</summary>
File,
/// <summary>Data/dataset.</summary>
Data,
/// <summary>Machine learning model.</summary>
MachineLearningModel
}
/// <summary>
/// Cryptographic hash of a component.
/// </summary>
public sealed record SbomHash
{
/// <summary>
/// Hash algorithm (SHA-256, SHA-512, etc.).
/// </summary>
public required string Algorithm { get; init; }
/// <summary>
/// Hash value (hex-encoded).
/// </summary>
public required string Value { get; init; }
}
/// <summary>
/// License information.
/// </summary>
public sealed record SbomLicense
{
/// <summary>
/// SPDX license identifier.
/// </summary>
public string? Id { get; init; }
/// <summary>
/// License name (when not an SPDX ID).
/// </summary>
public string? Name { get; init; }
/// <summary>
/// License text URL.
/// </summary>
public string? Url { get; init; }
/// <summary>
/// Full license text.
/// </summary>
public string? Text { get; init; }
}
/// <summary>
/// Relationship between components.
/// </summary>
public sealed record SbomRelationship
{
/// <summary>
/// Source component reference (bom-ref).
/// </summary>
public required string SourceRef { get; init; }
/// <summary>
/// Target component reference (bom-ref).
/// </summary>
public required string TargetRef { get; init; }
/// <summary>
/// Relationship type.
/// </summary>
public SbomRelationshipType Type { get; init; } = SbomRelationshipType.DependsOn;
}
/// <summary>
/// Relationship type between components.
/// </summary>
public enum SbomRelationshipType
{
/// <summary>Source depends on target.</summary>
DependsOn,
/// <summary>Source is a dependency of target.</summary>
DependencyOf,
/// <summary>Source contains target.</summary>
Contains,
/// <summary>Source is contained by target.</summary>
ContainedBy,
/// <summary>Source is a build tool for target.</summary>
BuildToolOf,
/// <summary>Source is a dev dependency of target.</summary>
DevDependencyOf,
/// <summary>Source is an optional dependency of target.</summary>
OptionalDependencyOf,
/// <summary>Source provides target.</summary>
Provides,
/// <summary>Other relationship.</summary>
Other
}
/// <summary>
/// External reference.
/// </summary>
public sealed record SbomExternalReference
{
/// <summary>
/// Reference type.
/// </summary>
public required string Type { get; init; }
/// <summary>
/// Reference URL.
/// </summary>
public required string Url { get; init; }
/// <summary>
/// Optional comment.
/// </summary>
public string? Comment { get; init; }
}
/// <summary>
/// Vulnerability information.
/// </summary>
public sealed record SbomVulnerability
{
/// <summary>
/// Vulnerability ID (CVE, GHSA, etc.).
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Vulnerability source.
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Affected component references.
/// </summary>
public ImmutableArray<string> AffectedRefs { get; init; } = [];
/// <summary>
/// Severity rating.
/// </summary>
public string? Severity { get; init; }
/// <summary>
/// CVSS score.
/// </summary>
public double? CvssScore { get; init; }
/// <summary>
/// Description.
/// </summary>
public string? Description { get; init; }
}

View File

@@ -158,6 +158,10 @@ public sealed class CycloneDxPredicateParser : IPredicateParser
errors.Add(new ValidationError("$.version", "Missing required field: version (BOM serial version)", "CDX_MISSING_VERSION"));
}
// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
// Validate serialNumber format for deterministic SBOM compliance
ValidateSerialNumberFormat(payload, warnings);
// Components array (may be missing for empty BOMs)
if (!payload.TryGetProperty("components", out var components))
{
@@ -175,6 +179,69 @@ public sealed class CycloneDxPredicateParser : IPredicateParser
}
}
/// <summary>
/// Validates serialNumber format for deterministic SBOM compliance.
/// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
/// </summary>
/// <remarks>
/// Deterministic SBOMs should use the format: urn:sha256:&lt;artifact-digest&gt;
/// where artifact-digest is the SHA-256 hash of the artifact being described.
/// Non-deterministic formats (urn:uuid:) are allowed for backwards compatibility
/// but generate a warning to encourage migration to deterministic format.
/// </remarks>
private void ValidateSerialNumberFormat(JsonElement payload, List<ValidationWarning> warnings)
{
if (!payload.TryGetProperty("serialNumber", out var serialNumber))
{
// serialNumber is optional in CycloneDX, no warning needed
return;
}
var serialNumberValue = serialNumber.GetString();
if (string.IsNullOrEmpty(serialNumberValue))
{
return;
}
// Check for deterministic format: urn:sha256:<64-hex-chars>
if (serialNumberValue.StartsWith("urn:sha256:", StringComparison.OrdinalIgnoreCase))
{
// Validate hash format
var hashPart = serialNumberValue.Substring("urn:sha256:".Length);
if (hashPart.Length == 64 && hashPart.All(c => char.IsAsciiHexDigit(c)))
{
_logger.LogDebug("serialNumber uses deterministic format: {SerialNumber}", serialNumberValue);
return; // Valid deterministic format
}
else
{
warnings.Add(new ValidationWarning(
"$.serialNumber",
$"serialNumber has urn:sha256: prefix but invalid hash format (expected 64 hex chars, got '{hashPart}')",
"CDX_SERIAL_INVALID_SHA256"));
return;
}
}
// Check for UUID format (non-deterministic but common)
if (serialNumberValue.StartsWith("urn:uuid:", StringComparison.OrdinalIgnoreCase))
{
_logger.LogDebug("serialNumber uses non-deterministic UUID format: {SerialNumber}", serialNumberValue);
warnings.Add(new ValidationWarning(
"$.serialNumber",
$"serialNumber uses non-deterministic UUID format. For reproducible SBOMs, use 'urn:sha256:<artifact-digest>' format instead.",
"CDX_SERIAL_NON_DETERMINISTIC"));
return;
}
// Other formats - warn about non-standard format
_logger.LogDebug("serialNumber uses non-standard format: {SerialNumber}", serialNumberValue);
warnings.Add(new ValidationWarning(
"$.serialNumber",
$"serialNumber uses non-standard format '{serialNumberValue}'. Expected 'urn:sha256:<artifact-digest>' for deterministic SBOMs.",
"CDX_SERIAL_NON_STANDARD"));
}
private IReadOnlyDictionary<string, string> ExtractMetadata(JsonElement payload)
{
var metadata = new SortedDictionary<string, string>(StringComparer.Ordinal);

View File

@@ -0,0 +1,298 @@
// -----------------------------------------------------------------------------
// CycloneDxWriter.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-001 - Implement CycloneDX 1.6 JSON Writer
// Description: Deterministic CycloneDX writer for DSSE signing
// -----------------------------------------------------------------------------
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Attestor.StandardPredicates.Canonicalization;
namespace StellaOps.Attestor.StandardPredicates.Writers;
/// <summary>
/// Writes CycloneDX 1.6 JSON documents with deterministic output.
/// </summary>
public sealed class CycloneDxWriter : ISbomWriter
{
private readonly ISbomCanonicalizer _canonicalizer;
private readonly JsonSerializerOptions _options;
/// <summary>
/// CycloneDX spec version.
/// </summary>
public const string SpecVersion = "1.6";
/// <summary>
/// Namespace for UUIDv5 generation.
/// </summary>
private static readonly Guid CycloneDxNamespace = new("6ba7b810-9dad-11d1-80b4-00c04fd430c8");
/// <inheritdoc />
public SbomFormat Format => SbomFormat.CycloneDx;
/// <summary>
/// Creates a new CycloneDX writer.
/// </summary>
public CycloneDxWriter(ISbomCanonicalizer? canonicalizer = null)
{
_canonicalizer = canonicalizer ?? new SbomCanonicalizer();
_options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
}
/// <inheritdoc />
public byte[] Write(SbomDocument document)
{
var cdx = ConvertToCycloneDx(document);
return _canonicalizer.Canonicalize(cdx);
}
/// <inheritdoc />
public Task<byte[]> WriteAsync(SbomDocument document, CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
return Task.FromResult(Write(document));
}
/// <inheritdoc />
public string ComputeContentHash(SbomDocument document)
{
var bytes = Write(document);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private CycloneDxBom ConvertToCycloneDx(SbomDocument document)
{
// Sort components by bom-ref
var sortedComponents = document.Components
.OrderBy(c => c.BomRef, StringComparer.Ordinal)
.Select(c => new CycloneDxComponent
{
BomRef = c.BomRef,
Type = c.Type,
Name = c.Name,
Version = c.Version,
Purl = c.Purl,
Hashes = c.Hashes
.OrderBy(h => h.Algorithm, StringComparer.Ordinal)
.Select(h => new CycloneDxHash { Alg = h.Algorithm, Content = h.Value })
.ToList(),
Licenses = c.Licenses.Count > 0
? c.Licenses.OrderBy(l => l, StringComparer.Ordinal)
.Select(l => new CycloneDxLicense { Id = l })
.ToList()
: null
})
.ToList();
// Sort dependencies by ref
var sortedDependencies = document.Dependencies
.OrderBy(d => d.Ref, StringComparer.Ordinal)
.Select(d => new CycloneDxDependency
{
Ref = d.Ref,
DependsOn = d.DependsOn.OrderBy(x => x, StringComparer.Ordinal).ToList()
})
.ToList();
// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
// Generate deterministic serial number using artifact digest when available
var serialNumber = GenerateSerialNumber(document, sortedComponents);
return new CycloneDxBom
{
BomFormat = "CycloneDX",
SpecVersion = SpecVersion,
SerialNumber = serialNumber,
Version = 1,
Metadata = new CycloneDxMetadata
{
Timestamp = document.CreatedAt.ToString("yyyy-MM-ddTHH:mm:ssZ", CultureInfo.InvariantCulture),
Tools = document.Tool != null
? [new CycloneDxTool { Name = document.Tool.Name, Version = document.Tool.Version, Vendor = document.Tool.Vendor }]
: null
},
Components = sortedComponents,
Dependencies = sortedDependencies.Count > 0 ? sortedDependencies : null
};
}
/// <summary>
/// Generates a deterministic serialNumber for the SBOM.
/// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
/// </summary>
/// <remarks>
/// If ArtifactDigest is provided, generates urn:sha256:&lt;artifact-digest&gt; format.
/// Otherwise, falls back to UUIDv5 derived from sorted component list for backwards compatibility.
/// The urn:sha256: format is preferred as it directly ties the SBOM identity to the artifact
/// it describes, enabling reproducible builds and deterministic verification.
/// </remarks>
private string GenerateSerialNumber(SbomDocument document, IReadOnlyList<CycloneDxComponent> sortedComponents)
{
// Preferred: Use artifact digest when available
if (!string.IsNullOrEmpty(document.ArtifactDigest))
{
// Validate and normalize the digest (lowercase, 64 hex chars)
var digest = document.ArtifactDigest.ToLowerInvariant();
if (digest.Length == 64 && digest.All(c => char.IsAsciiHexDigit(c)))
{
return $"urn:sha256:{digest}";
}
// If digest has sha256: prefix, extract the hash
if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
var hashPart = digest.Substring(7);
if (hashPart.Length == 64 && hashPart.All(c => char.IsAsciiHexDigit(c)))
{
return $"urn:sha256:{hashPart}";
}
}
}
// Fallback: Generate UUIDv5 from sorted components (legacy behavior)
var contentForSerial = JsonSerializer.Serialize(sortedComponents, _options);
var uuid = GenerateUuidV5(contentForSerial);
return $"urn:uuid:{uuid}";
}
private static string GenerateUuidV5(string input)
{
var nameBytes = Encoding.UTF8.GetBytes(input);
var namespaceBytes = CycloneDxNamespace.ToByteArray();
// Swap byte order for RFC 4122 compatibility
SwapByteOrder(namespaceBytes);
var combined = new byte[namespaceBytes.Length + nameBytes.Length];
Buffer.BlockCopy(namespaceBytes, 0, combined, 0, namespaceBytes.Length);
Buffer.BlockCopy(nameBytes, 0, combined, namespaceBytes.Length, nameBytes.Length);
var hash = SHA256.HashData(combined);
// Set version (5) and variant bits
hash[6] = (byte)((hash[6] & 0x0F) | 0x50);
hash[8] = (byte)((hash[8] & 0x3F) | 0x80);
var guid = new Guid(hash.Take(16).ToArray());
return guid.ToString("D");
}
private static void SwapByteOrder(byte[] guid)
{
// Swap first 4 bytes
(guid[0], guid[3]) = (guid[3], guid[0]);
(guid[1], guid[2]) = (guid[2], guid[1]);
// Swap bytes 4-5
(guid[4], guid[5]) = (guid[5], guid[4]);
// Swap bytes 6-7
(guid[6], guid[7]) = (guid[7], guid[6]);
}
#region CycloneDX Models
private sealed record CycloneDxBom
{
[JsonPropertyName("bomFormat")]
public required string BomFormat { get; init; }
[JsonPropertyName("specVersion")]
public required string SpecVersion { get; init; }
[JsonPropertyName("serialNumber")]
public required string SerialNumber { get; init; }
[JsonPropertyName("version")]
public int Version { get; init; }
[JsonPropertyName("metadata")]
public CycloneDxMetadata? Metadata { get; init; }
[JsonPropertyName("components")]
public IReadOnlyList<CycloneDxComponent>? Components { get; init; }
[JsonPropertyName("dependencies")]
public IReadOnlyList<CycloneDxDependency>? Dependencies { get; init; }
}
private sealed record CycloneDxMetadata
{
[JsonPropertyName("timestamp")]
public string? Timestamp { get; init; }
[JsonPropertyName("tools")]
public IReadOnlyList<CycloneDxTool>? Tools { get; init; }
}
private sealed record CycloneDxTool
{
[JsonPropertyName("vendor")]
public string? Vendor { get; init; }
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
}
private sealed record CycloneDxComponent
{
[JsonPropertyName("bom-ref")]
public required string BomRef { get; init; }
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("hashes")]
public IReadOnlyList<CycloneDxHash>? Hashes { get; init; }
[JsonPropertyName("licenses")]
public IReadOnlyList<CycloneDxLicense>? Licenses { get; init; }
}
private sealed record CycloneDxHash
{
[JsonPropertyName("alg")]
public required string Alg { get; init; }
[JsonPropertyName("content")]
public required string Content { get; init; }
}
private sealed record CycloneDxLicense
{
[JsonPropertyName("id")]
public required string Id { get; init; }
}
private sealed record CycloneDxDependency
{
[JsonPropertyName("ref")]
public required string Ref { get; init; }
[JsonPropertyName("dependsOn")]
public IReadOnlyList<string>? DependsOn { get; init; }
}
#endregion
}

View File

@@ -0,0 +1,205 @@
// -----------------------------------------------------------------------------
// ISbomWriter.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-001, TASK-015-002 - SBOM Writers
// Description: Interface for deterministic SBOM writing
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.StandardPredicates.Writers;
/// <summary>
/// Writes SBOM documents in deterministic, canonical format.
/// </summary>
public interface ISbomWriter
{
/// <summary>
/// The SBOM format this writer produces.
/// </summary>
Canonicalization.SbomFormat Format { get; }
/// <summary>
/// Writes an SBOM to canonical bytes.
/// </summary>
/// <param name="document">The SBOM document model.</param>
/// <returns>Canonical JSON bytes.</returns>
byte[] Write(SbomDocument document);
/// <summary>
/// Writes an SBOM to canonical bytes asynchronously.
/// </summary>
/// <param name="document">The SBOM document model.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Canonical JSON bytes.</returns>
Task<byte[]> WriteAsync(SbomDocument document, CancellationToken ct = default);
/// <summary>
/// Computes the content hash of the canonical SBOM.
/// </summary>
/// <param name="document">The SBOM document.</param>
/// <returns>SHA-256 hash in hex format.</returns>
string ComputeContentHash(SbomDocument document);
}
/// <summary>
/// Unified SBOM document model for Attestor operations.
/// </summary>
public sealed record SbomDocument
{
/// <summary>
/// Document name/identifier.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Document version.
/// </summary>
public string? Version { get; init; }
/// <summary>
/// Creation timestamp (UTC).
/// </summary>
public DateTimeOffset CreatedAt { get; init; } = DateTimeOffset.UtcNow;
/// <summary>
/// SHA-256 digest of the artifact this SBOM describes (e.g., container image digest).
/// Used to derive deterministic serialNumber: urn:sha256:&lt;artifact-digest&gt;
/// </summary>
/// <remarks>
/// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
/// If provided, CycloneDxWriter will generate serialNumber as urn:sha256:&lt;artifact-digest&gt;
/// instead of using a deterministic UUID. This enables reproducible SBOMs where the
/// serialNumber directly references the artifact being described.
/// Format: lowercase hex string, 64 characters (no prefix).
/// </remarks>
public string? ArtifactDigest { get; init; }
/// <summary>
/// Components in the SBOM.
/// </summary>
public IReadOnlyList<SbomComponent> Components { get; init; } = [];
/// <summary>
/// Dependencies between components.
/// </summary>
public IReadOnlyList<SbomDependency> Dependencies { get; init; } = [];
/// <summary>
/// Tool information.
/// </summary>
public SbomTool? Tool { get; init; }
/// <summary>
/// External references.
/// </summary>
public IReadOnlyList<SbomExternalReference> ExternalReferences { get; init; } = [];
}
/// <summary>
/// A component in the SBOM.
/// </summary>
public sealed record SbomComponent
{
/// <summary>
/// Unique reference ID.
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// Component name.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Component version.
/// </summary>
public string? Version { get; init; }
/// <summary>
/// Package URL (purl).
/// </summary>
public string? Purl { get; init; }
/// <summary>
/// Component type.
/// </summary>
public string Type { get; init; } = "library";
/// <summary>
/// Hashes for the component.
/// </summary>
public IReadOnlyList<SbomHash> Hashes { get; init; } = [];
/// <summary>
/// License identifiers.
/// </summary>
public IReadOnlyList<string> Licenses { get; init; } = [];
}
/// <summary>
/// A hash in the SBOM.
/// </summary>
public sealed record SbomHash
{
/// <summary>
/// Hash algorithm (e.g., SHA-256, SHA-512).
/// </summary>
public required string Algorithm { get; init; }
/// <summary>
/// Hash value in hex format.
/// </summary>
public required string Value { get; init; }
}
/// <summary>
/// A dependency relationship.
/// </summary>
public sealed record SbomDependency
{
/// <summary>
/// The component that has the dependency.
/// </summary>
public required string Ref { get; init; }
/// <summary>
/// Components this component depends on.
/// </summary>
public IReadOnlyList<string> DependsOn { get; init; } = [];
}
/// <summary>
/// Tool information.
/// </summary>
public sealed record SbomTool
{
/// <summary>
/// Tool vendor.
/// </summary>
public string? Vendor { get; init; }
/// <summary>
/// Tool name.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Tool version.
/// </summary>
public string? Version { get; init; }
}
/// <summary>
/// An external reference.
/// </summary>
public sealed record SbomExternalReference
{
/// <summary>
/// Reference type.
/// </summary>
public required string Type { get; init; }
/// <summary>
/// Reference URL.
/// </summary>
public required string Url { get; init; }
}

View File

@@ -0,0 +1,355 @@
// -----------------------------------------------------------------------------
// SpdxWriter.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-002 - Implement SPDX 3.0 JSON Writer
// Description: Deterministic SPDX 3.0 JSON-LD writer for DSSE signing
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Attestor.StandardPredicates.Canonicalization;
using StellaOps.Attestor.StandardPredicates.Models;
namespace StellaOps.Attestor.StandardPredicates.Writers;
/// <summary>
/// Writes SPDX 3.0 JSON-LD documents with deterministic output.
/// </summary>
public sealed class SpdxWriter : ISbomWriter
{
private readonly ISbomCanonicalizer _canonicalizer;
private readonly JsonSerializerOptions _options;
/// <summary>
/// SPDX spec version.
/// </summary>
public const string SpecVersion = "3.0";
/// <summary>
/// SPDX JSON-LD context.
/// </summary>
public const string Context = "https://spdx.org/rdf/3.0.0/spdx-context.jsonld";
/// <inheritdoc />
public SbomFormat Format => SbomFormat.Spdx;
/// <summary>
/// Creates a new SPDX writer.
/// </summary>
public SpdxWriter(ISbomCanonicalizer? canonicalizer = null)
{
_canonicalizer = canonicalizer ?? new SbomCanonicalizer();
_options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
}
/// <inheritdoc />
public SbomWriteResult Write(SbomDocument document)
{
ArgumentNullException.ThrowIfNull(document);
// Build SPDX structure
var spdxDocument = BuildSpdxDocument(document);
// Serialize to JSON
var json = JsonSerializer.Serialize(spdxDocument, _options);
var jsonBytes = Encoding.UTF8.GetBytes(json);
// Canonicalize
var canonicalBytes = _canonicalizer.Canonicalize(jsonBytes);
// Compute golden hash
var goldenHash = _canonicalizer.ComputeGoldenHash(canonicalBytes);
return new SbomWriteResult
{
Format = SbomFormat.Spdx,
CanonicalBytes = canonicalBytes,
GoldenHash = goldenHash,
DocumentId = spdxDocument.SpdxId
};
}
/// <inheritdoc />
public async Task<SbomWriteResult> WriteAsync(SbomDocument document, CancellationToken ct = default)
{
return await Task.Run(() => Write(document), ct);
}
private SpdxJsonLd BuildSpdxDocument(SbomDocument document)
{
var spdxId = GenerateSpdxId("SPDXRef-DOCUMENT", document.Name);
var creationTime = document.Timestamp.ToString("yyyy-MM-ddTHH:mm:ssZ");
// Build elements list (sorted by SPDXID)
var elements = new List<SpdxElement>();
// Add document element
elements.Add(new SpdxSbomElement
{
SpdxId = spdxId,
Type = "SpdxDocument",
Name = document.Name,
CreationInfo = new SpdxCreationInfo
{
Created = creationTime,
CreatedBy = document.Metadata?.Authors?.Select(a => $"Person: {a}").ToList() ?? [],
CreatedUsing = document.Metadata?.Tools?.Select(t => $"Tool: {t}").ToList() ?? []
}
});
// Add package elements for components
foreach (var component in document.Components.OrderBy(c => c.BomRef, StringComparer.Ordinal))
{
var packageId = GenerateSpdxId("SPDXRef-Package", component.BomRef);
elements.Add(new SpdxPackageElement
{
SpdxId = packageId,
Type = "Package",
Name = component.Name,
Version = component.Version,
PackageUrl = component.Purl,
Cpe = component.Cpe,
DownloadLocation = component.DownloadLocation ?? "NOASSERTION",
FilesAnalyzed = false,
Checksums = component.Hashes
.OrderBy(h => h.Algorithm, StringComparer.Ordinal)
.Select(h => new SpdxChecksum
{
Algorithm = MapHashAlgorithm(h.Algorithm),
ChecksumValue = h.Value
})
.ToList(),
LicenseConcluded = component.Licenses?.FirstOrDefault()?.Id ?? "NOASSERTION",
LicenseDeclared = component.Licenses?.FirstOrDefault()?.Id ?? "NOASSERTION",
CopyrightText = "NOASSERTION"
});
}
// Sort elements by SPDXID
elements = elements.OrderBy(e => e.SpdxId, StringComparer.Ordinal).ToList();
// Build relationships (sorted)
var relationships = new List<SpdxRelationship>();
foreach (var rel in document.Relationships.OrderBy(r => r.SourceRef).ThenBy(r => r.TargetRef).ThenBy(r => r.Type))
{
relationships.Add(new SpdxRelationship
{
SpdxElementId = GenerateSpdxId("SPDXRef-Package", rel.SourceRef),
RelationshipType = MapRelationshipType(rel.Type),
RelatedSpdxElement = GenerateSpdxId("SPDXRef-Package", rel.TargetRef)
});
}
return new SpdxJsonLd
{
Context = Context,
Graph = elements,
SpdxId = spdxId,
SpdxVersion = $"SPDX-{SpecVersion}",
Relationships = relationships
};
}
private static string GenerateSpdxId(string prefix, string value)
{
// Sanitize for SPDX ID format (letters, numbers, ., -)
var sanitized = new StringBuilder();
foreach (var c in value)
{
if (char.IsLetterOrDigit(c) || c == '.' || c == '-')
{
sanitized.Append(c);
}
else
{
sanitized.Append('-');
}
}
return $"{prefix}-{sanitized}";
}
private static string MapHashAlgorithm(string algorithm)
{
return algorithm.ToUpperInvariant() switch
{
"SHA-256" or "SHA256" => "SHA256",
"SHA-512" or "SHA512" => "SHA512",
"SHA-1" or "SHA1" => "SHA1",
"MD5" => "MD5",
_ => algorithm.ToUpperInvariant()
};
}
private static string MapRelationshipType(SbomRelationshipType type)
{
return type switch
{
SbomRelationshipType.DependsOn => "DEPENDS_ON",
SbomRelationshipType.DependencyOf => "DEPENDENCY_OF",
SbomRelationshipType.Contains => "CONTAINS",
SbomRelationshipType.ContainedBy => "CONTAINED_BY",
SbomRelationshipType.BuildToolOf => "BUILD_TOOL_OF",
SbomRelationshipType.DevDependencyOf => "DEV_DEPENDENCY_OF",
SbomRelationshipType.OptionalDependencyOf => "OPTIONAL_DEPENDENCY_OF",
_ => "OTHER"
};
}
}
// SPDX JSON-LD models
/// <summary>
/// SPDX 3.0 JSON-LD root document.
/// </summary>
public sealed record SpdxJsonLd
{
/// <summary>JSON-LD context.</summary>
[JsonPropertyName("@context")]
public required string Context { get; init; }
/// <summary>SPDX document ID.</summary>
[JsonPropertyName("spdxId")]
public required string SpdxId { get; init; }
/// <summary>SPDX version.</summary>
[JsonPropertyName("spdxVersion")]
public required string SpdxVersion { get; init; }
/// <summary>Graph of elements.</summary>
[JsonPropertyName("@graph")]
public required IReadOnlyList<SpdxElement> Graph { get; init; }
/// <summary>Relationships.</summary>
[JsonPropertyName("relationships")]
public IReadOnlyList<SpdxRelationship>? Relationships { get; init; }
}
/// <summary>
/// Base SPDX element.
/// </summary>
public abstract record SpdxElement
{
/// <summary>SPDX ID.</summary>
[JsonPropertyName("spdxId")]
public required string SpdxId { get; init; }
/// <summary>Element type.</summary>
[JsonPropertyName("@type")]
public required string Type { get; init; }
/// <summary>Element name.</summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
}
/// <summary>
/// SPDX SBOM document element.
/// </summary>
public sealed record SpdxSbomElement : SpdxElement
{
/// <summary>Creation info.</summary>
[JsonPropertyName("creationInfo")]
public SpdxCreationInfo? CreationInfo { get; init; }
}
/// <summary>
/// SPDX package element.
/// </summary>
public sealed record SpdxPackageElement : SpdxElement
{
/// <summary>Package version.</summary>
[JsonPropertyName("versionInfo")]
public string? Version { get; init; }
/// <summary>Package URL.</summary>
[JsonPropertyName("externalIdentifier")]
public string? PackageUrl { get; init; }
/// <summary>CPE.</summary>
[JsonPropertyName("cpe")]
public string? Cpe { get; init; }
/// <summary>Download location.</summary>
[JsonPropertyName("downloadLocation")]
public string? DownloadLocation { get; init; }
/// <summary>Files analyzed.</summary>
[JsonPropertyName("filesAnalyzed")]
public bool FilesAnalyzed { get; init; }
/// <summary>Checksums.</summary>
[JsonPropertyName("checksums")]
public IReadOnlyList<SpdxChecksum>? Checksums { get; init; }
/// <summary>Concluded license.</summary>
[JsonPropertyName("licenseConcluded")]
public string? LicenseConcluded { get; init; }
/// <summary>Declared license.</summary>
[JsonPropertyName("licenseDeclared")]
public string? LicenseDeclared { get; init; }
/// <summary>Copyright text.</summary>
[JsonPropertyName("copyrightText")]
public string? CopyrightText { get; init; }
}
/// <summary>
/// SPDX creation info.
/// </summary>
public sealed record SpdxCreationInfo
{
/// <summary>Created timestamp.</summary>
[JsonPropertyName("created")]
public required string Created { get; init; }
/// <summary>Created by.</summary>
[JsonPropertyName("createdBy")]
public IReadOnlyList<string>? CreatedBy { get; init; }
/// <summary>Created using tools.</summary>
[JsonPropertyName("createdUsing")]
public IReadOnlyList<string>? CreatedUsing { get; init; }
}
/// <summary>
/// SPDX checksum.
/// </summary>
public sealed record SpdxChecksum
{
/// <summary>Algorithm.</summary>
[JsonPropertyName("algorithm")]
public required string Algorithm { get; init; }
/// <summary>Checksum value.</summary>
[JsonPropertyName("checksumValue")]
public required string ChecksumValue { get; init; }
}
/// <summary>
/// SPDX relationship.
/// </summary>
public sealed record SpdxRelationship
{
/// <summary>Source element ID.</summary>
[JsonPropertyName("spdxElementId")]
public required string SpdxElementId { get; init; }
/// <summary>Relationship type.</summary>
[JsonPropertyName("relationshipType")]
public required string RelationshipType { get; init; }
/// <summary>Related element ID.</summary>
[JsonPropertyName("relatedSpdxElement")]
public required string RelatedSpdxElement { get; init; }
}

View File

@@ -0,0 +1,142 @@
// -----------------------------------------------------------------------------
// IVerdictLedgerRepository.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-002 - Implement VerdictLedger entity and repository
// Description: Repository interface for append-only verdict ledger
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.VerdictLedger;
/// <summary>
/// Repository for append-only verdict ledger operations.
/// Enforces insert-only semantics - no updates or deletes.
/// </summary>
public interface IVerdictLedgerRepository
{
/// <summary>
/// Appends a new entry to the ledger.
/// </summary>
/// <param name="entry">The entry to append.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The appended entry with computed hashes.</returns>
Task<VerdictLedgerEntry> AppendAsync(VerdictLedgerEntry entry, CancellationToken ct = default);
/// <summary>
/// Gets an entry by its ledger ID.
/// </summary>
/// <param name="ledgerId">The ledger ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The entry or null if not found.</returns>
Task<VerdictLedgerEntry?> GetByIdAsync(Guid ledgerId, CancellationToken ct = default);
/// <summary>
/// Gets an entry by its verdict hash.
/// </summary>
/// <param name="verdictHash">The verdict hash.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The entry or null if not found.</returns>
Task<VerdictLedgerEntry?> GetByHashAsync(string verdictHash, CancellationToken ct = default);
/// <summary>
/// Gets the most recent entry for a BOM reference.
/// </summary>
/// <param name="bomRef">The BOM reference (purl or digest).</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The most recent entry or null if none found.</returns>
Task<VerdictLedgerEntry?> GetLatestByBomRefAsync(string bomRef, CancellationToken ct = default);
/// <summary>
/// Queries entries by BOM reference.
/// </summary>
/// <param name="bomRef">The BOM reference (purl or digest).</param>
/// <param name="limit">Maximum entries to return.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Entries ordered by creation time descending.</returns>
IAsyncEnumerable<VerdictLedgerEntry> QueryByBomRefAsync(
string bomRef,
int limit = 100,
CancellationToken ct = default);
/// <summary>
/// Gets the chain of entries from a starting hash.
/// </summary>
/// <param name="startHash">The hash to start from.</param>
/// <param name="count">Number of entries to retrieve.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Chain of entries in order.</returns>
IAsyncEnumerable<VerdictLedgerEntry> GetChainAsync(
string startHash,
int count = 10,
CancellationToken ct = default);
/// <summary>
/// Gets the latest entry in the ledger (chain tip).
/// </summary>
/// <param name="tenantId">Tenant ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The latest entry or null if ledger is empty.</returns>
Task<VerdictLedgerEntry?> GetLatestAsync(Guid tenantId, CancellationToken ct = default);
/// <summary>
/// Verifies the integrity of the hash chain from a given entry.
/// </summary>
/// <param name="fromHash">Starting hash for verification.</param>
/// <param name="toHash">Ending hash for verification.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Verification result.</returns>
Task<ChainVerificationResult> VerifyChainAsync(
string fromHash,
string toHash,
CancellationToken ct = default);
}
/// <summary>
/// Result of chain verification.
/// </summary>
public sealed record ChainVerificationResult
{
/// <summary>
/// Whether the chain is valid.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Number of entries verified.
/// </summary>
public int EntriesVerified { get; init; }
/// <summary>
/// First invalid entry hash (if chain is broken).
/// </summary>
public string? BrokenAtHash { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// Creates a successful result.
/// </summary>
public static ChainVerificationResult Valid(int entriesVerified)
{
return new ChainVerificationResult
{
IsValid = true,
EntriesVerified = entriesVerified
};
}
/// <summary>
/// Creates a failed result.
/// </summary>
public static ChainVerificationResult Invalid(string brokenAtHash, string errorMessage)
{
return new ChainVerificationResult
{
IsValid = false,
BrokenAtHash = brokenAtHash,
ErrorMessage = errorMessage
};
}
}

View File

@@ -0,0 +1,103 @@
// -----------------------------------------------------------------------------
// VerdictLedgerEntry.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-002 - Implement VerdictLedger entity and repository
// Description: Append-only verdict ledger entry entity
// -----------------------------------------------------------------------------
namespace StellaOps.Attestor.VerdictLedger;
/// <summary>
/// An immutable entry in the append-only verdict ledger.
/// Each entry is cryptographically chained to the previous entry via SHA-256 hashes.
/// </summary>
public sealed record VerdictLedgerEntry
{
/// <summary>
/// Unique ledger entry ID.
/// </summary>
public required Guid LedgerId { get; init; }
/// <summary>
/// Package URL or container digest reference (e.g., purl or image@sha256:...).
/// </summary>
public required string BomRef { get; init; }
/// <summary>
/// CycloneDX serialNumber URN from the SBOM.
/// </summary>
public string? CycloneDxSerial { get; init; }
/// <summary>
/// Transparency log entry UUID (Rekor or similar).
/// </summary>
public string? RekorUuid { get; init; }
/// <summary>
/// The verdict decision.
/// </summary>
public required VerdictDecision Decision { get; init; }
/// <summary>
/// Human-readable reason for the decision.
/// </summary>
public required string Reason { get; init; }
/// <summary>
/// Reference to the policy bundle configuration.
/// </summary>
public required string PolicyBundleId { get; init; }
/// <summary>
/// SHA-256 hash of the policy bundle content.
/// </summary>
public required string PolicyBundleHash { get; init; }
/// <summary>
/// Container digest of the verifier service that evaluated this verdict.
/// </summary>
public required string VerifierImageDigest { get; init; }
/// <summary>
/// Key ID that signed this verdict.
/// </summary>
public required string SignerKeyId { get; init; }
/// <summary>
/// SHA-256 hash of the previous entry in the chain (null for genesis).
/// </summary>
public string? PreviousHash { get; init; }
/// <summary>
/// SHA-256 hash of this entry's canonical JSON form.
/// </summary>
public required string VerdictHash { get; init; }
/// <summary>
/// When this verdict was recorded (UTC).
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Tenant ID for multi-tenancy.
/// </summary>
public required Guid TenantId { get; init; }
}
/// <summary>
/// Verdict decision enum.
/// </summary>
public enum VerdictDecision
{
/// <summary>Decision not yet determined.</summary>
Unknown = 0,
/// <summary>Approved for deployment/release.</summary>
Approve = 1,
/// <summary>Rejected - must not proceed.</summary>
Reject = 2,
/// <summary>Pending further review or information.</summary>
Pending = 3
}

View File

@@ -0,0 +1,271 @@
// -----------------------------------------------------------------------------
// VerdictLedgerService.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-003 - Implement VerdictLedger service with chain validation
// Description: Service for managing the append-only verdict ledger
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.VerdictLedger;
/// <summary>
/// Service for managing the append-only verdict ledger.
/// Handles hash computation, chain validation, and verdict recording.
/// </summary>
public sealed class VerdictLedgerService : IVerdictLedgerService
{
private readonly IVerdictLedgerRepository _repository;
private readonly JsonSerializerOptions _canonicalOptions;
/// <summary>
/// Creates a new verdict ledger service.
/// </summary>
public VerdictLedgerService(IVerdictLedgerRepository repository)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_canonicalOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry> RecordVerdictAsync(
RecordVerdictRequest request,
CancellationToken ct = default)
{
request.Validate();
// Get the latest entry for hash chaining
var latest = await _repository.GetLatestAsync(request.TenantId, ct);
var previousHash = latest?.VerdictHash;
// Create the entry
var entry = new VerdictLedgerEntry
{
LedgerId = Guid.NewGuid(),
BomRef = request.BomRef,
CycloneDxSerial = request.CycloneDxSerial,
RekorUuid = request.RekorUuid,
Decision = request.Decision,
Reason = request.Reason,
PolicyBundleId = request.PolicyBundleId,
PolicyBundleHash = request.PolicyBundleHash,
VerifierImageDigest = request.VerifierImageDigest,
SignerKeyId = request.SignerKeyId,
PreviousHash = previousHash,
VerdictHash = string.Empty, // Computed below
CreatedAt = DateTimeOffset.UtcNow,
TenantId = request.TenantId
};
// Compute the verdict hash
var verdictHash = ComputeVerdictHash(entry);
entry = entry with { VerdictHash = verdictHash };
// Append to ledger
return await _repository.AppendAsync(entry, ct);
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry?> GetVerdictAsync(string bomRef, CancellationToken ct = default)
{
return await _repository.GetLatestByBomRefAsync(bomRef, ct);
}
/// <inheritdoc />
public async Task<VerdictLedgerEntry?> GetByIdAsync(Guid ledgerId, CancellationToken ct = default)
{
return await _repository.GetByIdAsync(ledgerId, ct);
}
/// <inheritdoc />
public IAsyncEnumerable<VerdictLedgerEntry> QueryByBomRefAsync(
string bomRef,
int limit = 100,
CancellationToken ct = default)
{
return _repository.QueryByBomRefAsync(bomRef, limit, ct);
}
/// <inheritdoc />
public async Task<ChainVerificationResult> VerifyChainAsync(
string fromHash,
string toHash,
CancellationToken ct = default)
{
return await _repository.VerifyChainAsync(fromHash, toHash, ct);
}
/// <inheritdoc />
public async Task<ChainVerificationResult> VerifyFullChainAsync(
Guid tenantId,
CancellationToken ct = default)
{
var latest = await _repository.GetLatestAsync(tenantId, ct);
if (latest == null)
{
return ChainVerificationResult.Valid(0);
}
// Walk back through the chain
var entriesVerified = 0;
var currentHash = latest.VerdictHash;
string? previousHash = latest.PreviousHash;
while (previousHash != null)
{
var entry = await _repository.GetByHashAsync(previousHash, ct);
if (entry == null)
{
return ChainVerificationResult.Invalid(
previousHash,
$"Missing entry in chain: {previousHash}");
}
// Verify the hash is correct
var computedHash = ComputeVerdictHash(entry);
if (computedHash != entry.VerdictHash)
{
return ChainVerificationResult.Invalid(
entry.VerdictHash,
$"Hash mismatch at {entry.LedgerId}: computed {computedHash}, stored {entry.VerdictHash}");
}
entriesVerified++;
previousHash = entry.PreviousHash;
ct.ThrowIfCancellationRequested();
}
return ChainVerificationResult.Valid(entriesVerified + 1);
}
/// <summary>
/// Computes the SHA-256 hash of a verdict entry in canonical form.
/// </summary>
private string ComputeVerdictHash(VerdictLedgerEntry entry)
{
// Create canonical representation (excluding VerdictHash itself)
var canonical = new
{
ledgerId = entry.LedgerId.ToString("D"),
bomRef = entry.BomRef,
cycloneDxSerial = entry.CycloneDxSerial,
rekorUuid = entry.RekorUuid,
decision = entry.Decision.ToString().ToLowerInvariant(),
reason = entry.Reason,
policyBundleId = entry.PolicyBundleId,
policyBundleHash = entry.PolicyBundleHash,
verifierImageDigest = entry.VerifierImageDigest,
signerKeyId = entry.SignerKeyId,
previousHash = entry.PreviousHash,
createdAt = entry.CreatedAt.ToUniversalTime().ToString("O"),
tenantId = entry.TenantId.ToString("D")
};
var json = JsonSerializer.Serialize(canonical, _canonicalOptions);
var bytes = Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Service interface for verdict ledger operations.
/// </summary>
public interface IVerdictLedgerService
{
/// <summary>
/// Records a new verdict to the ledger.
/// </summary>
Task<VerdictLedgerEntry> RecordVerdictAsync(RecordVerdictRequest request, CancellationToken ct = default);
/// <summary>
/// Gets the latest verdict for a BOM reference.
/// </summary>
Task<VerdictLedgerEntry?> GetVerdictAsync(string bomRef, CancellationToken ct = default);
/// <summary>
/// Gets a verdict by its ledger ID.
/// </summary>
Task<VerdictLedgerEntry?> GetByIdAsync(Guid ledgerId, CancellationToken ct = default);
/// <summary>
/// Queries verdicts by BOM reference.
/// </summary>
IAsyncEnumerable<VerdictLedgerEntry> QueryByBomRefAsync(string bomRef, int limit = 100, CancellationToken ct = default);
/// <summary>
/// Verifies the integrity of a portion of the chain.
/// </summary>
Task<ChainVerificationResult> VerifyChainAsync(string fromHash, string toHash, CancellationToken ct = default);
/// <summary>
/// Verifies the integrity of the full chain for a tenant.
/// </summary>
Task<ChainVerificationResult> VerifyFullChainAsync(Guid tenantId, CancellationToken ct = default);
}
/// <summary>
/// Request to record a new verdict.
/// </summary>
public sealed record RecordVerdictRequest
{
/// <summary>Package URL or container digest reference.</summary>
public required string BomRef { get; init; }
/// <summary>CycloneDX serialNumber URN.</summary>
public string? CycloneDxSerial { get; init; }
/// <summary>Rekor transparency log UUID.</summary>
public string? RekorUuid { get; init; }
/// <summary>The verdict decision.</summary>
public required VerdictDecision Decision { get; init; }
/// <summary>Reason for the decision.</summary>
public required string Reason { get; init; }
/// <summary>Policy bundle ID.</summary>
public required string PolicyBundleId { get; init; }
/// <summary>Policy bundle SHA-256 hash.</summary>
public required string PolicyBundleHash { get; init; }
/// <summary>Verifier image digest.</summary>
public required string VerifierImageDigest { get; init; }
/// <summary>Signer key ID.</summary>
public required string SignerKeyId { get; init; }
/// <summary>Tenant ID.</summary>
public required Guid TenantId { get; init; }
/// <summary>
/// Validates the request.
/// </summary>
public void Validate()
{
if (string.IsNullOrWhiteSpace(BomRef))
throw new ArgumentException("BomRef is required.", nameof(BomRef));
if (string.IsNullOrWhiteSpace(Reason))
throw new ArgumentException("Reason is required.", nameof(Reason));
if (string.IsNullOrWhiteSpace(PolicyBundleId))
throw new ArgumentException("PolicyBundleId is required.", nameof(PolicyBundleId));
if (string.IsNullOrWhiteSpace(PolicyBundleHash))
throw new ArgumentException("PolicyBundleHash is required.", nameof(PolicyBundleHash));
if (string.IsNullOrWhiteSpace(VerifierImageDigest))
throw new ArgumentException("VerifierImageDigest is required.", nameof(VerifierImageDigest));
if (string.IsNullOrWhiteSpace(SignerKeyId))
throw new ArgumentException("SignerKeyId is required.", nameof(SignerKeyId));
if (TenantId == Guid.Empty)
throw new ArgumentException("TenantId is required.", nameof(TenantId));
}
}

View File

@@ -0,0 +1,194 @@
// -----------------------------------------------------------------------------
// CycloneDxDeterminismTests.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-004 - Golden Hash Reproducibility Tests
// Description: Tests proving deterministic CycloneDX output
// -----------------------------------------------------------------------------
using System.Text;
using StellaOps.Attestor.StandardPredicates.Canonicalization;
using StellaOps.Attestor.StandardPredicates.Models;
using StellaOps.Attestor.StandardPredicates.Writers;
using Xunit;
namespace StellaOps.Attestor.StandardPredicates.Tests;
/// <summary>
/// Tests proving CycloneDX writer produces deterministic output.
/// Golden hash values are documented in comments for CI gate verification.
/// </summary>
public sealed class CycloneDxDeterminismTests
{
private readonly CycloneDxWriter _writer = new();
/// <summary>
/// Test Case 1: Identical inputs produce identical hashes.
/// Golden Hash: Expected to be stable across runs.
/// </summary>
[Fact]
public void IdenticalInputs_ProduceIdenticalHashes()
{
var document = CreateTestDocument("test-app", "1.0.0");
var result1 = _writer.Write(document);
var result2 = _writer.Write(document);
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
Assert.True(result1.CanonicalBytes.SequenceEqual(result2.CanonicalBytes));
}
/// <summary>
/// Test Case 2: Different component ordering produces same hash.
/// </summary>
[Fact]
public void DifferentComponentOrdering_ProducesSameHash()
{
var components1 = new[]
{
CreateComponent("pkg:npm/lodash@4.17.21", "lodash"),
CreateComponent("pkg:npm/express@4.18.2", "express"),
CreateComponent("pkg:npm/axios@1.4.0", "axios")
};
var components2 = new[]
{
CreateComponent("pkg:npm/axios@1.4.0", "axios"),
CreateComponent("pkg:npm/lodash@4.17.21", "lodash"),
CreateComponent("pkg:npm/express@4.18.2", "express")
};
var doc1 = CreateDocumentWithComponents("app", components1);
var doc2 = CreateDocumentWithComponents("app", components2);
var result1 = _writer.Write(doc1);
var result2 = _writer.Write(doc2);
// Hash should be identical because writer sorts components
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
}
/// <summary>
/// Test Case 3: Multiple runs produce identical output.
/// </summary>
[Fact]
public void TenConsecutiveRuns_ProduceIdenticalOutput()
{
var document = CreateTestDocument("multi-run-test", "2.0.0");
string? firstHash = null;
byte[]? firstBytes = null;
for (var i = 0; i < 10; i++)
{
var result = _writer.Write(document);
if (firstHash == null)
{
firstHash = result.GoldenHash;
firstBytes = result.CanonicalBytes;
}
else
{
Assert.Equal(firstHash, result.GoldenHash);
Assert.True(firstBytes!.SequenceEqual(result.CanonicalBytes),
$"Run {i + 1} produced different bytes");
}
}
}
/// <summary>
/// Test Case 4: Empty components array is handled correctly.
/// </summary>
[Fact]
public void EmptyComponents_ProducesDeterministicOutput()
{
var document = new SbomDocument
{
Name = "empty-test",
Version = "1.0.0",
Timestamp = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
Components = [],
Relationships = []
};
var result1 = _writer.Write(document);
var result2 = _writer.Write(document);
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
}
/// <summary>
/// Test Case 5: Unicode content is normalized correctly.
/// </summary>
[Fact]
public void UnicodeContent_IsNormalizedDeterministically()
{
// Test with various Unicode representations
var component1 = CreateComponent("pkg:npm/café@1.0.0", "café"); // composed
var component2 = CreateComponent("pkg:npm/café@1.0.0", "café"); // might be decomposed
var doc1 = CreateDocumentWithComponents("unicode-test", [component1]);
var doc2 = CreateDocumentWithComponents("unicode-test", [component2]);
var result1 = _writer.Write(doc1);
var result2 = _writer.Write(doc2);
// After NFC normalization, should produce same hash
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
}
private static SbomDocument CreateTestDocument(string name, string version)
{
return new SbomDocument
{
Name = name,
Version = version,
Timestamp = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
Metadata = new SbomMetadata
{
Tools = ["stella-scanner@1.0.0"],
Authors = ["test@example.com"]
},
Components =
[
CreateComponent("pkg:npm/lodash@4.17.21", "lodash"),
CreateComponent("pkg:npm/express@4.18.2", "express")
],
Relationships =
[
new SbomRelationship
{
SourceRef = "lodash",
TargetRef = "express",
Type = SbomRelationshipType.DependsOn
}
]
};
}
private static SbomDocument CreateDocumentWithComponents(string name, SbomComponent[] components)
{
return new SbomDocument
{
Name = name,
Version = "1.0.0",
Timestamp = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
Components = [.. components],
Relationships = []
};
}
private static SbomComponent CreateComponent(string purl, string name)
{
return new SbomComponent
{
BomRef = name,
Name = name,
Version = purl.Split('@').LastOrDefault() ?? "1.0.0",
Purl = purl,
Hashes =
[
new SbomHash { Algorithm = "SHA-256", Value = "abcd1234" }
]
};
}
}

View File

@@ -0,0 +1,292 @@
// -----------------------------------------------------------------------------
// SerialNumberDerivationTests.cs
// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association
// Task: TASK-025-004 - Enforce serialNumber Derivation Rule
// Description: Tests for deterministic serialNumber generation using artifact digest
// -----------------------------------------------------------------------------
using StellaOps.Attestor.StandardPredicates.Writers;
using Xunit;
using System.Text.Json;
using System.Text;
namespace StellaOps.Attestor.StandardPredicates.Tests;
/// <summary>
/// Tests for serialNumber derivation rule enforcement.
/// Sprint: SPRINT_20260118_025_ReleaseOrchestrator_sbom_release_association (TASK-025-004)
/// </summary>
public sealed class SerialNumberDerivationTests
{
private readonly CycloneDxWriter _writer = new();
#region Artifact Digest Format Tests
/// <summary>
/// When ArtifactDigest is provided in valid format, serialNumber should use urn:sha256: prefix.
/// </summary>
[Fact]
public void ArtifactDigest_ValidHex_GeneratesUrnSha256SerialNumber()
{
// Arrange
var artifactDigest = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"; // SHA-256 of empty string
var document = CreateDocument(artifactDigest);
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.StartsWith("urn:sha256:", serialNumber);
Assert.Equal($"urn:sha256:{artifactDigest}", serialNumber);
}
/// <summary>
/// When ArtifactDigest has sha256: prefix, it should be normalized to urn:sha256: format.
/// </summary>
[Fact]
public void ArtifactDigest_WithSha256Prefix_NormalizesToUrnSha256()
{
// Arrange
var rawDigest = "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
var document = CreateDocument(rawDigest);
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.Equal("urn:sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", serialNumber);
}
/// <summary>
/// When ArtifactDigest is uppercase hex, it should be normalized to lowercase.
/// </summary>
[Fact]
public void ArtifactDigest_UppercaseHex_NormalizedToLowercase()
{
// Arrange
var uppercaseDigest = "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855";
var document = CreateDocument(uppercaseDigest);
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.Equal("urn:sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", serialNumber);
}
/// <summary>
/// When ArtifactDigest is null, serialNumber should fall back to urn:uuid: format.
/// </summary>
[Fact]
public void ArtifactDigest_Null_FallsBackToUuid()
{
// Arrange
var document = CreateDocument(null);
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.StartsWith("urn:uuid:", serialNumber);
}
/// <summary>
/// When ArtifactDigest is empty string, serialNumber should fall back to urn:uuid: format.
/// </summary>
[Fact]
public void ArtifactDigest_EmptyString_FallsBackToUuid()
{
// Arrange
var document = CreateDocument("");
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.StartsWith("urn:uuid:", serialNumber);
}
/// <summary>
/// When ArtifactDigest is invalid hex (wrong length), serialNumber should fall back to urn:uuid: format.
/// </summary>
[Fact]
public void ArtifactDigest_InvalidLength_FallsBackToUuid()
{
// Arrange - only 32 chars instead of 64
var shortDigest = "e3b0c44298fc1c149afbf4c8996fb924";
var document = CreateDocument(shortDigest);
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.StartsWith("urn:uuid:", serialNumber);
}
/// <summary>
/// When ArtifactDigest contains non-hex characters, serialNumber should fall back to urn:uuid: format.
/// </summary>
[Fact]
public void ArtifactDigest_NonHexChars_FallsBackToUuid()
{
// Arrange - contains 'g' and 'z' which are not hex
var invalidDigest = "g3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b85z";
var document = CreateDocument(invalidDigest);
// Act
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotNull(serialNumber);
Assert.StartsWith("urn:uuid:", serialNumber);
}
#endregion
#region Determinism Tests
/// <summary>
/// Same artifact digest should always produce the same serialNumber.
/// </summary>
[Fact]
public void SameArtifactDigest_ProducesSameSerialNumber()
{
// Arrange
var artifactDigest = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
var doc1 = CreateDocument(artifactDigest);
var doc2 = CreateDocument(artifactDigest);
// Act
var bytes1 = _writer.Write(doc1);
var bytes2 = _writer.Write(doc2);
var json1 = Encoding.UTF8.GetString(bytes1);
var json2 = Encoding.UTF8.GetString(bytes2);
var parsed1 = JsonDocument.Parse(json1);
var parsed2 = JsonDocument.Parse(json2);
var serialNumber1 = parsed1.RootElement.GetProperty("serialNumber").GetString();
var serialNumber2 = parsed2.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.Equal(serialNumber1, serialNumber2);
}
/// <summary>
/// Different artifact digests should produce different serialNumbers.
/// </summary>
[Fact]
public void DifferentArtifactDigests_ProduceDifferentSerialNumbers()
{
// Arrange
var digest1 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
var digest2 = "a3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
var doc1 = CreateDocument(digest1);
var doc2 = CreateDocument(digest2);
// Act
var bytes1 = _writer.Write(doc1);
var bytes2 = _writer.Write(doc2);
var json1 = Encoding.UTF8.GetString(bytes1);
var json2 = Encoding.UTF8.GetString(bytes2);
var parsed1 = JsonDocument.Parse(json1);
var parsed2 = JsonDocument.Parse(json2);
var serialNumber1 = parsed1.RootElement.GetProperty("serialNumber").GetString();
var serialNumber2 = parsed2.RootElement.GetProperty("serialNumber").GetString();
// Assert
Assert.NotEqual(serialNumber1, serialNumber2);
Assert.Equal($"urn:sha256:{digest1}", serialNumber1);
Assert.Equal($"urn:sha256:{digest2}", serialNumber2);
}
/// <summary>
/// 100 consecutive writes with same input produce identical output.
/// </summary>
[Fact]
public void HundredConsecutiveWrites_ProduceIdenticalSerialNumber()
{
// Arrange
var artifactDigest = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
var document = CreateDocument(artifactDigest);
var serialNumbers = new HashSet<string>();
// Act
for (var i = 0; i < 100; i++)
{
var bytes = _writer.Write(document);
var json = Encoding.UTF8.GetString(bytes);
var parsed = JsonDocument.Parse(json);
var serialNumber = parsed.RootElement.GetProperty("serialNumber").GetString()!;
serialNumbers.Add(serialNumber);
}
// Assert
Assert.Single(serialNumbers);
Assert.Equal($"urn:sha256:{artifactDigest}", serialNumbers.First());
}
#endregion
#region Test Helpers
private static SbomDocument CreateDocument(string? artifactDigest)
{
return new SbomDocument
{
Name = "test-app",
Version = "1.0.0",
CreatedAt = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
ArtifactDigest = artifactDigest,
Components =
[
new SbomComponent
{
BomRef = "lodash",
Name = "lodash",
Version = "4.17.21",
Type = "library"
}
],
Tool = new SbomTool
{
Name = "stella-scanner",
Version = "1.0.0"
}
};
}
#endregion
}

View File

@@ -0,0 +1,196 @@
// -----------------------------------------------------------------------------
// SpdxDeterminismTests.cs
// Sprint: SPRINT_20260118_015_Attestor_deterministic_sbom_generation
// Task: TASK-015-004 - Golden Hash Reproducibility Tests
// Description: Tests proving deterministic SPDX output
// -----------------------------------------------------------------------------
using StellaOps.Attestor.StandardPredicates.Models;
using StellaOps.Attestor.StandardPredicates.Writers;
using Xunit;
namespace StellaOps.Attestor.StandardPredicates.Tests;
/// <summary>
/// Tests proving SPDX writer produces deterministic output.
/// Golden hash values are documented in comments for CI gate verification.
/// </summary>
public sealed class SpdxDeterminismTests
{
private readonly SpdxWriter _writer = new();
/// <summary>
/// Test Case 1: Identical inputs produce identical hashes.
/// </summary>
[Fact]
public void IdenticalInputs_ProduceIdenticalHashes()
{
var document = CreateTestDocument("test-app", "1.0.0");
var result1 = _writer.Write(document);
var result2 = _writer.Write(document);
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
Assert.True(result1.CanonicalBytes.SequenceEqual(result2.CanonicalBytes));
}
/// <summary>
/// Test Case 2: Different component ordering produces same hash.
/// SPDX elements are sorted by SPDXID.
/// </summary>
[Fact]
public void DifferentComponentOrdering_ProducesSameHash()
{
var components1 = new[]
{
CreateComponent("pkg:npm/zebra@1.0.0", "zebra"),
CreateComponent("pkg:npm/alpha@1.0.0", "alpha"),
CreateComponent("pkg:npm/middle@1.0.0", "middle")
};
var components2 = new[]
{
CreateComponent("pkg:npm/alpha@1.0.0", "alpha"),
CreateComponent("pkg:npm/zebra@1.0.0", "zebra"),
CreateComponent("pkg:npm/middle@1.0.0", "middle")
};
var doc1 = CreateDocumentWithComponents("app", components1);
var doc2 = CreateDocumentWithComponents("app", components2);
var result1 = _writer.Write(doc1);
var result2 = _writer.Write(doc2);
// Hash should be identical because writer sorts elements by SPDXID
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
}
/// <summary>
/// Test Case 3: Multiple runs produce identical output.
/// </summary>
[Fact]
public void TenConsecutiveRuns_ProduceIdenticalOutput()
{
var document = CreateTestDocument("spdx-multi-run", "2.0.0");
string? firstHash = null;
byte[]? firstBytes = null;
for (var i = 0; i < 10; i++)
{
var result = _writer.Write(document);
if (firstHash == null)
{
firstHash = result.GoldenHash;
firstBytes = result.CanonicalBytes;
}
else
{
Assert.Equal(firstHash, result.GoldenHash);
Assert.True(firstBytes!.SequenceEqual(result.CanonicalBytes),
$"Run {i + 1} produced different bytes");
}
}
}
/// <summary>
/// Test Case 4: Relationships are sorted deterministically.
/// </summary>
[Fact]
public void RelationshipOrdering_IsDeterministic()
{
var document = new SbomDocument
{
Name = "rel-test",
Version = "1.0.0",
Timestamp = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
Components =
[
CreateComponent("pkg:npm/a@1.0.0", "a"),
CreateComponent("pkg:npm/b@1.0.0", "b"),
CreateComponent("pkg:npm/c@1.0.0", "c")
],
Relationships =
[
new SbomRelationship { SourceRef = "c", TargetRef = "a", Type = SbomRelationshipType.DependsOn },
new SbomRelationship { SourceRef = "a", TargetRef = "b", Type = SbomRelationshipType.DependsOn },
new SbomRelationship { SourceRef = "b", TargetRef = "c", Type = SbomRelationshipType.DependsOn }
]
};
var result1 = _writer.Write(document);
var result2 = _writer.Write(document);
Assert.Equal(result1.GoldenHash, result2.GoldenHash);
}
/// <summary>
/// Test Case 5: JSON-LD context is correctly included.
/// </summary>
[Fact]
public void JsonLdContext_IsIncluded()
{
var document = CreateTestDocument("context-test", "1.0.0");
var result = _writer.Write(document);
var json = System.Text.Encoding.UTF8.GetString(result.CanonicalBytes);
Assert.Contains("@context", json);
Assert.Contains("spdx.org", json);
}
private static SbomDocument CreateTestDocument(string name, string version)
{
return new SbomDocument
{
Name = name,
Version = version,
Timestamp = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
Metadata = new SbomMetadata
{
Tools = ["stella-scanner@1.0.0"],
Authors = ["test@example.com"]
},
Components =
[
CreateComponent("pkg:npm/lodash@4.17.21", "lodash"),
CreateComponent("pkg:npm/express@4.18.2", "express")
],
Relationships =
[
new SbomRelationship
{
SourceRef = "lodash",
TargetRef = "express",
Type = SbomRelationshipType.DependsOn
}
]
};
}
private static SbomDocument CreateDocumentWithComponents(string name, SbomComponent[] components)
{
return new SbomDocument
{
Name = name,
Version = "1.0.0",
Timestamp = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero),
Components = [.. components],
Relationships = []
};
}
private static SbomComponent CreateComponent(string purl, string name)
{
return new SbomComponent
{
BomRef = name,
Name = name,
Version = purl.Split('@').LastOrDefault() ?? "1.0.0",
Purl = purl,
Hashes =
[
new SbomHash { Algorithm = "SHA-256", Value = "abcd1234" }
]
};
}
}

View File

@@ -0,0 +1,159 @@
// -----------------------------------------------------------------------------
// VerdictLedgerHashTests.cs
// Sprint: SPRINT_20260118_015_Attestor_verdict_ledger_foundation
// Task: VL-002 - Unit tests for hash computation determinism
// Description: Tests proving deterministic hash computation for verdict entries
// -----------------------------------------------------------------------------
using StellaOps.Attestor.Persistence.Entities;
using StellaOps.Attestor.Services;
using Xunit;
namespace StellaOps.Attestor.Tests;
/// <summary>
/// Tests for VerdictLedger hash computation determinism.
/// </summary>
public sealed class VerdictLedgerHashTests
{
/// <summary>
/// Identical inputs produce identical hashes.
/// </summary>
[Fact]
public void IdenticalInputs_ProduceIdenticalHashes()
{
var request1 = CreateTestRequest();
var request2 = CreateTestRequest();
var hash1 = ComputeHash(request1);
var hash2 = ComputeHash(request2);
Assert.Equal(hash1, hash2);
}
/// <summary>
/// Different bom_ref produces different hash.
/// </summary>
[Fact]
public void DifferentBomRef_ProducesDifferentHash()
{
var request1 = CreateTestRequest();
var request2 = CreateTestRequest() with { BomRef = "pkg:npm/other@1.0.0" };
var hash1 = ComputeHash(request1);
var hash2 = ComputeHash(request2);
Assert.NotEqual(hash1, hash2);
}
/// <summary>
/// Different prev_hash produces different hash.
/// </summary>
[Fact]
public void DifferentPrevHash_ProducesDifferentHash()
{
var request = CreateTestRequest();
var hash1 = ComputeHash(request, prevHash: null);
var hash2 = ComputeHash(request, prevHash: "abc123");
Assert.NotEqual(hash1, hash2);
}
/// <summary>
/// Genesis entry (null prev_hash) produces consistent hash.
/// </summary>
[Fact]
public void GenesisEntry_ProducesConsistentHash()
{
var request = CreateTestRequest();
var hash1 = ComputeHash(request, prevHash: null);
var hash2 = ComputeHash(request, prevHash: null);
Assert.Equal(hash1, hash2);
}
/// <summary>
/// Hash is 64 hex characters (SHA-256).
/// </summary>
[Fact]
public void Hash_Is64HexCharacters()
{
var request = CreateTestRequest();
var hash = ComputeHash(request);
Assert.Equal(64, hash.Length);
Assert.Matches("^[a-f0-9]{64}$", hash);
}
/// <summary>
/// Ten runs produce identical hash.
/// </summary>
[Fact]
public void TenRuns_ProduceIdenticalHash()
{
var request = CreateTestRequest();
string? firstHash = null;
for (var i = 0; i < 10; i++)
{
var hash = ComputeHash(request);
if (firstHash == null)
{
firstHash = hash;
}
else
{
Assert.Equal(firstHash, hash);
}
}
}
private static AppendVerdictRequest CreateTestRequest()
{
return new AppendVerdictRequest
{
BomRef = "pkg:npm/lodash@4.17.21",
CycloneDxSerial = "urn:uuid:12345678-1234-1234-1234-123456789012",
Decision = VerdictDecision.Approve,
Reason = "All checks passed",
PolicyBundleId = "pol-v1.0.0",
PolicyBundleHash = "abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234",
VerifierImageDigest = "ghcr.io/stellaops/verifier@sha256:1234567890abcdef",
SignerKeyId = "key-001",
TenantId = Guid.Parse("11111111-1111-1111-1111-111111111111")
};
}
private static string ComputeHash(AppendVerdictRequest request, string? prevHash = null)
{
// Use the same canonical JSON approach as VerdictLedgerService
var createdAt = new DateTimeOffset(2026, 1, 18, 12, 0, 0, TimeSpan.Zero);
var canonical = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["bomRef"] = request.BomRef,
["createdAt"] = createdAt.ToString("yyyy-MM-ddTHH:mm:ssZ"),
["cyclonedxSerial"] = request.CycloneDxSerial,
["decision"] = request.Decision.ToString().ToLowerInvariant(),
["policyBundleHash"] = request.PolicyBundleHash,
["policyBundleId"] = request.PolicyBundleId,
["prevHash"] = prevHash,
["reason"] = request.Reason,
["signerKeyid"] = request.SignerKeyId,
["verifierImageDigest"] = request.VerifierImageDigest
};
var json = System.Text.Json.JsonSerializer.Serialize(canonical, new System.Text.Json.JsonSerializerOptions
{
PropertyNamingPolicy = System.Text.Json.JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
});
using var sha256 = System.Security.Cryptography.SHA256.Create();
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var hash = sha256.ComputeHash(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}