save checkpoint

This commit is contained in:
master
2026-02-07 12:44:24 +02:00
parent 9339a8952c
commit 04360dff63
789 changed files with 39719 additions and 31710 deletions

View File

@@ -1,10 +1,4 @@
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.ProofChain.Assembly;
@@ -33,167 +27,3 @@ public interface IProofSpineAssembler
ProofSpineStatement spine,
CancellationToken ct = default);
}
/// <summary>
/// Request to assemble a proof spine.
/// </summary>
public sealed record ProofSpineRequest
{
/// <summary>
/// The SBOM entry ID that this spine covers.
/// </summary>
public required SbomEntryId SbomEntryId { get; init; }
/// <summary>
/// The evidence IDs to include in the proof bundle.
/// Will be sorted lexicographically during assembly.
/// </summary>
public required IReadOnlyList<EvidenceId> EvidenceIds { get; init; }
/// <summary>
/// The reasoning ID explaining the decision.
/// </summary>
public required ReasoningId ReasoningId { get; init; }
/// <summary>
/// The VEX verdict ID for this entry.
/// </summary>
public required VexVerdictId VexVerdictId { get; init; }
/// <summary>
/// Version of the policy used.
/// </summary>
public required string PolicyVersion { get; init; }
/// <summary>
/// The subject (artifact) this spine is about.
/// </summary>
public required ProofSpineSubject Subject { get; init; }
/// <summary>
/// Key profile to use for signing the spine statement.
/// </summary>
public SigningKeyProfile SigningProfile { get; init; } = SigningKeyProfile.Authority;
/// <summary>
/// Optional: ID of the uncertainty state attestation to include in the spine.
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
/// </summary>
public string? UncertaintyStatementId { get; init; }
/// <summary>
/// Optional: ID of the uncertainty budget attestation to include in the spine.
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
/// </summary>
public string? UncertaintyBudgetStatementId { get; init; }
}
/// <summary>
/// Subject for the proof spine (the artifact being attested).
/// </summary>
public sealed record ProofSpineSubject
{
/// <summary>
/// Name of the subject (e.g., image reference).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digest of the subject.
/// </summary>
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}
/// <summary>
/// Result of proof spine assembly.
/// </summary>
public sealed record ProofSpineResult
{
/// <summary>
/// The computed proof bundle ID (merkle root).
/// </summary>
public required ProofBundleId ProofBundleId { get; init; }
/// <summary>
/// The proof spine statement.
/// </summary>
public required ProofSpineStatement Statement { get; init; }
/// <summary>
/// The signed DSSE envelope.
/// </summary>
public required DsseEnvelope SignedEnvelope { get; init; }
/// <summary>
/// The merkle tree used for the proof bundle.
/// </summary>
public required MerkleTree MerkleTree { get; init; }
}
/// <summary>
/// Represents a merkle tree with proof generation capability.
/// </summary>
public sealed record MerkleTree
{
/// <summary>
/// The root hash of the merkle tree.
/// </summary>
public required byte[] Root { get; init; }
/// <summary>
/// The leaf hashes in order.
/// </summary>
public required IReadOnlyList<byte[]> Leaves { get; init; }
/// <summary>
/// Number of levels in the tree.
/// </summary>
public required int Depth { get; init; }
}
/// <summary>
/// Result of proof spine verification.
/// </summary>
public sealed record SpineVerificationResult
{
/// <summary>
/// Whether the spine is valid.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// The expected proof bundle ID (from the statement).
/// </summary>
public required ProofBundleId ExpectedBundleId { get; init; }
/// <summary>
/// The actual proof bundle ID (recomputed).
/// </summary>
public required ProofBundleId ActualBundleId { get; init; }
/// <summary>
/// Individual verification checks performed.
/// </summary>
public IReadOnlyList<SpineVerificationCheck> Checks { get; init; } = [];
}
/// <summary>
/// A single verification check in spine verification.
/// </summary>
public sealed record SpineVerificationCheck
{
/// <summary>
/// Name of the check.
/// </summary>
public required string CheckName { get; init; }
/// <summary>
/// Whether the check passed.
/// </summary>
public required bool Passed { get; init; }
/// <summary>
/// Optional details about the check.
/// </summary>
public string? Details { get; init; }
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Attestor.ProofChain.Assembly;
/// <summary>
/// Represents a merkle tree with proof generation capability.
/// </summary>
public sealed record MerkleTree
{
/// <summary>
/// The root hash of the merkle tree.
/// </summary>
public required byte[] Root { get; init; }
/// <summary>
/// The leaf hashes in order.
/// </summary>
public required IReadOnlyList<byte[]> Leaves { get; init; }
/// <summary>
/// Number of levels in the tree.
/// </summary>
public required int Depth { get; init; }
}

View File

@@ -0,0 +1,58 @@
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Signing;
namespace StellaOps.Attestor.ProofChain.Assembly;
/// <summary>
/// Request to assemble a proof spine.
/// </summary>
public sealed record ProofSpineRequest
{
/// <summary>
/// The SBOM entry ID that this spine covers.
/// </summary>
public required SbomEntryId SbomEntryId { get; init; }
/// <summary>
/// The evidence IDs to include in the proof bundle.
/// Will be sorted lexicographically during assembly.
/// </summary>
public required IReadOnlyList<EvidenceId> EvidenceIds { get; init; }
/// <summary>
/// The reasoning ID explaining the decision.
/// </summary>
public required ReasoningId ReasoningId { get; init; }
/// <summary>
/// The VEX verdict ID for this entry.
/// </summary>
public required VexVerdictId VexVerdictId { get; init; }
/// <summary>
/// Version of the policy used.
/// </summary>
public required string PolicyVersion { get; init; }
/// <summary>
/// The subject (artifact) this spine is about.
/// </summary>
public required ProofSpineSubject Subject { get; init; }
/// <summary>
/// Key profile to use for signing the spine statement.
/// </summary>
public SigningKeyProfile SigningProfile { get; init; } = SigningKeyProfile.Authority;
/// <summary>
/// Optional: ID of the uncertainty state attestation to include in the spine.
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
/// </summary>
public string? UncertaintyStatementId { get; init; }
/// <summary>
/// Optional: ID of the uncertainty budget attestation to include in the spine.
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
/// </summary>
public string? UncertaintyBudgetStatementId { get; init; }
}

View File

@@ -0,0 +1,31 @@
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Assembly;
/// <summary>
/// Result of proof spine assembly.
/// </summary>
public sealed record ProofSpineResult
{
/// <summary>
/// The computed proof bundle ID (merkle root).
/// </summary>
public required ProofBundleId ProofBundleId { get; init; }
/// <summary>
/// The proof spine statement.
/// </summary>
public required ProofSpineStatement Statement { get; init; }
/// <summary>
/// The signed DSSE envelope.
/// </summary>
public required DsseEnvelope SignedEnvelope { get; init; }
/// <summary>
/// The merkle tree used for the proof bundle.
/// </summary>
public required MerkleTree MerkleTree { get; init; }
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Attestor.ProofChain.Assembly;
/// <summary>
/// Subject for the proof spine (the artifact being attested).
/// </summary>
public sealed record ProofSpineSubject
{
/// <summary>
/// Name of the subject (e.g., image reference).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digest of the subject.
/// </summary>
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Attestor.ProofChain.Assembly;
/// <summary>
/// A single verification check in spine verification.
/// </summary>
public sealed record SpineVerificationCheck
{
/// <summary>
/// Name of the check.
/// </summary>
public required string CheckName { get; init; }
/// <summary>
/// Whether the check passed.
/// </summary>
public required bool Passed { get; init; }
/// <summary>
/// Optional details about the check.
/// </summary>
public string? Details { get; init; }
}

View File

@@ -0,0 +1,29 @@
using StellaOps.Attestor.ProofChain.Identifiers;
namespace StellaOps.Attestor.ProofChain.Assembly;
/// <summary>
/// Result of proof spine verification.
/// </summary>
public sealed record SpineVerificationResult
{
/// <summary>
/// Whether the spine is valid.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// The expected proof bundle ID (from the statement).
/// </summary>
public required ProofBundleId ExpectedBundleId { get; init; }
/// <summary>
/// The actual proof bundle ID (recomputed).
/// </summary>
public required ProofBundleId ActualBundleId { get; init; }
/// <summary>
/// Individual verification checks performed.
/// </summary>
public IReadOnlyList<SpineVerificationCheck> Checks { get; init; } = [];
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Attestor.ProofChain.Audit;
/// <summary>
/// Artifact types for hash auditing.
/// </summary>
public static class AuditArtifactTypes
{
public const string Proof = "proof";
public const string Verdict = "verdict";
public const string Attestation = "attestation";
public const string Spine = "spine";
public const string Manifest = "manifest";
public const string VexDocument = "vex_document";
public const string SbomFragment = "sbom_fragment";
public const string PolicySnapshot = "policy_snapshot";
public const string FeedSnapshot = "feed_snapshot";
}

View File

@@ -0,0 +1,95 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.Attestor.ProofChain.Audit;
/// <summary>
/// Validation, audit record creation, and detailed diff methods for AuditHashLogger.
/// </summary>
public sealed partial class AuditHashLogger
{
/// <summary>
/// Logs hash information with structured data for telemetry.
/// </summary>
public HashAuditRecord CreateAuditRecord(
string artifactId,
string artifactType,
ReadOnlySpan<byte> rawBytes,
ReadOnlySpan<byte> canonicalBytes,
string? correlationId = null)
{
var rawHash = ComputeSha256(rawBytes);
var canonicalHash = ComputeSha256(canonicalBytes);
return new HashAuditRecord
{
ArtifactId = artifactId,
ArtifactType = artifactType,
RawHash = rawHash,
CanonicalHash = canonicalHash,
RawSizeBytes = rawBytes.Length,
CanonicalSizeBytes = canonicalBytes.Length,
HashesMatch = rawHash.Equals(canonicalHash, StringComparison.Ordinal),
Timestamp = _timeProvider.GetUtcNow(),
CorrelationId = correlationId
};
}
/// <summary>
/// Validates that two canonical representations produce the same hash.
/// </summary>
public bool ValidateDeterminism(
string artifactId,
ReadOnlySpan<byte> firstCanonical,
ReadOnlySpan<byte> secondCanonical)
{
var firstHash = ComputeSha256(firstCanonical);
var secondHash = ComputeSha256(secondCanonical);
var isValid = firstHash.Equals(secondHash, StringComparison.Ordinal);
if (!isValid)
{
_logger.LogWarning(
"Determinism validation failed for {ArtifactId}: first={FirstHash}, second={SecondHash}",
artifactId, firstHash, secondHash);
if (_enableDetailedLogging && _logger.IsEnabled(LogLevel.Debug))
{
_logger.LogDebug(
"Determinism failure details for {ArtifactId}: size1={Size1}, size2={Size2}, diff={Diff}",
artifactId, firstCanonical.Length, secondCanonical.Length,
Math.Abs(firstCanonical.Length - secondCanonical.Length));
}
}
return isValid;
}
private void LogDetailedDiff(string artifactId, ReadOnlySpan<byte> raw, ReadOnlySpan<byte> canonical)
{
var minLen = Math.Min(raw.Length, canonical.Length);
var firstDiffPos = -1;
for (var i = 0; i < minLen; i++)
{
if (raw[i] != canonical[i]) { firstDiffPos = i; break; }
}
if (firstDiffPos == -1 && raw.Length != canonical.Length)
firstDiffPos = minLen;
if (firstDiffPos >= 0)
{
var contextStart = Math.Max(0, firstDiffPos - 20);
var rawContext = raw.Length > contextStart
? System.Text.Encoding.UTF8.GetString(raw.Slice(contextStart, Math.Min(40, raw.Length - contextStart)))
: string.Empty;
var canonicalContext = canonical.Length > contextStart
? System.Text.Encoding.UTF8.GetString(canonical.Slice(contextStart, Math.Min(40, canonical.Length - contextStart)))
: string.Empty;
_logger.LogTrace(
"First difference at position {Position} for {ArtifactId}: raw=\"{RawContext}\", canonical=\"{CanonicalContext}\"",
firstDiffPos, artifactId, EscapeForLog(rawContext), EscapeForLog(canonicalContext));
}
}
}

View File

@@ -5,10 +5,8 @@
// Description: Pre-canonical hash debug logging for audit trails
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Attestor.ProofChain.Audit;
@@ -16,7 +14,7 @@ namespace StellaOps.Attestor.ProofChain.Audit;
/// Logs both raw and canonical SHA-256 hashes for audit trails.
/// Enables debugging of canonicalization issues by comparing pre/post hashes.
/// </summary>
public sealed class AuditHashLogger
public sealed partial class AuditHashLogger
{
private readonly ILogger<AuditHashLogger> _logger;
private readonly bool _enableDetailedLogging;
@@ -49,25 +47,19 @@ public sealed class AuditHashLogger
{
var rawHash = ComputeSha256(rawBytes);
var canonicalHash = ComputeSha256(canonicalBytes);
var hashesMatch = rawHash.Equals(canonicalHash, StringComparison.Ordinal);
if (hashesMatch)
{
_logger.LogDebug(
"Hash audit for {ArtifactType} {ArtifactId}: raw and canonical hashes match ({Hash})",
artifactType,
artifactId,
canonicalHash);
artifactType, artifactId, canonicalHash);
}
else
{
_logger.LogInformation(
"Hash audit for {ArtifactType} {ArtifactId}: raw={RawHash}, canonical={CanonicalHash}, size_delta={SizeDelta}",
artifactType,
artifactId,
rawHash,
canonicalHash,
artifactType, artifactId, rawHash, canonicalHash,
canonicalBytes.Length - rawBytes.Length);
if (_enableDetailedLogging && _logger.IsEnabled(LogLevel.Trace))
@@ -77,132 +69,14 @@ public sealed class AuditHashLogger
}
}
/// <summary>
/// Logs hash information with structured data for telemetry.
/// </summary>
public HashAuditRecord CreateAuditRecord(
string artifactId,
string artifactType,
ReadOnlySpan<byte> rawBytes,
ReadOnlySpan<byte> canonicalBytes,
string? correlationId = null)
{
var rawHash = ComputeSha256(rawBytes);
var canonicalHash = ComputeSha256(canonicalBytes);
var record = new HashAuditRecord
{
ArtifactId = artifactId,
ArtifactType = artifactType,
RawHash = rawHash,
CanonicalHash = canonicalHash,
RawSizeBytes = rawBytes.Length,
CanonicalSizeBytes = canonicalBytes.Length,
HashesMatch = rawHash.Equals(canonicalHash, StringComparison.Ordinal),
Timestamp = _timeProvider.GetUtcNow(),
CorrelationId = correlationId
};
_logger.LogDebug(
"Created hash audit record for {ArtifactType} {ArtifactId}: match={Match}, raw_size={RawSize}, canonical_size={CanonicalSize}",
artifactType,
artifactId,
record.HashesMatch,
record.RawSizeBytes,
record.CanonicalSizeBytes);
return record;
}
/// <summary>
/// Validates that two canonical representations produce the same hash.
/// </summary>
public bool ValidateDeterminism(
string artifactId,
ReadOnlySpan<byte> firstCanonical,
ReadOnlySpan<byte> secondCanonical)
{
var firstHash = ComputeSha256(firstCanonical);
var secondHash = ComputeSha256(secondCanonical);
var isValid = firstHash.Equals(secondHash, StringComparison.Ordinal);
if (!isValid)
{
_logger.LogWarning(
"Determinism validation failed for {ArtifactId}: first={FirstHash}, second={SecondHash}",
artifactId,
firstHash,
secondHash);
if (_enableDetailedLogging && _logger.IsEnabled(LogLevel.Debug))
{
var firstSize = firstCanonical.Length;
var secondSize = secondCanonical.Length;
_logger.LogDebug(
"Determinism failure details for {ArtifactId}: size1={Size1}, size2={Size2}, diff={Diff}",
artifactId,
firstSize,
secondSize,
Math.Abs(firstSize - secondSize));
}
}
return isValid;
}
private void LogDetailedDiff(string artifactId, ReadOnlySpan<byte> raw, ReadOnlySpan<byte> canonical)
{
// Find first difference position
var minLen = Math.Min(raw.Length, canonical.Length);
var firstDiffPos = -1;
for (var i = 0; i < minLen; i++)
{
if (raw[i] != canonical[i])
{
firstDiffPos = i;
break;
}
}
if (firstDiffPos == -1 && raw.Length != canonical.Length)
{
firstDiffPos = minLen;
}
if (firstDiffPos >= 0)
{
// Get context around difference
var contextStart = Math.Max(0, firstDiffPos - 20);
var contextEnd = Math.Min(minLen, firstDiffPos + 20);
var rawContext = raw.Length > contextStart
? Encoding.UTF8.GetString(raw.Slice(contextStart, Math.Min(40, raw.Length - contextStart)))
: string.Empty;
var canonicalContext = canonical.Length > contextStart
? Encoding.UTF8.GetString(canonical.Slice(contextStart, Math.Min(40, canonical.Length - contextStart)))
: string.Empty;
_logger.LogTrace(
"First difference at position {Position} for {ArtifactId}: raw=\"{RawContext}\", canonical=\"{CanonicalContext}\"",
firstDiffPos,
artifactId,
EscapeForLog(rawContext),
EscapeForLog(canonicalContext));
}
}
private static string ComputeSha256(ReadOnlySpan<byte> data)
internal static string ComputeSha256(ReadOnlySpan<byte> data)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(data, hash);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string EscapeForLog(string value)
internal static string EscapeForLog(string value)
{
return value
.Replace("\n", "\\n")
@@ -210,75 +84,3 @@ public sealed class AuditHashLogger
.Replace("\t", "\\t");
}
}
/// <summary>
/// Record of a hash audit for structured logging/telemetry.
/// </summary>
public sealed record HashAuditRecord
{
/// <summary>
/// Unique identifier for the artifact.
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Type of artifact (proof, verdict, attestation, etc.).
/// </summary>
public required string ArtifactType { get; init; }
/// <summary>
/// SHA-256 hash of raw bytes before canonicalization.
/// </summary>
public required string RawHash { get; init; }
/// <summary>
/// SHA-256 hash of canonical bytes.
/// </summary>
public required string CanonicalHash { get; init; }
/// <summary>
/// Size of raw bytes.
/// </summary>
public required int RawSizeBytes { get; init; }
/// <summary>
/// Size of canonical bytes.
/// </summary>
public required int CanonicalSizeBytes { get; init; }
/// <summary>
/// Whether raw and canonical hashes match.
/// </summary>
public required bool HashesMatch { get; init; }
/// <summary>
/// UTC timestamp of the audit.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Optional correlation ID for tracing.
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// Size delta (positive = canonical is larger).
/// </summary>
public int SizeDelta => CanonicalSizeBytes - RawSizeBytes;
}
/// <summary>
/// Artifact types for hash auditing.
/// </summary>
public static class AuditArtifactTypes
{
public const string Proof = "proof";
public const string Verdict = "verdict";
public const string Attestation = "attestation";
public const string Spine = "spine";
public const string Manifest = "manifest";
public const string VexDocument = "vex_document";
public const string SbomFragment = "sbom_fragment";
public const string PolicySnapshot = "policy_snapshot";
public const string FeedSnapshot = "feed_snapshot";
}

View File

@@ -0,0 +1,57 @@
namespace StellaOps.Attestor.ProofChain.Audit;
/// <summary>
/// Record of a hash audit for structured logging/telemetry.
/// </summary>
public sealed record HashAuditRecord
{
/// <summary>
/// Unique identifier for the artifact.
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Type of artifact (proof, verdict, attestation, etc.).
/// </summary>
public required string ArtifactType { get; init; }
/// <summary>
/// SHA-256 hash of raw bytes before canonicalization.
/// </summary>
public required string RawHash { get; init; }
/// <summary>
/// SHA-256 hash of canonical bytes.
/// </summary>
public required string CanonicalHash { get; init; }
/// <summary>
/// Size of raw bytes.
/// </summary>
public required int RawSizeBytes { get; init; }
/// <summary>
/// Size of canonical bytes.
/// </summary>
public required int CanonicalSizeBytes { get; init; }
/// <summary>
/// Whether raw and canonical hashes match.
/// </summary>
public required bool HashesMatch { get; init; }
/// <summary>
/// UTC timestamp of the audit.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Optional correlation ID for tracing.
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// Size delta (positive = canonical is larger).
/// </summary>
public int SizeDelta => CanonicalSizeBytes - RawSizeBytes;
}

View File

@@ -1,34 +1,7 @@
using StellaOps.Attestor.ProofChain.Statements;
using System.Collections.Generic;
namespace StellaOps.Attestor.ProofChain.Builders;
/// <summary>
/// Represents a subject (artifact) for proof chain statements.
/// </summary>
public sealed record ProofSubject
{
/// <summary>
/// The name or identifier of the subject (e.g., image reference, PURL).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digests of the subject in algorithm:hex format.
/// </summary>
public required IReadOnlyDictionary<string, string> Digest { get; init; }
/// <summary>
/// Converts this ProofSubject to an in-toto Subject.
/// </summary>
public Subject ToSubject() => new()
{
Name = Name,
Digest = Digest
};
}
/// <summary>
/// Factory for building in-toto statements for proof chain predicates.
/// </summary>

View File

@@ -0,0 +1,28 @@
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Builders;
/// <summary>
/// Represents a subject (artifact) for proof chain statements.
/// </summary>
public sealed record ProofSubject
{
/// <summary>
/// The name or identifier of the subject (e.g., image reference, PURL).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digests of the subject in algorithm:hex format.
/// </summary>
public required IReadOnlyDictionary<string, string> Digest { get; init; }
/// <summary>
/// Converts this ProofSubject to an in-toto Subject.
/// </summary>
public Subject ToSubject() => new()
{
Name = Name,
Digest = Digest
};
}

View File

@@ -0,0 +1,59 @@
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Builders;
/// <summary>
/// Extended statement building methods (linkage, uncertainty, budget).
/// </summary>
public sealed partial class StatementBuilder
{
/// <inheritdoc />
public SbomLinkageStatement BuildSbomLinkageStatement(
IReadOnlyList<ProofSubject> subjects,
SbomLinkagePayload predicate)
{
ArgumentNullException.ThrowIfNull(subjects);
ArgumentNullException.ThrowIfNull(predicate);
if (subjects.Count == 0)
{
throw new ArgumentException("At least one subject is required.", nameof(subjects));
}
return new SbomLinkageStatement
{
Subject = subjects.Select(s => s.ToSubject()).ToList(),
Predicate = predicate
};
}
/// <inheritdoc />
public UncertaintyStatement BuildUncertaintyStatement(
ProofSubject subject,
UncertaintyPayload predicate)
{
ArgumentNullException.ThrowIfNull(subject);
ArgumentNullException.ThrowIfNull(predicate);
return new UncertaintyStatement
{
Subject = [subject.ToSubject()],
Predicate = predicate
};
}
/// <inheritdoc />
public UncertaintyBudgetStatement BuildUncertaintyBudgetStatement(
ProofSubject subject,
UncertaintyBudgetPayload predicate)
{
ArgumentNullException.ThrowIfNull(subject);
ArgumentNullException.ThrowIfNull(predicate);
return new UncertaintyBudgetStatement
{
Subject = [subject.ToSubject()],
Predicate = predicate
};
}
}

View File

@@ -1,14 +1,11 @@
using StellaOps.Attestor.ProofChain.Statements;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Attestor.ProofChain.Builders;
/// <summary>
/// Default implementation of IStatementBuilder.
/// </summary>
public sealed class StatementBuilder : IStatementBuilder
public sealed partial class StatementBuilder : IStatementBuilder
{
/// <inheritdoc />
public EvidenceStatement BuildEvidenceStatement(
@@ -84,54 +81,4 @@ public sealed class StatementBuilder : IStatementBuilder
Predicate = predicate
};
}
/// <inheritdoc />
public SbomLinkageStatement BuildSbomLinkageStatement(
IReadOnlyList<ProofSubject> subjects,
SbomLinkagePayload predicate)
{
ArgumentNullException.ThrowIfNull(subjects);
ArgumentNullException.ThrowIfNull(predicate);
if (subjects.Count == 0)
{
throw new ArgumentException("At least one subject is required.", nameof(subjects));
}
return new SbomLinkageStatement
{
Subject = subjects.Select(s => s.ToSubject()).ToList(),
Predicate = predicate
};
}
/// <inheritdoc />
public UncertaintyStatement BuildUncertaintyStatement(
ProofSubject subject,
UncertaintyPayload predicate)
{
ArgumentNullException.ThrowIfNull(subject);
ArgumentNullException.ThrowIfNull(predicate);
return new UncertaintyStatement
{
Subject = [subject.ToSubject()],
Predicate = predicate
};
}
/// <inheritdoc />
public UncertaintyBudgetStatement BuildUncertaintyBudgetStatement(
ProofSubject subject,
UncertaintyBudgetPayload predicate)
{
ArgumentNullException.ThrowIfNull(subject);
ArgumentNullException.ThrowIfNull(predicate);
return new UncertaintyBudgetStatement
{
Subject = [subject.ToSubject()],
Predicate = predicate
};
}
}

View File

@@ -0,0 +1,96 @@
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Scanner.ChangeTrace.Models;
using System.Collections.Immutable;
using ChangeTraceModel = StellaOps.Scanner.ChangeTrace.Models.ChangeTrace;
namespace StellaOps.Attestor.ProofChain.ChangeTrace;
/// <summary>
/// Helper methods for statement creation and impact aggregation.
/// </summary>
public sealed partial class ChangeTraceAttestationService
{
/// <summary>
/// Create an in-toto statement from the change trace and predicate.
/// </summary>
private ChangeTraceStatement CreateStatement(
ChangeTraceModel trace,
ChangeTracePredicate predicate)
{
var subjectName = trace.Subject.Purl ?? trace.Subject.Name ?? trace.Subject.Digest;
var digest = ParseDigest(trace.Subject.Digest);
return new ChangeTraceStatement
{
Subject =
[
new Subject
{
Name = subjectName,
Digest = digest
}
],
Predicate = predicate
};
}
/// <summary>
/// Parse a digest string into a dictionary of algorithm:value pairs.
/// </summary>
private static IReadOnlyDictionary<string, string> ParseDigest(string digestString)
{
var result = new Dictionary<string, string>(StringComparer.Ordinal);
if (string.IsNullOrEmpty(digestString))
{
return result;
}
var colonIndex = digestString.IndexOf(':', StringComparison.Ordinal);
if (colonIndex > 0)
{
var algorithm = digestString[..colonIndex];
var value = digestString[(colonIndex + 1)..];
result[algorithm] = value;
}
else
{
result["sha256"] = digestString;
}
return result;
}
/// <summary>
/// Aggregate reachability impact from multiple deltas.
/// </summary>
private static ReachabilityImpact AggregateReachabilityImpact(
ImmutableArray<PackageDelta> deltas)
{
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Introduced))
return ReachabilityImpact.Introduced;
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Increased))
return ReachabilityImpact.Increased;
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Reduced))
return ReachabilityImpact.Reduced;
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Eliminated))
return ReachabilityImpact.Eliminated;
return ReachabilityImpact.Unchanged;
}
/// <summary>
/// Determine exploitability impact from overall risk delta score.
/// </summary>
private static ExploitabilityImpact DetermineExploitabilityFromScore(double riskDelta)
{
return riskDelta switch
{
<= -0.5 => ExploitabilityImpact.Eliminated,
< -0.3 => ExploitabilityImpact.Down,
>= 0.5 => ExploitabilityImpact.Introduced,
> 0.3 => ExploitabilityImpact.Up,
_ => ExploitabilityImpact.Unchanged
};
}
}

View File

@@ -0,0 +1,83 @@
// -----------------------------------------------------------------------------
// ChangeTraceAttestationService.Mapping.cs
// Predicate mapping logic for ChangeTraceAttestationService.
// -----------------------------------------------------------------------------
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Scanner.ChangeTrace.Models;
using System.Collections.Immutable;
using ChangeTraceModel = StellaOps.Scanner.ChangeTrace.Models.ChangeTrace;
namespace StellaOps.Attestor.ProofChain.ChangeTrace;
/// <summary>
/// Predicate mapping methods for ChangeTraceAttestationService.
/// </summary>
public sealed partial class ChangeTraceAttestationService
{
/// <summary>
/// Map a change trace model to its attestation predicate.
/// </summary>
private ChangeTracePredicate MapToPredicate(
ChangeTraceModel trace,
ChangeTraceAttestationOptions options)
{
var deltas = trace.Deltas
.Take(options.MaxDeltas)
.Select(d => new ChangeTraceDeltaEntry
{
Purl = d.Purl,
FromVersion = d.FromVersion,
ToVersion = d.ToVersion,
ChangeType = d.ChangeType.ToString(),
Explain = d.Explain.ToString(),
SymbolsChanged = d.Evidence.SymbolsChanged,
BytesChanged = d.Evidence.BytesChanged,
Confidence = d.Evidence.Confidence,
TrustDeltaScore = d.TrustDelta?.Score ?? 0,
CveIds = d.Evidence.CveIds,
Functions = d.Evidence.Functions
})
.ToImmutableArray();
var proofSteps = trace.Deltas
.Where(d => d.TrustDelta is not null)
.SelectMany(d => d.TrustDelta!.ProofSteps)
.Distinct()
.Take(options.MaxProofSteps)
.ToImmutableArray();
var aggregateReachability = AggregateReachabilityImpact(trace.Deltas);
var aggregateExploitability = DetermineExploitabilityFromScore(trace.Summary.RiskDelta);
return new ChangeTracePredicate
{
FromDigest = trace.Basis.FromScanId ?? trace.Subject.Digest,
ToDigest = trace.Basis.ToScanId ?? trace.Subject.Digest,
TenantId = options.TenantId,
Deltas = deltas,
Summary = new ChangeTracePredicateSummary
{
ChangedPackages = trace.Summary.ChangedPackages,
ChangedSymbols = trace.Summary.ChangedSymbols,
ChangedBytes = trace.Summary.ChangedBytes,
RiskDelta = trace.Summary.RiskDelta,
Verdict = trace.Summary.Verdict.ToString().ToLowerInvariant()
},
TrustDelta = new TrustDeltaRecord
{
Score = trace.Summary.RiskDelta,
BeforeScore = trace.Summary.BeforeRiskScore,
AfterScore = trace.Summary.AfterRiskScore,
ReachabilityImpact = aggregateReachability.ToString().ToLowerInvariant(),
ExploitabilityImpact = aggregateExploitability.ToString().ToLowerInvariant()
},
ProofSteps = proofSteps,
DiffMethods = trace.Basis.DiffMethod,
Policies = trace.Basis.Policies,
AnalyzedAt = trace.Basis.AnalyzedAt,
AlgorithmVersion = trace.Basis.EngineVersion,
CommitmentHash = trace.Commitment?.Sha256
};
}
}

View File

@@ -4,22 +4,16 @@
// Description: Service for generating change trace DSSE attestations.
// -----------------------------------------------------------------------------
using ChangeTraceModel = StellaOps.Scanner.ChangeTrace.Models.ChangeTrace;
using DsseEnvelope = StellaOps.Attestor.ProofChain.Signing.DsseEnvelope;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Scanner.ChangeTrace.Models;
using System.Collections.Immutable;
namespace StellaOps.Attestor.ProofChain.ChangeTrace;
/// <summary>
/// Service for generating change trace DSSE attestations.
/// </summary>
public sealed class ChangeTraceAttestationService : IChangeTraceAttestationService
public sealed partial class ChangeTraceAttestationService : IChangeTraceAttestationService
{
private readonly IProofChainSigner _signer;
private readonly TimeProvider _timeProvider;
@@ -54,156 +48,4 @@ public sealed class ChangeTraceAttestationService : IChangeTraceAttestationServi
SigningKeyProfile.Evidence,
ct).ConfigureAwait(false);
}
/// <summary>
/// Map a change trace model to its attestation predicate.
/// </summary>
private ChangeTracePredicate MapToPredicate(
ChangeTraceModel trace,
ChangeTraceAttestationOptions options)
{
var deltas = trace.Deltas
.Take(options.MaxDeltas)
.Select(d => new ChangeTraceDeltaEntry
{
Purl = d.Purl,
FromVersion = d.FromVersion,
ToVersion = d.ToVersion,
ChangeType = d.ChangeType.ToString(),
Explain = d.Explain.ToString(),
SymbolsChanged = d.Evidence.SymbolsChanged,
BytesChanged = d.Evidence.BytesChanged,
Confidence = d.Evidence.Confidence,
TrustDeltaScore = d.TrustDelta?.Score ?? 0,
CveIds = d.Evidence.CveIds,
Functions = d.Evidence.Functions
})
.ToImmutableArray();
var proofSteps = trace.Deltas
.Where(d => d.TrustDelta is not null)
.SelectMany(d => d.TrustDelta!.ProofSteps)
.Distinct()
.Take(options.MaxProofSteps)
.ToImmutableArray();
var aggregateReachability = AggregateReachabilityImpact(trace.Deltas);
var aggregateExploitability = DetermineExploitabilityFromScore(trace.Summary.RiskDelta);
return new ChangeTracePredicate
{
FromDigest = trace.Basis.FromScanId ?? trace.Subject.Digest,
ToDigest = trace.Basis.ToScanId ?? trace.Subject.Digest,
TenantId = options.TenantId,
Deltas = deltas,
Summary = new ChangeTracePredicateSummary
{
ChangedPackages = trace.Summary.ChangedPackages,
ChangedSymbols = trace.Summary.ChangedSymbols,
ChangedBytes = trace.Summary.ChangedBytes,
RiskDelta = trace.Summary.RiskDelta,
Verdict = trace.Summary.Verdict.ToString().ToLowerInvariant()
},
TrustDelta = new TrustDeltaRecord
{
Score = trace.Summary.RiskDelta,
BeforeScore = trace.Summary.BeforeRiskScore,
AfterScore = trace.Summary.AfterRiskScore,
ReachabilityImpact = aggregateReachability.ToString().ToLowerInvariant(),
ExploitabilityImpact = aggregateExploitability.ToString().ToLowerInvariant()
},
ProofSteps = proofSteps,
DiffMethods = trace.Basis.DiffMethod,
Policies = trace.Basis.Policies,
AnalyzedAt = trace.Basis.AnalyzedAt,
AlgorithmVersion = trace.Basis.EngineVersion,
CommitmentHash = trace.Commitment?.Sha256
};
}
/// <summary>
/// Create an in-toto statement from the change trace and predicate.
/// </summary>
private ChangeTraceStatement CreateStatement(
ChangeTraceModel trace,
ChangeTracePredicate predicate)
{
var subjectName = trace.Subject.Purl ?? trace.Subject.Name ?? trace.Subject.Digest;
var digest = ParseDigest(trace.Subject.Digest);
return new ChangeTraceStatement
{
Subject =
[
new Subject
{
Name = subjectName,
Digest = digest
}
],
Predicate = predicate
};
}
/// <summary>
/// Parse a digest string into a dictionary of algorithm:value pairs.
/// </summary>
private static IReadOnlyDictionary<string, string> ParseDigest(string digestString)
{
var result = new Dictionary<string, string>(StringComparer.Ordinal);
if (string.IsNullOrEmpty(digestString))
{
return result;
}
// Handle "algorithm:value" format
var colonIndex = digestString.IndexOf(':', StringComparison.Ordinal);
if (colonIndex > 0)
{
var algorithm = digestString[..colonIndex];
var value = digestString[(colonIndex + 1)..];
result[algorithm] = value;
}
else
{
// Assume SHA-256 if no algorithm prefix
result["sha256"] = digestString;
}
return result;
}
/// <summary>
/// Aggregate reachability impact from multiple deltas.
/// </summary>
private static ReachabilityImpact AggregateReachabilityImpact(
ImmutableArray<PackageDelta> deltas)
{
// Priority: Introduced > Increased > Reduced > Eliminated > Unchanged
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Introduced))
return ReachabilityImpact.Introduced;
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Increased))
return ReachabilityImpact.Increased;
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Reduced))
return ReachabilityImpact.Reduced;
if (deltas.Any(d => d.TrustDelta?.ReachabilityImpact == ReachabilityImpact.Eliminated))
return ReachabilityImpact.Eliminated;
return ReachabilityImpact.Unchanged;
}
/// <summary>
/// Determine exploitability impact from overall risk delta score.
/// </summary>
private static ExploitabilityImpact DetermineExploitabilityFromScore(double riskDelta)
{
return riskDelta switch
{
<= -0.5 => ExploitabilityImpact.Eliminated,
< -0.3 => ExploitabilityImpact.Down,
>= 0.5 => ExploitabilityImpact.Introduced,
> 0.3 => ExploitabilityImpact.Up,
_ => ExploitabilityImpact.Unchanged
};
}
}

View File

@@ -0,0 +1,52 @@
using StellaOps.Attestor.ProofChain.Models;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Combine multiple evidence sources into a single proof with aggregated confidence.
/// </summary>
public static ProofBlob CombineEvidence(
string cveId,
string packagePurl,
IReadOnlyList<ProofEvidence> evidences)
{
return CombineEvidence(cveId, packagePurl, evidences, TimeProvider.System);
}
public static ProofBlob CombineEvidence(
string cveId,
string packagePurl,
IReadOnlyList<ProofEvidence> evidences,
TimeProvider timeProvider)
{
if (evidences.Count == 0)
{
throw new ArgumentException("At least one evidence required", nameof(evidences));
}
var subjectId = $"{cveId}:{packagePurl}";
// Aggregate confidence: use highest tier evidence as base, boost for multiple sources
var confidence = ComputeAggregateConfidence(evidences);
// Determine method based on evidence types
var method = DetermineMethod(evidences);
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = evidences,
Method = method,
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,68 @@
using StellaOps.Attestor.ProofChain.Models;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
private static double ComputeAggregateConfidence(IReadOnlyList<ProofEvidence> evidences)
{
// Confidence aggregation strategy:
// 1. Start with highest individual confidence
// 2. Add bonus for multiple independent sources
// 3. Cap at 0.98 (never 100% certain)
var baseConfidence = evidences.Count switch
{
0 => 0.0,
1 => DetermineEvidenceConfidence(evidences[0].Type),
_ => evidences.Max(e => DetermineEvidenceConfidence(e.Type))
};
// Bonus for multiple sources (diminishing returns)
var multiSourceBonus = evidences.Count switch
{
<= 1 => 0.0,
2 => 0.05,
3 => 0.08,
_ => 0.10
};
return Math.Min(baseConfidence + multiSourceBonus, 0.98);
}
private static double DetermineEvidenceConfidence(EvidenceType type)
{
return type switch
{
EvidenceType.DistroAdvisory => 0.98,
EvidenceType.ChangelogMention => 0.80,
EvidenceType.PatchHeader => 0.85,
EvidenceType.BinaryFingerprint => 0.70,
EvidenceType.VersionComparison => 0.95,
EvidenceType.BuildCatalog => 0.90,
_ => 0.50
};
}
private static string DetermineMethod(IReadOnlyList<ProofEvidence> evidences)
{
var types = evidences.Select(e => e.Type).Distinct().OrderBy(t => t).ToList();
if (types.Count == 1)
{
return types[0] switch
{
EvidenceType.DistroAdvisory => "distro_advisory_tier1",
EvidenceType.ChangelogMention => "changelog_mention_tier2",
EvidenceType.PatchHeader => "patch_header_tier3",
EvidenceType.BinaryFingerprint => "binary_fingerprint_tier4",
EvidenceType.VersionComparison => "version_comparison",
EvidenceType.BuildCatalog => "build_catalog",
_ => "unknown"
};
}
// Multiple evidence types - use combined method name
return $"multi_tier_combined_{types.Count}";
}
}

View File

@@ -0,0 +1,58 @@
using StellaOps.Attestor.ProofChain.Models;
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate "not affected" proof when package version is below introduced range.
/// </summary>
public static ProofBlob NotAffected(
string cveId,
string packagePurl,
string reason,
JsonDocument versionData)
{
return NotAffected(cveId, packagePurl, reason, versionData, TimeProvider.System);
}
public static ProofBlob NotAffected(
string cveId,
string packagePurl,
string reason,
JsonDocument versionData,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:version_comparison:{cveId}";
var dataElement = versionData.RootElement.Clone();
var dataHash = ComputeDataHash(versionData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.VersionComparison,
Source = "version_comparison",
Timestamp = timeProvider.GetUtcNow(),
Data = dataElement,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.NotAffected,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = reason,
Confidence = 0.95,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,72 @@
using StellaOps.Attestor.ProofChain.Models;
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate proof from distro advisory evidence (Tier 1).
/// </summary>
public static ProofBlob FromDistroAdvisory(
string cveId,
string packagePurl,
string advisorySource,
string advisoryId,
string fixedVersion,
DateTimeOffset advisoryDate,
JsonDocument advisoryData)
{
return FromDistroAdvisory(
cveId,
packagePurl,
advisorySource,
advisoryId,
fixedVersion,
advisoryDate,
advisoryData,
TimeProvider.System);
}
public static ProofBlob FromDistroAdvisory(
string cveId,
string packagePurl,
string advisorySource,
string advisoryId,
string fixedVersion,
DateTimeOffset advisoryDate,
JsonDocument advisoryData,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:distro:{advisorySource}:{advisoryId}";
var dataElement = advisoryData.RootElement.Clone();
var dataHash = ComputeDataHash(advisoryData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.DistroAdvisory,
Source = advisorySource,
Timestamp = advisoryDate,
Data = dataElement,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "", // Will be computed
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "distro_advisory_tier1",
Confidence = 0.98, // Highest confidence - authoritative source
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,58 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Concelier.SourceIntel;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate proof from changelog evidence (Tier 2).
/// </summary>
public static ProofBlob FromChangelog(
string cveId,
string packagePurl,
ChangelogEntry changelogEntry,
string changelogSource)
{
return FromChangelog(cveId, packagePurl, changelogEntry, changelogSource, TimeProvider.System);
}
public static ProofBlob FromChangelog(
string cveId,
string packagePurl,
ChangelogEntry changelogEntry,
string changelogSource,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:changelog:{changelogSource}:{changelogEntry.Version}";
var changelogData = SerializeToElement(changelogEntry, out var changelogBytes);
var dataHash = ComputeDataHash(changelogBytes);
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.ChangelogMention,
Source = changelogSource,
Timestamp = changelogEntry.Date,
Data = changelogData,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "changelog_mention_tier2",
Confidence = changelogEntry.Confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,56 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Concelier.SourceIntel;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate proof from patch header evidence (Tier 3).
/// </summary>
public static ProofBlob FromPatchHeader(
string cveId,
string packagePurl,
PatchHeaderParseResult patchResult)
{
return FromPatchHeader(cveId, packagePurl, patchResult, TimeProvider.System);
}
public static ProofBlob FromPatchHeader(
string cveId,
string packagePurl,
PatchHeaderParseResult patchResult,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:patch_header:{patchResult.PatchFilePath}";
var patchData = SerializeToElement(patchResult, out var patchBytes);
var dataHash = ComputeDataHash(patchBytes);
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.PatchHeader,
Source = patchResult.Origin,
Timestamp = patchResult.ParsedAt,
Data = patchData,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "patch_header_tier3",
Confidence = patchResult.Confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,61 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Feedser.Core.Models;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate proof from patch signature (HunkSig) evidence (Tier 3+).
/// </summary>
public static ProofBlob FromPatchSignature(
string cveId,
string packagePurl,
PatchSignature patchSig,
bool exactMatch)
{
return FromPatchSignature(cveId, packagePurl, patchSig, exactMatch, TimeProvider.System);
}
public static ProofBlob FromPatchSignature(
string cveId,
string packagePurl,
PatchSignature patchSig,
bool exactMatch,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:hunksig:{patchSig.CommitSha}";
var patchData = SerializeToElement(patchSig, out var patchBytes);
var dataHash = ComputeDataHash(patchBytes);
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.PatchHeader, // Reuse PatchHeader type
Source = patchSig.UpstreamRepo,
Timestamp = patchSig.ExtractedAt,
Data = patchData,
DataHash = dataHash
};
// Confidence based on match quality
var confidence = exactMatch ? 0.90 : 0.75;
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = exactMatch ? "hunksig_exact_tier3" : "hunksig_fuzzy_tier3",
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,69 @@
using StellaOps.Attestor.ProofChain.Models;
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate proof from binary fingerprint evidence (Tier 4).
/// </summary>
public static ProofBlob FromBinaryFingerprint(
string cveId,
string packagePurl,
string fingerprintMethod,
string fingerprintValue,
JsonDocument fingerprintData,
double confidence)
{
return FromBinaryFingerprint(
cveId,
packagePurl,
fingerprintMethod,
fingerprintValue,
fingerprintData,
confidence,
TimeProvider.System);
}
public static ProofBlob FromBinaryFingerprint(
string cveId,
string packagePurl,
string fingerprintMethod,
string fingerprintValue,
JsonDocument fingerprintData,
double confidence,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:binary:{fingerprintMethod}:{fingerprintValue}";
var dataElement = fingerprintData.RootElement.Clone();
var dataHash = ComputeDataHash(fingerprintData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.BinaryFingerprint,
Source = fingerprintMethod,
Timestamp = timeProvider.GetUtcNow(),
Data = dataElement,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = $"binary_{fingerprintMethod}_tier4",
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -0,0 +1,79 @@
using StellaOps.Attestor.ProofChain.Models;
namespace StellaOps.Attestor.ProofChain.Generators;
public sealed partial class BackportProofGenerator
{
/// <summary>
/// Generate "vulnerable" proof when no fix evidence found.
/// </summary>
public static ProofBlob Vulnerable(
string cveId,
string packagePurl,
string reason)
{
return Vulnerable(cveId, packagePurl, reason, TimeProvider.System);
}
public static ProofBlob Vulnerable(
string cveId,
string packagePurl,
string reason,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
// Empty evidence list - absence of fix is the evidence
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.Vulnerable,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = Array.Empty<ProofEvidence>(),
Method = reason,
Confidence = 0.85, // Lower confidence - absence of evidence is not evidence of absence
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate "unknown" proof when confidence is too low or data insufficient.
/// </summary>
public static ProofBlob Unknown(
string cveId,
string packagePurl,
string reason,
IReadOnlyList<ProofEvidence> partialEvidences)
{
return Unknown(cveId, packagePurl, reason, partialEvidences, TimeProvider.System);
}
public static ProofBlob Unknown(
string cveId,
string packagePurl,
string reason,
IReadOnlyList<ProofEvidence> partialEvidences,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.Unknown,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = partialEvidences,
Method = reason,
Confidence = 0.0,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
}

View File

@@ -1,535 +1,18 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Canonical.Json;
using StellaOps.Concelier.SourceIntel;
using StellaOps.Feedser.Core;
using StellaOps.Feedser.Core.Models;
using System.Text;
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Generates ProofBlobs from multi-tier backport detection evidence.
/// Combines distro advisories, changelog mentions, patch headers, and binary fingerprints.
/// </summary>
public sealed class BackportProofGenerator
public sealed partial class BackportProofGenerator
{
private const string ToolVersion = "1.0.0";
/// <summary>
/// Generate proof from distro advisory evidence (Tier 1).
/// </summary>
public static ProofBlob FromDistroAdvisory(
string cveId,
string packagePurl,
string advisorySource,
string advisoryId,
string fixedVersion,
DateTimeOffset advisoryDate,
JsonDocument advisoryData)
{
return FromDistroAdvisory(
cveId,
packagePurl,
advisorySource,
advisoryId,
fixedVersion,
advisoryDate,
advisoryData,
TimeProvider.System);
}
public static ProofBlob FromDistroAdvisory(
string cveId,
string packagePurl,
string advisorySource,
string advisoryId,
string fixedVersion,
DateTimeOffset advisoryDate,
JsonDocument advisoryData,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:distro:{advisorySource}:{advisoryId}";
var dataElement = advisoryData.RootElement.Clone();
var dataHash = ComputeDataHash(advisoryData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.DistroAdvisory,
Source = advisorySource,
Timestamp = advisoryDate,
Data = dataElement,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "", // Will be computed
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "distro_advisory_tier1",
Confidence = 0.98, // Highest confidence - authoritative source
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate proof from changelog evidence (Tier 2).
/// </summary>
public static ProofBlob FromChangelog(
string cveId,
string packagePurl,
ChangelogEntry changelogEntry,
string changelogSource)
{
return FromChangelog(cveId, packagePurl, changelogEntry, changelogSource, TimeProvider.System);
}
public static ProofBlob FromChangelog(
string cveId,
string packagePurl,
ChangelogEntry changelogEntry,
string changelogSource,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:changelog:{changelogSource}:{changelogEntry.Version}";
var changelogData = SerializeToElement(changelogEntry, out var changelogBytes);
var dataHash = ComputeDataHash(changelogBytes);
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.ChangelogMention,
Source = changelogSource,
Timestamp = changelogEntry.Date,
Data = changelogData,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "changelog_mention_tier2",
Confidence = changelogEntry.Confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate proof from patch header evidence (Tier 3).
/// </summary>
public static ProofBlob FromPatchHeader(
string cveId,
string packagePurl,
PatchHeaderParseResult patchResult)
{
return FromPatchHeader(cveId, packagePurl, patchResult, TimeProvider.System);
}
public static ProofBlob FromPatchHeader(
string cveId,
string packagePurl,
PatchHeaderParseResult patchResult,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:patch_header:{patchResult.PatchFilePath}";
var patchData = SerializeToElement(patchResult, out var patchBytes);
var dataHash = ComputeDataHash(patchBytes);
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.PatchHeader,
Source = patchResult.Origin,
Timestamp = patchResult.ParsedAt,
Data = patchData,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = "patch_header_tier3",
Confidence = patchResult.Confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate proof from patch signature (HunkSig) evidence (Tier 3+).
/// </summary>
public static ProofBlob FromPatchSignature(
string cveId,
string packagePurl,
PatchSignature patchSig,
bool exactMatch)
{
return FromPatchSignature(cveId, packagePurl, patchSig, exactMatch, TimeProvider.System);
}
public static ProofBlob FromPatchSignature(
string cveId,
string packagePurl,
PatchSignature patchSig,
bool exactMatch,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:hunksig:{patchSig.CommitSha}";
var patchData = SerializeToElement(patchSig, out var patchBytes);
var dataHash = ComputeDataHash(patchBytes);
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.PatchHeader, // Reuse PatchHeader type
Source = patchSig.UpstreamRepo,
Timestamp = patchSig.ExtractedAt,
Data = patchData,
DataHash = dataHash
};
// Confidence based on match quality
var confidence = exactMatch ? 0.90 : 0.75;
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = exactMatch ? "hunksig_exact_tier3" : "hunksig_fuzzy_tier3",
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate proof from binary fingerprint evidence (Tier 4).
/// </summary>
public static ProofBlob FromBinaryFingerprint(
string cveId,
string packagePurl,
string fingerprintMethod,
string fingerprintValue,
JsonDocument fingerprintData,
double confidence)
{
return FromBinaryFingerprint(
cveId,
packagePurl,
fingerprintMethod,
fingerprintValue,
fingerprintData,
confidence,
TimeProvider.System);
}
public static ProofBlob FromBinaryFingerprint(
string cveId,
string packagePurl,
string fingerprintMethod,
string fingerprintValue,
JsonDocument fingerprintData,
double confidence,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:binary:{fingerprintMethod}:{fingerprintValue}";
var dataElement = fingerprintData.RootElement.Clone();
var dataHash = ComputeDataHash(fingerprintData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.BinaryFingerprint,
Source = fingerprintMethod,
Timestamp = timeProvider.GetUtcNow(),
Data = dataElement,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = $"binary_{fingerprintMethod}_tier4",
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Combine multiple evidence sources into a single proof with aggregated confidence.
/// </summary>
public static ProofBlob CombineEvidence(
string cveId,
string packagePurl,
IReadOnlyList<ProofEvidence> evidences)
{
return CombineEvidence(cveId, packagePurl, evidences, TimeProvider.System);
}
public static ProofBlob CombineEvidence(
string cveId,
string packagePurl,
IReadOnlyList<ProofEvidence> evidences,
TimeProvider timeProvider)
{
if (evidences.Count == 0)
{
throw new ArgumentException("At least one evidence required", nameof(evidences));
}
var subjectId = $"{cveId}:{packagePurl}";
// Aggregate confidence: use highest tier evidence as base, boost for multiple sources
var confidence = ComputeAggregateConfidence(evidences);
// Determine method based on evidence types
var method = DetermineMethod(evidences);
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.BackportFixed,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = evidences,
Method = method,
Confidence = confidence,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate "not affected" proof when package version is below introduced range.
/// </summary>
public static ProofBlob NotAffected(
string cveId,
string packagePurl,
string reason,
JsonDocument versionData)
{
return NotAffected(cveId, packagePurl, reason, versionData, TimeProvider.System);
}
public static ProofBlob NotAffected(
string cveId,
string packagePurl,
string reason,
JsonDocument versionData,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var evidenceId = $"evidence:version_comparison:{cveId}";
var dataElement = versionData.RootElement.Clone();
var dataHash = ComputeDataHash(versionData.RootElement.GetRawText());
var evidence = new ProofEvidence
{
EvidenceId = evidenceId,
Type = EvidenceType.VersionComparison,
Source = "version_comparison",
Timestamp = timeProvider.GetUtcNow(),
Data = dataElement,
DataHash = dataHash
};
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.NotAffected,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = new[] { evidence },
Method = reason,
Confidence = 0.95,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate "vulnerable" proof when no fix evidence found.
/// </summary>
public static ProofBlob Vulnerable(
string cveId,
string packagePurl,
string reason)
{
return Vulnerable(cveId, packagePurl, reason, TimeProvider.System);
}
public static ProofBlob Vulnerable(
string cveId,
string packagePurl,
string reason,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
// Empty evidence list - absence of fix is the evidence
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.Vulnerable,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = Array.Empty<ProofEvidence>(),
Method = reason,
Confidence = 0.85, // Lower confidence - absence of evidence is not evidence of absence
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate "unknown" proof when confidence is too low or data insufficient.
/// </summary>
public static ProofBlob Unknown(
string cveId,
string packagePurl,
string reason,
IReadOnlyList<ProofEvidence> partialEvidences)
{
return Unknown(cveId, packagePurl, reason, partialEvidences, TimeProvider.System);
}
public static ProofBlob Unknown(
string cveId,
string packagePurl,
string reason,
IReadOnlyList<ProofEvidence> partialEvidences,
TimeProvider timeProvider)
{
var subjectId = $"{cveId}:{packagePurl}";
var proof = new ProofBlob
{
ProofId = "",
SubjectId = subjectId,
Type = ProofBlobType.Unknown,
CreatedAt = timeProvider.GetUtcNow(),
Evidences = partialEvidences,
Method = reason,
Confidence = 0.0,
ToolVersion = ToolVersion,
SnapshotId = GenerateSnapshotId(timeProvider)
};
return ProofHashing.WithHash(proof);
}
private static double ComputeAggregateConfidence(IReadOnlyList<ProofEvidence> evidences)
{
// Confidence aggregation strategy:
// 1. Start with highest individual confidence
// 2. Add bonus for multiple independent sources
// 3. Cap at 0.98 (never 100% certain)
var baseConfidence = evidences.Count switch
{
0 => 0.0,
1 => DetermineEvidenceConfidence(evidences[0].Type),
_ => evidences.Max(e => DetermineEvidenceConfidence(e.Type))
};
// Bonus for multiple sources (diminishing returns)
var multiSourceBonus = evidences.Count switch
{
<= 1 => 0.0,
2 => 0.05,
3 => 0.08,
_ => 0.10
};
return Math.Min(baseConfidence + multiSourceBonus, 0.98);
}
private static double DetermineEvidenceConfidence(EvidenceType type)
{
return type switch
{
EvidenceType.DistroAdvisory => 0.98,
EvidenceType.ChangelogMention => 0.80,
EvidenceType.PatchHeader => 0.85,
EvidenceType.BinaryFingerprint => 0.70,
EvidenceType.VersionComparison => 0.95,
EvidenceType.BuildCatalog => 0.90,
_ => 0.50
};
}
private static string DetermineMethod(IReadOnlyList<ProofEvidence> evidences)
{
var types = evidences.Select(e => e.Type).Distinct().OrderBy(t => t).ToList();
if (types.Count == 1)
{
return types[0] switch
{
EvidenceType.DistroAdvisory => "distro_advisory_tier1",
EvidenceType.ChangelogMention => "changelog_mention_tier2",
EvidenceType.PatchHeader => "patch_header_tier3",
EvidenceType.BinaryFingerprint => "binary_fingerprint_tier4",
EvidenceType.VersionComparison => "version_comparison",
EvidenceType.BuildCatalog => "build_catalog",
_ => "unknown"
};
}
// Multiple evidence types - use combined method name
return $"multi_tier_combined_{types.Count}";
}
private static string GenerateSnapshotId(TimeProvider timeProvider)
{
// Snapshot ID format: YYYYMMDD-HHMMSS-UTC

View File

@@ -0,0 +1,95 @@
// -----------------------------------------------------------------------------
// BinaryFingerprintEvidenceGenerator.Helpers.cs
// Helper and computation methods for BinaryFingerprintEvidenceGenerator.
// -----------------------------------------------------------------------------
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Canonical.Json;
using System.Collections.Immutable;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Helper and computation methods for BinaryFingerprintEvidenceGenerator.
/// </summary>
public sealed partial class BinaryFingerprintEvidenceGenerator
{
/// <summary>
/// Generate proof segments for multiple binary findings in batch.
/// </summary>
public ImmutableArray<ProofBlob> GenerateBatch(
IEnumerable<BinaryFingerprintEvidencePredicate> predicates)
{
var results = new List<ProofBlob>();
foreach (var predicate in predicates)
results.Add(Generate(predicate));
return results.ToImmutableArray();
}
/// <summary>
/// Create a BinaryFingerprintEvidencePredicate from scan findings.
/// </summary>
public static BinaryFingerprintEvidencePredicate CreatePredicate(
BinaryIdentityInfo identity, string layerDigest,
IEnumerable<BinaryVulnMatchInfo> matches, ScanContextInfo? scanContext = null)
{
return new BinaryFingerprintEvidencePredicate
{
BinaryIdentity = identity,
LayerDigest = layerDigest,
Matches = matches.ToImmutableArray(),
ScanContext = scanContext
};
}
private List<ProofEvidence> BuildEvidenceList(BinaryFingerprintEvidencePredicate predicate)
{
var evidences = new List<ProofEvidence>();
foreach (var match in predicate.Matches)
{
var matchData = SerializeToElement(match, GetJsonOptions(), out var matchBytes);
var matchHash = CanonJson.Sha256Prefixed(CanonJson.CanonicalizeParsedJson(matchBytes));
evidences.Add(new ProofEvidence
{
EvidenceId = $"evidence:binary:{predicate.BinaryIdentity.BinaryKey}:{match.CveId}",
Type = EvidenceType.BinaryFingerprint,
Source = match.Method,
Timestamp = _timeProvider.GetUtcNow(),
Data = matchData,
DataHash = matchHash
});
}
return evidences;
}
private static ProofBlobType DetermineProofType(ImmutableArray<BinaryVulnMatchInfo> matches)
{
if (matches.IsDefaultOrEmpty) return ProofBlobType.Unknown;
if (matches.All(m => m.FixStatus?.State?.Equals("fixed", StringComparison.OrdinalIgnoreCase) == true))
return ProofBlobType.BackportFixed;
if (matches.Any(m => m.FixStatus?.State?.Equals("vulnerable", StringComparison.OrdinalIgnoreCase) == true || m.FixStatus is null))
return ProofBlobType.Vulnerable;
if (matches.All(m => m.FixStatus?.State?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true))
return ProofBlobType.NotAffected;
return ProofBlobType.Unknown;
}
private static double ComputeAggregateConfidence(ImmutableArray<BinaryVulnMatchInfo> matches)
{
if (matches.IsDefaultOrEmpty) return 0.0;
var weightedSum = 0.0;
var totalWeight = 0.0;
foreach (var match in matches)
{
var w = match.Method switch
{
"buildid_catalog" => 1.0, "fingerprint_match" => 0.8,
"range_match" => 0.6, _ => 0.5
};
weightedSum += (double)match.Confidence * w;
totalWeight += w;
}
return totalWeight > 0 ? Math.Min(weightedSum / totalWeight, 0.98) : 0.0;
}
}

View File

@@ -1,14 +1,12 @@
// -----------------------------------------------------------------------------
// BinaryFingerprintEvidenceGenerator.cs
// Sprint: SPRINT_20251226_014_BINIDX
// Task: SCANINT-11 Implement proof segment generation in Attestor
// Task: SCANINT-11 - Implement proof segment generation in Attestor
// -----------------------------------------------------------------------------
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Canonical.Json;
using System.Collections.Immutable;
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
@@ -17,7 +15,7 @@ namespace StellaOps.Attestor.ProofChain.Generators;
/// Generates binary fingerprint evidence proof segments for scanner findings.
/// Creates attestable evidence of binary vulnerability matches.
/// </summary>
public sealed class BinaryFingerprintEvidenceGenerator
public sealed partial class BinaryFingerprintEvidenceGenerator
{
private const string ToolId = "stellaops.binaryindex";
private const string ToolVersion = "1.0.0";
@@ -43,38 +41,17 @@ public sealed class BinaryFingerprintEvidenceGenerator
var predicateJson = SerializeToElement(predicate, GetJsonOptions(), out var predicateBytes);
var dataHash = CanonJson.Sha256Prefixed(CanonJson.CanonicalizeParsedJson(predicateBytes));
// Create subject ID from binary key and scan context
var subjectId = $"binary:{predicate.BinaryIdentity.BinaryKey}";
if (predicate.ScanContext is not null)
{
subjectId = $"{predicate.ScanContext.ScanId}:{subjectId}";
}
// Create evidence entry for each match
var evidences = new List<ProofEvidence>();
foreach (var match in predicate.Matches)
{
var matchData = SerializeToElement(match, GetJsonOptions(), out var matchBytes);
var matchHash = CanonJson.Sha256Prefixed(CanonJson.CanonicalizeParsedJson(matchBytes));
evidences.Add(new ProofEvidence
{
EvidenceId = $"evidence:binary:{predicate.BinaryIdentity.BinaryKey}:{match.CveId}",
Type = EvidenceType.BinaryFingerprint,
Source = match.Method,
Timestamp = _timeProvider.GetUtcNow(),
Data = matchData,
DataHash = matchHash
});
}
// Determine proof type based on matches
var evidences = BuildEvidenceList(predicate);
var proofType = DetermineProofType(predicate.Matches);
var confidence = ComputeAggregateConfidence(predicate.Matches);
var proof = new ProofBlob
{
ProofId = "", // Will be computed by ProofHashing.WithHash
ProofId = "",
SubjectId = subjectId,
Type = proofType,
CreatedAt = _timeProvider.GetUtcNow(),
@@ -88,122 +65,20 @@ public sealed class BinaryFingerprintEvidenceGenerator
return ProofHashing.WithHash(proof);
}
/// <summary>
/// Generate proof segments for multiple binary findings in batch.
/// </summary>
public ImmutableArray<ProofBlob> GenerateBatch(
IEnumerable<BinaryFingerprintEvidencePredicate> predicates)
{
var results = new List<ProofBlob>();
foreach (var predicate in predicates)
{
results.Add(Generate(predicate));
}
return results.ToImmutableArray();
}
/// <summary>
/// Create a BinaryFingerprintEvidencePredicate from scan findings.
/// </summary>
public static BinaryFingerprintEvidencePredicate CreatePredicate(
BinaryIdentityInfo identity,
string layerDigest,
IEnumerable<BinaryVulnMatchInfo> matches,
ScanContextInfo? scanContext = null)
{
return new BinaryFingerprintEvidencePredicate
{
BinaryIdentity = identity,
LayerDigest = layerDigest,
Matches = matches.ToImmutableArray(),
ScanContext = scanContext
};
}
private static ProofBlobType DetermineProofType(ImmutableArray<BinaryVulnMatchInfo> matches)
{
if (matches.IsDefaultOrEmpty)
{
return ProofBlobType.Unknown;
}
// Check if all matches have fix status indicating fixed
var allFixed = matches.All(m =>
m.FixStatus?.State?.Equals("fixed", StringComparison.OrdinalIgnoreCase) == true);
if (allFixed)
{
return ProofBlobType.BackportFixed;
}
// Check if any match is vulnerable
var anyVulnerable = matches.Any(m =>
m.FixStatus?.State?.Equals("vulnerable", StringComparison.OrdinalIgnoreCase) == true ||
m.FixStatus is null);
if (anyVulnerable)
{
return ProofBlobType.Vulnerable;
}
// Check for not_affected
var allNotAffected = matches.All(m =>
m.FixStatus?.State?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true);
if (allNotAffected)
{
return ProofBlobType.NotAffected;
}
return ProofBlobType.Unknown;
}
private static double ComputeAggregateConfidence(ImmutableArray<BinaryVulnMatchInfo> matches)
{
if (matches.IsDefaultOrEmpty)
{
return 0.0;
}
// Use average confidence, weighted by match method
var weightedSum = 0.0;
var totalWeight = 0.0;
foreach (var match in matches)
{
var methodWeight = match.Method switch
{
"buildid_catalog" => 1.0,
"fingerprint_match" => 0.8,
"range_match" => 0.6,
_ => 0.5
};
weightedSum += (double)match.Confidence * methodWeight;
totalWeight += methodWeight;
}
return totalWeight > 0 ? Math.Min(weightedSum / totalWeight, 0.98) : 0.0;
}
private string GenerateSnapshotId()
{
return _timeProvider.GetUtcNow().ToString("yyyyMMdd-HHmmss") + "-UTC";
}
private static JsonElement SerializeToElement<T>(
T value,
JsonSerializerOptions options,
out byte[] jsonBytes)
internal static JsonElement SerializeToElement<T>(
T value, JsonSerializerOptions options, out byte[] jsonBytes)
{
jsonBytes = JsonSerializer.SerializeToUtf8Bytes(value, options);
using var document = JsonDocument.Parse(jsonBytes);
return document.RootElement.Clone();
}
private static JsonSerializerOptions GetJsonOptions()
internal static JsonSerializerOptions GetJsonOptions()
{
return new JsonSerializerOptions
{

View File

@@ -0,0 +1,33 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Summary of evidence tiers used in a proof.
/// </summary>
public sealed record EvidenceSummary
{
[JsonPropertyName("total_evidences")]
public required int TotalEvidences { get; init; }
[JsonPropertyName("tiers")]
public required IReadOnlyList<TierSummary> Tiers { get; init; }
[JsonPropertyName("evidence_ids")]
public required IReadOnlyList<string> EvidenceIds { get; init; }
}
/// <summary>
/// Summary of a single evidence tier.
/// </summary>
public sealed record TierSummary
{
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("count")]
public required int Count { get; init; }
[JsonPropertyName("sources")]
public required IReadOnlyList<string> Sources { get; init; }
}

View File

@@ -0,0 +1,89 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Canonical.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Helper methods for VexProofIntegrator.
/// </summary>
public sealed partial class VexProofIntegrator
{
private static string DetermineVexStatus(ProofBlobType type)
{
return type switch
{
ProofBlobType.BackportFixed => "fixed",
ProofBlobType.NotAffected => "not_affected",
ProofBlobType.Vulnerable => "affected",
ProofBlobType.Unknown => "under_investigation",
_ => "under_investigation"
};
}
private static string DetermineJustification(ProofBlob proof)
{
return proof.Type switch
{
ProofBlobType.BackportFixed =>
$"Backport fix detected via {proof.Method} with {proof.Confidence:P0} confidence",
ProofBlobType.NotAffected =>
$"Not affected: {proof.Method}",
ProofBlobType.Vulnerable =>
$"No fix evidence found via {proof.Method}",
ProofBlobType.Unknown =>
$"Insufficient evidence: {proof.Method}",
_ => "Unknown status"
};
}
private static EvidenceSummary GenerateEvidenceSummary(IReadOnlyList<ProofEvidence> evidences)
{
var tiers = evidences
.GroupBy(e => e.Type)
.Select(g => new TierSummary
{
Type = g.Key.ToString(),
Count = g.Count(),
Sources = g.Select(e => e.Source).Distinct().ToList()
})
.ToList();
return new EvidenceSummary
{
TotalEvidences = evidences.Count,
Tiers = tiers,
EvidenceIds = evidences.Select(e => e.EvidenceId).ToList()
};
}
private static string ExtractCveId(string subjectId)
{
var parts = subjectId.Split(':', 2);
return parts[0];
}
private static string ExtractPurlHash(string subjectId)
{
var parts = subjectId.Split(':', 2);
if (parts.Length > 1)
{
return CanonJson.Sha256Hex(System.Text.Encoding.UTF8.GetBytes(parts[1]));
}
return CanonJson.Sha256Hex(System.Text.Encoding.UTF8.GetBytes(subjectId));
}
private static VexVerdictPayload ConvertToStandardPayload(VexVerdictProofPayload proofPayload)
{
return new VexVerdictPayload
{
SbomEntryId = proofPayload.SbomEntryId,
VulnerabilityId = proofPayload.VulnerabilityId,
Status = proofPayload.Status,
Justification = proofPayload.Justification,
PolicyVersion = proofPayload.PolicyVersion,
ReasoningId = proofPayload.ReasoningId,
VexVerdictId = proofPayload.VexVerdictId
};
}
}

View File

@@ -0,0 +1,60 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Canonical.Json;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Extended metadata generation methods for VexProofIntegrator.
/// </summary>
public sealed partial class VexProofIntegrator
{
/// <summary>
/// Create proof-carrying VEX verdict with extended metadata.
/// Returns both standard VEX statement and extended proof payload for storage.
/// </summary>
public static (VexVerdictStatement Statement, VexVerdictProofPayload ProofPayload) GenerateWithProofMetadata(
ProofBlob proof,
string sbomEntryId,
string policyVersion,
string reasoningId)
{
var status = DetermineVexStatus(proof.Type);
var justification = DetermineJustification(proof);
var proofPayload = new VexVerdictProofPayload
{
SbomEntryId = sbomEntryId,
VulnerabilityId = ExtractCveId(proof.SubjectId),
Status = status,
Justification = justification,
PolicyVersion = policyVersion,
ReasoningId = reasoningId,
VexVerdictId = "", // Will be computed
ProofRef = proof.ProofId,
ProofMethod = proof.Method,
ProofConfidence = proof.Confidence,
EvidenceSummary = GenerateEvidenceSummary(proof.Evidences)
};
var vexId = CanonJson.HashPrefixed(proofPayload);
proofPayload = proofPayload with { VexVerdictId = vexId };
var subject = new Subject
{
Name = sbomEntryId,
Digest = new Dictionary<string, string>
{
["sha256"] = ExtractPurlHash(proof.SubjectId)
}
};
var statement = new VexVerdictStatement
{
Subject = new[] { subject },
Predicate = ConvertToStandardPayload(proofPayload)
};
return (statement, proofPayload);
}
}

View File

@@ -1,17 +1,14 @@
using StellaOps.Attestor.ProofChain.Models;
using StellaOps.Attestor.ProofChain.Statements;
using StellaOps.Canonical.Json;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Integrates ProofBlob evidence into VEX verdicts with proof_ref fields.
/// Implements proof-carrying VEX statements for cryptographic auditability.
/// </summary>
public sealed class VexProofIntegrator
public sealed partial class VexProofIntegrator
{
/// <summary>
/// Generate VEX verdict statement from ProofBlob.
@@ -40,11 +37,9 @@ public sealed class VexProofIntegrator
EvidenceSummary = GenerateEvidenceSummary(proof.Evidences)
};
// Compute VexVerdictId from canonical payload
var vexId = CanonJson.HashPrefixed(payload);
payload = payload with { VexVerdictId = vexId };
// Create subject for the VEX statement
var subject = new Subject
{
Name = sbomEntryId,
@@ -83,216 +78,4 @@ public sealed class VexProofIntegrator
return statements;
}
/// <summary>
/// Create proof-carrying VEX verdict with extended metadata.
/// Returns both standard VEX statement and extended proof payload for storage.
/// </summary>
public static (VexVerdictStatement Statement, VexVerdictProofPayload ProofPayload) GenerateWithProofMetadata(
ProofBlob proof,
string sbomEntryId,
string policyVersion,
string reasoningId)
{
var status = DetermineVexStatus(proof.Type);
var justification = DetermineJustification(proof);
var proofPayload = new VexVerdictProofPayload
{
SbomEntryId = sbomEntryId,
VulnerabilityId = ExtractCveId(proof.SubjectId),
Status = status,
Justification = justification,
PolicyVersion = policyVersion,
ReasoningId = reasoningId,
VexVerdictId = "", // Will be computed
ProofRef = proof.ProofId,
ProofMethod = proof.Method,
ProofConfidence = proof.Confidence,
EvidenceSummary = GenerateEvidenceSummary(proof.Evidences)
};
var vexId = CanonJson.HashPrefixed(proofPayload);
proofPayload = proofPayload with { VexVerdictId = vexId };
var subject = new Subject
{
Name = sbomEntryId,
Digest = new Dictionary<string, string>
{
["sha256"] = ExtractPurlHash(proof.SubjectId)
}
};
var statement = new VexVerdictStatement
{
Subject = new[] { subject },
Predicate = ConvertToStandardPayload(proofPayload)
};
return (statement, proofPayload);
}
private static string DetermineVexStatus(ProofBlobType type)
{
return type switch
{
ProofBlobType.BackportFixed => "fixed",
ProofBlobType.NotAffected => "not_affected",
ProofBlobType.Vulnerable => "affected",
ProofBlobType.Unknown => "under_investigation",
_ => "under_investigation"
};
}
private static string DetermineJustification(ProofBlob proof)
{
return proof.Type switch
{
ProofBlobType.BackportFixed =>
$"Backport fix detected via {proof.Method} with {proof.Confidence:P0} confidence",
ProofBlobType.NotAffected =>
$"Not affected: {proof.Method}",
ProofBlobType.Vulnerable =>
$"No fix evidence found via {proof.Method}",
ProofBlobType.Unknown =>
$"Insufficient evidence: {proof.Method}",
_ => "Unknown status"
};
}
private static EvidenceSummary GenerateEvidenceSummary(IReadOnlyList<ProofEvidence> evidences)
{
var tiers = evidences
.GroupBy(e => e.Type)
.Select(g => new TierSummary
{
Type = g.Key.ToString(),
Count = g.Count(),
Sources = g.Select(e => e.Source).Distinct().ToList()
})
.ToList();
return new EvidenceSummary
{
TotalEvidences = evidences.Count,
Tiers = tiers,
EvidenceIds = evidences.Select(e => e.EvidenceId).ToList()
};
}
private static string ExtractCveId(string subjectId)
{
// SubjectId format: "CVE-XXXX-YYYY:pkg:..."
var parts = subjectId.Split(':', 2);
return parts[0];
}
private static string ExtractPurlHash(string subjectId)
{
// Generate hash from PURL portion
var parts = subjectId.Split(':', 2);
if (parts.Length > 1)
{
return CanonJson.Sha256Hex(System.Text.Encoding.UTF8.GetBytes(parts[1]));
}
return CanonJson.Sha256Hex(System.Text.Encoding.UTF8.GetBytes(subjectId));
}
private static VexVerdictPayload ConvertToStandardPayload(VexVerdictProofPayload proofPayload)
{
// Convert to standard payload (without proof extensions) for in-toto compatibility
return new VexVerdictPayload
{
SbomEntryId = proofPayload.SbomEntryId,
VulnerabilityId = proofPayload.VulnerabilityId,
Status = proofPayload.Status,
Justification = proofPayload.Justification,
PolicyVersion = proofPayload.PolicyVersion,
ReasoningId = proofPayload.ReasoningId,
VexVerdictId = proofPayload.VexVerdictId
};
}
}
/// <summary>
/// Extended VEX verdict payload with proof references.
/// </summary>
public sealed record VexVerdictProofPayload
{
[JsonPropertyName("sbomEntryId")]
public required string SbomEntryId { get; init; }
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("justification")]
public required string Justification { get; init; }
[JsonPropertyName("policyVersion")]
public required string PolicyVersion { get; init; }
[JsonPropertyName("reasoningId")]
public required string ReasoningId { get; init; }
[JsonPropertyName("vexVerdictId")]
public required string VexVerdictId { get; init; }
/// <summary>
/// Reference to the ProofBlob ID (SHA-256 hash).
/// Format: "sha256:..."
/// </summary>
[JsonPropertyName("proof_ref")]
public required string ProofRef { get; init; }
/// <summary>
/// Method used to generate the proof.
/// </summary>
[JsonPropertyName("proof_method")]
public required string ProofMethod { get; init; }
/// <summary>
/// Confidence score of the proof (0.0-1.0).
/// </summary>
[JsonPropertyName("proof_confidence")]
public required double ProofConfidence { get; init; }
/// <summary>
/// Summary of evidence used in the proof.
/// </summary>
[JsonPropertyName("evidence_summary")]
public required EvidenceSummary EvidenceSummary { get; init; }
}
/// <summary>
/// Summary of evidence tiers used in a proof.
/// </summary>
public sealed record EvidenceSummary
{
[JsonPropertyName("total_evidences")]
public required int TotalEvidences { get; init; }
[JsonPropertyName("tiers")]
public required IReadOnlyList<TierSummary> Tiers { get; init; }
[JsonPropertyName("evidence_ids")]
public required IReadOnlyList<string> EvidenceIds { get; init; }
}
/// <summary>
/// Summary of a single evidence tier.
/// </summary>
public sealed record TierSummary
{
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("count")]
public required int Count { get; init; }
[JsonPropertyName("sources")]
public required IReadOnlyList<string> Sources { get; init; }
}

View File

@@ -0,0 +1,55 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Generators;
/// <summary>
/// Extended VEX verdict payload with proof references.
/// </summary>
public sealed record VexVerdictProofPayload
{
[JsonPropertyName("sbomEntryId")]
public required string SbomEntryId { get; init; }
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
[JsonPropertyName("status")]
public required string Status { get; init; }
[JsonPropertyName("justification")]
public required string Justification { get; init; }
[JsonPropertyName("policyVersion")]
public required string PolicyVersion { get; init; }
[JsonPropertyName("reasoningId")]
public required string ReasoningId { get; init; }
[JsonPropertyName("vexVerdictId")]
public required string VexVerdictId { get; init; }
/// <summary>
/// Reference to the ProofBlob ID (SHA-256 hash).
/// Format: "sha256:..."
/// </summary>
[JsonPropertyName("proof_ref")]
public required string ProofRef { get; init; }
/// <summary>
/// Method used to generate the proof.
/// </summary>
[JsonPropertyName("proof_method")]
public required string ProofMethod { get; init; }
/// <summary>
/// Confidence score of the proof (0.0-1.0).
/// </summary>
[JsonPropertyName("proof_confidence")]
public required double ProofConfidence { get; init; }
/// <summary>
/// Summary of evidence used in the proof.
/// </summary>
[JsonPropertyName("evidence_summary")]
public required EvidenceSummary EvidenceSummary { get; init; }
}

View File

@@ -93,184 +93,3 @@ public interface IProofGraphService
string nodeId,
CancellationToken ct = default);
}
/// <summary>
/// Types of nodes in the proof graph.
/// </summary>
public enum ProofGraphNodeType
{
/// <summary>Container image, binary, Helm chart.</summary>
Artifact,
/// <summary>SBOM document by sbomId.</summary>
SbomDocument,
/// <summary>In-toto statement by statement hash.</summary>
InTotoStatement,
/// <summary>DSSE envelope by envelope hash.</summary>
DsseEnvelope,
/// <summary>Rekor transparency log entry.</summary>
RekorEntry,
/// <summary>VEX statement by VEX hash.</summary>
VexStatement,
/// <summary>Component/subject from SBOM.</summary>
Subject,
/// <summary>Signing key.</summary>
SigningKey,
/// <summary>Trust anchor (root of trust).</summary>
TrustAnchor
}
/// <summary>
/// Types of edges in the proof graph.
/// </summary>
public enum ProofGraphEdgeType
{
/// <summary>Artifact → SbomDocument: artifact is described by SBOM.</summary>
DescribedBy,
/// <summary>SbomDocument → InTotoStatement: SBOM is attested by statement.</summary>
AttestedBy,
/// <summary>InTotoStatement → DsseEnvelope: statement is wrapped in envelope.</summary>
WrappedBy,
/// <summary>DsseEnvelope → RekorEntry: envelope is logged in Rekor.</summary>
LoggedIn,
/// <summary>Artifact/Subject → VexStatement: has VEX statement.</summary>
HasVex,
/// <summary>InTotoStatement → Subject: statement contains subject.</summary>
ContainsSubject,
/// <summary>Build → SBOM: build produces SBOM.</summary>
Produces,
/// <summary>VEX → Component: VEX affects component.</summary>
Affects,
/// <summary>Envelope → Key: envelope is signed by key.</summary>
SignedBy,
/// <summary>Envelope → Rekor: envelope is recorded at log index.</summary>
RecordedAt,
/// <summary>Key → TrustAnchor: key chains to trust anchor.</summary>
ChainsTo
}
/// <summary>
/// A node in the proof graph.
/// </summary>
public sealed record ProofGraphNode
{
/// <summary>
/// Unique identifier for this node.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// The type of this node.
/// </summary>
public required ProofGraphNodeType Type { get; init; }
/// <summary>
/// Content digest (content-addressed identifier).
/// </summary>
public required string ContentDigest { get; init; }
/// <summary>
/// When this node was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Optional metadata for the node.
/// </summary>
public IReadOnlyDictionary<string, object>? Metadata { get; init; }
}
/// <summary>
/// An edge in the proof graph.
/// </summary>
public sealed record ProofGraphEdge
{
/// <summary>
/// Unique identifier for this edge.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Source node ID.
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Target node ID.
/// </summary>
public required string TargetId { get; init; }
/// <summary>
/// The type of this edge.
/// </summary>
public required ProofGraphEdgeType Type { get; init; }
/// <summary>
/// When this edge was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// A path through the proof graph.
/// </summary>
public sealed record ProofGraphPath
{
/// <summary>
/// Nodes in the path, in order.
/// </summary>
public required IReadOnlyList<ProofGraphNode> Nodes { get; init; }
/// <summary>
/// Edges connecting the nodes.
/// </summary>
public required IReadOnlyList<ProofGraphEdge> Edges { get; init; }
/// <summary>
/// Length of the path (number of edges).
/// </summary>
public int Length => Edges.Count;
}
/// <summary>
/// A subgraph of the proof graph.
/// </summary>
public sealed record ProofGraphSubgraph
{
/// <summary>
/// The root node ID that was queried.
/// </summary>
public required string RootNodeId { get; init; }
/// <summary>
/// All nodes in the subgraph.
/// </summary>
public required IReadOnlyList<ProofGraphNode> Nodes { get; init; }
/// <summary>
/// All edges in the subgraph.
/// </summary>
public required IReadOnlyList<ProofGraphEdge> Edges { get; init; }
/// <summary>
/// Maximum depth that was traversed.
/// </summary>
public required int MaxDepth { get; init; }
}

View File

@@ -0,0 +1,58 @@
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// Edge mutation methods for InMemoryProofGraphService.
/// </summary>
public sealed partial class InMemoryProofGraphService
{
/// <inheritdoc />
public Task<ProofGraphEdge> AddEdgeAsync(
string sourceId,
string targetId,
ProofGraphEdgeType edgeType,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
ArgumentException.ThrowIfNullOrWhiteSpace(targetId);
if (!_nodes.ContainsKey(sourceId))
{
throw new ArgumentException($"Source node '{sourceId}' does not exist.", nameof(sourceId));
}
if (!_nodes.ContainsKey(targetId))
{
throw new ArgumentException($"Target node '{targetId}' does not exist.", nameof(targetId));
}
var edgeId = $"{sourceId}->{edgeType}->{targetId}";
var edge = new ProofGraphEdge
{
Id = edgeId,
SourceId = sourceId,
TargetId = targetId,
Type = edgeType,
CreatedAt = _timeProvider.GetUtcNow()
};
if (_edges.TryAdd(edgeId, edge))
{
_outgoingEdges.AddOrUpdate(
sourceId,
_ => [edgeId],
(_, list) => { lock (list) { list.Add(edgeId); } return list; });
_incomingEdges.AddOrUpdate(
targetId,
_ => [edgeId],
(_, list) => { lock (list) { list.Add(edgeId); } return list; });
}
else
{
edge = _edges[edgeId];
}
return Task.FromResult(edge);
}
}

View File

@@ -0,0 +1,79 @@
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// Query and traversal methods for InMemoryProofGraphService.
/// </summary>
public sealed partial class InMemoryProofGraphService
{
/// <inheritdoc />
public Task<ProofGraphNode?> GetNodeAsync(string nodeId, CancellationToken ct = default)
{
_nodes.TryGetValue(nodeId, out var node);
return Task.FromResult(node);
}
/// <inheritdoc />
public Task<ProofGraphPath?> FindPathAsync(
string sourceId,
string targetId,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
ArgumentException.ThrowIfNullOrWhiteSpace(targetId);
if (!_nodes.ContainsKey(sourceId) || !_nodes.ContainsKey(targetId))
{
return Task.FromResult<ProofGraphPath?>(null);
}
// BFS to find shortest path
var visited = new HashSet<string>();
var queue = new Queue<(string nodeId, List<string> path)>();
queue.Enqueue((sourceId, [sourceId]));
visited.Add(sourceId);
while (queue.Count > 0)
{
var (currentId, path) = queue.Dequeue();
if (currentId == targetId)
{
var nodes = path.Select(id => _nodes[id]).ToList();
var edges = new List<ProofGraphEdge>();
for (int i = 0; i < path.Count - 1; i++)
{
var edgeIds = _outgoingEdges.GetValueOrDefault(path[i], []);
var edge = edgeIds
.Select(eid => _edges[eid])
.FirstOrDefault(e => e.TargetId == path[i + 1]);
if (edge != null)
{
edges.Add(edge);
}
}
return Task.FromResult<ProofGraphPath?>(new ProofGraphPath
{
Nodes = nodes,
Edges = edges
});
}
var outgoing = _outgoingEdges.GetValueOrDefault(currentId, []);
foreach (var edgeId in outgoing)
{
var edge = _edges[edgeId];
if (!visited.Contains(edge.TargetId))
{
visited.Add(edge.TargetId);
var newPath = new List<string>(path) { edge.TargetId };
queue.Enqueue((edge.TargetId, newPath));
}
}
}
return Task.FromResult<ProofGraphPath?>(null);
}
}

View File

@@ -0,0 +1,96 @@
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// Subgraph traversal methods for InMemoryProofGraphService.
/// </summary>
public sealed partial class InMemoryProofGraphService
{
/// <inheritdoc />
public Task<ProofGraphSubgraph> GetArtifactSubgraphAsync(
string artifactId,
int maxDepth = 5,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(artifactId);
var nodes = new Dictionary<string, ProofGraphNode>();
var edges = new List<ProofGraphEdge>();
var visited = new HashSet<string>();
var queue = new Queue<(string nodeId, int depth)>();
if (_nodes.TryGetValue(artifactId, out var rootNode))
{
nodes[artifactId] = rootNode;
queue.Enqueue((artifactId, 0));
visited.Add(artifactId);
}
while (queue.Count > 0)
{
var (currentId, depth) = queue.Dequeue();
if (depth >= maxDepth)
{
continue;
}
// Process outgoing edges
var outgoing = _outgoingEdges.GetValueOrDefault(currentId, []);
foreach (var edgeId in outgoing)
{
var edge = _edges[edgeId];
edges.Add(edge);
if (!visited.Contains(edge.TargetId) && _nodes.TryGetValue(edge.TargetId, out var targetNode))
{
visited.Add(edge.TargetId);
nodes[edge.TargetId] = targetNode;
queue.Enqueue((edge.TargetId, depth + 1));
}
}
// Process incoming edges
var incoming = _incomingEdges.GetValueOrDefault(currentId, []);
foreach (var edgeId in incoming)
{
var edge = _edges[edgeId];
edges.Add(edge);
if (!visited.Contains(edge.SourceId) && _nodes.TryGetValue(edge.SourceId, out var sourceNode))
{
visited.Add(edge.SourceId);
nodes[edge.SourceId] = sourceNode;
queue.Enqueue((edge.SourceId, depth + 1));
}
}
}
return Task.FromResult(new ProofGraphSubgraph
{
RootNodeId = artifactId,
Nodes = nodes.Values.ToList(),
Edges = edges.Distinct().ToList(),
MaxDepth = maxDepth
});
}
/// <inheritdoc />
public Task<IReadOnlyList<ProofGraphEdge>> GetOutgoingEdgesAsync(
string nodeId,
CancellationToken ct = default)
{
var edgeIds = _outgoingEdges.GetValueOrDefault(nodeId, []);
var edges = edgeIds.Select(id => _edges[id]).ToList();
return Task.FromResult<IReadOnlyList<ProofGraphEdge>>(edges);
}
/// <inheritdoc />
public Task<IReadOnlyList<ProofGraphEdge>> GetIncomingEdgesAsync(
string nodeId,
CancellationToken ct = default)
{
var edgeIds = _incomingEdges.GetValueOrDefault(nodeId, []);
var edges = edgeIds.Select(id => _edges[id]).ToList();
return Task.FromResult<IReadOnlyList<ProofGraphEdge>>(edges);
}
}

View File

@@ -1,9 +1,4 @@
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.ProofChain.Graph;
@@ -11,7 +6,7 @@ namespace StellaOps.Attestor.ProofChain.Graph;
/// In-memory implementation of IProofGraphService for testing and development.
/// Not suitable for production use with large graphs.
/// </summary>
public sealed class InMemoryProofGraphService : IProofGraphService
public sealed partial class InMemoryProofGraphService : IProofGraphService
{
private readonly ConcurrentDictionary<string, ProofGraphNode> _nodes = new();
private readonly ConcurrentDictionary<string, ProofGraphEdge> _edges = new();
@@ -46,228 +41,12 @@ public sealed class InMemoryProofGraphService : IProofGraphService
if (!_nodes.TryAdd(nodeId, node))
{
// Node already exists, return the existing one
node = _nodes[nodeId];
}
return Task.FromResult(node);
}
/// <inheritdoc />
public Task<ProofGraphEdge> AddEdgeAsync(
string sourceId,
string targetId,
ProofGraphEdgeType edgeType,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
ArgumentException.ThrowIfNullOrWhiteSpace(targetId);
if (!_nodes.ContainsKey(sourceId))
{
throw new ArgumentException($"Source node '{sourceId}' does not exist.", nameof(sourceId));
}
if (!_nodes.ContainsKey(targetId))
{
throw new ArgumentException($"Target node '{targetId}' does not exist.", nameof(targetId));
}
var edgeId = $"{sourceId}->{edgeType}->{targetId}";
var edge = new ProofGraphEdge
{
Id = edgeId,
SourceId = sourceId,
TargetId = targetId,
Type = edgeType,
CreatedAt = _timeProvider.GetUtcNow()
};
if (_edges.TryAdd(edgeId, edge))
{
// Add to adjacency lists
_outgoingEdges.AddOrUpdate(
sourceId,
_ => [edgeId],
(_, list) => { lock (list) { list.Add(edgeId); } return list; });
_incomingEdges.AddOrUpdate(
targetId,
_ => [edgeId],
(_, list) => { lock (list) { list.Add(edgeId); } return list; });
}
else
{
// Edge already exists
edge = _edges[edgeId];
}
return Task.FromResult(edge);
}
/// <inheritdoc />
public Task<ProofGraphNode?> GetNodeAsync(string nodeId, CancellationToken ct = default)
{
_nodes.TryGetValue(nodeId, out var node);
return Task.FromResult(node);
}
/// <inheritdoc />
public Task<ProofGraphPath?> FindPathAsync(
string sourceId,
string targetId,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceId);
ArgumentException.ThrowIfNullOrWhiteSpace(targetId);
if (!_nodes.ContainsKey(sourceId) || !_nodes.ContainsKey(targetId))
{
return Task.FromResult<ProofGraphPath?>(null);
}
// BFS to find shortest path
var visited = new HashSet<string>();
var queue = new Queue<(string nodeId, List<string> path)>();
queue.Enqueue((sourceId, [sourceId]));
visited.Add(sourceId);
while (queue.Count > 0)
{
var (currentId, path) = queue.Dequeue();
if (currentId == targetId)
{
// Found path, reconstruct nodes and edges
var nodes = path.Select(id => _nodes[id]).ToList();
var edges = new List<ProofGraphEdge>();
for (int i = 0; i < path.Count - 1; i++)
{
var edgeIds = _outgoingEdges.GetValueOrDefault(path[i], []);
var edge = edgeIds
.Select(eid => _edges[eid])
.FirstOrDefault(e => e.TargetId == path[i + 1]);
if (edge != null)
{
edges.Add(edge);
}
}
return Task.FromResult<ProofGraphPath?>(new ProofGraphPath
{
Nodes = nodes,
Edges = edges
});
}
var outgoing = _outgoingEdges.GetValueOrDefault(currentId, []);
foreach (var edgeId in outgoing)
{
var edge = _edges[edgeId];
if (!visited.Contains(edge.TargetId))
{
visited.Add(edge.TargetId);
var newPath = new List<string>(path) { edge.TargetId };
queue.Enqueue((edge.TargetId, newPath));
}
}
}
return Task.FromResult<ProofGraphPath?>(null);
}
/// <inheritdoc />
public Task<ProofGraphSubgraph> GetArtifactSubgraphAsync(
string artifactId,
int maxDepth = 5,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(artifactId);
var nodes = new Dictionary<string, ProofGraphNode>();
var edges = new List<ProofGraphEdge>();
var visited = new HashSet<string>();
var queue = new Queue<(string nodeId, int depth)>();
if (_nodes.TryGetValue(artifactId, out var rootNode))
{
nodes[artifactId] = rootNode;
queue.Enqueue((artifactId, 0));
visited.Add(artifactId);
}
while (queue.Count > 0)
{
var (currentId, depth) = queue.Dequeue();
if (depth >= maxDepth)
{
continue;
}
// Process outgoing edges
var outgoing = _outgoingEdges.GetValueOrDefault(currentId, []);
foreach (var edgeId in outgoing)
{
var edge = _edges[edgeId];
edges.Add(edge);
if (!visited.Contains(edge.TargetId) && _nodes.TryGetValue(edge.TargetId, out var targetNode))
{
visited.Add(edge.TargetId);
nodes[edge.TargetId] = targetNode;
queue.Enqueue((edge.TargetId, depth + 1));
}
}
// Process incoming edges
var incoming = _incomingEdges.GetValueOrDefault(currentId, []);
foreach (var edgeId in incoming)
{
var edge = _edges[edgeId];
edges.Add(edge);
if (!visited.Contains(edge.SourceId) && _nodes.TryGetValue(edge.SourceId, out var sourceNode))
{
visited.Add(edge.SourceId);
nodes[edge.SourceId] = sourceNode;
queue.Enqueue((edge.SourceId, depth + 1));
}
}
}
return Task.FromResult(new ProofGraphSubgraph
{
RootNodeId = artifactId,
Nodes = nodes.Values.ToList(),
Edges = edges.Distinct().ToList(),
MaxDepth = maxDepth
});
}
/// <inheritdoc />
public Task<IReadOnlyList<ProofGraphEdge>> GetOutgoingEdgesAsync(
string nodeId,
CancellationToken ct = default)
{
var edgeIds = _outgoingEdges.GetValueOrDefault(nodeId, []);
var edges = edgeIds.Select(id => _edges[id]).ToList();
return Task.FromResult<IReadOnlyList<ProofGraphEdge>>(edges);
}
/// <inheritdoc />
public Task<IReadOnlyList<ProofGraphEdge>> GetIncomingEdgesAsync(
string nodeId,
CancellationToken ct = default)
{
var edgeIds = _incomingEdges.GetValueOrDefault(nodeId, []);
var edges = edgeIds.Select(id => _edges[id]).ToList();
return Task.FromResult<IReadOnlyList<ProofGraphEdge>>(edges);
}
/// <summary>
/// Clears all nodes and edges (for testing).
/// </summary>

View File

@@ -0,0 +1,34 @@
using System;
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// An edge in the proof graph.
/// </summary>
public sealed record ProofGraphEdge
{
/// <summary>
/// Unique identifier for this edge.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// Source node ID.
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Target node ID.
/// </summary>
public required string TargetId { get; init; }
/// <summary>
/// The type of this edge.
/// </summary>
public required ProofGraphEdgeType Type { get; init; }
/// <summary>
/// When this edge was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,40 @@
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// Types of edges in the proof graph.
/// </summary>
public enum ProofGraphEdgeType
{
/// <summary>Artifact -> SbomDocument: artifact is described by SBOM.</summary>
DescribedBy,
/// <summary>SbomDocument -> InTotoStatement: SBOM is attested by statement.</summary>
AttestedBy,
/// <summary>InTotoStatement -> DsseEnvelope: statement is wrapped in envelope.</summary>
WrappedBy,
/// <summary>DsseEnvelope -> RekorEntry: envelope is logged in Rekor.</summary>
LoggedIn,
/// <summary>Artifact/Subject -> VexStatement: has VEX statement.</summary>
HasVex,
/// <summary>InTotoStatement -> Subject: statement contains subject.</summary>
ContainsSubject,
/// <summary>Build -> SBOM: build produces SBOM.</summary>
Produces,
/// <summary>VEX -> Component: VEX affects component.</summary>
Affects,
/// <summary>Envelope -> Key: envelope is signed by key.</summary>
SignedBy,
/// <summary>Envelope -> Rekor: envelope is recorded at log index.</summary>
RecordedAt,
/// <summary>Key -> TrustAnchor: key chains to trust anchor.</summary>
ChainsTo
}

View File

@@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// A node in the proof graph.
/// </summary>
public sealed record ProofGraphNode
{
/// <summary>
/// Unique identifier for this node.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// The type of this node.
/// </summary>
public required ProofGraphNodeType Type { get; init; }
/// <summary>
/// Content digest (content-addressed identifier).
/// </summary>
public required string ContentDigest { get; init; }
/// <summary>
/// When this node was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Optional metadata for the node.
/// </summary>
public IReadOnlyDictionary<string, object>? Metadata { get; init; }
}

View File

@@ -0,0 +1,34 @@
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// Types of nodes in the proof graph.
/// </summary>
public enum ProofGraphNodeType
{
/// <summary>Container image, binary, Helm chart.</summary>
Artifact,
/// <summary>SBOM document by sbomId.</summary>
SbomDocument,
/// <summary>In-toto statement by statement hash.</summary>
InTotoStatement,
/// <summary>DSSE envelope by envelope hash.</summary>
DsseEnvelope,
/// <summary>Rekor transparency log entry.</summary>
RekorEntry,
/// <summary>VEX statement by VEX hash.</summary>
VexStatement,
/// <summary>Component/subject from SBOM.</summary>
Subject,
/// <summary>Signing key.</summary>
SigningKey,
/// <summary>Trust anchor (root of trust).</summary>
TrustAnchor
}

View File

@@ -0,0 +1,24 @@
using System.Collections.Generic;
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// A path through the proof graph.
/// </summary>
public sealed record ProofGraphPath
{
/// <summary>
/// Nodes in the path, in order.
/// </summary>
public required IReadOnlyList<ProofGraphNode> Nodes { get; init; }
/// <summary>
/// Edges connecting the nodes.
/// </summary>
public required IReadOnlyList<ProofGraphEdge> Edges { get; init; }
/// <summary>
/// Length of the path (number of edges).
/// </summary>
public int Length => Edges.Count;
}

View File

@@ -0,0 +1,29 @@
using System.Collections.Generic;
namespace StellaOps.Attestor.ProofChain.Graph;
/// <summary>
/// A subgraph of the proof graph.
/// </summary>
public sealed record ProofGraphSubgraph
{
/// <summary>
/// The root node ID that was queried.
/// </summary>
public required string RootNodeId { get; init; }
/// <summary>
/// All nodes in the subgraph.
/// </summary>
public required IReadOnlyList<ProofGraphNode> Nodes { get; init; }
/// <summary>
/// All edges in the subgraph.
/// </summary>
public required IReadOnlyList<ProofGraphEdge> Edges { get; init; }
/// <summary>
/// Maximum depth that was traversed.
/// </summary>
public required int MaxDepth { get; init; }
}

View File

@@ -0,0 +1,37 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ArtifactId Parse(string value) => new(ParseSha256(value));
public static bool TryParse(string value, out ArtifactId? id) => TryParseSha256(value, out id);
private static string ParseSha256(string value)
{
if (!TryParseSha256(value, out var id))
{
throw new FormatException($"Invalid ArtifactID: '{value}'.");
}
return id!.Digest;
}
private static bool TryParseSha256(string value, out ArtifactId? id)
{
id = null;
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest))
{
return false;
}
if (!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
{
return false;
}
id = new ArtifactId(digest);
return true;
}
}

View File

@@ -1,6 +1,4 @@
using StellaOps.Attestor.ProofChain.Internal;
using System;
namespace StellaOps.Attestor.ProofChain.Identifiers;
@@ -84,86 +82,3 @@ public abstract record ContentAddressedId
};
}
}
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest)
{
public override string ToString() => base.ToString();
}
public sealed record ArtifactId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ArtifactId Parse(string value) => new(ParseSha256(value));
public static bool TryParse(string value, out ArtifactId? id) => TryParseSha256(value, out id);
private static string ParseSha256(string value)
{
if (!TryParseSha256(value, out var id))
{
throw new FormatException($"Invalid ArtifactID: '{value}'.");
}
return id!.Digest;
}
private static bool TryParseSha256(string value, out ArtifactId? id)
{
id = null;
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest))
{
return false;
}
if (!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
{
return false;
}
id = new ArtifactId(digest);
return true;
}
}
public sealed record EvidenceId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static EvidenceId Parse(string value) => new(Sha256IdParser.Parse(value, "EvidenceID"));
}
public sealed record ReasoningId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ReasoningId Parse(string value) => new(Sha256IdParser.Parse(value, "ReasoningID"));
}
public sealed record VexVerdictId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static VexVerdictId Parse(string value) => new(Sha256IdParser.Parse(value, "VEXVerdictID"));
}
public sealed record ProofBundleId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ProofBundleId Parse(string value) => new(Sha256IdParser.Parse(value, "ProofBundleID"));
}
internal static class Sha256IdParser
{
public static string Parse(string value, string kind)
{
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest) ||
!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
{
throw new FormatException($"Invalid {kind}: '{value}'.");
}
return digest;
}
}

View File

@@ -0,0 +1,84 @@
using System.Text;
using StellaOps.Canonical.Json;
namespace StellaOps.Attestor.ProofChain.Identifiers;
/// <summary>
/// Graph revision and SBOM digest computation methods.
/// </summary>
public sealed partial class ContentAddressedIdGenerator
{
public GraphRevisionId ComputeGraphRevisionId(
IReadOnlyList<string> nodeIds,
IReadOnlyList<string> edgeIds,
string policyDigest,
string feedsDigest,
string toolchainDigest,
string paramsDigest)
{
ArgumentNullException.ThrowIfNull(nodeIds);
ArgumentNullException.ThrowIfNull(edgeIds);
ArgumentException.ThrowIfNullOrWhiteSpace(policyDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(feedsDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(toolchainDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(paramsDigest);
var nodes = new List<string>(nodeIds);
nodes.Sort(StringComparer.Ordinal);
var edges = new List<string>(edgeIds);
edges.Sort(StringComparer.Ordinal);
var leaves = new List<ReadOnlyMemory<byte>>(nodes.Count + edges.Count + 4);
foreach (var node in nodes)
{
leaves.Add(Encoding.UTF8.GetBytes(node));
}
foreach (var edge in edges)
{
leaves.Add(Encoding.UTF8.GetBytes(edge));
}
leaves.Add(Encoding.UTF8.GetBytes(policyDigest.Trim()));
leaves.Add(Encoding.UTF8.GetBytes(feedsDigest.Trim()));
leaves.Add(Encoding.UTF8.GetBytes(toolchainDigest.Trim()));
leaves.Add(Encoding.UTF8.GetBytes(paramsDigest.Trim()));
var root = _merkleTreeBuilder.ComputeMerkleRoot(leaves);
return new GraphRevisionId(Convert.ToHexStringLower(root));
}
public string ComputeSbomDigest(ReadOnlySpan<byte> sbomJson)
{
var canonical = _canonicalizer.Canonicalize(sbomJson);
return $"sha256:{HashSha256Hex(canonical)}";
}
public SbomEntryId ComputeSbomEntryId(ReadOnlySpan<byte> sbomJson, string purl, string? version = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
var sbomDigest = ComputeSbomDigest(sbomJson);
return new SbomEntryId(sbomDigest, purl, version);
}
/// <summary>
/// Canonicalizes a value with version marker for content-addressed hashing.
/// Uses the current canonicalization version (<see cref="CanonVersion.Current"/>).
/// </summary>
private byte[] CanonicalizeVersioned<T>(T value)
{
var json = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(value, SerializerOptions);
return _canonicalizer.CanonicalizeWithVersion(json, CanonVersion.Current);
}
/// <summary>
/// Canonicalizes a value without version marker.
/// Used for SBOM digests which are content-addressed by their raw JSON.
/// </summary>
private byte[] Canonicalize<T>(T value)
{
var json = System.Text.Json.JsonSerializer.SerializeToUtf8Bytes(value, SerializerOptions);
return _canonicalizer.Canonicalize(json);
}
}

View File

@@ -1,10 +1,6 @@
using StellaOps.Attestor.ProofChain.Json;
using StellaOps.Attestor.ProofChain.Merkle;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Canonical.Json;
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -12,7 +8,7 @@ using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed class ContentAddressedIdGenerator : IContentAddressedIdGenerator
public sealed partial class ContentAddressedIdGenerator : IContentAddressedIdGenerator
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
@@ -91,80 +87,6 @@ public sealed class ContentAddressedIdGenerator : IContentAddressedIdGenerator
return new ProofBundleId(Convert.ToHexStringLower(root));
}
public GraphRevisionId ComputeGraphRevisionId(
IReadOnlyList<string> nodeIds,
IReadOnlyList<string> edgeIds,
string policyDigest,
string feedsDigest,
string toolchainDigest,
string paramsDigest)
{
ArgumentNullException.ThrowIfNull(nodeIds);
ArgumentNullException.ThrowIfNull(edgeIds);
ArgumentException.ThrowIfNullOrWhiteSpace(policyDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(feedsDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(toolchainDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(paramsDigest);
var nodes = new List<string>(nodeIds);
nodes.Sort(StringComparer.Ordinal);
var edges = new List<string>(edgeIds);
edges.Sort(StringComparer.Ordinal);
var leaves = new List<ReadOnlyMemory<byte>>(nodes.Count + edges.Count + 4);
foreach (var node in nodes)
{
leaves.Add(Encoding.UTF8.GetBytes(node));
}
foreach (var edge in edges)
{
leaves.Add(Encoding.UTF8.GetBytes(edge));
}
leaves.Add(Encoding.UTF8.GetBytes(policyDigest.Trim()));
leaves.Add(Encoding.UTF8.GetBytes(feedsDigest.Trim()));
leaves.Add(Encoding.UTF8.GetBytes(toolchainDigest.Trim()));
leaves.Add(Encoding.UTF8.GetBytes(paramsDigest.Trim()));
var root = _merkleTreeBuilder.ComputeMerkleRoot(leaves);
return new GraphRevisionId(Convert.ToHexStringLower(root));
}
public string ComputeSbomDigest(ReadOnlySpan<byte> sbomJson)
{
var canonical = _canonicalizer.Canonicalize(sbomJson);
return $"sha256:{HashSha256Hex(canonical)}";
}
public SbomEntryId ComputeSbomEntryId(ReadOnlySpan<byte> sbomJson, string purl, string? version = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(purl);
var sbomDigest = ComputeSbomDigest(sbomJson);
return new SbomEntryId(sbomDigest, purl, version);
}
/// <summary>
/// Canonicalizes a value with version marker for content-addressed hashing.
/// Uses the current canonicalization version (<see cref="CanonVersion.Current"/>).
/// </summary>
private byte[] CanonicalizeVersioned<T>(T value)
{
var json = JsonSerializer.SerializeToUtf8Bytes(value, SerializerOptions);
return _canonicalizer.CanonicalizeWithVersion(json, CanonVersion.Current);
}
/// <summary>
/// Canonicalizes a value without version marker.
/// Used for SBOM digests which are content-addressed by their raw JSON.
/// </summary>
private byte[] Canonicalize<T>(T value)
{
var json = JsonSerializer.SerializeToUtf8Bytes(value, SerializerOptions);
return _canonicalizer.Canonicalize(json);
}
private static string HashSha256Hex(ReadOnlySpan<byte> bytes)
=> Convert.ToHexStringLower(SHA256.HashData(bytes));
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed record EvidenceId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static EvidenceId Parse(string value) => new(Sha256IdParser.Parse(value, "EvidenceID"));
}

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed record GenericContentAddressedId(string Algorithm, string Digest) : ContentAddressedId(Algorithm, Digest)
{
public override string ToString() => base.ToString();
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed record ProofBundleId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ProofBundleId Parse(string value) => new(Sha256IdParser.Parse(value, "ProofBundleID"));
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed record ReasoningId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static ReasoningId Parse(string value) => new(Sha256IdParser.Parse(value, "ReasoningID"));
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
internal static class Sha256IdParser
{
public static string Parse(string value, string kind)
{
if (!ContentAddressedId.TrySplit(value, out var algorithm, out var digest) ||
!string.Equals(algorithm, "sha256", StringComparison.Ordinal))
{
throw new FormatException($"Invalid {kind}: '{value}'.");
}
return digest;
}
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Attestor.ProofChain.Identifiers;
public sealed record VexVerdictId(string Digest) : ContentAddressedId("sha256", Digest)
{
public override string ToString() => base.ToString();
public new static VexVerdictId Parse(string value) => new(Sha256IdParser.Parse(value, "VEXVerdictID"));
}

View File

@@ -1,64 +1,6 @@
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.Attestor.ProofChain.Json;
/// <summary>
/// JSON Schema validation result.
/// </summary>
public sealed record SchemaValidationResult
{
/// <summary>
/// Whether the JSON is valid against the schema.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Validation errors if any.
/// </summary>
public required IReadOnlyList<SchemaValidationError> Errors { get; init; }
/// <summary>
/// Create a successful validation result.
/// </summary>
public static SchemaValidationResult Success() => new()
{
IsValid = true,
Errors = []
};
/// <summary>
/// Create a failed validation result.
/// </summary>
public static SchemaValidationResult Failure(params SchemaValidationError[] errors) => new()
{
IsValid = false,
Errors = errors
};
}
/// <summary>
/// A single schema validation error.
/// </summary>
public sealed record SchemaValidationError
{
/// <summary>
/// JSON pointer to the error location.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Error message.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Schema keyword that failed (e.g., "required", "type").
/// </summary>
public string? Keyword { get; init; }
}
/// <summary>
/// Service for validating JSON against schemas.
/// </summary>
@@ -94,268 +36,3 @@ public interface IJsonSchemaValidator
/// <returns>True if a schema is registered.</returns>
bool HasSchema(string predicateType);
}
/// <summary>
/// Default implementation of JSON Schema validation.
/// </summary>
public sealed class PredicateSchemaValidator : IJsonSchemaValidator
{
private static readonly Dictionary<string, JsonDocument> _schemas = new();
/// <summary>
/// Static initializer to load embedded schemas.
/// </summary>
static PredicateSchemaValidator()
{
// TODO: Load schemas from embedded resources
// These would be in src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/Schemas/
}
/// <inheritdoc />
public Task<SchemaValidationResult> ValidatePredicateAsync(
string json,
string predicateType,
CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
if (!HasSchema(predicateType))
{
return Task.FromResult(SchemaValidationResult.Failure(new SchemaValidationError
{
Path = "/",
Message = $"No schema registered for predicate type: {predicateType}",
Keyword = "predicateType"
}));
}
try
{
using var document = JsonDocument.Parse(json);
// TODO: Implement actual JSON Schema validation
// For now, do basic structural checks
var root = document.RootElement;
var errors = new List<SchemaValidationError>();
// Validate required fields based on predicate type
switch (predicateType)
{
case "evidence.stella/v1":
errors.AddRange(ValidateEvidencePredicate(root));
break;
case "reasoning.stella/v1":
errors.AddRange(ValidateReasoningPredicate(root));
break;
case "cdx-vex.stella/v1":
errors.AddRange(ValidateVexPredicate(root));
break;
case "proofspine.stella/v1":
errors.AddRange(ValidateProofSpinePredicate(root));
break;
case "verdict.stella/v1":
errors.AddRange(ValidateVerdictPredicate(root));
break;
case "delta-verdict.stella/v1":
errors.AddRange(ValidateDeltaVerdictPredicate(root));
break;
case "reachability-subgraph.stella/v1":
errors.AddRange(ValidateReachabilitySubgraphPredicate(root));
break;
// Delta predicate types for lineage comparison (Sprint 20251228_007)
case "stella.ops/vex-delta@v1":
errors.AddRange(ValidateVexDeltaPredicate(root));
break;
case "stella.ops/sbom-delta@v1":
errors.AddRange(ValidateSbomDeltaPredicate(root));
break;
case "stella.ops/verdict-delta@v1":
errors.AddRange(ValidateVerdictDeltaPredicate(root));
break;
}
return errors.Count > 0
? Task.FromResult(SchemaValidationResult.Failure(errors.ToArray()))
: Task.FromResult(SchemaValidationResult.Success());
}
catch (JsonException ex)
{
return Task.FromResult(SchemaValidationResult.Failure(new SchemaValidationError
{
Path = "/",
Message = $"Invalid JSON: {ex.Message}",
Keyword = "format"
}));
}
}
/// <inheritdoc />
public Task<SchemaValidationResult> ValidateStatementAsync<T>(
T statement,
CancellationToken ct = default) where T : Statements.InTotoStatement
{
ct.ThrowIfCancellationRequested();
var json = System.Text.Json.JsonSerializer.Serialize(statement);
return ValidatePredicateAsync(json, statement.PredicateType, ct);
}
/// <inheritdoc />
public bool HasSchema(string predicateType)
{
return predicateType switch
{
"evidence.stella/v1" => true,
"reasoning.stella/v1" => true,
"cdx-vex.stella/v1" => true,
"proofspine.stella/v1" => true,
"verdict.stella/v1" => true,
"https://stella-ops.org/predicates/sbom-linkage/v1" => true,
"delta-verdict.stella/v1" => true,
"reachability-subgraph.stella/v1" => true,
// Delta predicate types for lineage comparison (Sprint 20251228_007)
"stella.ops/vex-delta@v1" => true,
"stella.ops/sbom-delta@v1" => true,
"stella.ops/verdict-delta@v1" => true,
_ => false
};
}
private static IEnumerable<SchemaValidationError> ValidateEvidencePredicate(JsonElement root)
{
// Required: scanToolName, scanToolVersion, timestamp
if (!root.TryGetProperty("scanToolName", out _))
yield return new() { Path = "/scanToolName", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("scanToolVersion", out _))
yield return new() { Path = "/scanToolVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("timestamp", out _))
yield return new() { Path = "/timestamp", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateReasoningPredicate(JsonElement root)
{
// Required: policyId, policyVersion, evaluatedAt
if (!root.TryGetProperty("policyId", out _))
yield return new() { Path = "/policyId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("policyVersion", out _))
yield return new() { Path = "/policyVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("evaluatedAt", out _))
yield return new() { Path = "/evaluatedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVexPredicate(JsonElement root)
{
// Required: vulnerability, status
if (!root.TryGetProperty("vulnerability", out _))
yield return new() { Path = "/vulnerability", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("status", out _))
yield return new() { Path = "/status", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateProofSpinePredicate(JsonElement root)
{
// Required: sbomEntryId, evidenceIds, proofBundleId
if (!root.TryGetProperty("sbomEntryId", out _))
yield return new() { Path = "/sbomEntryId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("evidenceIds", out _))
yield return new() { Path = "/evidenceIds", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("proofBundleId", out _))
yield return new() { Path = "/proofBundleId", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVerdictPredicate(JsonElement root)
{
// Required: proofBundleId, result, verifiedAt
if (!root.TryGetProperty("proofBundleId", out _))
yield return new() { Path = "/proofBundleId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("result", out _))
yield return new() { Path = "/result", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("verifiedAt", out _))
yield return new() { Path = "/verifiedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateDeltaVerdictPredicate(JsonElement root)
{
// Required: beforeRevisionId, afterRevisionId, hasMaterialChange, priorityScore, changes, comparedAt
if (!root.TryGetProperty("beforeRevisionId", out _))
yield return new() { Path = "/beforeRevisionId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("afterRevisionId", out _))
yield return new() { Path = "/afterRevisionId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("hasMaterialChange", out _))
yield return new() { Path = "/hasMaterialChange", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("priorityScore", out _))
yield return new() { Path = "/priorityScore", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("changes", out _))
yield return new() { Path = "/changes", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateReachabilitySubgraphPredicate(JsonElement root)
{
// Required: graphDigest, analysis
if (!root.TryGetProperty("graphDigest", out _))
yield return new() { Path = "/graphDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("analysis", out _))
yield return new() { Path = "/analysis", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVexDeltaPredicate(JsonElement root)
{
// Required: fromDigest, toDigest, tenantId, summary, comparedAt
if (!root.TryGetProperty("fromDigest", out _))
yield return new() { Path = "/fromDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toDigest", out _))
yield return new() { Path = "/toDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("tenantId", out _))
yield return new() { Path = "/tenantId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("summary", out _))
yield return new() { Path = "/summary", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateSbomDeltaPredicate(JsonElement root)
{
// Required: fromDigest, toDigest, fromSbomDigest, toSbomDigest, tenantId, summary, comparedAt
if (!root.TryGetProperty("fromDigest", out _))
yield return new() { Path = "/fromDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toDigest", out _))
yield return new() { Path = "/toDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("fromSbomDigest", out _))
yield return new() { Path = "/fromSbomDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toSbomDigest", out _))
yield return new() { Path = "/toSbomDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("tenantId", out _))
yield return new() { Path = "/tenantId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("summary", out _))
yield return new() { Path = "/summary", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVerdictDeltaPredicate(JsonElement root)
{
// Required: fromDigest, toDigest, tenantId, fromPolicyVersion, toPolicyVersion, fromVerdict, toVerdict, summary, comparedAt
if (!root.TryGetProperty("fromDigest", out _))
yield return new() { Path = "/fromDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toDigest", out _))
yield return new() { Path = "/toDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("tenantId", out _))
yield return new() { Path = "/tenantId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("fromPolicyVersion", out _))
yield return new() { Path = "/fromPolicyVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toPolicyVersion", out _))
yield return new() { Path = "/toPolicyVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("fromVerdict", out _))
yield return new() { Path = "/fromVerdict", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toVerdict", out _))
yield return new() { Path = "/toVerdict", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("summary", out _))
yield return new() { Path = "/summary", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
}

View File

@@ -0,0 +1,88 @@
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Json;
/// <summary>
/// Delta predicate validation methods for PredicateSchemaValidator.
/// </summary>
public sealed partial class PredicateSchemaValidator
{
private static IEnumerable<SchemaValidationError> ValidateDeltaVerdictPredicate(JsonElement root)
{
if (!root.TryGetProperty("beforeRevisionId", out _))
yield return new() { Path = "/beforeRevisionId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("afterRevisionId", out _))
yield return new() { Path = "/afterRevisionId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("hasMaterialChange", out _))
yield return new() { Path = "/hasMaterialChange", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("priorityScore", out _))
yield return new() { Path = "/priorityScore", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("changes", out _))
yield return new() { Path = "/changes", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateReachabilitySubgraphPredicate(JsonElement root)
{
if (!root.TryGetProperty("graphDigest", out _))
yield return new() { Path = "/graphDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("analysis", out _))
yield return new() { Path = "/analysis", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVexDeltaPredicate(JsonElement root)
{
if (!root.TryGetProperty("fromDigest", out _))
yield return new() { Path = "/fromDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toDigest", out _))
yield return new() { Path = "/toDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("tenantId", out _))
yield return new() { Path = "/tenantId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("summary", out _))
yield return new() { Path = "/summary", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateSbomDeltaPredicate(JsonElement root)
{
if (!root.TryGetProperty("fromDigest", out _))
yield return new() { Path = "/fromDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toDigest", out _))
yield return new() { Path = "/toDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("fromSbomDigest", out _))
yield return new() { Path = "/fromSbomDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toSbomDigest", out _))
yield return new() { Path = "/toSbomDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("tenantId", out _))
yield return new() { Path = "/tenantId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("summary", out _))
yield return new() { Path = "/summary", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVerdictDeltaPredicate(JsonElement root)
{
if (!root.TryGetProperty("fromDigest", out _))
yield return new() { Path = "/fromDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toDigest", out _))
yield return new() { Path = "/toDigest", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("tenantId", out _))
yield return new() { Path = "/tenantId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("fromPolicyVersion", out _))
yield return new() { Path = "/fromPolicyVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toPolicyVersion", out _))
yield return new() { Path = "/toPolicyVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("fromVerdict", out _))
yield return new() { Path = "/fromVerdict", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("toVerdict", out _))
yield return new() { Path = "/toVerdict", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("summary", out _))
yield return new() { Path = "/summary", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("comparedAt", out _))
yield return new() { Path = "/comparedAt", Message = "Required property missing", Keyword = "required" };
}
}

View File

@@ -0,0 +1,77 @@
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Json;
/// <summary>
/// Predicate-specific validation methods for PredicateSchemaValidator.
/// </summary>
public sealed partial class PredicateSchemaValidator
{
private static IEnumerable<SchemaValidationError> ValidateByPredicateType(
JsonElement root, string predicateType)
{
return predicateType switch
{
"evidence.stella/v1" => ValidateEvidencePredicate(root),
"reasoning.stella/v1" => ValidateReasoningPredicate(root),
"cdx-vex.stella/v1" => ValidateVexPredicate(root),
"proofspine.stella/v1" => ValidateProofSpinePredicate(root),
"verdict.stella/v1" => ValidateVerdictPredicate(root),
"delta-verdict.stella/v1" => ValidateDeltaVerdictPredicate(root),
"reachability-subgraph.stella/v1" => ValidateReachabilitySubgraphPredicate(root),
"stella.ops/vex-delta@v1" => ValidateVexDeltaPredicate(root),
"stella.ops/sbom-delta@v1" => ValidateSbomDeltaPredicate(root),
"stella.ops/verdict-delta@v1" => ValidateVerdictDeltaPredicate(root),
_ => []
};
}
private static IEnumerable<SchemaValidationError> ValidateEvidencePredicate(JsonElement root)
{
if (!root.TryGetProperty("scanToolName", out _))
yield return new() { Path = "/scanToolName", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("scanToolVersion", out _))
yield return new() { Path = "/scanToolVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("timestamp", out _))
yield return new() { Path = "/timestamp", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateReasoningPredicate(JsonElement root)
{
if (!root.TryGetProperty("policyId", out _))
yield return new() { Path = "/policyId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("policyVersion", out _))
yield return new() { Path = "/policyVersion", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("evaluatedAt", out _))
yield return new() { Path = "/evaluatedAt", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVexPredicate(JsonElement root)
{
if (!root.TryGetProperty("vulnerability", out _))
yield return new() { Path = "/vulnerability", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("status", out _))
yield return new() { Path = "/status", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateProofSpinePredicate(JsonElement root)
{
if (!root.TryGetProperty("sbomEntryId", out _))
yield return new() { Path = "/sbomEntryId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("evidenceIds", out _))
yield return new() { Path = "/evidenceIds", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("proofBundleId", out _))
yield return new() { Path = "/proofBundleId", Message = "Required property missing", Keyword = "required" };
}
private static IEnumerable<SchemaValidationError> ValidateVerdictPredicate(JsonElement root)
{
if (!root.TryGetProperty("proofBundleId", out _))
yield return new() { Path = "/proofBundleId", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("result", out _))
yield return new() { Path = "/result", Message = "Required property missing", Keyword = "required" };
if (!root.TryGetProperty("verifiedAt", out _))
yield return new() { Path = "/verifiedAt", Message = "Required property missing", Keyword = "required" };
}
}

View File

@@ -0,0 +1,100 @@
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Json;
/// <summary>
/// Default implementation of JSON Schema validation.
/// </summary>
public sealed partial class PredicateSchemaValidator : IJsonSchemaValidator
{
private static readonly Dictionary<string, JsonDocument> _schemas = new();
/// <summary>
/// Static initializer to load embedded schemas.
/// </summary>
static PredicateSchemaValidator()
{
// TODO: Load schemas from embedded resources
// These would be in src/Attestor/__Libraries/StellaOps.Attestor.ProofChain/Schemas/
}
/// <inheritdoc />
public Task<SchemaValidationResult> ValidatePredicateAsync(
string json,
string predicateType,
CancellationToken ct = default)
{
ct.ThrowIfCancellationRequested();
if (!HasSchema(predicateType))
{
return Task.FromResult(SchemaValidationResult.Failure(new SchemaValidationError
{
Path = "/",
Message = $"No schema registered for predicate type: {predicateType}",
Keyword = "predicateType"
}));
}
try
{
using var document = JsonDocument.Parse(json);
// TODO: Implement actual JSON Schema validation
// For now, do basic structural checks
var root = document.RootElement;
var errors = new List<SchemaValidationError>();
// Validate required fields based on predicate type
errors.AddRange(ValidateByPredicateType(root, predicateType));
return errors.Count > 0
? Task.FromResult(SchemaValidationResult.Failure(errors.ToArray()))
: Task.FromResult(SchemaValidationResult.Success());
}
catch (JsonException ex)
{
return Task.FromResult(SchemaValidationResult.Failure(new SchemaValidationError
{
Path = "/",
Message = $"Invalid JSON: {ex.Message}",
Keyword = "format"
}));
}
}
/// <inheritdoc />
public Task<SchemaValidationResult> ValidateStatementAsync<T>(
T statement,
CancellationToken ct = default) where T : Statements.InTotoStatement
{
ct.ThrowIfCancellationRequested();
var json = System.Text.Json.JsonSerializer.Serialize(statement);
return ValidatePredicateAsync(json, statement.PredicateType, ct);
}
/// <inheritdoc />
public bool HasSchema(string predicateType)
{
return predicateType switch
{
"evidence.stella/v1" => true,
"reasoning.stella/v1" => true,
"cdx-vex.stella/v1" => true,
"proofspine.stella/v1" => true,
"verdict.stella/v1" => true,
"https://stella-ops.org/predicates/sbom-linkage/v1" => true,
"delta-verdict.stella/v1" => true,
"reachability-subgraph.stella/v1" => true,
// Delta predicate types for lineage comparison (Sprint 20251228_007)
"stella.ops/vex-delta@v1" => true,
"stella.ops/sbom-delta@v1" => true,
"stella.ops/verdict-delta@v1" => true,
_ => false
};
}
}

View File

@@ -0,0 +1,29 @@
namespace StellaOps.Attestor.ProofChain.Json;
public sealed partial class Rfc8785JsonCanonicalizer
{
private static string InsertDecimalPoint(string digits, int decimalExponent)
{
var position = digits.Length + decimalExponent;
if (position > 0)
{
var integerPart = digits[..position].TrimStart('0');
if (integerPart.Length == 0)
{
integerPart = "0";
}
var fractionalPart = digits[position..].TrimEnd('0');
if (fractionalPart.Length == 0)
{
return integerPart;
}
return $"{integerPart}.{fractionalPart}";
}
var zeros = new string('0', -position);
var fraction = (zeros + digits).TrimEnd('0');
return $"0.{fraction}";
}
}

View File

@@ -0,0 +1,94 @@
using System.Globalization;
namespace StellaOps.Attestor.ProofChain.Json;
public sealed partial class Rfc8785JsonCanonicalizer
{
private static string NormalizeNumberString(string raw)
{
if (string.IsNullOrWhiteSpace(raw))
{
throw new FormatException("Invalid JSON number.");
}
var index = 0;
var negative = raw[index] == '-';
if (negative)
{
index++;
}
var intStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == intStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var intPart = raw[intStart..index];
var fracPart = string.Empty;
if (index < raw.Length && raw[index] == '.')
{
index++;
var fracStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == fracStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
fracPart = raw[fracStart..index];
}
var exponent = 0;
if (index < raw.Length && (raw[index] == 'e' || raw[index] == 'E'))
{
index++;
var expNegative = false;
if (index < raw.Length && (raw[index] == '+' || raw[index] == '-'))
{
expNegative = raw[index] == '-';
index++;
}
var expStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == expStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var expValue = int.Parse(raw[expStart..index], CultureInfo.InvariantCulture);
exponent = expNegative ? -expValue : expValue;
}
if (index != raw.Length)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var digits = (intPart + fracPart).TrimStart('0');
if (digits.Length == 0)
{
return "0";
}
var decimalExponent = exponent - fracPart.Length;
var normalized = decimalExponent >= 0
? digits + new string('0', decimalExponent)
: InsertDecimalPoint(digits, decimalExponent);
return negative ? "-" + normalized : normalized;
}
}

View File

@@ -0,0 +1,32 @@
using System.Text;
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Json;
public sealed partial class Rfc8785JsonCanonicalizer
{
private static void WriteNumber(Utf8JsonWriter writer, JsonElement element)
{
var raw = element.GetRawText();
writer.WriteRawValue(NormalizeNumberString(raw), skipInputValidation: true);
}
/// <summary>
/// Applies NFC normalization to a string if enabled.
/// </summary>
private string? NormalizeString(string? value)
{
if (value is null || !_enableNfcNormalization)
{
return value;
}
// Only normalize if the string is not already in NFC form
if (value.IsNormalized(NormalizationForm.FormC))
{
return value;
}
return value.Normalize(NormalizationForm.FormC);
}
}

View File

@@ -0,0 +1,100 @@
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Json;
public sealed partial class Rfc8785JsonCanonicalizer
{
private void WriteCanonical(Utf8JsonWriter writer, JsonElement element)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
WriteObject(writer, element);
return;
case JsonValueKind.Array:
WriteArray(writer, element);
return;
case JsonValueKind.String:
writer.WriteStringValue(NormalizeString(element.GetString()));
return;
case JsonValueKind.Number:
WriteNumber(writer, element);
return;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
return;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
return;
case JsonValueKind.Null:
writer.WriteNullValue();
return;
default:
throw new FormatException($"Unsupported JSON token kind '{element.ValueKind}'.");
}
}
private void WriteCanonicalWithVersion(Utf8JsonWriter writer, JsonElement element, string version)
{
if (element.ValueKind == JsonValueKind.Object)
{
writer.WriteStartObject();
// Write version marker first (underscore prefix ensures it stays first after sorting)
writer.WriteString(VersionFieldName, NormalizeString(version));
// Write remaining properties sorted
var properties = new List<(string Name, JsonElement Value)>();
foreach (var property in element.EnumerateObject())
{
properties.Add((property.Name, property.Value));
}
properties.Sort(static (x, y) => string.CompareOrdinal(x.Name, y.Name));
foreach (var (name, value) in properties)
{
writer.WritePropertyName(NormalizeString(name)!);
WriteCanonical(writer, value);
}
writer.WriteEndObject();
}
else
{
// Non-object root: wrap in versioned object
writer.WriteStartObject();
writer.WriteString(VersionFieldName, NormalizeString(version));
writer.WritePropertyName("_value");
WriteCanonical(writer, element);
writer.WriteEndObject();
}
}
private void WriteObject(Utf8JsonWriter writer, JsonElement element)
{
var properties = new List<(string Name, JsonElement Value)>();
foreach (var property in element.EnumerateObject())
{
properties.Add((property.Name, property.Value));
}
properties.Sort(static (x, y) => string.CompareOrdinal(x.Name, y.Name));
writer.WriteStartObject();
foreach (var (name, value) in properties)
{
writer.WritePropertyName(NormalizeString(name)!);
WriteCanonical(writer, value);
}
writer.WriteEndObject();
}
private void WriteArray(Utf8JsonWriter writer, JsonElement element)
{
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(writer, item);
}
writer.WriteEndArray();
}
}

View File

@@ -1,9 +1,4 @@
using System;
using System.Buffers;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
@@ -17,7 +12,7 @@ namespace StellaOps.Attestor.ProofChain.Json;
/// NFC normalization ensures that equivalent Unicode sequences (e.g., composed vs decomposed characters)
/// produce identical canonical output, which is critical for cross-platform determinism.
/// </remarks>
public sealed class Rfc8785JsonCanonicalizer : IJsonCanonicalizer
public sealed partial class Rfc8785JsonCanonicalizer : IJsonCanonicalizer
{
/// <summary>
/// Field name for version marker. Underscore prefix ensures lexicographic first position.
@@ -81,236 +76,4 @@ public sealed class Rfc8785JsonCanonicalizer : IJsonCanonicalizer
return buffer.WrittenSpan.ToArray();
}
private void WriteCanonicalWithVersion(Utf8JsonWriter writer, JsonElement element, string version)
{
if (element.ValueKind == JsonValueKind.Object)
{
writer.WriteStartObject();
// Write version marker first (underscore prefix ensures it stays first after sorting)
writer.WriteString(VersionFieldName, NormalizeString(version));
// Write remaining properties sorted
var properties = new List<(string Name, JsonElement Value)>();
foreach (var property in element.EnumerateObject())
{
properties.Add((property.Name, property.Value));
}
properties.Sort(static (x, y) => string.CompareOrdinal(x.Name, y.Name));
foreach (var (name, value) in properties)
{
writer.WritePropertyName(NormalizeString(name)!);
WriteCanonical(writer, value);
}
writer.WriteEndObject();
}
else
{
// Non-object root: wrap in versioned object
writer.WriteStartObject();
writer.WriteString(VersionFieldName, NormalizeString(version));
writer.WritePropertyName("_value");
WriteCanonical(writer, element);
writer.WriteEndObject();
}
}
private void WriteCanonical(Utf8JsonWriter writer, JsonElement element)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
WriteObject(writer, element);
return;
case JsonValueKind.Array:
WriteArray(writer, element);
return;
case JsonValueKind.String:
writer.WriteStringValue(NormalizeString(element.GetString()));
return;
case JsonValueKind.Number:
WriteNumber(writer, element);
return;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
return;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
return;
case JsonValueKind.Null:
writer.WriteNullValue();
return;
default:
throw new FormatException($"Unsupported JSON token kind '{element.ValueKind}'.");
}
}
private void WriteObject(Utf8JsonWriter writer, JsonElement element)
{
var properties = new List<(string Name, JsonElement Value)>();
foreach (var property in element.EnumerateObject())
{
properties.Add((property.Name, property.Value));
}
properties.Sort(static (x, y) => string.CompareOrdinal(x.Name, y.Name));
writer.WriteStartObject();
foreach (var (name, value) in properties)
{
writer.WritePropertyName(NormalizeString(name)!);
WriteCanonical(writer, value);
}
writer.WriteEndObject();
}
private void WriteArray(Utf8JsonWriter writer, JsonElement element)
{
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(writer, item);
}
writer.WriteEndArray();
}
/// <summary>
/// Applies NFC normalization to a string if enabled.
/// </summary>
private string? NormalizeString(string? value)
{
if (value is null || !_enableNfcNormalization)
{
return value;
}
// Only normalize if the string is not already in NFC form
if (value.IsNormalized(NormalizationForm.FormC))
{
return value;
}
return value.Normalize(NormalizationForm.FormC);
}
private static void WriteNumber(Utf8JsonWriter writer, JsonElement element)
{
var raw = element.GetRawText();
writer.WriteRawValue(NormalizeNumberString(raw), skipInputValidation: true);
}
private static string NormalizeNumberString(string raw)
{
if (string.IsNullOrWhiteSpace(raw))
{
throw new FormatException("Invalid JSON number.");
}
var index = 0;
var negative = raw[index] == '-';
if (negative)
{
index++;
}
var intStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == intStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var intPart = raw[intStart..index];
var fracPart = string.Empty;
if (index < raw.Length && raw[index] == '.')
{
index++;
var fracStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == fracStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
fracPart = raw[fracStart..index];
}
var exponent = 0;
if (index < raw.Length && (raw[index] == 'e' || raw[index] == 'E'))
{
index++;
var expNegative = false;
if (index < raw.Length && (raw[index] == '+' || raw[index] == '-'))
{
expNegative = raw[index] == '-';
index++;
}
var expStart = index;
while (index < raw.Length && char.IsDigit(raw[index]))
{
index++;
}
if (index == expStart)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var expValue = int.Parse(raw[expStart..index], CultureInfo.InvariantCulture);
exponent = expNegative ? -expValue : expValue;
}
if (index != raw.Length)
{
throw new FormatException($"Invalid JSON number: '{raw}'.");
}
var digits = (intPart + fracPart).TrimStart('0');
if (digits.Length == 0)
{
return "0";
}
var decimalExponent = exponent - fracPart.Length;
var normalized = decimalExponent >= 0
? digits + new string('0', decimalExponent)
: InsertDecimalPoint(digits, decimalExponent);
return negative ? "-" + normalized : normalized;
}
private static string InsertDecimalPoint(string digits, int decimalExponent)
{
var position = digits.Length + decimalExponent;
if (position > 0)
{
var integerPart = digits[..position].TrimStart('0');
if (integerPart.Length == 0)
{
integerPart = "0";
}
var fractionalPart = digits[position..].TrimEnd('0');
if (fractionalPart.Length == 0)
{
return integerPart;
}
return $"{integerPart}.{fractionalPart}";
}
var zeros = new string('0', -position);
var fraction = (zeros + digits).TrimEnd('0');
return $"0.{fraction}";
}
}

View File

@@ -0,0 +1,23 @@
namespace StellaOps.Attestor.ProofChain.Json;
/// <summary>
/// A single schema validation error.
/// </summary>
public sealed record SchemaValidationError
{
/// <summary>
/// JSON pointer to the error location.
/// </summary>
public required string Path { get; init; }
/// <summary>
/// Error message.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Schema keyword that failed (e.g., "required", "type").
/// </summary>
public string? Keyword { get; init; }
}

View File

@@ -0,0 +1,39 @@
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.Attestor.ProofChain.Json;
/// <summary>
/// JSON Schema validation result.
/// </summary>
public sealed record SchemaValidationResult
{
/// <summary>
/// Whether the JSON is valid against the schema.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Validation errors if any.
/// </summary>
public required IReadOnlyList<SchemaValidationError> Errors { get; init; }
/// <summary>
/// Create a successful validation result.
/// </summary>
public static SchemaValidationResult Success() => new()
{
IsValid = true,
Errors = []
};
/// <summary>
/// Create a failed validation result.
/// </summary>
public static SchemaValidationResult Failure(params SchemaValidationError[] errors) => new()
{
IsValid = false,
Errors = errors
};
}

View File

@@ -0,0 +1,75 @@
// -----------------------------------------------------------------------------
// ComponentRefExtractor.Resolution.cs
// PURL resolution and helper methods for ComponentRefExtractor.
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Linking;
/// <summary>
/// PURL resolution and SPDX 3.0 helper methods.
/// </summary>
public sealed partial class ComponentRefExtractor
{
/// <summary>
/// Resolves a PURL to a bom-ref in the extraction result.
/// </summary>
/// <param name="purl">The Package URL to resolve.</param>
/// <param name="extraction">The SBOM extraction result.</param>
/// <returns>The matching bom-ref or null.</returns>
public string? ResolvePurlToBomRef(string purl, SbomExtractionResult extraction)
{
ArgumentNullException.ThrowIfNull(extraction);
if (string.IsNullOrWhiteSpace(purl))
return null;
// Exact match
var exact = extraction.ComponentRefs.FirstOrDefault(c =>
string.Equals(c.Purl, purl, StringComparison.OrdinalIgnoreCase));
if (exact != null)
return exact.BomRef;
// Try without version qualifier
var purlBase = RemoveVersionFromPurl(purl);
var partial = extraction.ComponentRefs.FirstOrDefault(c =>
c.Purl != null && RemoveVersionFromPurl(c.Purl).Equals(purlBase, StringComparison.OrdinalIgnoreCase));
return partial?.BomRef;
}
private static string RemoveVersionFromPurl(string purl)
{
var atIndex = purl.LastIndexOf('@');
return atIndex > 0 ? purl[..atIndex] : purl;
}
private static ComponentRef? ExtractSpdx3Element(JsonElement element)
{
if (!element.TryGetProperty("@type", out var typeProp) ||
typeProp.GetString()?.Contains("Package") != true)
{
return null;
}
var spdxId = element.TryGetProperty("@id", out var idProp)
? idProp.GetString()
: null;
var name = element.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: null;
if (spdxId == null)
return null;
return new ComponentRef
{
BomRef = spdxId,
Name = name ?? string.Empty,
Format = SbomFormat.Spdx3
};
}
}

View File

@@ -0,0 +1,93 @@
// -----------------------------------------------------------------------------
// ComponentRefExtractor.Spdx.cs
// SPDX extraction methods for ComponentRefExtractor.
// -----------------------------------------------------------------------------
using System.Text.Json;
namespace StellaOps.Attestor.ProofChain.Linking;
/// <summary>
/// SPDX extraction methods.
/// </summary>
public sealed partial class ComponentRefExtractor
{
/// <summary>
/// Extracts component references from an SPDX SBOM.
/// </summary>
/// <param name="sbomJson">The SPDX JSON document.</param>
/// <returns>Extracted component references.</returns>
public SbomExtractionResult ExtractFromSpdx(JsonDocument sbomJson)
{
ArgumentNullException.ThrowIfNull(sbomJson);
var components = new List<ComponentRef>();
var root = sbomJson.RootElement;
if (root.TryGetProperty("packages", out var packagesArray))
{
foreach (var package in packagesArray.EnumerateArray())
{
var comp = ExtractSpdx2Package(package);
if (comp != null) components.Add(comp);
}
}
if (root.TryGetProperty("@graph", out var graphArray))
{
foreach (var element in graphArray.EnumerateArray())
{
var comp = ExtractSpdx3Element(element);
if (comp != null) components.Add(comp);
}
}
string? docId = null;
if (root.TryGetProperty("SPDXID", out var docIdProp))
docId = docIdProp.GetString();
return new SbomExtractionResult
{
Format = SbomFormat.Spdx,
SerialNumber = docId,
ComponentRefs = components
};
}
private static ComponentRef? ExtractSpdx2Package(JsonElement package)
{
var spdxId = package.TryGetProperty("SPDXID", out var spdxIdProp) ? spdxIdProp.GetString() : null;
var name = package.TryGetProperty("name", out var nameProp) ? nameProp.GetString() : null;
var version = package.TryGetProperty("versionInfo", out var versionProp) ? versionProp.GetString() : null;
string? purl = ExtractPurlFromExternalRefs(package);
if (spdxId == null) return null;
return new ComponentRef
{
BomRef = spdxId,
Name = name ?? string.Empty,
Version = version,
Purl = purl,
Format = SbomFormat.Spdx
};
}
private static string? ExtractPurlFromExternalRefs(JsonElement package)
{
if (!package.TryGetProperty("externalRefs", out var externalRefs))
return null;
foreach (var extRef in externalRefs.EnumerateArray())
{
if (extRef.TryGetProperty("referenceType", out var refType) &&
refType.GetString() == "purl" &&
extRef.TryGetProperty("referenceLocator", out var locator))
{
return locator.GetString();
}
}
return null;
}
}

View File

@@ -12,7 +12,7 @@ namespace StellaOps.Attestor.ProofChain.Linking;
/// <summary>
/// Extracts component references from SBOM documents for VEX cross-linking.
/// </summary>
public sealed class ComponentRefExtractor
public sealed partial class ComponentRefExtractor
{
/// <summary>
/// Extracts component references from a CycloneDX SBOM.
@@ -60,7 +60,6 @@ public sealed class ComponentRefExtractor
}
}
// Extract serial number
string? serialNumber = null;
if (root.TryGetProperty("serialNumber", out var serialProp))
{
@@ -74,192 +73,4 @@ public sealed class ComponentRefExtractor
ComponentRefs = components
};
}
/// <summary>
/// Extracts component references from an SPDX SBOM.
/// </summary>
/// <param name="sbomJson">The SPDX JSON document.</param>
/// <returns>Extracted component references.</returns>
public SbomExtractionResult ExtractFromSpdx(JsonDocument sbomJson)
{
ArgumentNullException.ThrowIfNull(sbomJson);
var components = new List<ComponentRef>();
var root = sbomJson.RootElement;
// SPDX 2.x uses "packages"
if (root.TryGetProperty("packages", out var packagesArray))
{
foreach (var package in packagesArray.EnumerateArray())
{
var spdxId = package.TryGetProperty("SPDXID", out var spdxIdProp)
? spdxIdProp.GetString()
: null;
var name = package.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: null;
var version = package.TryGetProperty("versionInfo", out var versionProp)
? versionProp.GetString()
: null;
// Extract PURL from external refs
string? purl = null;
if (package.TryGetProperty("externalRefs", out var externalRefs))
{
foreach (var extRef in externalRefs.EnumerateArray())
{
if (extRef.TryGetProperty("referenceType", out var refType) &&
refType.GetString() == "purl" &&
extRef.TryGetProperty("referenceLocator", out var locator))
{
purl = locator.GetString();
break;
}
}
}
if (spdxId != null)
{
components.Add(new ComponentRef
{
BomRef = spdxId,
Name = name ?? string.Empty,
Version = version,
Purl = purl,
Format = SbomFormat.Spdx
});
}
}
}
// SPDX 3.0 uses "elements" with @graph
if (root.TryGetProperty("@graph", out var graphArray))
{
foreach (var element in graphArray.EnumerateArray())
{
if (element.TryGetProperty("@type", out var typeProp) &&
typeProp.GetString()?.Contains("Package") == true)
{
var spdxId = element.TryGetProperty("@id", out var idProp)
? idProp.GetString()
: null;
var name = element.TryGetProperty("name", out var nameProp)
? nameProp.GetString()
: null;
if (spdxId != null)
{
components.Add(new ComponentRef
{
BomRef = spdxId,
Name = name ?? string.Empty,
Format = SbomFormat.Spdx3
});
}
}
}
}
// Extract document ID
string? docId = null;
if (root.TryGetProperty("SPDXID", out var docIdProp))
{
docId = docIdProp.GetString();
}
return new SbomExtractionResult
{
Format = SbomFormat.Spdx,
SerialNumber = docId,
ComponentRefs = components
};
}
/// <summary>
/// Resolves a PURL to a bom-ref in the extraction result.
/// </summary>
/// <param name="purl">The Package URL to resolve.</param>
/// <param name="extraction">The SBOM extraction result.</param>
/// <returns>The matching bom-ref or null.</returns>
public string? ResolvePurlToBomRef(string purl, SbomExtractionResult extraction)
{
ArgumentNullException.ThrowIfNull(extraction);
if (string.IsNullOrWhiteSpace(purl))
return null;
// Exact match
var exact = extraction.ComponentRefs.FirstOrDefault(c =>
string.Equals(c.Purl, purl, StringComparison.OrdinalIgnoreCase));
if (exact != null)
return exact.BomRef;
// Try without version qualifier
var purlBase = RemoveVersionFromPurl(purl);
var partial = extraction.ComponentRefs.FirstOrDefault(c =>
c.Purl != null && RemoveVersionFromPurl(c.Purl).Equals(purlBase, StringComparison.OrdinalIgnoreCase));
return partial?.BomRef;
}
private static string RemoveVersionFromPurl(string purl)
{
var atIndex = purl.LastIndexOf('@');
return atIndex > 0 ? purl[..atIndex] : purl;
}
}
/// <summary>
/// Result of SBOM component extraction.
/// </summary>
public sealed record SbomExtractionResult
{
/// <summary>SBOM format.</summary>
public required SbomFormat Format { get; init; }
/// <summary>Document serial number or ID.</summary>
public string? SerialNumber { get; init; }
/// <summary>Extracted component references.</summary>
public required IReadOnlyList<ComponentRef> ComponentRefs { get; init; }
}
/// <summary>
/// A component reference from an SBOM.
/// </summary>
public sealed record ComponentRef
{
/// <summary>CycloneDX bom-ref or SPDX SPDXID.</summary>
public string? BomRef { get; init; }
/// <summary>Component name.</summary>
public required string Name { get; init; }
/// <summary>Component version.</summary>
public string? Version { get; init; }
/// <summary>Package URL.</summary>
public string? Purl { get; init; }
/// <summary>Source SBOM format.</summary>
public required SbomFormat Format { get; init; }
}
/// <summary>
/// SBOM format enumeration.
/// </summary>
public enum SbomFormat
{
/// <summary>CycloneDX format.</summary>
CycloneDx,
/// <summary>SPDX 2.x format.</summary>
Spdx,
/// <summary>SPDX 3.0 format.</summary>
Spdx3
}

View File

@@ -0,0 +1,52 @@
namespace StellaOps.Attestor.ProofChain.Linking;
/// <summary>
/// Result of SBOM component extraction.
/// </summary>
public sealed record SbomExtractionResult
{
/// <summary>SBOM format.</summary>
public required SbomFormat Format { get; init; }
/// <summary>Document serial number or ID.</summary>
public string? SerialNumber { get; init; }
/// <summary>Extracted component references.</summary>
public required IReadOnlyList<ComponentRef> ComponentRefs { get; init; }
}
/// <summary>
/// A component reference from an SBOM.
/// </summary>
public sealed record ComponentRef
{
/// <summary>CycloneDX bom-ref or SPDX SPDXID.</summary>
public string? BomRef { get; init; }
/// <summary>Component name.</summary>
public required string Name { get; init; }
/// <summary>Component version.</summary>
public string? Version { get; init; }
/// <summary>Package URL.</summary>
public string? Purl { get; init; }
/// <summary>Source SBOM format.</summary>
public required SbomFormat Format { get; init; }
}
/// <summary>
/// SBOM format enumeration.
/// </summary>
public enum SbomFormat
{
/// <summary>CycloneDX format.</summary>
CycloneDx,
/// <summary>SPDX 2.x format.</summary>
Spdx,
/// <summary>SPDX 3.0 format.</summary>
Spdx3
}

View File

@@ -0,0 +1,70 @@
using System.Security.Cryptography;
namespace StellaOps.Attestor.ProofChain.Merkle;
/// <summary>
/// Sorting, padding, and hashing helpers for DeterministicMerkleTreeBuilder.
/// </summary>
public sealed partial class DeterministicMerkleTreeBuilder
{
private static IReadOnlyList<ReadOnlyMemory<byte>> SortLeaves(IReadOnlyList<ReadOnlyMemory<byte>> leaves)
{
if (leaves.Count <= 1)
{
return leaves;
}
var indexed = new List<(ReadOnlyMemory<byte> Value, int Index)>(leaves.Count);
for (var i = 0; i < leaves.Count; i++)
{
indexed.Add((leaves[i], i));
}
indexed.Sort(static (left, right) =>
{
var comparison = CompareBytes(left.Value.Span, right.Value.Span);
return comparison != 0 ? comparison : left.Index.CompareTo(right.Index);
});
var ordered = new ReadOnlyMemory<byte>[indexed.Count];
for (var i = 0; i < indexed.Count; i++)
{
ordered[i] = indexed[i].Value;
}
return ordered;
}
private static int CompareBytes(ReadOnlySpan<byte> left, ReadOnlySpan<byte> right)
{
var min = Math.Min(left.Length, right.Length);
for (var i = 0; i < min; i++)
{
var diff = left[i].CompareTo(right[i]);
if (diff != 0)
{
return diff;
}
}
return left.Length.CompareTo(right.Length);
}
private static int PadToPowerOfTwo(int count)
{
var power = 1;
while (power < count)
{
power <<= 1;
}
return power;
}
private static byte[] HashInternal(byte[] left, byte[] right)
{
var buffer = new byte[left.Length + right.Length];
Buffer.BlockCopy(left, 0, buffer, 0, left.Length);
Buffer.BlockCopy(right, 0, buffer, left.Length, right.Length);
return SHA256.HashData(buffer);
}
}

View File

@@ -0,0 +1,80 @@
using System.Security.Cryptography;
namespace StellaOps.Attestor.ProofChain.Merkle;
/// <summary>
/// Proof generation and verification methods for DeterministicMerkleTreeBuilder.
/// </summary>
public sealed partial class DeterministicMerkleTreeBuilder
{
/// <inheritdoc />
public MerkleProof GenerateProof(MerkleTreeWithProofs tree, int leafIndex)
{
ArgumentNullException.ThrowIfNull(tree);
if (leafIndex < 0 || leafIndex >= tree.Leaves.Count)
{
throw new ArgumentOutOfRangeException(nameof(leafIndex),
$"Leaf index must be between 0 and {tree.Leaves.Count - 1}.");
}
var steps = new List<MerkleProofStep>();
var currentIndex = leafIndex;
for (var level = 0; level < tree.Levels.Count - 1; level++)
{
var currentLevel = tree.Levels[level];
int siblingIndex;
bool isRight;
if (currentIndex % 2 == 0)
{
siblingIndex = currentIndex + 1;
isRight = true;
}
else
{
siblingIndex = currentIndex - 1;
isRight = false;
}
steps.Add(new MerkleProofStep
{
SiblingHash = currentLevel[siblingIndex],
IsRight = isRight
});
currentIndex /= 2;
}
return new MerkleProof
{
LeafIndex = leafIndex,
LeafHash = tree.Leaves[leafIndex],
Steps = steps
};
}
/// <inheritdoc />
public bool VerifyProof(MerkleProof proof, ReadOnlySpan<byte> leafValue, ReadOnlySpan<byte> expectedRoot)
{
ArgumentNullException.ThrowIfNull(proof);
var currentHash = SHA256.HashData(leafValue);
foreach (var step in proof.Steps)
{
if (step.IsRight)
{
currentHash = HashInternal(currentHash, step.SiblingHash);
}
else
{
currentHash = HashInternal(step.SiblingHash, currentHash);
}
}
return currentHash.AsSpan().SequenceEqual(expectedRoot);
}
}

View File

@@ -1,5 +1,3 @@
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
namespace StellaOps.Attestor.ProofChain.Merkle;
@@ -11,7 +9,7 @@ namespace StellaOps.Attestor.ProofChain.Merkle;
/// - Padding to power of 2 by duplicating last leaf
/// - Left || Right concatenation for internal nodes
/// </summary>
public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
public sealed partial class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
{
/// <inheritdoc />
public byte[] ComputeMerkleRoot(IReadOnlyList<ReadOnlyMemory<byte>> leafValues)
@@ -69,146 +67,4 @@ public sealed class DeterministicMerkleTreeBuilder : IMerkleTreeBuilder
Levels = levels
};
}
/// <inheritdoc />
public MerkleProof GenerateProof(MerkleTreeWithProofs tree, int leafIndex)
{
ArgumentNullException.ThrowIfNull(tree);
if (leafIndex < 0 || leafIndex >= tree.Leaves.Count)
{
throw new ArgumentOutOfRangeException(nameof(leafIndex),
$"Leaf index must be between 0 and {tree.Leaves.Count - 1}.");
}
var steps = new List<MerkleProofStep>();
var currentIndex = leafIndex;
for (var level = 0; level < tree.Levels.Count - 1; level++)
{
var currentLevel = tree.Levels[level];
// Find sibling
int siblingIndex;
bool isRight;
if (currentIndex % 2 == 0)
{
// Current is left child, sibling is right
siblingIndex = currentIndex + 1;
isRight = true;
}
else
{
// Current is right child, sibling is left
siblingIndex = currentIndex - 1;
isRight = false;
}
steps.Add(new MerkleProofStep
{
SiblingHash = currentLevel[siblingIndex],
IsRight = isRight
});
// Move to parent index
currentIndex /= 2;
}
return new MerkleProof
{
LeafIndex = leafIndex,
LeafHash = tree.Leaves[leafIndex],
Steps = steps
};
}
/// <inheritdoc />
public bool VerifyProof(MerkleProof proof, ReadOnlySpan<byte> leafValue, ReadOnlySpan<byte> expectedRoot)
{
ArgumentNullException.ThrowIfNull(proof);
// Hash the leaf value
var currentHash = SHA256.HashData(leafValue);
// Walk up the tree
foreach (var step in proof.Steps)
{
if (step.IsRight)
{
// Sibling is on the right: H(current || sibling)
currentHash = HashInternal(currentHash, step.SiblingHash);
}
else
{
// Sibling is on the left: H(sibling || current)
currentHash = HashInternal(step.SiblingHash, currentHash);
}
}
// Compare with expected root
return currentHash.AsSpan().SequenceEqual(expectedRoot);
}
private static IReadOnlyList<ReadOnlyMemory<byte>> SortLeaves(IReadOnlyList<ReadOnlyMemory<byte>> leaves)
{
if (leaves.Count <= 1)
{
return leaves;
}
var indexed = new List<(ReadOnlyMemory<byte> Value, int Index)>(leaves.Count);
for (var i = 0; i < leaves.Count; i++)
{
indexed.Add((leaves[i], i));
}
indexed.Sort(static (left, right) =>
{
var comparison = CompareBytes(left.Value.Span, right.Value.Span);
return comparison != 0 ? comparison : left.Index.CompareTo(right.Index);
});
var ordered = new ReadOnlyMemory<byte>[indexed.Count];
for (var i = 0; i < indexed.Count; i++)
{
ordered[i] = indexed[i].Value;
}
return ordered;
}
private static int CompareBytes(ReadOnlySpan<byte> left, ReadOnlySpan<byte> right)
{
var min = Math.Min(left.Length, right.Length);
for (var i = 0; i < min; i++)
{
var diff = left[i].CompareTo(right[i]);
if (diff != 0)
{
return diff;
}
}
return left.Length.CompareTo(right.Length);
}
private static int PadToPowerOfTwo(int count)
{
var power = 1;
while (power < count)
{
power <<= 1;
}
return power;
}
private static byte[] HashInternal(byte[] left, byte[] right)
{
var buffer = new byte[left.Length + right.Length];
Buffer.BlockCopy(left, 0, buffer, 0, left.Length);
Buffer.BlockCopy(right, 0, buffer, left.Length, right.Length);
return SHA256.HashData(buffer);
}
}

View File

@@ -1,6 +1,3 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Attestor.ProofChain.Merkle;
/// <summary>
@@ -39,67 +36,3 @@ public interface IMerkleTreeBuilder
/// <returns>True if the proof is valid.</returns>
bool VerifyProof(MerkleProof proof, ReadOnlySpan<byte> leafValue, ReadOnlySpan<byte> expectedRoot);
}
/// <summary>
/// A merkle tree with all internal nodes stored for proof generation.
/// </summary>
public sealed record MerkleTreeWithProofs
{
/// <summary>
/// The merkle root.
/// </summary>
public required byte[] Root { get; init; }
/// <summary>
/// The leaf hashes (level 0).
/// </summary>
public required IReadOnlyList<byte[]> Leaves { get; init; }
/// <summary>
/// All levels of the tree, from leaves (index 0) to root.
/// </summary>
public required IReadOnlyList<IReadOnlyList<byte[]>> Levels { get; init; }
/// <summary>
/// The depth of the tree (number of levels - 1).
/// </summary>
public int Depth => Levels.Count - 1;
}
/// <summary>
/// A merkle proof for a specific leaf.
/// </summary>
public sealed record MerkleProof
{
/// <summary>
/// The index of the leaf in the original list.
/// </summary>
public required int LeafIndex { get; init; }
/// <summary>
/// The hash of the leaf.
/// </summary>
public required byte[] LeafHash { get; init; }
/// <summary>
/// The sibling hashes needed to reconstruct the root, from bottom to top.
/// </summary>
public required IReadOnlyList<MerkleProofStep> Steps { get; init; }
}
/// <summary>
/// A single step in a merkle proof.
/// </summary>
public sealed record MerkleProofStep
{
/// <summary>
/// The sibling hash at this level.
/// </summary>
public required byte[] SiblingHash { get; init; }
/// <summary>
/// Whether the sibling is on the right (true) or left (false).
/// </summary>
public required bool IsRight { get; init; }
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Attestor.ProofChain.Merkle;
/// <summary>
/// A merkle proof for a specific leaf.
/// </summary>
public sealed record MerkleProof
{
/// <summary>
/// The index of the leaf in the original list.
/// </summary>
public required int LeafIndex { get; init; }
/// <summary>
/// The hash of the leaf.
/// </summary>
public required byte[] LeafHash { get; init; }
/// <summary>
/// The sibling hashes needed to reconstruct the root, from bottom to top.
/// </summary>
public required IReadOnlyList<MerkleProofStep> Steps { get; init; }
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Attestor.ProofChain.Merkle;
/// <summary>
/// A single step in a merkle proof.
/// </summary>
public sealed record MerkleProofStep
{
/// <summary>
/// The sibling hash at this level.
/// </summary>
public required byte[] SiblingHash { get; init; }
/// <summary>
/// Whether the sibling is on the right (true) or left (false).
/// </summary>
public required bool IsRight { get; init; }
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Attestor.ProofChain.Merkle;
/// <summary>
/// A merkle tree with all internal nodes stored for proof generation.
/// </summary>
public sealed record MerkleTreeWithProofs
{
/// <summary>
/// The merkle root.
/// </summary>
public required byte[] Root { get; init; }
/// <summary>
/// The leaf hashes (level 0).
/// </summary>
public required IReadOnlyList<byte[]> Leaves { get; init; }
/// <summary>
/// All levels of the tree, from leaves (index 0) to root.
/// </summary>
public required IReadOnlyList<IReadOnlyList<byte[]>> Levels { get; init; }
/// <summary>
/// The depth of the tree (number of levels - 1).
/// </summary>
public int Depth => Levels.Count - 1;
}

View File

@@ -1,13 +1,3 @@
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Receipts;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Attestor.ProofChain.Pipeline;
/// <summary>
@@ -25,127 +15,3 @@ public interface IProofChainPipeline
ProofChainRequest request,
CancellationToken ct = default);
}
/// <summary>
/// Request to execute the proof chain pipeline.
/// </summary>
public sealed record ProofChainRequest
{
/// <summary>
/// The SBOM bytes to process.
/// </summary>
public required byte[] SbomBytes { get; init; }
/// <summary>
/// Media type of the SBOM (e.g., "application/vnd.cyclonedx+json").
/// </summary>
public required string SbomMediaType { get; init; }
/// <summary>
/// Evidence gathered from scanning.
/// </summary>
public required IReadOnlyList<EvidencePayload> Evidence { get; init; }
/// <summary>
/// Policy version used for evaluation.
/// </summary>
public required string PolicyVersion { get; init; }
/// <summary>
/// Trust anchor for verification.
/// </summary>
public required TrustAnchorId TrustAnchorId { get; init; }
/// <summary>
/// Whether to submit envelopes to Rekor.
/// </summary>
public bool SubmitToRekor { get; init; } = true;
/// <summary>
/// Subject information for the attestations.
/// </summary>
public required PipelineSubject Subject { get; init; }
}
/// <summary>
/// Subject information for the pipeline.
/// </summary>
public sealed record PipelineSubject
{
/// <summary>
/// Name of the subject (e.g., image reference).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digests of the subject.
/// </summary>
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}
/// <summary>
/// Result of the proof chain pipeline.
/// </summary>
public sealed record ProofChainResult
{
/// <summary>
/// The assembled proof bundle ID.
/// </summary>
public required ProofBundleId ProofBundleId { get; init; }
/// <summary>
/// All signed DSSE envelopes produced.
/// </summary>
public required IReadOnlyList<DsseEnvelope> Envelopes { get; init; }
/// <summary>
/// The proof spine statement.
/// </summary>
public required ProofSpineStatement ProofSpine { get; init; }
/// <summary>
/// Rekor entries if submitted.
/// </summary>
public IReadOnlyList<RekorEntry>? RekorEntries { get; init; }
/// <summary>
/// Verification receipt.
/// </summary>
public required VerificationReceipt Receipt { get; init; }
/// <summary>
/// Graph revision ID for this evaluation.
/// </summary>
public required GraphRevisionId GraphRevisionId { get; init; }
}
/// <summary>
/// A Rekor transparency log entry.
/// </summary>
public sealed record RekorEntry
{
/// <summary>
/// The log index in Rekor.
/// </summary>
public required long LogIndex { get; init; }
/// <summary>
/// The UUID of the entry.
/// </summary>
public required string Uuid { get; init; }
/// <summary>
/// The integrated time (when the entry was added).
/// </summary>
public required DateTimeOffset IntegratedTime { get; init; }
/// <summary>
/// The log ID (tree hash).
/// </summary>
public required string LogId { get; init; }
/// <summary>
/// The body of the entry (base64-encoded).
/// </summary>
public string? Body { get; init; }
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Attestor.ProofChain.Pipeline;
/// <summary>
/// Subject information for the pipeline.
/// </summary>
public sealed record PipelineSubject
{
/// <summary>
/// Name of the subject (e.g., image reference).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Digests of the subject.
/// </summary>
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}

View File

@@ -0,0 +1,45 @@
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Pipeline;
/// <summary>
/// Request to execute the proof chain pipeline.
/// </summary>
public sealed record ProofChainRequest
{
/// <summary>
/// The SBOM bytes to process.
/// </summary>
public required byte[] SbomBytes { get; init; }
/// <summary>
/// Media type of the SBOM (e.g., "application/vnd.cyclonedx+json").
/// </summary>
public required string SbomMediaType { get; init; }
/// <summary>
/// Evidence gathered from scanning.
/// </summary>
public required IReadOnlyList<EvidencePayload> Evidence { get; init; }
/// <summary>
/// Policy version used for evaluation.
/// </summary>
public required string PolicyVersion { get; init; }
/// <summary>
/// Trust anchor for verification.
/// </summary>
public required TrustAnchorId TrustAnchorId { get; init; }
/// <summary>
/// Whether to submit envelopes to Rekor.
/// </summary>
public bool SubmitToRekor { get; init; } = true;
/// <summary>
/// Subject information for the attestations.
/// </summary>
public required PipelineSubject Subject { get; init; }
}

View File

@@ -0,0 +1,42 @@
using StellaOps.Attestor.ProofChain.Identifiers;
using StellaOps.Attestor.ProofChain.Receipts;
using StellaOps.Attestor.ProofChain.Signing;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Attestor.ProofChain.Pipeline;
/// <summary>
/// Result of the proof chain pipeline.
/// </summary>
public sealed record ProofChainResult
{
/// <summary>
/// The assembled proof bundle ID.
/// </summary>
public required ProofBundleId ProofBundleId { get; init; }
/// <summary>
/// All signed DSSE envelopes produced.
/// </summary>
public required IReadOnlyList<DsseEnvelope> Envelopes { get; init; }
/// <summary>
/// The proof spine statement.
/// </summary>
public required ProofSpineStatement ProofSpine { get; init; }
/// <summary>
/// Rekor entries if submitted.
/// </summary>
public IReadOnlyList<RekorEntry>? RekorEntries { get; init; }
/// <summary>
/// Verification receipt.
/// </summary>
public required VerificationReceipt Receipt { get; init; }
/// <summary>
/// Graph revision ID for this evaluation.
/// </summary>
public required GraphRevisionId GraphRevisionId { get; init; }
}

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Attestor.ProofChain.Pipeline;
/// <summary>
/// A Rekor transparency log entry.
/// </summary>
public sealed record RekorEntry
{
/// <summary>
/// The log index in Rekor.
/// </summary>
public required long LogIndex { get; init; }
/// <summary>
/// The UUID of the entry.
/// </summary>
public required string Uuid { get; init; }
/// <summary>
/// The integrated time (when the entry was added).
/// </summary>
public required DateTimeOffset IntegratedTime { get; init; }
/// <summary>
/// The log ID (tree hash).
/// </summary>
public required string LogId { get; init; }
/// <summary>
/// The body of the entry (base64-encoded).
/// </summary>
public string? Body { get; init; }
}

View File

@@ -0,0 +1,27 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Authority level for AI-generated artifacts.
/// Determines how the artifact should be treated in decisioning.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<AIArtifactAuthority>))]
public enum AIArtifactAuthority
{
/// <summary>
/// Pure suggestion - not backed by evidence, requires human review.
/// </summary>
Suggestion,
/// <summary>
/// Evidence-backed - citations verified, evidence refs resolvable.
/// Qualifies when: citation rate >= 80% AND all evidence refs valid.
/// </summary>
EvidenceBacked,
/// <summary>
/// Meets configurable authority threshold for automated processing.
/// </summary>
AuthorityThreshold
}

View File

@@ -2,104 +2,6 @@ using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Authority level for AI-generated artifacts.
/// Determines how the artifact should be treated in decisioning.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<AIArtifactAuthority>))]
public enum AIArtifactAuthority
{
/// <summary>
/// Pure suggestion - not backed by evidence, requires human review.
/// </summary>
Suggestion,
/// <summary>
/// Evidence-backed - citations verified, evidence refs resolvable.
/// Qualifies when: citation rate ≥ 80% AND all evidence refs valid.
/// </summary>
EvidenceBacked,
/// <summary>
/// Meets configurable authority threshold for automated processing.
/// </summary>
AuthorityThreshold
}
/// <summary>
/// Model identifier format for tracking AI model versions.
/// </summary>
public sealed record AIModelIdentifier
{
/// <summary>
/// Provider of the model (e.g., "anthropic", "openai", "local").
/// </summary>
[JsonPropertyName("provider")]
public required string Provider { get; init; }
/// <summary>
/// Model name/family (e.g., "claude-3-opus", "gpt-4").
/// </summary>
[JsonPropertyName("model")]
public required string Model { get; init; }
/// <summary>
/// Model version string (e.g., "20240229", "0613").
/// </summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>
/// For local models: SHA-256 digest of weights.
/// Null for cloud-hosted models.
/// </summary>
[JsonPropertyName("weightsDigest")]
public string? WeightsDigest { get; init; }
/// <summary>
/// Canonical string representation: provider:model:version
/// </summary>
public override string ToString() =>
$"{Provider}:{Model}:{Version}";
}
/// <summary>
/// Decoding parameters used during AI generation.
/// Required for deterministic replay.
/// </summary>
public sealed record AIDecodingParameters
{
/// <summary>
/// Temperature setting (0.0 = deterministic, higher = more random).
/// </summary>
[JsonPropertyName("temperature")]
public double Temperature { get; init; }
/// <summary>
/// Top-p (nucleus sampling) value.
/// </summary>
[JsonPropertyName("topP")]
public double? TopP { get; init; }
/// <summary>
/// Top-k sampling value.
/// </summary>
[JsonPropertyName("topK")]
public int? TopK { get; init; }
/// <summary>
/// Maximum tokens to generate.
/// </summary>
[JsonPropertyName("maxTokens")]
public int? MaxTokens { get; init; }
/// <summary>
/// Random seed for reproducibility.
/// </summary>
[JsonPropertyName("seed")]
public long? Seed { get; init; }
}
/// <summary>
/// Base predicate for all AI-generated artifacts.
/// Captures metadata required for replay, inspection, and authority classification.

View File

@@ -0,0 +1,47 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Result of authority classification.
/// </summary>
public sealed record AIAuthorityClassificationResult
{
/// <summary>
/// Determined authority level.
/// </summary>
public required AIArtifactAuthority Authority { get; init; }
/// <summary>
/// Overall quality score (0.0-1.0).
/// </summary>
public required double QualityScore { get; init; }
/// <summary>
/// Citation rate if applicable.
/// </summary>
public double? CitationRate { get; init; }
/// <summary>
/// Verified citation rate if applicable.
/// </summary>
public double? VerifiedCitationRate { get; init; }
/// <summary>
/// Number of resolvable evidence refs.
/// </summary>
public int? ResolvableEvidenceCount { get; init; }
/// <summary>
/// Number of unresolvable evidence refs.
/// </summary>
public int? UnresolvableEvidenceCount { get; init; }
/// <summary>
/// Reasons for the classification decision.
/// </summary>
public required IReadOnlyList<string> Reasons { get; init; }
/// <summary>
/// Whether the artifact can be auto-processed without human review.
/// </summary>
public required bool CanAutoProcess { get; init; }
}

View File

@@ -0,0 +1,34 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
/// <summary>
/// Classify an explanation predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyExplanation(AIExplanationPredicate predicate)
{
var reasons = new List<string>();
var qualityScore = CalculateExplanationQualityScore(predicate, reasons);
var verifiedRate = predicate.Citations.Count > 0
? (double)predicate.Citations.Count(c => c.Verified) / predicate.Citations.Count
: 0;
var authority = DetermineAuthority(
predicate.CitationRate,
verifiedRate,
predicate.ConfidenceScore,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
CitationRate = predicate.CitationRate,
VerifiedCitationRate = verifiedRate,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold
};
}
}

View File

@@ -0,0 +1,26 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
private double CalculateExplanationQualityScore(
AIExplanationPredicate predicate,
List<string> reasons)
{
var citationWeight = 0.35;
var verifiedWeight = 0.30;
var confidenceWeight = 0.20;
var contentWeight = 0.15;
var verifiedRate = predicate.Citations.Count > 0
? (double)predicate.Citations.Count(c => c.Verified) / predicate.Citations.Count
: 0;
// Reasonable explanation length
var contentScore = Math.Min(1.0, predicate.Content.Length / 500.0);
return (predicate.CitationRate * citationWeight) +
(verifiedRate * verifiedWeight) +
(predicate.ConfidenceScore * confidenceWeight) +
(contentScore * contentWeight);
}
}

View File

@@ -0,0 +1,40 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
/// <summary>
/// Classify a policy draft predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyPolicyDraft(AIPolicyDraftPredicate predicate)
{
var reasons = new List<string>();
var avgConfidence = predicate.Rules.Count > 0
? predicate.Rules.Average(r => r.Confidence)
: 0;
var passedTestRate = predicate.TestCases.Count > 0
? (double)predicate.TestCases.Count(t => t.Passed == true) / predicate.TestCases.Count
: 0;
var qualityScore = CalculatePolicyDraftQualityScore(
predicate, avgConfidence, passedTestRate, reasons);
var authority = DetermineAuthority(
passedTestRate,
passedTestRate,
avgConfidence,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold
&& predicate.ValidationResult.OverallPassed
&& predicate.DeployReady
};
}
}

View File

@@ -0,0 +1,28 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
private double CalculatePolicyDraftQualityScore(
AIPolicyDraftPredicate predicate,
double avgConfidence,
double passedTestRate,
List<string> reasons)
{
var confidenceWeight = 0.25;
var testWeight = 0.35;
var validationWeight = 0.25;
var clarityWeight = 0.15;
var validationScore = predicate.ValidationResult.OverallPassed ? 1.0 : 0.3;
var ambiguityCount = predicate.Rules.Sum(r => r.Ambiguities?.Count ?? 0);
var clarityScore = predicate.Rules.Count > 0
? 1.0 - Math.Min(1.0, ambiguityCount / (predicate.Rules.Count * 2.0))
: 0;
return (avgConfidence * confidenceWeight) +
(passedTestRate * testWeight) +
(validationScore * validationWeight) +
(clarityScore * clarityWeight);
}
}

View File

@@ -0,0 +1,39 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
/// <summary>
/// Classify a remediation plan predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyRemediationPlan(AIRemediationPlanPredicate predicate)
{
var reasons = new List<string>();
var evidenceRefs = predicate.EvidenceRefs;
var resolvableCount = evidenceRefs.Count(r => _evidenceResolver?.Invoke(r) ?? true);
var unresolvableCount = evidenceRefs.Count - resolvableCount;
var qualityScore = CalculateRemediationQualityScore(predicate, resolvableCount, reasons);
var evidenceBackingRate = evidenceRefs.Count > 0
? (double)resolvableCount / evidenceRefs.Count
: 0;
var authority = DetermineAuthority(
evidenceBackingRate,
evidenceBackingRate,
predicate.RiskAssessment.RiskBefore - predicate.RiskAssessment.RiskAfter,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
ResolvableEvidenceCount = resolvableCount,
UnresolvableEvidenceCount = unresolvableCount,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold && predicate.PrReady
};
}
}

View File

@@ -0,0 +1,40 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
private double CalculateRemediationQualityScore(
AIRemediationPlanPredicate predicate,
int resolvableCount,
List<string> reasons)
{
var evidenceWeight = 0.30;
var riskDeltaWeight = 0.25;
var automationWeight = 0.20;
var verificationWeight = 0.25;
var evidenceScore = predicate.EvidenceRefs.Count > 0
? (double)resolvableCount / predicate.EvidenceRefs.Count
: 0;
var riskDelta = predicate.ExpectedDelta;
var riskScore = Math.Min(1.0, Math.Max(0, riskDelta));
var autoSteps = predicate.Steps.Count(s => s.CanAutomate);
var automationScore = predicate.Steps.Count > 0
? (double)autoSteps / predicate.Steps.Count
: 0;
var verificationScore = predicate.VerificationStatus switch
{
RemediationVerificationStatus.Verified => 0.8,
RemediationVerificationStatus.Applied => 1.0,
RemediationVerificationStatus.Stale => 0.5,
_ => 0.2
};
return (evidenceScore * evidenceWeight) +
(riskScore * riskDeltaWeight) +
(automationScore * automationWeight) +
(verificationScore * verificationWeight);
}
}

View File

@@ -0,0 +1,43 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
/// <summary>
/// Classify a VEX draft predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyVexDraft(AIVexDraftPredicate predicate)
{
var reasons = new List<string>();
var evidenceRefs = predicate.EvidenceRefs;
var resolvableCount = evidenceRefs.Count(r => _evidenceResolver?.Invoke(r) ?? true);
var avgConfidence = predicate.VexStatements.Count > 0
? predicate.VexStatements.Average(s => s.Confidence)
: 0;
var qualityScore = CalculateVexDraftQualityScore(
predicate, resolvableCount, avgConfidence, reasons);
var evidenceBackingRate = evidenceRefs.Count > 0
? (double)resolvableCount / evidenceRefs.Count
: 0;
var authority = DetermineAuthority(
evidenceBackingRate,
evidenceBackingRate,
avgConfidence,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
ResolvableEvidenceCount = resolvableCount,
UnresolvableEvidenceCount = evidenceRefs.Count - resolvableCount,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold && predicate.AutoApprovable
};
}
}

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
public sealed partial class AIAuthorityClassifier
{
private double CalculateVexDraftQualityScore(
AIVexDraftPredicate predicate,
int resolvableCount,
double avgConfidence,
List<string> reasons)
{
var evidenceWeight = 0.35;
var confidenceWeight = 0.30;
var justificationWeight = 0.20;
var conflictWeight = 0.15;
var evidenceScore = predicate.EvidenceRefs.Count > 0
? (double)resolvableCount / predicate.EvidenceRefs.Count
: 0;
var nonConflicting = predicate.Justifications.Count(j => !j.ConflictsWithExisting);
var conflictScore = predicate.Justifications.Count > 0
? (double)nonConflicting / predicate.Justifications.Count
: 1.0;
var hasJustifications = predicate.Justifications.Count > 0 ? 1.0 : 0.0;
return (evidenceScore * evidenceWeight) +
(avgConfidence * confidenceWeight) +
(hasJustifications * justificationWeight) +
(conflictScore * conflictWeight);
}
}

View File

@@ -1,242 +1,23 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Configuration for authority classification thresholds.
/// </summary>
public sealed record AIAuthorityThresholds
{
/// <summary>
/// Minimum citation rate for Evidence-Backed classification.
/// Default: 0.8 (80%)
/// </summary>
public double MinCitationRate { get; init; } = 0.8;
/// <summary>
/// Minimum confidence score for Evidence-Backed classification.
/// Default: 0.7 (70%)
/// </summary>
public double MinConfidenceScore { get; init; } = 0.7;
/// <summary>
/// Whether all evidence refs must be resolvable.
/// Default: true
/// </summary>
public bool RequireResolvableEvidence { get; init; } = true;
/// <summary>
/// Minimum verified citations ratio for Evidence-Backed.
/// Default: 0.9 (90%)
/// </summary>
public double MinVerifiedCitationRate { get; init; } = 0.9;
/// <summary>
/// Custom authority threshold score (0.0-1.0) for AuthorityThreshold classification.
/// If overall score meets this, artifact can be auto-processed.
/// Default: 0.95
/// </summary>
public double AuthorityThresholdScore { get; init; } = 0.95;
}
/// <summary>
/// Result of authority classification.
/// </summary>
public sealed record AIAuthorityClassificationResult
{
/// <summary>
/// Determined authority level.
/// </summary>
public required AIArtifactAuthority Authority { get; init; }
/// <summary>
/// Overall quality score (0.0-1.0).
/// </summary>
public required double QualityScore { get; init; }
/// <summary>
/// Citation rate if applicable.
/// </summary>
public double? CitationRate { get; init; }
/// <summary>
/// Verified citation rate if applicable.
/// </summary>
public double? VerifiedCitationRate { get; init; }
/// <summary>
/// Number of resolvable evidence refs.
/// </summary>
public int? ResolvableEvidenceCount { get; init; }
/// <summary>
/// Number of unresolvable evidence refs.
/// </summary>
public int? UnresolvableEvidenceCount { get; init; }
/// <summary>
/// Reasons for the classification decision.
/// </summary>
public required IReadOnlyList<string> Reasons { get; init; }
/// <summary>
/// Whether the artifact can be auto-processed without human review.
/// </summary>
public required bool CanAutoProcess { get; init; }
}
/// <summary>
/// Classifies AI artifacts into authority levels based on evidence backing.
/// Sprint: SPRINT_20251226_018_AI_attestations
/// Task: AIATTEST-07
/// </summary>
public sealed class AIAuthorityClassifier
public sealed partial class AIAuthorityClassifier
{
private readonly AIAuthorityThresholds _thresholds;
private readonly Func<string, bool>? _evidenceResolver;
public AIAuthorityClassifier(AIAuthorityThresholds? thresholds = null, Func<string, bool>? evidenceResolver = null)
public AIAuthorityClassifier(
AIAuthorityThresholds? thresholds = null,
Func<string, bool>? evidenceResolver = null)
{
_thresholds = thresholds ?? new AIAuthorityThresholds();
_evidenceResolver = evidenceResolver;
}
/// <summary>
/// Classify an explanation predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyExplanation(AIExplanationPredicate predicate)
{
var reasons = new List<string>();
var qualityScore = CalculateExplanationQualityScore(predicate, reasons);
var verifiedRate = predicate.Citations.Count > 0
? (double)predicate.Citations.Count(c => c.Verified) / predicate.Citations.Count
: 0;
var authority = DetermineAuthority(
predicate.CitationRate,
verifiedRate,
predicate.ConfidenceScore,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
CitationRate = predicate.CitationRate,
VerifiedCitationRate = verifiedRate,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold
};
}
/// <summary>
/// Classify a remediation plan predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyRemediationPlan(AIRemediationPlanPredicate predicate)
{
var reasons = new List<string>();
var evidenceRefs = predicate.EvidenceRefs;
var resolvableCount = evidenceRefs.Count(r => _evidenceResolver?.Invoke(r) ?? true);
var unresolvableCount = evidenceRefs.Count - resolvableCount;
var qualityScore = CalculateRemediationQualityScore(predicate, resolvableCount, reasons);
var evidenceBackingRate = evidenceRefs.Count > 0
? (double)resolvableCount / evidenceRefs.Count
: 0;
var authority = DetermineAuthority(
evidenceBackingRate,
evidenceBackingRate,
predicate.RiskAssessment.RiskBefore - predicate.RiskAssessment.RiskAfter,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
ResolvableEvidenceCount = resolvableCount,
UnresolvableEvidenceCount = unresolvableCount,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold && predicate.PrReady
};
}
/// <summary>
/// Classify a VEX draft predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyVexDraft(AIVexDraftPredicate predicate)
{
var reasons = new List<string>();
var evidenceRefs = predicate.EvidenceRefs;
var resolvableCount = evidenceRefs.Count(r => _evidenceResolver?.Invoke(r) ?? true);
var avgConfidence = predicate.VexStatements.Count > 0
? predicate.VexStatements.Average(s => s.Confidence)
: 0;
var qualityScore = CalculateVexDraftQualityScore(predicate, resolvableCount, avgConfidence, reasons);
var evidenceBackingRate = evidenceRefs.Count > 0
? (double)resolvableCount / evidenceRefs.Count
: 0;
var authority = DetermineAuthority(
evidenceBackingRate,
evidenceBackingRate,
avgConfidence,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
ResolvableEvidenceCount = resolvableCount,
UnresolvableEvidenceCount = evidenceRefs.Count - resolvableCount,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold && predicate.AutoApprovable
};
}
/// <summary>
/// Classify a policy draft predicate.
/// </summary>
public AIAuthorityClassificationResult ClassifyPolicyDraft(AIPolicyDraftPredicate predicate)
{
var reasons = new List<string>();
var avgConfidence = predicate.Rules.Count > 0
? predicate.Rules.Average(r => r.Confidence)
: 0;
var passedTestRate = predicate.TestCases.Count > 0
? (double)predicate.TestCases.Count(t => t.Passed == true) / predicate.TestCases.Count
: 0;
var qualityScore = CalculatePolicyDraftQualityScore(predicate, avgConfidence, passedTestRate, reasons);
var authority = DetermineAuthority(
passedTestRate,
passedTestRate,
avgConfidence,
qualityScore,
reasons);
return new AIAuthorityClassificationResult
{
Authority = authority,
QualityScore = qualityScore,
Reasons = reasons,
CanAutoProcess = authority == AIArtifactAuthority.AuthorityThreshold
&& predicate.ValidationResult.OverallPassed
&& predicate.DeployReady
};
}
private AIArtifactAuthority DetermineAuthority(
double citationRate,
double verifiedRate,
@@ -269,98 +50,4 @@ public sealed class AIAuthorityClassifier
return AIArtifactAuthority.Suggestion;
}
private double CalculateExplanationQualityScore(AIExplanationPredicate predicate, List<string> reasons)
{
var citationWeight = 0.35;
var verifiedWeight = 0.30;
var confidenceWeight = 0.20;
var contentWeight = 0.15;
var verifiedRate = predicate.Citations.Count > 0
? (double)predicate.Citations.Count(c => c.Verified) / predicate.Citations.Count
: 0;
var contentScore = Math.Min(1.0, predicate.Content.Length / 500.0); // Reasonable explanation length
return (predicate.CitationRate * citationWeight) +
(verifiedRate * verifiedWeight) +
(predicate.ConfidenceScore * confidenceWeight) +
(contentScore * contentWeight);
}
private double CalculateRemediationQualityScore(AIRemediationPlanPredicate predicate, int resolvableCount, List<string> reasons)
{
var evidenceWeight = 0.30;
var riskDeltaWeight = 0.25;
var automationWeight = 0.20;
var verificationWeight = 0.25;
var evidenceScore = predicate.EvidenceRefs.Count > 0
? (double)resolvableCount / predicate.EvidenceRefs.Count
: 0;
var riskDelta = predicate.ExpectedDelta;
var riskScore = Math.Min(1.0, Math.Max(0, riskDelta));
var autoSteps = predicate.Steps.Count(s => s.CanAutomate);
var automationScore = predicate.Steps.Count > 0 ? (double)autoSteps / predicate.Steps.Count : 0;
var verificationScore = predicate.VerificationStatus switch
{
RemediationVerificationStatus.Verified => 0.8,
RemediationVerificationStatus.Applied => 1.0,
RemediationVerificationStatus.Stale => 0.5,
_ => 0.2
};
return (evidenceScore * evidenceWeight) +
(riskScore * riskDeltaWeight) +
(automationScore * automationWeight) +
(verificationScore * verificationWeight);
}
private double CalculateVexDraftQualityScore(AIVexDraftPredicate predicate, int resolvableCount, double avgConfidence, List<string> reasons)
{
var evidenceWeight = 0.35;
var confidenceWeight = 0.30;
var justificationWeight = 0.20;
var conflictWeight = 0.15;
var evidenceScore = predicate.EvidenceRefs.Count > 0
? (double)resolvableCount / predicate.EvidenceRefs.Count
: 0;
var nonConflicting = predicate.Justifications.Count(j => !j.ConflictsWithExisting);
var conflictScore = predicate.Justifications.Count > 0
? (double)nonConflicting / predicate.Justifications.Count
: 1.0;
var hasJustifications = predicate.Justifications.Count > 0 ? 1.0 : 0.0;
return (evidenceScore * evidenceWeight) +
(avgConfidence * confidenceWeight) +
(hasJustifications * justificationWeight) +
(conflictScore * conflictWeight);
}
private double CalculatePolicyDraftQualityScore(AIPolicyDraftPredicate predicate, double avgConfidence, double passedTestRate, List<string> reasons)
{
var confidenceWeight = 0.25;
var testWeight = 0.35;
var validationWeight = 0.25;
var clarityWeight = 0.15;
var validationScore = predicate.ValidationResult.OverallPassed ? 1.0 : 0.3;
var ambiguityCount = predicate.Rules.Sum(r => r.Ambiguities?.Count ?? 0);
var clarityScore = predicate.Rules.Count > 0
? 1.0 - Math.Min(1.0, ambiguityCount / (predicate.Rules.Count * 2.0))
: 0;
return (avgConfidence * confidenceWeight) +
(passedTestRate * testWeight) +
(validationScore * validationWeight) +
(clarityScore * clarityWeight);
}
}

View File

@@ -0,0 +1,38 @@
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Configuration for authority classification thresholds.
/// </summary>
public sealed record AIAuthorityThresholds
{
/// <summary>
/// Minimum citation rate for Evidence-Backed classification.
/// Default: 0.8 (80%)
/// </summary>
public double MinCitationRate { get; init; } = 0.8;
/// <summary>
/// Minimum confidence score for Evidence-Backed classification.
/// Default: 0.7 (70%)
/// </summary>
public double MinConfidenceScore { get; init; } = 0.7;
/// <summary>
/// Whether all evidence refs must be resolvable.
/// Default: true
/// </summary>
public bool RequireResolvableEvidence { get; init; } = true;
/// <summary>
/// Minimum verified citations ratio for Evidence-Backed.
/// Default: 0.9 (90%)
/// </summary>
public double MinVerifiedCitationRate { get; init; } = 0.9;
/// <summary>
/// Custom authority threshold score (0.0-1.0) for AuthorityThreshold classification.
/// If overall score meets this, artifact can be auto-processed.
/// Default: 0.95
/// </summary>
public double AuthorityThresholdScore { get; init; } = 0.95;
}

View File

@@ -0,0 +1,40 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Decoding parameters used during AI generation.
/// Required for deterministic replay.
/// </summary>
public sealed record AIDecodingParameters
{
/// <summary>
/// Temperature setting (0.0 = deterministic, higher = more random).
/// </summary>
[JsonPropertyName("temperature")]
public double Temperature { get; init; }
/// <summary>
/// Top-p (nucleus sampling) value.
/// </summary>
[JsonPropertyName("topP")]
public double? TopP { get; init; }
/// <summary>
/// Top-k sampling value.
/// </summary>
[JsonPropertyName("topK")]
public int? TopK { get; init; }
/// <summary>
/// Maximum tokens to generate.
/// </summary>
[JsonPropertyName("maxTokens")]
public int? MaxTokens { get; init; }
/// <summary>
/// Random seed for reproducibility.
/// </summary>
[JsonPropertyName("seed")]
public long? Seed { get; init; }
}

View File

@@ -0,0 +1,40 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Citation linking AI claims to evidence sources.
/// </summary>
public sealed record AIExplanationCitation
{
/// <summary>
/// Index of the claim in the explanation (0-based).
/// </summary>
[JsonPropertyName("claimIndex")]
public required int ClaimIndex { get; init; }
/// <summary>
/// Text of the cited claim.
/// </summary>
[JsonPropertyName("claimText")]
public required string ClaimText { get; init; }
/// <summary>
/// Evidence node ID this claim references.
/// Format: sha256:&lt;64-hex-chars&gt;
/// </summary>
[JsonPropertyName("evidenceId")]
public required string EvidenceId { get; init; }
/// <summary>
/// Type of evidence (e.g., "sbom", "vex", "reachability", "runtime").
/// </summary>
[JsonPropertyName("evidenceType")]
public required string EvidenceType { get; init; }
/// <summary>
/// Whether the citation was verified against the evidence.
/// </summary>
[JsonPropertyName("verified")]
public required bool Verified { get; init; }
}

View File

@@ -2,85 +2,6 @@ using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Type of explanation generated by AI.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<AIExplanationType>))]
public enum AIExplanationType
{
/// <summary>
/// Explanation of why a vulnerability is exploitable.
/// </summary>
Exploitability,
/// <summary>
/// Explanation of a code path or call graph.
/// </summary>
CodePath,
/// <summary>
/// Explanation of a policy decision.
/// </summary>
PolicyDecision,
/// <summary>
/// Explanation of risk factors.
/// </summary>
RiskFactors,
/// <summary>
/// Explanation of remediation options.
/// </summary>
RemediationOptions,
/// <summary>
/// Plain language summary for non-technical audiences.
/// </summary>
PlainLanguageSummary,
/// <summary>
/// Explanation of evidence chain.
/// </summary>
EvidenceChain
}
/// <summary>
/// Citation linking AI claims to evidence sources.
/// </summary>
public sealed record AIExplanationCitation
{
/// <summary>
/// Index of the claim in the explanation (0-based).
/// </summary>
[JsonPropertyName("claimIndex")]
public required int ClaimIndex { get; init; }
/// <summary>
/// Text of the cited claim.
/// </summary>
[JsonPropertyName("claimText")]
public required string ClaimText { get; init; }
/// <summary>
/// Evidence node ID this claim references.
/// Format: sha256:&lt;64-hex-chars&gt;
/// </summary>
[JsonPropertyName("evidenceId")]
public required string EvidenceId { get; init; }
/// <summary>
/// Type of evidence (e.g., "sbom", "vex", "reachability", "runtime").
/// </summary>
[JsonPropertyName("evidenceType")]
public required string EvidenceType { get; init; }
/// <summary>
/// Whether the citation was verified against the evidence.
/// </summary>
[JsonPropertyName("verified")]
public required bool Verified { get; init; }
}
/// <summary>
/// Predicate for AI-generated explanations.
/// Extends AIArtifactBase with explanation-specific fields.
@@ -115,7 +36,7 @@ public sealed record AIExplanationPredicate : AIArtifactBasePredicate
/// <summary>
/// Citation rate: ratio of cited claims to total claims.
/// Used for authority classification (0.8 for EvidenceBacked).
/// Used for authority classification (>=0.8 for EvidenceBacked).
/// </summary>
[JsonPropertyName("citationRate")]
public required double CitationRate { get; init; }

View File

@@ -0,0 +1,45 @@
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates.AI;
/// <summary>
/// Type of explanation generated by AI.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<AIExplanationType>))]
public enum AIExplanationType
{
/// <summary>
/// Explanation of why a vulnerability is exploitable.
/// </summary>
Exploitability,
/// <summary>
/// Explanation of a code path or call graph.
/// </summary>
CodePath,
/// <summary>
/// Explanation of a policy decision.
/// </summary>
PolicyDecision,
/// <summary>
/// Explanation of risk factors.
/// </summary>
RiskFactors,
/// <summary>
/// Explanation of remediation options.
/// </summary>
RemediationOptions,
/// <summary>
/// Plain language summary for non-technical audiences.
/// </summary>
PlainLanguageSummary,
/// <summary>
/// Explanation of evidence chain.
/// </summary>
EvidenceChain
}

Some files were not shown because too many files have changed in this diff Show More