sprints work

This commit is contained in:
StellaOps Bot
2025-12-25 12:19:12 +02:00
parent 223843f1d1
commit 2a06f780cf
224 changed files with 41796 additions and 1515 deletions

View File

@@ -0,0 +1,696 @@
// -----------------------------------------------------------------------------
// ScoreProvenanceChain.cs
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
// Task: PINT-8200-030
// Description: Score provenance chain linking Finding → Evidence → Score → Verdict
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Attestation;
/// <summary>
/// Complete provenance chain tracking a vulnerability finding through
/// evidence collection, score calculation, and policy verdict.
/// </summary>
/// <remarks>
/// This chain provides audit-grade traceability:
/// 1. **Finding**: The vulnerability that triggered evaluation (CVE, PURL, digest).
/// 2. **Evidence**: The attestations/documents considered (SBOM, VEX, reachability).
/// 3. **Score**: The EWS calculation with all inputs and weights.
/// 4. **Verdict**: The final policy decision with rule chain.
///
/// Each step includes content-addressed references for deterministic replay.
/// </remarks>
public sealed record ScoreProvenanceChain
{
/// <summary>
/// Creates a new ScoreProvenanceChain.
/// </summary>
public ScoreProvenanceChain(
ProvenanceFindingRef finding,
ProvenanceEvidenceSet evidenceSet,
ProvenanceScoreNode score,
ProvenanceVerdictRef verdict,
DateTimeOffset createdAt)
{
Finding = finding ?? throw new ArgumentNullException(nameof(finding));
EvidenceSet = evidenceSet ?? throw new ArgumentNullException(nameof(evidenceSet));
Score = score ?? throw new ArgumentNullException(nameof(score));
Verdict = verdict ?? throw new ArgumentNullException(nameof(verdict));
CreatedAt = createdAt;
ChainDigest = ComputeChainDigest();
}
/// <summary>
/// Reference to the vulnerability finding that triggered evaluation.
/// </summary>
public ProvenanceFindingRef Finding { get; }
/// <summary>
/// Set of evidence attestations that were considered.
/// </summary>
public ProvenanceEvidenceSet EvidenceSet { get; }
/// <summary>
/// Score computation node with inputs, weights, and result.
/// </summary>
public ProvenanceScoreNode Score { get; }
/// <summary>
/// Reference to the final policy verdict.
/// </summary>
public ProvenanceVerdictRef Verdict { get; }
/// <summary>
/// Chain creation timestamp (UTC).
/// </summary>
public DateTimeOffset CreatedAt { get; }
/// <summary>
/// Digest of the entire provenance chain for tamper detection.
/// </summary>
public string ChainDigest { get; }
/// <summary>
/// Validates the chain integrity by recomputing the digest.
/// </summary>
public bool ValidateIntegrity()
{
var recomputed = ComputeChainDigest();
return string.Equals(ChainDigest, recomputed, StringComparison.Ordinal);
}
/// <summary>
/// Gets a summary of the provenance chain for logging.
/// </summary>
public string GetSummary()
{
return $"[{Finding.VulnerabilityId}] " +
$"Evidence({EvidenceSet.TotalCount}) → " +
$"Score({Score.FinalScore}, {Score.Bucket}) → " +
$"Verdict({Verdict.Status})";
}
private string ComputeChainDigest()
{
// Canonical structure for hashing
var canonical = new
{
finding = new
{
vuln_id = Finding.VulnerabilityId,
component_purl = Finding.ComponentPurl,
finding_digest = Finding.FindingDigest
},
evidence_set = new
{
sbom_count = EvidenceSet.SbomRefs.Length,
vex_count = EvidenceSet.VexRefs.Length,
reachability_count = EvidenceSet.ReachabilityRefs.Length,
scan_count = EvidenceSet.ScanRefs.Length,
evidence_digest = EvidenceSet.SetDigest
},
score = new
{
final_score = Score.FinalScore,
bucket = Score.Bucket,
policy_digest = Score.PolicyDigest,
input_digest = Score.InputDigest
},
verdict = new
{
status = Verdict.Status,
severity = Verdict.Severity,
rule_name = Verdict.MatchedRuleName,
verdict_digest = Verdict.VerdictDigest
},
created_at = CreatedAt.ToUniversalTime().ToString("O")
};
var json = JsonSerializer.Serialize(canonical, ProvenanceJsonOptions.Default);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexStringLower(hash);
}
/// <summary>
/// Creates a ScoreProvenanceChain from a VerdictPredicate and supporting context.
/// </summary>
public static ScoreProvenanceChain FromVerdictPredicate(
VerdictPredicate predicate,
ProvenanceFindingRef finding,
ProvenanceEvidenceSet evidenceSet)
{
ArgumentNullException.ThrowIfNull(predicate);
ArgumentNullException.ThrowIfNull(finding);
ArgumentNullException.ThrowIfNull(evidenceSet);
var scoreNode = ProvenanceScoreNode.FromVerdictEws(predicate.EvidenceWeightedScore, predicate.FindingId);
var verdictRef = ProvenanceVerdictRef.FromVerdictPredicate(predicate);
return new ScoreProvenanceChain(
finding: finding,
evidenceSet: evidenceSet,
score: scoreNode,
verdict: verdictRef,
createdAt: DateTimeOffset.UtcNow
);
}
}
/// <summary>
/// Reference to the vulnerability finding that triggered evaluation.
/// </summary>
public sealed record ProvenanceFindingRef
{
/// <summary>
/// Creates a new ProvenanceFindingRef.
/// </summary>
public ProvenanceFindingRef(
string vulnerabilityId,
string? componentPurl = null,
string? findingDigest = null,
string? advisorySource = null,
DateTimeOffset? publishedAt = null)
{
VulnerabilityId = Validation.TrimToNull(vulnerabilityId)
?? throw new ArgumentNullException(nameof(vulnerabilityId));
ComponentPurl = Validation.TrimToNull(componentPurl);
FindingDigest = Validation.TrimToNull(findingDigest);
AdvisorySource = Validation.TrimToNull(advisorySource);
PublishedAt = publishedAt;
}
/// <summary>
/// Vulnerability identifier (CVE, GHSA, etc.).
/// </summary>
public string VulnerabilityId { get; }
/// <summary>
/// Package URL of the affected component (optional).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ComponentPurl { get; }
/// <summary>
/// Content digest of the finding document (optional).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? FindingDigest { get; }
/// <summary>
/// Advisory source (NVD, OSV, vendor, etc.).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? AdvisorySource { get; }
/// <summary>
/// Advisory publication date (optional).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? PublishedAt { get; }
}
/// <summary>
/// Set of evidence attestations considered during scoring.
/// </summary>
public sealed record ProvenanceEvidenceSet
{
/// <summary>
/// Creates a new ProvenanceEvidenceSet.
/// </summary>
public ProvenanceEvidenceSet(
IEnumerable<ProvenanceEvidenceRef>? sbomRefs = null,
IEnumerable<ProvenanceEvidenceRef>? vexRefs = null,
IEnumerable<ProvenanceEvidenceRef>? reachabilityRefs = null,
IEnumerable<ProvenanceEvidenceRef>? scanRefs = null,
IEnumerable<ProvenanceEvidenceRef>? otherRefs = null)
{
SbomRefs = NormalizeRefs(sbomRefs);
VexRefs = NormalizeRefs(vexRefs);
ReachabilityRefs = NormalizeRefs(reachabilityRefs);
ScanRefs = NormalizeRefs(scanRefs);
OtherRefs = NormalizeRefs(otherRefs);
SetDigest = ComputeSetDigest();
}
/// <summary>
/// SBOM attestation references.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<ProvenanceEvidenceRef> SbomRefs { get; }
/// <summary>
/// VEX document references.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<ProvenanceEvidenceRef> VexRefs { get; }
/// <summary>
/// Reachability analysis attestation references.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<ProvenanceEvidenceRef> ReachabilityRefs { get; }
/// <summary>
/// Scan result attestation references.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<ProvenanceEvidenceRef> ScanRefs { get; }
/// <summary>
/// Other evidence references.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<ProvenanceEvidenceRef> OtherRefs { get; }
/// <summary>
/// Digest of the entire evidence set.
/// </summary>
public string SetDigest { get; }
/// <summary>
/// Total count of all evidence references.
/// </summary>
public int TotalCount =>
SbomRefs.Length + VexRefs.Length + ReachabilityRefs.Length + ScanRefs.Length + OtherRefs.Length;
/// <summary>
/// Whether any evidence was considered.
/// </summary>
public bool HasEvidence => TotalCount > 0;
/// <summary>
/// Gets all references in deterministic order.
/// </summary>
public IEnumerable<ProvenanceEvidenceRef> GetAllRefs()
{
return SbomRefs
.Concat(VexRefs)
.Concat(ReachabilityRefs)
.Concat(ScanRefs)
.Concat(OtherRefs);
}
private static ImmutableArray<ProvenanceEvidenceRef> NormalizeRefs(IEnumerable<ProvenanceEvidenceRef>? refs)
{
if (refs is null)
{
return [];
}
return refs
.Where(static r => r is not null)
.OrderBy(static r => r.Type, StringComparer.Ordinal)
.ThenBy(static r => r.Digest, StringComparer.Ordinal)
.ToImmutableArray();
}
private string ComputeSetDigest()
{
var digests = GetAllRefs()
.Select(static r => r.Digest)
.Where(static d => !string.IsNullOrEmpty(d))
.OrderBy(static d => d, StringComparer.Ordinal)
.ToArray();
if (digests.Length == 0)
{
return "empty";
}
var combined = string.Join(":", digests);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
return Convert.ToHexStringLower(hash);
}
/// <summary>
/// Empty evidence set.
/// </summary>
public static ProvenanceEvidenceSet Empty => new();
}
/// <summary>
/// Reference to a single evidence attestation.
/// </summary>
public sealed record ProvenanceEvidenceRef
{
/// <summary>
/// Creates a new ProvenanceEvidenceRef.
/// </summary>
public ProvenanceEvidenceRef(
string type,
string digest,
string? uri = null,
string? provider = null,
DateTimeOffset? createdAt = null,
string? status = null)
{
Type = Validation.TrimToNull(type) ?? throw new ArgumentNullException(nameof(type));
Digest = Validation.TrimToNull(digest) ?? throw new ArgumentNullException(nameof(digest));
Uri = Validation.TrimToNull(uri);
Provider = Validation.TrimToNull(provider);
CreatedAt = createdAt;
Status = Validation.TrimToNull(status);
}
/// <summary>
/// Evidence type (sbom, vex, reachability, scan, etc.).
/// </summary>
public string Type { get; }
/// <summary>
/// Content digest of the evidence attestation.
/// </summary>
public string Digest { get; }
/// <summary>
/// URI reference to the evidence (optional).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Uri { get; }
/// <summary>
/// Evidence provider (vendor, tool, etc.).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Provider { get; }
/// <summary>
/// Evidence creation timestamp.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CreatedAt { get; }
/// <summary>
/// Evidence status (e.g., VEX status).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Status { get; }
/// <summary>
/// Well-known evidence types.
/// </summary>
public static class Types
{
public const string Sbom = "sbom";
public const string Vex = "vex";
public const string Reachability = "reachability";
public const string Scan = "scan";
public const string Advisory = "advisory";
public const string RuntimeSignal = "runtime-signal";
public const string BackportAnalysis = "backport-analysis";
public const string ExploitIntel = "exploit-intel";
}
}
/// <summary>
/// Score computation node in the provenance chain.
/// </summary>
public sealed record ProvenanceScoreNode
{
/// <summary>
/// Creates a new ProvenanceScoreNode.
/// </summary>
public ProvenanceScoreNode(
int finalScore,
string bucket,
VerdictEvidenceInputs inputs,
VerdictEvidenceWeights weights,
string policyDigest,
string calculatorVersion,
DateTimeOffset calculatedAt,
IEnumerable<string>? appliedFlags = null,
VerdictAppliedGuardrails? guardrails = null)
{
FinalScore = finalScore;
Bucket = Validation.TrimToNull(bucket) ?? throw new ArgumentNullException(nameof(bucket));
Inputs = inputs ?? throw new ArgumentNullException(nameof(inputs));
Weights = weights ?? throw new ArgumentNullException(nameof(weights));
PolicyDigest = Validation.TrimToNull(policyDigest) ?? throw new ArgumentNullException(nameof(policyDigest));
CalculatorVersion = Validation.TrimToNull(calculatorVersion) ?? throw new ArgumentNullException(nameof(calculatorVersion));
CalculatedAt = calculatedAt;
AppliedFlags = NormalizeFlags(appliedFlags);
Guardrails = guardrails;
InputDigest = ComputeInputDigest();
}
/// <summary>
/// Final computed score [0, 100].
/// </summary>
public int FinalScore { get; }
/// <summary>
/// Score bucket (ActNow, ScheduleNext, Investigate, Watchlist).
/// </summary>
public string Bucket { get; }
/// <summary>
/// Normalized input values used for calculation.
/// </summary>
public VerdictEvidenceInputs Inputs { get; }
/// <summary>
/// Weights applied to each dimension.
/// </summary>
public VerdictEvidenceWeights Weights { get; }
/// <summary>
/// Policy digest used for calculation.
/// </summary>
public string PolicyDigest { get; }
/// <summary>
/// Calculator version for reproducibility.
/// </summary>
public string CalculatorVersion { get; }
/// <summary>
/// Calculation timestamp (UTC).
/// </summary>
public DateTimeOffset CalculatedAt { get; }
/// <summary>
/// Flags applied during scoring.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> AppliedFlags { get; }
/// <summary>
/// Guardrails applied during scoring.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictAppliedGuardrails? Guardrails { get; }
/// <summary>
/// Digest of inputs for verification.
/// </summary>
public string InputDigest { get; }
private static ImmutableArray<string> NormalizeFlags(IEnumerable<string>? flags)
{
if (flags is null)
{
return [];
}
return flags
.Select(static f => f?.Trim())
.Where(static f => !string.IsNullOrEmpty(f))
.Select(static f => f!)
.OrderBy(static f => f, StringComparer.Ordinal)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private string ComputeInputDigest()
{
var canonical = new
{
rch = Inputs.Reachability,
rts = Inputs.Runtime,
bkp = Inputs.Backport,
xpl = Inputs.Exploit,
src = Inputs.SourceTrust,
mit = Inputs.Mitigation,
w_rch = Weights.Reachability,
w_rts = Weights.Runtime,
w_bkp = Weights.Backport,
w_xpl = Weights.Exploit,
w_src = Weights.SourceTrust,
w_mit = Weights.Mitigation
};
var json = JsonSerializer.Serialize(canonical, ProvenanceJsonOptions.Default);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexStringLower(hash);
}
/// <summary>
/// Creates a ProvenanceScoreNode from a VerdictEvidenceWeightedScore.
/// </summary>
public static ProvenanceScoreNode FromVerdictEws(VerdictEvidenceWeightedScore? ews, string findingId)
{
if (ews is null)
{
// No EWS - create a placeholder node
return new ProvenanceScoreNode(
finalScore: 0,
bucket: "Unknown",
inputs: new VerdictEvidenceInputs(0, 0, 0, 0, 0, 0),
weights: new VerdictEvidenceWeights(0, 0, 0, 0, 0, 0),
policyDigest: "none",
calculatorVersion: "none",
calculatedAt: DateTimeOffset.UtcNow
);
}
var proof = ews.Proof;
if (proof is null)
{
// EWS without proof - use available data
return new ProvenanceScoreNode(
finalScore: ews.Score,
bucket: ews.Bucket,
inputs: new VerdictEvidenceInputs(0, 0, 0, 0, 0, 0),
weights: new VerdictEvidenceWeights(0, 0, 0, 0, 0, 0),
policyDigest: ews.PolicyDigest ?? "unknown",
calculatorVersion: "unknown",
calculatedAt: ews.CalculatedAt ?? DateTimeOffset.UtcNow,
appliedFlags: ews.Flags,
guardrails: ews.Guardrails
);
}
return new ProvenanceScoreNode(
finalScore: ews.Score,
bucket: ews.Bucket,
inputs: proof.Inputs,
weights: proof.Weights,
policyDigest: proof.PolicyDigest,
calculatorVersion: proof.CalculatorVersion,
calculatedAt: proof.CalculatedAt,
appliedFlags: ews.Flags,
guardrails: ews.Guardrails
);
}
}
/// <summary>
/// Reference to the final policy verdict.
/// </summary>
public sealed record ProvenanceVerdictRef
{
/// <summary>
/// Creates a new ProvenanceVerdictRef.
/// </summary>
public ProvenanceVerdictRef(
string status,
string severity,
string matchedRuleName,
int matchedRulePriority,
string verdictDigest,
DateTimeOffset evaluatedAt,
string? rationale = null)
{
Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status));
Severity = Validation.TrimToNull(severity) ?? throw new ArgumentNullException(nameof(severity));
MatchedRuleName = Validation.TrimToNull(matchedRuleName) ?? throw new ArgumentNullException(nameof(matchedRuleName));
MatchedRulePriority = matchedRulePriority;
VerdictDigest = Validation.TrimToNull(verdictDigest) ?? throw new ArgumentNullException(nameof(verdictDigest));
EvaluatedAt = evaluatedAt;
Rationale = Validation.TrimToNull(rationale);
}
/// <summary>
/// Verdict status (affected, not_affected, fixed, etc.).
/// </summary>
public string Status { get; }
/// <summary>
/// Final severity determination.
/// </summary>
public string Severity { get; }
/// <summary>
/// Name of the policy rule that matched.
/// </summary>
public string MatchedRuleName { get; }
/// <summary>
/// Priority of the matched rule.
/// </summary>
public int MatchedRulePriority { get; }
/// <summary>
/// Content digest of the verdict for verification.
/// </summary>
public string VerdictDigest { get; }
/// <summary>
/// Evaluation timestamp (UTC).
/// </summary>
public DateTimeOffset EvaluatedAt { get; }
/// <summary>
/// Human-readable rationale (optional).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Rationale { get; }
/// <summary>
/// Creates a ProvenanceVerdictRef from a VerdictPredicate.
/// </summary>
public static ProvenanceVerdictRef FromVerdictPredicate(VerdictPredicate predicate)
{
ArgumentNullException.ThrowIfNull(predicate);
// Compute verdict digest from key fields
var canonical = new
{
tenant_id = predicate.TenantId,
policy_id = predicate.PolicyId,
policy_version = predicate.PolicyVersion,
finding_id = predicate.FindingId,
status = predicate.Verdict.Status,
severity = predicate.Verdict.Severity,
score = predicate.Verdict.Score,
evaluated_at = predicate.EvaluatedAt.ToUniversalTime().ToString("O")
};
var json = JsonSerializer.Serialize(canonical, ProvenanceJsonOptions.Default);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
var verdictDigest = Convert.ToHexStringLower(hash);
// Get matched rule name from rule chain
var matchedRule = predicate.RuleChain.FirstOrDefault();
var matchedRuleName = matchedRule?.RuleId ?? "default";
return new ProvenanceVerdictRef(
status: predicate.Verdict.Status,
severity: predicate.Verdict.Severity,
matchedRuleName: matchedRuleName,
matchedRulePriority: 0, // Priority not directly available from predicate
verdictDigest: verdictDigest,
evaluatedAt: predicate.EvaluatedAt,
rationale: predicate.Verdict.Rationale
);
}
}
/// <summary>
/// JSON serialization options for provenance chain.
/// </summary>
internal static class ProvenanceJsonOptions
{
public static JsonSerializerOptions Default { get; } = new()
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
}

View File

@@ -0,0 +1,237 @@
// -----------------------------------------------------------------------------
// ScoringDeterminismVerifier.cs
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
// Task: PINT-8200-029
// Description: Scoring determinism verification for attestation verification
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.Policy.Engine.Attestation;
/// <summary>
/// Result of scoring determinism verification.
/// </summary>
public sealed record ScoringVerificationResult
{
/// <summary>
/// Whether the score verification passed (recalculated matches attested).
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// The attested score from the verdict.
/// </summary>
public required int AttestedScore { get; init; }
/// <summary>
/// The recalculated score using the proof inputs.
/// </summary>
public required int RecalculatedScore { get; init; }
/// <summary>
/// Difference between attested and recalculated (should be 0 for valid).
/// </summary>
public int Difference => Math.Abs(AttestedScore - RecalculatedScore);
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Creates a successful verification result.
/// </summary>
public static ScoringVerificationResult Success(int score) => new()
{
IsValid = true,
AttestedScore = score,
RecalculatedScore = score,
Error = null
};
/// <summary>
/// Creates a failed verification result due to score mismatch.
/// </summary>
public static ScoringVerificationResult ScoreMismatch(int attested, int recalculated) => new()
{
IsValid = false,
AttestedScore = attested,
RecalculatedScore = recalculated,
Error = $"Score mismatch: attested={attested}, recalculated={recalculated}, diff={Math.Abs(attested - recalculated)}"
};
/// <summary>
/// Creates a failed verification result due to missing proof.
/// </summary>
public static ScoringVerificationResult MissingProof(int attestedScore) => new()
{
IsValid = false,
AttestedScore = attestedScore,
RecalculatedScore = 0,
Error = "No scoring proof available for verification"
};
/// <summary>
/// Creates a skipped verification result (no EWS present).
/// </summary>
public static ScoringVerificationResult Skipped() => new()
{
IsValid = true,
AttestedScore = 0,
RecalculatedScore = 0,
Error = null
};
}
/// <summary>
/// Interface for scoring determinism verification.
/// </summary>
public interface IScoringDeterminismVerifier
{
/// <summary>
/// Verifies that the attested score can be reproduced from the proof.
/// </summary>
/// <param name="ews">The attested evidence-weighted score.</param>
/// <returns>Verification result.</returns>
ScoringVerificationResult Verify(VerdictEvidenceWeightedScore? ews);
/// <summary>
/// Verifies that a verdict predicate's score is deterministically reproducible.
/// </summary>
/// <param name="predicate">The verdict predicate to verify.</param>
/// <returns>Verification result.</returns>
ScoringVerificationResult VerifyPredicate(VerdictPredicate? predicate);
}
/// <summary>
/// Verifies scoring determinism by recalculating from proof inputs.
/// </summary>
public sealed class ScoringDeterminismVerifier : IScoringDeterminismVerifier
{
private readonly IEvidenceWeightedScoreCalculator _calculator;
private readonly ILogger<ScoringDeterminismVerifier> _logger;
/// <summary>
/// Creates a new ScoringDeterminismVerifier.
/// </summary>
public ScoringDeterminismVerifier(
IEvidenceWeightedScoreCalculator calculator,
ILogger<ScoringDeterminismVerifier> logger)
{
_calculator = calculator ?? throw new ArgumentNullException(nameof(calculator));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public ScoringVerificationResult Verify(VerdictEvidenceWeightedScore? ews)
{
if (ews is null)
{
_logger.LogDebug("No EWS present in verdict, skipping determinism verification");
return ScoringVerificationResult.Skipped();
}
if (ews.Proof is null)
{
_logger.LogWarning(
"EWS present but no proof available for determinism verification (score={Score})",
ews.Score);
return ScoringVerificationResult.MissingProof(ews.Score);
}
try
{
// Reconstruct inputs from proof
var input = new EvidenceWeightedScoreInput
{
FindingId = "verification", // Placeholder - not used in calculation
Rch = ews.Proof.Inputs.Reachability,
Rts = ews.Proof.Inputs.Runtime,
Bkp = ews.Proof.Inputs.Backport,
Xpl = ews.Proof.Inputs.Exploit,
Src = ews.Proof.Inputs.SourceTrust,
Mit = ews.Proof.Inputs.Mitigation,
};
// Reconstruct weights from proof
var weights = new EvidenceWeights
{
Rch = ews.Proof.Weights.Reachability,
Rts = ews.Proof.Weights.Runtime,
Bkp = ews.Proof.Weights.Backport,
Xpl = ews.Proof.Weights.Exploit,
Src = ews.Proof.Weights.SourceTrust,
Mit = ews.Proof.Weights.Mitigation,
};
// Create policy with the proof weights
var policy = new EvidenceWeightPolicy
{
Version = "ews.v1",
Profile = "verification",
Weights = weights,
};
// Recalculate
var result = _calculator.Calculate(input, policy);
// Compare
if (result.Score == ews.Score)
{
_logger.LogDebug(
"Scoring determinism verified: score={Score}",
ews.Score);
return ScoringVerificationResult.Success(ews.Score);
}
else
{
_logger.LogWarning(
"Scoring determinism failed: attested={Attested}, recalculated={Recalculated}",
ews.Score,
result.Score);
return ScoringVerificationResult.ScoreMismatch(ews.Score, result.Score);
}
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during scoring determinism verification");
return new ScoringVerificationResult
{
IsValid = false,
AttestedScore = ews.Score,
RecalculatedScore = 0,
Error = $"Verification error: {ex.Message}"
};
}
}
/// <inheritdoc />
public ScoringVerificationResult VerifyPredicate(VerdictPredicate? predicate)
{
if (predicate is null)
{
_logger.LogDebug("No predicate provided, skipping determinism verification");
return ScoringVerificationResult.Skipped();
}
return Verify(predicate.EvidenceWeightedScore);
}
}
/// <summary>
/// Factory for creating scoring determinism verifiers.
/// </summary>
public static class ScoringDeterminismVerifierFactory
{
/// <summary>
/// Creates a new ScoringDeterminismVerifier with default calculator.
/// </summary>
public static IScoringDeterminismVerifier Create(ILogger<ScoringDeterminismVerifier> logger)
{
return new ScoringDeterminismVerifier(
new EvidenceWeightedScoreCalculator(),
logger);
}
}

View File

@@ -0,0 +1,266 @@
// -----------------------------------------------------------------------------
// VerdictBudgetCheck.cs
// Sprint: SPRINT_8200_0001_0006_budget_threshold_attestation
// Tasks: BUDGET-8200-006, BUDGET-8200-007
// Description: Budget check attestation data for verdict predicates
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Attestation;
/// <summary>
/// Budget check information for verdict attestation.
/// Captures the budget configuration and evaluation result at decision time.
/// </summary>
public sealed record VerdictBudgetCheck
{
/// <summary>
/// Creates a new VerdictBudgetCheck.
/// </summary>
public VerdictBudgetCheck(
string environment,
VerdictBudgetConfig config,
VerdictBudgetActualCounts actualCounts,
string result,
string configHash,
DateTimeOffset evaluatedAt,
IEnumerable<VerdictBudgetViolation>? violations = null)
{
Environment = Validation.TrimToNull(environment) ?? throw new ArgumentNullException(nameof(environment));
Config = config ?? throw new ArgumentNullException(nameof(config));
ActualCounts = actualCounts ?? throw new ArgumentNullException(nameof(actualCounts));
Result = Validation.TrimToNull(result) ?? throw new ArgumentNullException(nameof(result));
ConfigHash = Validation.TrimToNull(configHash) ?? throw new ArgumentNullException(nameof(configHash));
EvaluatedAt = evaluatedAt;
Violations = NormalizeViolations(violations);
}
/// <summary>
/// Environment for which the budget was evaluated.
/// </summary>
public string Environment { get; }
/// <summary>
/// Budget configuration that was applied.
/// </summary>
public VerdictBudgetConfig Config { get; }
/// <summary>
/// Actual counts observed at evaluation time.
/// </summary>
public VerdictBudgetActualCounts ActualCounts { get; }
/// <summary>
/// Budget check result: pass, warn, fail.
/// </summary>
public string Result { get; }
/// <summary>
/// SHA-256 hash of budget configuration for determinism proof.
/// Format: sha256:{64 hex characters}
/// </summary>
public string ConfigHash { get; }
/// <summary>
/// Timestamp when the budget was evaluated.
/// </summary>
public DateTimeOffset EvaluatedAt { get; }
/// <summary>
/// Violations if any limits were exceeded.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<VerdictBudgetViolation> Violations { get; }
/// <summary>
/// Computes a deterministic hash of a budget configuration.
/// </summary>
public static string ComputeConfigHash(VerdictBudgetConfig config)
{
ArgumentNullException.ThrowIfNull(config);
// Serialize with canonical options for deterministic output
var options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
var json = JsonSerializer.Serialize(config, options);
var bytes = Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static ImmutableArray<VerdictBudgetViolation> NormalizeViolations(
IEnumerable<VerdictBudgetViolation>? violations)
{
if (violations is null)
{
return [];
}
return violations
.Where(static v => v is not null)
.OrderBy(static v => v.Type, StringComparer.Ordinal)
.ThenBy(static v => v.Reason ?? string.Empty, StringComparer.Ordinal)
.ToImmutableArray();
}
}
/// <summary>
/// Budget configuration that was applied during evaluation.
/// </summary>
public sealed record VerdictBudgetConfig
{
/// <summary>
/// Creates a new VerdictBudgetConfig.
/// </summary>
public VerdictBudgetConfig(
int maxUnknownCount,
double maxCumulativeUncertainty,
string action,
IReadOnlyDictionary<string, int>? reasonLimits = null)
{
MaxUnknownCount = maxUnknownCount;
MaxCumulativeUncertainty = maxCumulativeUncertainty;
Action = Validation.TrimToNull(action) ?? "warn";
ReasonLimits = NormalizeReasonLimits(reasonLimits);
}
/// <summary>
/// Maximum number of unknowns allowed.
/// </summary>
public int MaxUnknownCount { get; }
/// <summary>
/// Maximum cumulative uncertainty score allowed.
/// </summary>
public double MaxCumulativeUncertainty { get; }
/// <summary>
/// Action to take when budget is exceeded: warn, block.
/// </summary>
public string Action { get; }
/// <summary>
/// Per-reason code limits (optional).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, int> ReasonLimits { get; }
private static ImmutableSortedDictionary<string, int> NormalizeReasonLimits(
IReadOnlyDictionary<string, int>? limits)
{
if (limits is null || limits.Count == 0)
{
return ImmutableSortedDictionary<string, int>.Empty;
}
return limits
.Where(static kv => !string.IsNullOrWhiteSpace(kv.Key))
.ToImmutableSortedDictionary(
static kv => kv.Key.Trim(),
static kv => kv.Value,
StringComparer.Ordinal);
}
}
/// <summary>
/// Actual counts observed at evaluation time.
/// </summary>
public sealed record VerdictBudgetActualCounts
{
/// <summary>
/// Creates a new VerdictBudgetActualCounts.
/// </summary>
public VerdictBudgetActualCounts(
int total,
double cumulativeUncertainty,
IReadOnlyDictionary<string, int>? byReason = null)
{
Total = total;
CumulativeUncertainty = cumulativeUncertainty;
ByReason = NormalizeByReason(byReason);
}
/// <summary>
/// Total number of unknowns.
/// </summary>
public int Total { get; }
/// <summary>
/// Cumulative uncertainty score across all unknowns.
/// </summary>
public double CumulativeUncertainty { get; }
/// <summary>
/// Breakdown by reason code.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, int> ByReason { get; }
private static ImmutableSortedDictionary<string, int> NormalizeByReason(
IReadOnlyDictionary<string, int>? byReason)
{
if (byReason is null || byReason.Count == 0)
{
return ImmutableSortedDictionary<string, int>.Empty;
}
return byReason
.Where(static kv => !string.IsNullOrWhiteSpace(kv.Key))
.ToImmutableSortedDictionary(
static kv => kv.Key.Trim(),
static kv => kv.Value,
StringComparer.Ordinal);
}
}
/// <summary>
/// Represents a budget limit violation.
/// </summary>
public sealed record VerdictBudgetViolation
{
/// <summary>
/// Creates a new VerdictBudgetViolation.
/// </summary>
public VerdictBudgetViolation(
string type,
int limit,
int actual,
string? reason = null)
{
Type = Validation.TrimToNull(type) ?? throw new ArgumentNullException(nameof(type));
Limit = limit;
Actual = actual;
Reason = Validation.TrimToNull(reason);
}
/// <summary>
/// Type of violation: total, cumulative, reason.
/// </summary>
public string Type { get; }
/// <summary>
/// The limit that was exceeded.
/// </summary>
public int Limit { get; }
/// <summary>
/// The actual value that exceeded the limit.
/// </summary>
public int Actual { get; }
/// <summary>
/// Reason code, if this is a per-reason violation.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Reason { get; }
}

View File

@@ -0,0 +1,521 @@
// -----------------------------------------------------------------------------
// VerdictEvidenceWeightedScore.cs
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
// Task: PINT-8200-025, PINT-8200-028
// Description: Serializable EWS decomposition and ScoringProof for verdict attestation
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.Signals.EvidenceWeightedScore;
namespace StellaOps.Policy.Engine.Attestation;
/// <summary>
/// Evidence-Weighted Score (EWS) decomposition for verdict serialization.
/// Includes score, bucket, dimension breakdown, flags, and calculation metadata.
/// </summary>
public sealed record VerdictEvidenceWeightedScore
{
/// <summary>
/// Creates a new VerdictEvidenceWeightedScore from its components.
/// </summary>
public VerdictEvidenceWeightedScore(
int score,
string bucket,
IEnumerable<VerdictDimensionContribution>? breakdown = null,
IEnumerable<string>? flags = null,
IEnumerable<string>? explanations = null,
string? policyDigest = null,
DateTimeOffset? calculatedAt = null,
VerdictAppliedGuardrails? guardrails = null,
VerdictScoringProof? proof = null)
{
Score = score is < 0 or > 100
? throw new ArgumentOutOfRangeException(nameof(score), score, "Score must be between 0 and 100.")
: score;
Bucket = Validation.TrimToNull(bucket) ?? throw new ArgumentNullException(nameof(bucket));
Breakdown = NormalizeBreakdown(breakdown);
Flags = NormalizeFlags(flags);
Explanations = NormalizeExplanations(explanations);
PolicyDigest = Validation.TrimToNull(policyDigest);
CalculatedAt = calculatedAt;
Guardrails = guardrails;
Proof = proof;
}
/// <summary>
/// Final score [0, 100]. Higher = more evidence of real risk.
/// </summary>
public int Score { get; }
/// <summary>
/// Score bucket for quick triage (ActNow, ScheduleNext, Investigate, Watchlist).
/// </summary>
public string Bucket { get; }
/// <summary>
/// Per-dimension score contributions (breakdown).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<VerdictDimensionContribution> Breakdown { get; }
/// <summary>
/// Active flags for badges (e.g., "live-signal", "proven-path", "vendor-na").
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Flags { get; }
/// <summary>
/// Human-readable explanations of top contributing factors.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableArray<string> Explanations { get; }
/// <summary>
/// Policy digest for determinism verification.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PolicyDigest { get; }
/// <summary>
/// Calculation timestamp (UTC ISO-8601).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? CalculatedAt { get; }
/// <summary>
/// Applied guardrails (caps/floors) during calculation.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictAppliedGuardrails? Guardrails { get; }
/// <summary>
/// Scoring proof for reproducibility verification.
/// Contains raw inputs and weights to allow deterministic recalculation.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictScoringProof? Proof { get; }
/// <summary>
/// Creates a VerdictEvidenceWeightedScore from an EvidenceWeightedScoreResult.
/// </summary>
public static VerdictEvidenceWeightedScore? FromEwsResult(EvidenceWeightedScoreResult? ewsResult)
{
if (ewsResult is null)
{
return null;
}
return new VerdictEvidenceWeightedScore(
score: ewsResult.Score,
bucket: ewsResult.Bucket.ToString(),
breakdown: ewsResult.Breakdown.Select(d => VerdictDimensionContribution.FromDimensionContribution(d)),
flags: ewsResult.Flags,
explanations: ewsResult.Explanations,
policyDigest: ewsResult.PolicyDigest,
calculatedAt: ewsResult.CalculatedAt,
guardrails: VerdictAppliedGuardrails.FromAppliedGuardrails(ewsResult.Caps),
proof: VerdictScoringProof.FromEwsResult(ewsResult)
);
}
private static ImmutableArray<VerdictDimensionContribution> NormalizeBreakdown(
IEnumerable<VerdictDimensionContribution>? breakdown)
{
if (breakdown is null)
{
return [];
}
return breakdown
.Where(static b => b is not null)
.OrderByDescending(static b => Math.Abs(b.Contribution))
.ToImmutableArray();
}
private static ImmutableArray<string> NormalizeFlags(IEnumerable<string>? flags)
{
if (flags is null)
{
return [];
}
return flags
.Select(static f => f?.Trim())
.Where(static f => !string.IsNullOrEmpty(f))
.Select(static f => f!)
.OrderBy(static f => f, StringComparer.Ordinal)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
}
private static ImmutableArray<string> NormalizeExplanations(IEnumerable<string>? explanations)
{
if (explanations is null)
{
return [];
}
return explanations
.Select(static e => e?.Trim())
.Where(static e => !string.IsNullOrEmpty(e))
.Select(static e => e!)
.ToImmutableArray();
}
}
/// <summary>
/// Per-dimension contribution to the evidence-weighted score.
/// </summary>
public sealed record VerdictDimensionContribution
{
/// <summary>
/// Creates a new VerdictDimensionContribution.
/// </summary>
public VerdictDimensionContribution(
string dimension,
string symbol,
double inputValue,
double weight,
double contribution,
bool isSubtractive = false)
{
Dimension = Validation.TrimToNull(dimension) ?? throw new ArgumentNullException(nameof(dimension));
Symbol = Validation.TrimToNull(symbol) ?? throw new ArgumentNullException(nameof(symbol));
InputValue = inputValue;
Weight = weight;
Contribution = contribution;
IsSubtractive = isSubtractive;
}
/// <summary>
/// Dimension name (e.g., "Reachability", "Runtime").
/// </summary>
public string Dimension { get; }
/// <summary>
/// Symbol (RCH, RTS, BKP, XPL, SRC, MIT).
/// </summary>
public string Symbol { get; }
/// <summary>
/// Normalized input value [0, 1].
/// </summary>
public double InputValue { get; }
/// <summary>
/// Weight applied to this dimension.
/// </summary>
public double Weight { get; }
/// <summary>
/// Contribution to raw score (weight * input, or negative for MIT).
/// </summary>
public double Contribution { get; }
/// <summary>
/// Whether this is a subtractive dimension (like MIT).
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool IsSubtractive { get; }
/// <summary>
/// Creates a VerdictDimensionContribution from a DimensionContribution.
/// </summary>
public static VerdictDimensionContribution FromDimensionContribution(DimensionContribution contribution)
{
ArgumentNullException.ThrowIfNull(contribution);
return new VerdictDimensionContribution(
dimension: contribution.Dimension,
symbol: contribution.Symbol,
inputValue: contribution.InputValue,
weight: contribution.Weight,
contribution: contribution.Contribution,
isSubtractive: contribution.IsSubtractive
);
}
}
/// <summary>
/// Record of applied guardrails during EWS calculation.
/// </summary>
public sealed record VerdictAppliedGuardrails
{
/// <summary>
/// Creates a new VerdictAppliedGuardrails.
/// </summary>
public VerdictAppliedGuardrails(
bool speculativeCap,
bool notAffectedCap,
bool runtimeFloor,
int originalScore,
int adjustedScore)
{
SpeculativeCap = speculativeCap;
NotAffectedCap = notAffectedCap;
RuntimeFloor = runtimeFloor;
OriginalScore = originalScore;
AdjustedScore = adjustedScore;
}
/// <summary>
/// Whether the speculative cap was applied.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool SpeculativeCap { get; }
/// <summary>
/// Whether the not-affected cap was applied.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool NotAffectedCap { get; }
/// <summary>
/// Whether the runtime floor was applied.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public bool RuntimeFloor { get; }
/// <summary>
/// Original score before guardrails.
/// </summary>
public int OriginalScore { get; }
/// <summary>
/// Score after guardrails.
/// </summary>
public int AdjustedScore { get; }
/// <summary>
/// Check if any guardrail was applied.
/// </summary>
[JsonIgnore]
public bool AnyApplied => SpeculativeCap || NotAffectedCap || RuntimeFloor;
/// <summary>
/// Creates a VerdictAppliedGuardrails from an AppliedGuardrails.
/// </summary>
public static VerdictAppliedGuardrails? FromAppliedGuardrails(AppliedGuardrails? guardrails)
{
if (guardrails is null)
{
return null;
}
// Only include if any guardrail was actually applied
if (!guardrails.AnyApplied)
{
return null;
}
return new VerdictAppliedGuardrails(
speculativeCap: guardrails.SpeculativeCap,
notAffectedCap: guardrails.NotAffectedCap,
runtimeFloor: guardrails.RuntimeFloor,
originalScore: guardrails.OriginalScore,
adjustedScore: guardrails.AdjustedScore
);
}
}
/// <summary>
/// Scoring proof for deterministic reproducibility verification.
/// Contains all inputs needed to recalculate and verify the score.
/// </summary>
public sealed record VerdictScoringProof
{
/// <summary>
/// Creates a new VerdictScoringProof.
/// </summary>
public VerdictScoringProof(
VerdictEvidenceInputs inputs,
VerdictEvidenceWeights weights,
string policyDigest,
string calculatorVersion,
DateTimeOffset calculatedAt)
{
Inputs = inputs ?? throw new ArgumentNullException(nameof(inputs));
Weights = weights ?? throw new ArgumentNullException(nameof(weights));
PolicyDigest = Validation.TrimToNull(policyDigest) ?? throw new ArgumentNullException(nameof(policyDigest));
CalculatorVersion = Validation.TrimToNull(calculatorVersion) ?? throw new ArgumentNullException(nameof(calculatorVersion));
CalculatedAt = calculatedAt;
}
/// <summary>
/// Normalized input values [0, 1] for each dimension.
/// </summary>
public VerdictEvidenceInputs Inputs { get; }
/// <summary>
/// Weight values used for scoring.
/// </summary>
public VerdictEvidenceWeights Weights { get; }
/// <summary>
/// Policy digest (SHA256) used for calculation.
/// </summary>
public string PolicyDigest { get; }
/// <summary>
/// Calculator version string for reproducibility.
/// </summary>
public string CalculatorVersion { get; }
/// <summary>
/// Calculation timestamp (UTC).
/// </summary>
public DateTimeOffset CalculatedAt { get; }
/// <summary>
/// Creates a VerdictScoringProof from an EvidenceWeightedScoreResult.
/// </summary>
public static VerdictScoringProof? FromEwsResult(EvidenceWeightedScoreResult? ewsResult)
{
if (ewsResult is null)
{
return null;
}
return new VerdictScoringProof(
inputs: VerdictEvidenceInputs.FromEvidenceInputValues(ewsResult.Inputs),
weights: VerdictEvidenceWeights.FromEvidenceWeights(ewsResult.Weights),
policyDigest: ewsResult.PolicyDigest,
calculatorVersion: "1.0.0", // TODO: Get from calculator metadata
calculatedAt: ewsResult.CalculatedAt
);
}
}
/// <summary>
/// Normalized input values for scoring.
/// </summary>
public sealed record VerdictEvidenceInputs
{
/// <summary>
/// Creates a new VerdictEvidenceInputs.
/// </summary>
public VerdictEvidenceInputs(
double reachability,
double runtime,
double backport,
double exploit,
double sourceTrust,
double mitigation)
{
Reachability = reachability;
Runtime = runtime;
Backport = backport;
Exploit = exploit;
SourceTrust = sourceTrust;
Mitigation = mitigation;
}
/// <summary>Reachability input [0, 1].</summary>
[JsonPropertyName("rch")]
public double Reachability { get; }
/// <summary>Runtime signal input [0, 1].</summary>
[JsonPropertyName("rts")]
public double Runtime { get; }
/// <summary>Backport analysis input [0, 1].</summary>
[JsonPropertyName("bkp")]
public double Backport { get; }
/// <summary>Exploit evidence input [0, 1].</summary>
[JsonPropertyName("xpl")]
public double Exploit { get; }
/// <summary>Source trust input [0, 1].</summary>
[JsonPropertyName("src")]
public double SourceTrust { get; }
/// <summary>Mitigation factor input [0, 1].</summary>
[JsonPropertyName("mit")]
public double Mitigation { get; }
/// <summary>
/// Creates from an EvidenceInputValues.
/// </summary>
public static VerdictEvidenceInputs FromEvidenceInputValues(EvidenceInputValues inputs)
{
ArgumentNullException.ThrowIfNull(inputs);
return new VerdictEvidenceInputs(
reachability: inputs.Rch,
runtime: inputs.Rts,
backport: inputs.Bkp,
exploit: inputs.Xpl,
sourceTrust: inputs.Src,
mitigation: inputs.Mit
);
}
}
/// <summary>
/// Weight values for scoring dimensions.
/// </summary>
public sealed record VerdictEvidenceWeights
{
/// <summary>
/// Creates a new VerdictEvidenceWeights.
/// </summary>
public VerdictEvidenceWeights(
double reachability,
double runtime,
double backport,
double exploit,
double sourceTrust,
double mitigation)
{
Reachability = reachability;
Runtime = runtime;
Backport = backport;
Exploit = exploit;
SourceTrust = sourceTrust;
Mitigation = mitigation;
}
/// <summary>Reachability weight [0, 1].</summary>
[JsonPropertyName("rch")]
public double Reachability { get; }
/// <summary>Runtime signal weight [0, 1].</summary>
[JsonPropertyName("rts")]
public double Runtime { get; }
/// <summary>Backport analysis weight [0, 1].</summary>
[JsonPropertyName("bkp")]
public double Backport { get; }
/// <summary>Exploit evidence weight [0, 1].</summary>
[JsonPropertyName("xpl")]
public double Exploit { get; }
/// <summary>Source trust weight [0, 1].</summary>
[JsonPropertyName("src")]
public double SourceTrust { get; }
/// <summary>Mitigation factor weight [0, 1].</summary>
[JsonPropertyName("mit")]
public double Mitigation { get; }
/// <summary>
/// Creates from an EvidenceWeights.
/// </summary>
public static VerdictEvidenceWeights FromEvidenceWeights(EvidenceWeights weights)
{
ArgumentNullException.ThrowIfNull(weights);
return new VerdictEvidenceWeights(
reachability: weights.Rch,
runtime: weights.Rts,
backport: weights.Bkp,
exploit: weights.Xpl,
sourceTrust: weights.Src,
mitigation: weights.Mit
);
}
}

View File

@@ -23,6 +23,8 @@ public sealed record VerdictPredicate
IEnumerable<VerdictEvidence>? evidence = null,
IEnumerable<VerdictVexImpact>? vexImpacts = null,
VerdictReachability? reachability = null,
VerdictEvidenceWeightedScore? evidenceWeightedScore = null,
VerdictBudgetCheck? budgetCheck = null,
ImmutableSortedDictionary<string, string>? metadata = null)
{
Type = PredicateType;
@@ -47,6 +49,8 @@ public sealed record VerdictPredicate
Evidence = NormalizeEvidence(evidence);
VexImpacts = NormalizeVexImpacts(vexImpacts);
Reachability = reachability;
EvidenceWeightedScore = evidenceWeightedScore;
BudgetCheck = budgetCheck;
Metadata = NormalizeMetadata(metadata);
}
@@ -77,6 +81,19 @@ public sealed record VerdictPredicate
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictReachability? Reachability { get; }
/// <summary>
/// Evidence-weighted score decomposition for scoring transparency.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictEvidenceWeightedScore? EvidenceWeightedScore { get; }
/// <summary>
/// Budget check information for unknown budget enforcement.
/// Captures the budget configuration and result at decision time.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public VerdictBudgetCheck? BudgetCheck { get; }
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)]
public ImmutableSortedDictionary<string, string> Metadata { get; }

View File

@@ -76,6 +76,9 @@ public sealed class VerdictPredicateBuilder
// Extract reachability (if present in metadata)
var reachability = ExtractReachability(trace);
// Extract evidence-weighted score (if present)
var evidenceWeightedScore = VerdictEvidenceWeightedScore.FromEwsResult(trace.EvidenceWeightedScore);
// Build metadata with determinism hash
var metadata = BuildMetadata(trace, evidence);
@@ -91,6 +94,7 @@ public sealed class VerdictPredicateBuilder
evidence: evidence,
vexImpacts: vexImpacts,
reachability: reachability,
evidenceWeightedScore: evidenceWeightedScore,
metadata: metadata
);
}
@@ -249,6 +253,8 @@ public sealed class VerdictPredicateBuilder
evidence: evidence,
vexImpacts: null,
reachability: null,
evidenceWeightedScore: null,
budgetCheck: null,
metadata: null
);