feat(rate-limiting): Implement core rate limiting functionality with configuration, decision-making, metrics, middleware, and service registration

- Add RateLimitConfig for configuration management with YAML binding support.
- Introduce RateLimitDecision to encapsulate the result of rate limit checks.
- Implement RateLimitMetrics for OpenTelemetry metrics tracking.
- Create RateLimitMiddleware for enforcing rate limits on incoming requests.
- Develop RateLimitService to orchestrate instance and environment rate limit checks.
- Add RateLimitServiceCollectionExtensions for dependency injection registration.
This commit is contained in:
master
2025-12-17 18:02:37 +02:00
parent 394b57f6bf
commit 8bbfe4d2d2
211 changed files with 47179 additions and 1590 deletions

View File

@@ -0,0 +1,266 @@
// -----------------------------------------------------------------------------
// ProofAwareScoringEngine.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-004 - Integrate ProofLedger into RiskScoring.Score()
// Description: Decorator that emits proof ledger nodes during scoring
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Scoring;
namespace StellaOps.Policy.Engine.Scoring.Engines;
/// <summary>
/// Decorator that wraps a scoring engine and emits proof ledger nodes.
/// Per advisory "Determinism and Reproducibility Technical Reference" §11.2.
/// </summary>
public sealed class ProofAwareScoringEngine : IScoringEngine
{
private readonly IScoringEngine _inner;
private readonly ILogger<ProofAwareScoringEngine> _logger;
private readonly ProofAwareScoringOptions _options;
public ProofAwareScoringEngine(
IScoringEngine inner,
ILogger<ProofAwareScoringEngine> logger,
ProofAwareScoringOptions? options = null)
{
_inner = inner ?? throw new ArgumentNullException(nameof(inner));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? ProofAwareScoringOptions.Default;
}
public ScoringProfile Profile => _inner.Profile;
public async Task<ScoringEngineResult> ScoreAsync(
ScoringInput input,
ScorePolicy policy,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(input);
ArgumentNullException.ThrowIfNull(policy);
// Initialize proof ledger for this scoring run
var ledger = new ProofLedger();
var seed = GenerateSeed(input);
var nodeCounter = 0;
// Emit input nodes for each scoring factor
EmitInputNodes(ledger, input, seed, ref nodeCounter);
// Delegate to inner engine
var result = await _inner.ScoreAsync(input, policy, ct);
// Emit delta nodes for each signal contribution
EmitDeltaNodes(ledger, result, input.AsOf, seed, ref nodeCounter);
// Emit final score node
var finalNode = ProofNode.CreateScore(
id: $"node-{nodeCounter++:D4}",
ruleId: "FINAL_SCORE",
actor: $"scoring-engine:{Profile.ToString().ToLowerInvariant()}",
tsUtc: input.AsOf,
seed: seed,
finalScore: result.FinalScore / 100.0,
parentIds: Enumerable.Range(0, nodeCounter - 1).Select(i => $"node-{i:D4}").TakeLast(5).ToArray());
ledger.Append(finalNode);
// Compute root hash
var rootHash = ledger.RootHash();
_logger.LogDebug(
"Proof ledger for {FindingId}: {NodeCount} nodes, rootHash={RootHash}",
input.FindingId, ledger.Count, rootHash);
// Attach proof ledger to result via extension
var proofResult = result.WithProofLedger(ledger, rootHash);
return proofResult;
}
private void EmitInputNodes(
ProofLedger ledger,
ScoringInput input,
byte[] seed,
ref int nodeCounter)
{
var ts = input.AsOf;
// CVSS input
ledger.Append(ProofNode.CreateInput(
id: $"node-{nodeCounter++:D4}",
ruleId: "CVSS_BASE",
actor: "scoring-input",
tsUtc: ts,
seed: seed,
initialValue: (double)input.CvssBase,
evidenceRefs: input.InputDigests?.TryGetValue("cvss", out var cvssDigest) == true
? [cvssDigest]
: []));
// Reachability input
var reachValue = input.Reachability.AdvancedScore ?? (input.Reachability.HopCount.HasValue ? 1.0 : 0.0);
ledger.Append(ProofNode.CreateInput(
id: $"node-{nodeCounter++:D4}",
ruleId: "REACHABILITY",
actor: "scoring-input",
tsUtc: ts.AddTicks(1),
seed: seed,
initialValue: reachValue,
evidenceRefs: input.InputDigests?.TryGetValue("reachability", out var reachDigest) == true
? [reachDigest]
: []));
// Evidence input
var evidenceValue = input.Evidence.AdvancedScore ?? (input.Evidence.Types.Count > 0 ? 0.5 : 0.0);
ledger.Append(ProofNode.CreateInput(
id: $"node-{nodeCounter++:D4}",
ruleId: "EVIDENCE",
actor: "scoring-input",
tsUtc: ts.AddTicks(2),
seed: seed,
initialValue: evidenceValue,
evidenceRefs: input.InputDigests?.TryGetValue("evidence", out var evidenceDigest) == true
? [evidenceDigest]
: []));
// Provenance input
var provValue = (int)input.Provenance.Level / 4.0; // Normalize to 0-1
ledger.Append(ProofNode.CreateInput(
id: $"node-{nodeCounter++:D4}",
ruleId: "PROVENANCE",
actor: "scoring-input",
tsUtc: ts.AddTicks(3),
seed: seed,
initialValue: provValue,
evidenceRefs: input.InputDigests?.TryGetValue("provenance", out var provDigest) == true
? [provDigest]
: []));
// KEV input
if (input.IsKnownExploited)
{
ledger.Append(ProofNode.CreateInput(
id: $"node-{nodeCounter++:D4}",
ruleId: "KEV_FLAG",
actor: "scoring-input",
tsUtc: ts.AddTicks(4),
seed: seed,
initialValue: 1.0));
}
}
private void EmitDeltaNodes(
ProofLedger ledger,
ScoringEngineResult result,
DateTimeOffset ts,
byte[] seed,
ref int nodeCounter)
{
var runningTotal = 0.0;
var inputNodeIds = Enumerable.Range(0, nodeCounter).Select(i => $"node-{i:D4}").ToList();
foreach (var (signal, contribution) in result.SignalContributions.OrderBy(x => x.Key))
{
var delta = contribution / 100.0; // Normalize to 0-1 scale
runningTotal += delta;
ledger.Append(ProofNode.CreateDelta(
id: $"node-{nodeCounter++:D4}",
ruleId: $"WEIGHT_{signal.ToUpperInvariant()}",
actor: $"scoring-engine:{Profile.ToString().ToLowerInvariant()}",
tsUtc: ts.AddMilliseconds(nodeCounter),
seed: seed,
delta: delta,
newTotal: Math.Clamp(runningTotal, 0, 1),
parentIds: inputNodeIds.Take(4).ToArray()));
}
}
private static byte[] GenerateSeed(ScoringInput input)
{
// Generate deterministic seed from input digests
using var sha256 = System.Security.Cryptography.SHA256.Create();
var inputString = $"{input.FindingId}:{input.TenantId}:{input.ProfileId}:{input.AsOf:O}";
foreach (var kvp in input.InputDigests?.OrderBy(x => x.Key) ?? [])
{
inputString += $":{kvp.Key}={kvp.Value}";
}
return sha256.ComputeHash(System.Text.Encoding.UTF8.GetBytes(inputString));
}
}
/// <summary>
/// Options for proof-aware scoring.
/// </summary>
public sealed class ProofAwareScoringOptions
{
/// <summary>
/// Default options.
/// </summary>
public static readonly ProofAwareScoringOptions Default = new();
/// <summary>
/// Whether to emit detailed delta nodes for each signal.
/// </summary>
public bool EmitDetailedDeltas { get; init; } = true;
/// <summary>
/// Whether to include evidence references in nodes.
/// </summary>
public bool IncludeEvidenceRefs { get; init; } = true;
}
/// <summary>
/// Extension methods for scoring results with proof ledgers.
/// </summary>
public static class ScoringResultProofExtensions
{
private static readonly System.Runtime.CompilerServices.ConditionalWeakTable<ScoringEngineResult, ProofLedgerAttachment>
_proofAttachments = new();
/// <summary>
/// Attach a proof ledger to a scoring result.
/// </summary>
public static ScoringEngineResult WithProofLedger(
this ScoringEngineResult result,
ProofLedger ledger,
string rootHash)
{
_proofAttachments.Add(result, new ProofLedgerAttachment(ledger, rootHash));
return result;
}
/// <summary>
/// Get the attached proof ledger from a scoring result.
/// </summary>
public static ProofLedger? GetProofLedger(this ScoringEngineResult result)
{
return _proofAttachments.TryGetValue(result, out var attachment)
? attachment.Ledger
: null;
}
/// <summary>
/// Get the proof root hash from a scoring result.
/// </summary>
public static string? GetProofRootHash(this ScoringEngineResult result)
{
return _proofAttachments.TryGetValue(result, out var attachment)
? attachment.RootHash
: null;
}
/// <summary>
/// Check if a scoring result has a proof ledger attached.
/// </summary>
public static bool HasProofLedger(this ScoringEngineResult result)
{
return _proofAttachments.TryGetValue(result, out _);
}
private sealed record ProofLedgerAttachment(ProofLedger Ledger, string RootHash);
}

View File

@@ -2,6 +2,18 @@ using System.Collections.Immutable;
namespace StellaOps.Policy;
/// <summary>
/// Configuration for policy-based risk scoring.
/// </summary>
/// <param name="Version">Configuration version.</param>
/// <param name="SeverityWeights">Weight multipliers per severity level.</param>
/// <param name="QuietPenalty">Score penalty for quiet-mode findings.</param>
/// <param name="WarnPenalty">Score penalty for warn-mode findings.</param>
/// <param name="IgnorePenalty">Score penalty for ignored findings.</param>
/// <param name="TrustOverrides">Trust adjustments by source.</param>
/// <param name="ReachabilityBuckets">Weights per reachability tier.</param>
/// <param name="UnknownConfidence">Configuration for unknown handling.</param>
/// <param name="SmartDiff">Optional Smart-Diff scoring configuration.</param>
public sealed record PolicyScoringConfig(
string Version,
ImmutableDictionary<PolicySeverity, double> SeverityWeights,
@@ -10,9 +22,53 @@ public sealed record PolicyScoringConfig(
double IgnorePenalty,
ImmutableDictionary<string, double> TrustOverrides,
ImmutableDictionary<string, double> ReachabilityBuckets,
PolicyUnknownConfidenceConfig UnknownConfidence)
PolicyUnknownConfidenceConfig UnknownConfidence,
SmartDiffPolicyScoringConfig? SmartDiff = null)
{
public static string BaselineVersion => "1.0";
public static PolicyScoringConfig Default { get; } = PolicyScoringConfigBinder.LoadDefault();
}
/// <summary>
/// Smart-Diff scoring configuration integrated into policy scoring.
/// Sprint: SPRINT_3500_0004_0001
/// Task: SDIFF-BIN-020 - Add config to PolicyScoringConfig
/// </summary>
public sealed record SmartDiffPolicyScoringConfig(
/// <summary>Weight for reachability flip from unreachable to reachable.</summary>
double ReachabilityFlipUpWeight = 1.0,
/// <summary>Weight for reachability flip from reachable to unreachable.</summary>
double ReachabilityFlipDownWeight = 0.8,
/// <summary>Weight for VEX status flip to affected.</summary>
double VexFlipToAffectedWeight = 0.9,
/// <summary>Weight for VEX status flip to not_affected.</summary>
double VexFlipToNotAffectedWeight = 0.7,
/// <summary>Weight for entering affected version range.</summary>
double RangeEntryWeight = 0.8,
/// <summary>Weight for exiting affected version range.</summary>
double RangeExitWeight = 0.6,
/// <summary>Weight for KEV addition.</summary>
double KevAddedWeight = 1.0,
/// <summary>EPSS threshold for significance.</summary>
double EpssThreshold = 0.1,
/// <summary>Weight for EPSS threshold crossing.</summary>
double EpssThresholdCrossWeight = 0.5,
/// <summary>Weight for hardening regression.</summary>
double HardeningRegressionWeight = 0.7,
/// <summary>Weight for hardening improvement.</summary>
double HardeningImprovementWeight = 0.3,
/// <summary>Minimum hardening score drop to flag as regression.</summary>
double HardeningRegressionThreshold = 0.1)
{
/// <summary>Default Smart-Diff policy configuration.</summary>
public static SmartDiffPolicyScoringConfig Default { get; } = new();
/// <summary>Strict configuration with higher weights for regressions.</summary>
public static SmartDiffPolicyScoringConfig Strict { get; } = new(
ReachabilityFlipUpWeight: 1.2,
VexFlipToAffectedWeight: 1.1,
KevAddedWeight: 1.5,
HardeningRegressionWeight: 1.0,
HardeningRegressionThreshold: 0.05);
}

View File

@@ -0,0 +1,147 @@
// -----------------------------------------------------------------------------
// ProofHashing.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-002 - Implement ProofHashing with per-node canonical hash
// Description: Deterministic hashing for proof nodes and root hash computation
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Provides deterministic hashing functions for proof nodes.
/// Per advisory "Determinism and Reproducibility Technical Reference" §11.2.
/// </summary>
public static class ProofHashing
{
// JSON serializer options for canonical JSON output
private static readonly JsonSerializerOptions CanonicalJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.Never
};
/// <summary>
/// Compute and attach the node hash to a ProofNode.
/// The hash is computed over the canonical JSON representation excluding the NodeHash field.
/// </summary>
/// <param name="node">The proof node to hash.</param>
/// <returns>A new ProofNode with the NodeHash field populated.</returns>
public static ProofNode WithHash(ProofNode node)
{
ArgumentNullException.ThrowIfNull(node);
var canonical = CanonicalizeNode(node);
var hash = ComputeSha256Hex(canonical);
return node with { NodeHash = $"sha256:{hash}" };
}
/// <summary>
/// Compute the root hash over an ordered sequence of proof nodes.
/// The root hash is the SHA-256 of the canonical JSON array of node hashes.
/// </summary>
/// <param name="nodesInOrder">The proof nodes in deterministic order.</param>
/// <returns>The root hash as "sha256:&lt;hex&gt;".</returns>
public static string ComputeRootHash(IEnumerable<ProofNode> nodesInOrder)
{
ArgumentNullException.ThrowIfNull(nodesInOrder);
var hashes = nodesInOrder.Select(n => n.NodeHash).ToArray();
var canonical = CanonicalizeArray(hashes);
var hash = ComputeSha256Hex(canonical);
return $"sha256:{hash}";
}
/// <summary>
/// Verify that a node's hash is correct.
/// </summary>
/// <param name="node">The node to verify.</param>
/// <returns>True if the hash is valid, false otherwise.</returns>
public static bool VerifyNodeHash(ProofNode node)
{
ArgumentNullException.ThrowIfNull(node);
if (string.IsNullOrEmpty(node.NodeHash))
return false;
var computed = WithHash(node with { NodeHash = string.Empty });
return node.NodeHash.Equals(computed.NodeHash, StringComparison.Ordinal);
}
/// <summary>
/// Verify that the root hash matches the nodes.
/// </summary>
/// <param name="nodesInOrder">The proof nodes in order.</param>
/// <param name="expectedRootHash">The expected root hash.</param>
/// <returns>True if the root hash matches, false otherwise.</returns>
public static bool VerifyRootHash(IEnumerable<ProofNode> nodesInOrder, string expectedRootHash)
{
ArgumentNullException.ThrowIfNull(nodesInOrder);
var computed = ComputeRootHash(nodesInOrder);
return computed.Equals(expectedRootHash, StringComparison.Ordinal);
}
#region Canonical JSON Helpers
/// <summary>
/// Create canonical JSON representation of a proof node (excluding NodeHash).
/// Keys are sorted alphabetically for determinism.
/// </summary>
private static byte[] CanonicalizeNode(ProofNode node)
{
// Build a sorted object for canonical representation
// Note: We explicitly exclude NodeHash from the canonical form
var obj = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["actor"] = node.Actor,
["delta"] = node.Delta,
["evidenceRefs"] = node.EvidenceRefs,
["id"] = node.Id,
["kind"] = node.Kind.ToString().ToLowerInvariant(),
["parentIds"] = node.ParentIds,
["ruleId"] = node.RuleId,
["seed"] = Convert.ToBase64String(node.Seed),
["total"] = node.Total,
["tsUtc"] = node.TsUtc.ToUniversalTime().ToString("O")
};
return SerializeCanonical(obj);
}
/// <summary>
/// Create canonical JSON representation of a string array.
/// </summary>
private static byte[] CanonicalizeArray(string[] values)
{
return SerializeCanonical(values);
}
/// <summary>
/// Serialize an object to canonical JSON bytes (no whitespace, sorted keys).
/// </summary>
private static byte[] SerializeCanonical(object obj)
{
// Use JsonNode for better control over serialization
var json = JsonSerializer.Serialize(obj, CanonicalJsonOptions);
return Encoding.UTF8.GetBytes(json);
}
/// <summary>
/// Compute SHA-256 hash and return as lowercase hex string.
/// </summary>
private static string ComputeSha256Hex(byte[] data)
{
var hash = SHA256.HashData(data);
return Convert.ToHexStringLower(hash);
}
#endregion
}

View File

@@ -0,0 +1,197 @@
// -----------------------------------------------------------------------------
// ProofLedger.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-003 - Implement ProofLedger with deterministic append
// Description: Append-only ledger for score proof nodes with root hash computation
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Append-only ledger for score proof nodes.
/// Provides deterministic root hash computation for audit and replay.
/// Per advisory "Determinism and Reproducibility Technical Reference" §11.2.
/// </summary>
public sealed class ProofLedger
{
private readonly List<ProofNode> _nodes = [];
private readonly object _lock = new();
private string? _cachedRootHash;
/// <summary>
/// The ordered list of proof nodes in the ledger.
/// </summary>
public IReadOnlyList<ProofNode> Nodes => _nodes.AsReadOnly();
/// <summary>
/// The number of nodes in the ledger.
/// </summary>
public int Count => _nodes.Count;
/// <summary>
/// Append a proof node to the ledger.
/// The node hash will be computed and attached automatically.
/// </summary>
/// <param name="node">The node to append.</param>
/// <exception cref="ArgumentNullException">If node is null.</exception>
public void Append(ProofNode node)
{
ArgumentNullException.ThrowIfNull(node);
lock (_lock)
{
// Compute hash if not already computed
var hashedNode = string.IsNullOrEmpty(node.NodeHash)
? ProofHashing.WithHash(node)
: node;
_nodes.Add(hashedNode);
_cachedRootHash = null; // Invalidate cache
}
}
/// <summary>
/// Append multiple proof nodes to the ledger in order.
/// </summary>
/// <param name="nodes">The nodes to append.</param>
public void AppendRange(IEnumerable<ProofNode> nodes)
{
ArgumentNullException.ThrowIfNull(nodes);
lock (_lock)
{
foreach (var node in nodes)
{
var hashedNode = string.IsNullOrEmpty(node.NodeHash)
? ProofHashing.WithHash(node)
: node;
_nodes.Add(hashedNode);
}
_cachedRootHash = null; // Invalidate cache
}
}
/// <summary>
/// Compute the root hash of the ledger.
/// The root hash is deterministic given the same nodes in the same order.
/// </summary>
/// <returns>The root hash as "sha256:&lt;hex&gt;".</returns>
public string RootHash()
{
lock (_lock)
{
_cachedRootHash ??= ProofHashing.ComputeRootHash(_nodes);
return _cachedRootHash;
}
}
/// <summary>
/// Verify that all node hashes in the ledger are valid.
/// </summary>
/// <returns>True if all hashes are valid, false otherwise.</returns>
public bool VerifyIntegrity()
{
lock (_lock)
{
return _nodes.All(ProofHashing.VerifyNodeHash);
}
}
/// <summary>
/// Get a snapshot of the ledger as an immutable list.
/// </summary>
/// <returns>An immutable copy of the nodes.</returns>
public ImmutableList<ProofNode> ToImmutableSnapshot()
{
lock (_lock)
{
return [.. _nodes];
}
}
/// <summary>
/// Serialize the ledger to JSON.
/// </summary>
/// <param name="options">Optional JSON serializer options.</param>
/// <returns>The JSON representation of the ledger.</returns>
public string ToJson(JsonSerializerOptions? options = null)
{
lock (_lock)
{
var payload = new ProofLedgerPayload(
Nodes: [.. _nodes],
RootHash: RootHash(),
CreatedAtUtc: DateTimeOffset.UtcNow);
return JsonSerializer.Serialize(payload, options ?? DefaultJsonOptions);
}
}
/// <summary>
/// Deserialize a ledger from JSON and verify integrity.
/// </summary>
/// <param name="json">The JSON string.</param>
/// <param name="options">Optional JSON serializer options.</param>
/// <returns>The deserialized ledger.</returns>
/// <exception cref="InvalidOperationException">If integrity verification fails.</exception>
public static ProofLedger FromJson(string json, JsonSerializerOptions? options = null)
{
var payload = JsonSerializer.Deserialize<ProofLedgerPayload>(json, options ?? DefaultJsonOptions)
?? throw new InvalidOperationException("Failed to deserialize proof ledger");
var ledger = new ProofLedger();
// Add nodes directly without recomputing hashes
foreach (var node in payload.Nodes)
{
ledger._nodes.Add(node);
}
// Verify integrity
if (!ledger.VerifyIntegrity())
{
throw new InvalidOperationException("Proof ledger integrity check failed: node hashes do not match");
}
// Verify root hash
if (!ProofHashing.VerifyRootHash(ledger._nodes, payload.RootHash))
{
throw new InvalidOperationException("Proof ledger integrity check failed: root hash does not match");
}
return ledger;
}
/// <summary>
/// Create a new ledger from an existing sequence of nodes.
/// Useful for replay scenarios.
/// </summary>
/// <param name="nodes">The nodes to populate the ledger with.</param>
/// <returns>A new ledger containing the nodes.</returns>
public static ProofLedger FromNodes(IEnumerable<ProofNode> nodes)
{
var ledger = new ProofLedger();
ledger.AppendRange(nodes);
return ledger;
}
private static readonly JsonSerializerOptions DefaultJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
}
/// <summary>
/// JSON payload for proof ledger serialization.
/// </summary>
internal sealed record ProofLedgerPayload(
[property: JsonPropertyName("nodes")] ImmutableArray<ProofNode> Nodes,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc);

View File

@@ -0,0 +1,167 @@
// -----------------------------------------------------------------------------
// ProofNode.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-001 - Implement ProofNode record and ProofNodeKind enum
// Description: Proof ledger node types for score replay and audit trails
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// The type of proof ledger node.
/// Per advisory "Building a Deeper Moat Beyond Reachability" §11.2.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ProofNodeKind>))]
public enum ProofNodeKind
{
/// <summary>Input node - captures initial scoring inputs.</summary>
[JsonStringEnumMemberName("input")]
Input,
/// <summary>Transform node - records a transformation/calculation step.</summary>
[JsonStringEnumMemberName("transform")]
Transform,
/// <summary>Delta node - records a scoring delta applied.</summary>
[JsonStringEnumMemberName("delta")]
Delta,
/// <summary>Score node - final score output.</summary>
[JsonStringEnumMemberName("score")]
Score
}
/// <summary>
/// A single node in the score proof ledger.
/// Each node represents a discrete step in the scoring process with cryptographic linking.
/// Per advisory "Determinism and Reproducibility Technical Reference" §11.2.
/// </summary>
/// <param name="Id">Unique identifier for this node (e.g., UUID or sequential).</param>
/// <param name="Kind">The type of proof node.</param>
/// <param name="RuleId">The rule or policy ID that generated this node.</param>
/// <param name="ParentIds">IDs of parent nodes this node depends on (for graph structure).</param>
/// <param name="EvidenceRefs">Digests or references to evidence artifacts in the bundle.</param>
/// <param name="Delta">Scoring delta applied (0 for non-Delta nodes).</param>
/// <param name="Total">Running total score at this node.</param>
/// <param name="Actor">Module or component name that created this node.</param>
/// <param name="TsUtc">Timestamp in UTC when the node was created.</param>
/// <param name="Seed">32-byte seed for deterministic replay.</param>
/// <param name="NodeHash">SHA-256 hash over canonical node (excluding NodeHash itself).</param>
public sealed record ProofNode(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("kind")] ProofNodeKind Kind,
[property: JsonPropertyName("ruleId")] string RuleId,
[property: JsonPropertyName("parentIds")] string[] ParentIds,
[property: JsonPropertyName("evidenceRefs")] string[] EvidenceRefs,
[property: JsonPropertyName("delta")] double Delta,
[property: JsonPropertyName("total")] double Total,
[property: JsonPropertyName("actor")] string Actor,
[property: JsonPropertyName("tsUtc")] DateTimeOffset TsUtc,
[property: JsonPropertyName("seed")] byte[] Seed,
[property: JsonPropertyName("nodeHash")] string NodeHash)
{
/// <summary>
/// Create a new ProofNode with default values for optional properties.
/// </summary>
public static ProofNode Create(
string id,
ProofNodeKind kind,
string ruleId,
string actor,
DateTimeOffset tsUtc,
byte[] seed,
double delta = 0.0,
double total = 0.0,
string[]? parentIds = null,
string[]? evidenceRefs = null)
{
return new ProofNode(
Id: id,
Kind: kind,
RuleId: ruleId,
ParentIds: parentIds ?? [],
EvidenceRefs: evidenceRefs ?? [],
Delta: delta,
Total: total,
Actor: actor,
TsUtc: tsUtc,
Seed: seed,
NodeHash: string.Empty // Will be computed by ProofHashing.WithHash
);
}
/// <summary>
/// Create an input node capturing initial scoring inputs.
/// </summary>
public static ProofNode CreateInput(
string id,
string ruleId,
string actor,
DateTimeOffset tsUtc,
byte[] seed,
double initialValue,
string[]? evidenceRefs = null)
{
return Create(
id: id,
kind: ProofNodeKind.Input,
ruleId: ruleId,
actor: actor,
tsUtc: tsUtc,
seed: seed,
total: initialValue,
evidenceRefs: evidenceRefs);
}
/// <summary>
/// Create a delta node recording a scoring adjustment.
/// </summary>
public static ProofNode CreateDelta(
string id,
string ruleId,
string actor,
DateTimeOffset tsUtc,
byte[] seed,
double delta,
double newTotal,
string[] parentIds,
string[]? evidenceRefs = null)
{
return Create(
id: id,
kind: ProofNodeKind.Delta,
ruleId: ruleId,
actor: actor,
tsUtc: tsUtc,
seed: seed,
delta: delta,
total: newTotal,
parentIds: parentIds,
evidenceRefs: evidenceRefs);
}
/// <summary>
/// Create a final score node.
/// </summary>
public static ProofNode CreateScore(
string id,
string ruleId,
string actor,
DateTimeOffset tsUtc,
byte[] seed,
double finalScore,
string[] parentIds)
{
return Create(
id: id,
kind: ProofNodeKind.Score,
ruleId: ruleId,
actor: actor,
tsUtc: tsUtc,
seed: seed,
total: finalScore,
parentIds: parentIds);
}
}

View File

@@ -0,0 +1,364 @@
// -----------------------------------------------------------------------------
// ProofLedgerDeterminismTests.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-012 - Unit tests for ProofLedger determinism
// Description: Verifies that proof ledger produces identical hashes across runs
// -----------------------------------------------------------------------------
using StellaOps.Policy.Scoring;
using Xunit;
namespace StellaOps.Policy.Scoring.Tests;
/// <summary>
/// Tests for ProofLedger determinism and hash stability.
/// </summary>
public sealed class ProofLedgerDeterminismTests
{
private static readonly byte[] TestSeed = new byte[32];
private static readonly DateTimeOffset FixedTimestamp = new(2025, 12, 17, 12, 0, 0, TimeSpan.Zero);
[Fact]
public void RootHash_SameNodesInSameOrder_ProducesIdenticalHash()
{
// Arrange
var nodes = CreateTestNodes(count: 5);
var ledger1 = new ProofLedger();
var ledger2 = new ProofLedger();
// Act
foreach (var node in nodes)
{
ledger1.Append(node);
ledger2.Append(node);
}
// Assert
Assert.Equal(ledger1.RootHash(), ledger2.RootHash());
}
[Fact]
public void RootHash_MultipleCallsOnSameLedger_ReturnsSameHash()
{
// Arrange
var ledger = new ProofLedger();
foreach (var node in CreateTestNodes(count: 3))
{
ledger.Append(node);
}
// Act
var hash1 = ledger.RootHash();
var hash2 = ledger.RootHash();
var hash3 = ledger.RootHash();
// Assert
Assert.Equal(hash1, hash2);
Assert.Equal(hash2, hash3);
}
[Fact]
public void RootHash_DifferentNodeOrder_ProducesDifferentHash()
{
// Arrange
var node1 = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed, delta: 0.1, total: 0.1);
var node2 = ProofNode.Create("id-2", ProofNodeKind.Transform, "rule-2", "actor", FixedTimestamp, TestSeed, delta: 0.2, total: 0.3);
var ledger1 = new ProofLedger();
ledger1.Append(node1);
ledger1.Append(node2);
var ledger2 = new ProofLedger();
ledger2.Append(node2);
ledger2.Append(node1);
// Act
var hash1 = ledger1.RootHash();
var hash2 = ledger2.RootHash();
// Assert
Assert.NotEqual(hash1, hash2);
}
[Fact]
public void RootHash_DifferentNodeContent_ProducesDifferentHash()
{
// Arrange
var node1a = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed, delta: 0.1, total: 0.1);
var node1b = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed, delta: 0.2, total: 0.2); // Different delta
var ledger1 = new ProofLedger();
ledger1.Append(node1a);
var ledger2 = new ProofLedger();
ledger2.Append(node1b);
// Act
var hash1 = ledger1.RootHash();
var hash2 = ledger2.RootHash();
// Assert
Assert.NotEqual(hash1, hash2);
}
[Fact]
public void AppendRange_ProducesSameHashAsIndividualAppends()
{
// Arrange
var nodes = CreateTestNodes(count: 4);
var ledger1 = new ProofLedger();
foreach (var node in nodes)
{
ledger1.Append(node);
}
var ledger2 = new ProofLedger();
ledger2.AppendRange(nodes);
// Act & Assert
Assert.Equal(ledger1.RootHash(), ledger2.RootHash());
}
[Fact]
public void VerifyIntegrity_ValidLedger_ReturnsTrue()
{
// Arrange
var ledger = new ProofLedger();
foreach (var node in CreateTestNodes(count: 3))
{
ledger.Append(node);
}
// Act & Assert
Assert.True(ledger.VerifyIntegrity());
}
[Fact]
public void ToImmutableSnapshot_ReturnsCorrectNodes()
{
// Arrange
var nodes = CreateTestNodes(count: 3);
var ledger = new ProofLedger();
ledger.AppendRange(nodes);
// Act
var snapshot = ledger.ToImmutableSnapshot();
// Assert
Assert.Equal(nodes.Length, snapshot.Count);
for (int i = 0; i < nodes.Length; i++)
{
Assert.Equal(nodes[i].Id, snapshot[i].Id);
Assert.Equal(nodes[i].Kind, snapshot[i].Kind);
Assert.Equal(nodes[i].Delta, snapshot[i].Delta);
}
}
[Fact]
public void ToJson_ProducesValidJson()
{
// Arrange
var ledger = new ProofLedger();
foreach (var node in CreateTestNodes(count: 2))
{
ledger.Append(node);
}
// Act
var json = ledger.ToJson();
// Assert
Assert.NotNull(json);
Assert.Contains("nodes", json);
Assert.Contains("rootHash", json);
Assert.Contains("sha256:", json);
}
[Fact]
public void FromJson_RoundTrip_PreservesIntegrity()
{
// Arrange
var ledger = new ProofLedger();
foreach (var node in CreateTestNodes(count: 3))
{
ledger.Append(node);
}
var originalHash = ledger.RootHash();
// Act
var json = ledger.ToJson();
var restored = ProofLedger.FromJson(json);
// Assert
Assert.True(restored.VerifyIntegrity());
Assert.Equal(originalHash, restored.RootHash());
}
[Fact]
public void RootHash_EmptyLedger_ProducesConsistentHash()
{
// Arrange
var ledger1 = new ProofLedger();
var ledger2 = new ProofLedger();
// Act
var hash1 = ledger1.RootHash();
var hash2 = ledger2.RootHash();
// Assert
Assert.Equal(hash1, hash2);
Assert.StartsWith("sha256:", hash1);
}
[Fact]
public void NodeHash_SameNodeRecreated_ProducesSameHash()
{
// Arrange
var node1 = ProofNode.Create(
id: "test-id",
kind: ProofNodeKind.Delta,
ruleId: "rule-x",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
delta: 0.15,
total: 0.45,
parentIds: ["parent-1", "parent-2"],
evidenceRefs: ["sha256:abc123"]);
var node2 = ProofNode.Create(
id: "test-id",
kind: ProofNodeKind.Delta,
ruleId: "rule-x",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
delta: 0.15,
total: 0.45,
parentIds: ["parent-1", "parent-2"],
evidenceRefs: ["sha256:abc123"]);
// Act
var hashedNode1 = ProofHashing.WithHash(node1);
var hashedNode2 = ProofHashing.WithHash(node2);
// Assert
Assert.Equal(hashedNode1.NodeHash, hashedNode2.NodeHash);
Assert.StartsWith("sha256:", hashedNode1.NodeHash);
}
[Fact]
public void NodeHash_DifferentTimestamp_ProducesDifferentHash()
{
// Arrange
var node1 = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed);
var node2 = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp.AddSeconds(1), TestSeed);
// Act
var hashedNode1 = ProofHashing.WithHash(node1);
var hashedNode2 = ProofHashing.WithHash(node2);
// Assert
Assert.NotEqual(hashedNode1.NodeHash, hashedNode2.NodeHash);
}
[Fact]
public void VerifyNodeHash_ValidHash_ReturnsTrue()
{
// Arrange
var node = ProofNode.Create("id-1", ProofNodeKind.Input, "rule-1", "actor", FixedTimestamp, TestSeed);
var hashedNode = ProofHashing.WithHash(node);
// Act & Assert
Assert.True(ProofHashing.VerifyNodeHash(hashedNode));
}
[Fact]
public void VerifyRootHash_ValidHash_ReturnsTrue()
{
// Arrange
var ledger = new ProofLedger();
foreach (var node in CreateTestNodes(count: 3))
{
ledger.Append(node);
}
var rootHash = ledger.RootHash();
// Act & Assert
Assert.True(ProofHashing.VerifyRootHash(ledger.Nodes, rootHash));
}
[Fact]
public void VerifyRootHash_TamperedHash_ReturnsFalse()
{
// Arrange
var ledger = new ProofLedger();
foreach (var node in CreateTestNodes(count: 3))
{
ledger.Append(node);
}
var tamperedHash = "sha256:0000000000000000000000000000000000000000000000000000000000000000";
// Act & Assert
Assert.False(ProofHashing.VerifyRootHash(ledger.Nodes, tamperedHash));
}
[Fact]
public void ConcurrentAppends_ProduceDeterministicOrder()
{
// Arrange - run same sequence multiple times
var results = new List<string>();
for (int run = 0; run < 10; run++)
{
var ledger = new ProofLedger();
var nodes = CreateTestNodes(count: 10);
foreach (var node in nodes)
{
ledger.Append(node);
}
results.Add(ledger.RootHash());
}
// Assert - all runs should produce identical hash
Assert.True(results.All(h => h == results[0]));
}
private static ProofNode[] CreateTestNodes(int count)
{
var nodes = new ProofNode[count];
double runningTotal = 0;
for (int i = 0; i < count; i++)
{
var delta = 0.1 * (i + 1);
runningTotal += delta;
var kind = i switch
{
0 => ProofNodeKind.Input,
_ when i == count - 1 => ProofNodeKind.Score,
_ when i % 2 == 0 => ProofNodeKind.Transform,
_ => ProofNodeKind.Delta
};
nodes[i] = ProofNode.Create(
id: $"node-{i:D3}",
kind: kind,
ruleId: $"rule-{i}",
actor: "test-scorer",
tsUtc: FixedTimestamp.AddMilliseconds(i * 100),
seed: TestSeed,
delta: delta,
total: runningTotal,
parentIds: i > 0 ? [$"node-{i - 1:D3}"] : null,
evidenceRefs: [$"sha256:evidence{i:D3}"]);
}
return nodes;
}
}

View File

@@ -0,0 +1,398 @@
// -----------------------------------------------------------------------------
// ProofLedgerTests.cs
// Sprint: SPRINT_3401_0002_0001_score_replay_proof_bundle
// Task: SCORE-REPLAY-012 - Unit tests for ProofLedger determinism
// Description: Tests for proof ledger hash consistency and determinism
// -----------------------------------------------------------------------------
using FluentAssertions;
using StellaOps.Policy.Scoring;
using Xunit;
namespace StellaOps.Policy.Tests.Scoring;
/// <summary>
/// Unit tests for ProofLedger determinism.
/// Validates that same inputs produce identical hashes across runs.
/// </summary>
public class ProofLedgerTests
{
private static readonly byte[] TestSeed = Enumerable.Repeat((byte)0x42, 32).ToArray();
private static readonly DateTimeOffset FixedTimestamp = new(2025, 12, 17, 12, 0, 0, TimeSpan.Zero);
#region ProofNode Hash Tests
[Fact]
public void ProofHashing_WithHash_ComputesConsistentHash()
{
// Arrange
var node = ProofNode.Create(
id: "node-001",
kind: ProofNodeKind.Input,
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
total: 9.0);
// Act
var hashed1 = ProofHashing.WithHash(node);
var hashed2 = ProofHashing.WithHash(node);
var hashed3 = ProofHashing.WithHash(node);
// Assert - all hashes should be identical
hashed1.NodeHash.Should().StartWith("sha256:");
hashed1.NodeHash.Should().Be(hashed2.NodeHash);
hashed2.NodeHash.Should().Be(hashed3.NodeHash);
}
[Fact]
public void ProofHashing_WithHash_DifferentInputsProduceDifferentHashes()
{
// Arrange
var node1 = ProofNode.Create(
id: "node-001",
kind: ProofNodeKind.Input,
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
total: 9.0);
var node2 = ProofNode.Create(
id: "node-002", // Different ID
kind: ProofNodeKind.Input,
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
total: 9.0);
// Act
var hashed1 = ProofHashing.WithHash(node1);
var hashed2 = ProofHashing.WithHash(node2);
// Assert - different inputs = different hashes
hashed1.NodeHash.Should().NotBe(hashed2.NodeHash);
}
[Fact]
public void ProofHashing_VerifyNodeHash_ReturnsTrueForValidHash()
{
// Arrange
var node = ProofNode.Create(
id: "node-001",
kind: ProofNodeKind.Input,
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
total: 9.0);
var hashed = ProofHashing.WithHash(node);
// Act
var isValid = ProofHashing.VerifyNodeHash(hashed);
// Assert
isValid.Should().BeTrue();
}
[Fact]
public void ProofHashing_VerifyNodeHash_ReturnsFalseForTamperedHash()
{
// Arrange
var node = ProofNode.Create(
id: "node-001",
kind: ProofNodeKind.Input,
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
total: 9.0);
var hashed = ProofHashing.WithHash(node);
var tampered = hashed with { Total = 8.0 }; // Tamper with the total
// Act
var isValid = ProofHashing.VerifyNodeHash(tampered);
// Assert
isValid.Should().BeFalse();
}
#endregion
#region ProofLedger Determinism Tests
[Fact]
public void ProofLedger_RootHash_IsDeterministic()
{
// Arrange - create identical ledgers
var nodes = CreateTestNodes();
var ledger1 = new ProofLedger();
var ledger2 = new ProofLedger();
var ledger3 = new ProofLedger();
foreach (var node in nodes)
{
ledger1.Append(node);
ledger2.Append(node);
ledger3.Append(node);
}
// Act
var hash1 = ledger1.RootHash();
var hash2 = ledger2.RootHash();
var hash3 = ledger3.RootHash();
// Assert - all root hashes should be identical
hash1.Should().StartWith("sha256:");
hash1.Should().Be(hash2);
hash2.Should().Be(hash3);
}
[Fact]
public void ProofLedger_RootHash_DependsOnNodeOrder()
{
// Arrange - same nodes, different order
var nodes = CreateTestNodes();
var reversedNodes = nodes.Reverse().ToList();
var ledger1 = ProofLedger.FromNodes(nodes);
var ledger2 = ProofLedger.FromNodes(reversedNodes);
// Act
var hash1 = ledger1.RootHash();
var hash2 = ledger2.RootHash();
// Assert - different order = different hash
hash1.Should().NotBe(hash2);
}
[Fact]
public void ProofLedger_RootHash_ChangesWhenNodeAdded()
{
// Arrange
var nodes = CreateTestNodes();
var ledger = ProofLedger.FromNodes(nodes);
var hash1 = ledger.RootHash();
// Act - add another node
ledger.Append(ProofNode.Create(
id: "node-extra",
kind: ProofNodeKind.Score,
ruleId: "FINAL",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
total: 0.73));
var hash2 = ledger.RootHash();
// Assert
hash2.Should().NotBe(hash1);
}
[Fact]
public void ProofLedger_VerifyIntegrity_ReturnsTrueForValidLedger()
{
// Arrange
var nodes = CreateTestNodes();
var ledger = ProofLedger.FromNodes(nodes);
// Act
var isValid = ledger.VerifyIntegrity();
// Assert
isValid.Should().BeTrue();
}
#endregion
#region Serialization Tests
[Fact]
public void ProofLedger_ToJson_FromJson_RoundTrips()
{
// Arrange
var nodes = CreateTestNodes();
var ledger = ProofLedger.FromNodes(nodes);
var originalRootHash = ledger.RootHash();
// Act
var json = ledger.ToJson();
var restored = ProofLedger.FromJson(json);
// Assert
restored.Count.Should().Be(ledger.Count);
restored.RootHash().Should().Be(originalRootHash);
}
[Fact]
public void ProofLedger_FromJson_ThrowsOnTamperedData()
{
// Arrange
var nodes = CreateTestNodes();
var ledger = ProofLedger.FromNodes(nodes);
var json = ledger.ToJson();
// Tamper with the JSON
var tampered = json.Replace("\"total\":9.0", "\"total\":8.0");
// Act & Assert
var act = () => ProofLedger.FromJson(tampered);
act.Should().Throw<InvalidOperationException>()
.WithMessage("*integrity*");
}
#endregion
#region Score Replay Invariant Tests
[Fact]
public void ScoreReplay_SameInputs_ProducesIdenticalRootHash()
{
// Arrange - simulate score replay scenario
// Same manifest + same seed + same timestamp = identical rootHash
var seed = Enumerable.Repeat((byte)7, 32).ToArray();
var timestamp = new DateTimeOffset(2025, 12, 17, 0, 0, 0, TimeSpan.Zero);
// First scoring run
var ledger1 = SimulateScoring(seed, timestamp, cvssBase: 9.0, epss: 0.50);
// Second scoring run (replay)
var ledger2 = SimulateScoring(seed, timestamp, cvssBase: 9.0, epss: 0.50);
// Third scoring run (replay again)
var ledger3 = SimulateScoring(seed, timestamp, cvssBase: 9.0, epss: 0.50);
// Assert - all root hashes should be bit-identical
ledger1.RootHash().Should().Be(ledger2.RootHash());
ledger2.RootHash().Should().Be(ledger3.RootHash());
}
[Fact]
public void ScoreReplay_DifferentSeed_ProducesDifferentRootHash()
{
// Arrange
var seed1 = Enumerable.Repeat((byte)7, 32).ToArray();
var seed2 = Enumerable.Repeat((byte)8, 32).ToArray();
var timestamp = new DateTimeOffset(2025, 12, 17, 0, 0, 0, TimeSpan.Zero);
// Act
var ledger1 = SimulateScoring(seed1, timestamp, cvssBase: 9.0, epss: 0.50);
var ledger2 = SimulateScoring(seed2, timestamp, cvssBase: 9.0, epss: 0.50);
// Assert
ledger1.RootHash().Should().NotBe(ledger2.RootHash());
}
[Fact]
public void ScoreReplay_DifferentInputs_ProducesDifferentRootHash()
{
// Arrange
var seed = Enumerable.Repeat((byte)7, 32).ToArray();
var timestamp = new DateTimeOffset(2025, 12, 17, 0, 0, 0, TimeSpan.Zero);
// Act
var ledger1 = SimulateScoring(seed, timestamp, cvssBase: 9.0, epss: 0.50);
var ledger2 = SimulateScoring(seed, timestamp, cvssBase: 8.0, epss: 0.50);
// Assert
ledger1.RootHash().Should().NotBe(ledger2.RootHash());
}
#endregion
#region Helper Methods
private static List<ProofNode> CreateTestNodes()
{
return
[
ProofNode.CreateInput(
id: "node-001",
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: FixedTimestamp,
seed: TestSeed,
initialValue: 9.0,
evidenceRefs: ["sha256:vuln001"]),
ProofNode.CreateDelta(
id: "node-002",
ruleId: "EPSS_ADJUST",
actor: "scorer",
tsUtc: FixedTimestamp.AddMilliseconds(1),
seed: TestSeed,
delta: -0.5,
newTotal: 8.5,
parentIds: ["node-001"],
evidenceRefs: ["sha256:epss001"]),
ProofNode.CreateScore(
id: "node-003",
ruleId: "FINAL_SCORE",
actor: "scorer",
tsUtc: FixedTimestamp.AddMilliseconds(2),
seed: TestSeed,
finalScore: 0.85,
parentIds: ["node-002"])
];
}
private static ProofLedger SimulateScoring(byte[] seed, DateTimeOffset timestamp, double cvssBase, double epss)
{
var ledger = new ProofLedger();
// Input node - CVSS base score
ledger.Append(ProofNode.CreateInput(
id: "input-cvss",
ruleId: "CVSS_BASE",
actor: "scorer",
tsUtc: timestamp,
seed: seed,
initialValue: cvssBase));
// Input node - EPSS score
ledger.Append(ProofNode.CreateInput(
id: "input-epss",
ruleId: "EPSS_SCORE",
actor: "scorer",
tsUtc: timestamp.AddMilliseconds(1),
seed: seed,
initialValue: epss));
// Delta node - apply EPSS modifier
var epssWeight = 0.3;
var delta = epss * epssWeight;
var total = (cvssBase / 10.0) * (1 - epssWeight) + delta;
ledger.Append(ProofNode.CreateDelta(
id: "delta-epss",
ruleId: "EPSS_WEIGHT",
actor: "scorer",
tsUtc: timestamp.AddMilliseconds(2),
seed: seed,
delta: delta,
newTotal: total,
parentIds: ["input-cvss", "input-epss"]));
// Final score node
ledger.Append(ProofNode.CreateScore(
id: "score-final",
ruleId: "FINAL",
actor: "scorer",
tsUtc: timestamp.AddMilliseconds(3),
seed: seed,
finalScore: Math.Round(total, 2),
parentIds: ["delta-epss"]));
return ledger;
}
#endregion
}