partly or unimplemented features - now implemented

This commit is contained in:
master
2026-02-09 08:53:51 +02:00
parent 1bf6bbf395
commit 4bdc298ec1
674 changed files with 90194 additions and 2271 deletions

View File

@@ -0,0 +1,114 @@
using StellaOps.Policy.Determinization.Models;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Combined score result that integrates impact and uncertainty scores.
/// </summary>
public sealed record CombinedImpactScore
{
/// <summary>Impact score from multi-factor calculation.</summary>
[JsonPropertyName("impact")]
public required ImpactScore Impact { get; init; }
/// <summary>Uncertainty score from entropy calculation.</summary>
[JsonPropertyName("uncertainty")]
public required UncertaintyScore Uncertainty { get; init; }
/// <summary>
/// Effective priority score combining impact and uncertainty.
/// Higher uncertainty reduces the effective priority.
/// Formula: impact * (1 - uncertainty_entropy * uncertainty_penalty_factor)
/// </summary>
[JsonPropertyName("effective_priority")]
public required double EffectivePriority { get; init; }
/// <summary>Basis points representation of effective priority (0-10000).</summary>
[JsonPropertyName("effective_priority_basis_points")]
public required int EffectivePriorityBasisPoints { get; init; }
/// <summary>When this combined score was calculated (UTC).</summary>
[JsonPropertyName("calculated_at")]
public required DateTimeOffset CalculatedAt { get; init; }
}
/// <summary>
/// Interface for combined impact-uncertainty score calculation.
/// </summary>
public interface ICombinedImpactCalculator
{
/// <summary>
/// Calculates combined impact-uncertainty score for prioritization.
/// </summary>
/// <param name="impactContext">Impact context with environment, data sensitivity, etc.</param>
/// <param name="signalSnapshot">Signal snapshot for uncertainty calculation.</param>
/// <param name="uncertaintyPenaltyFactor">How much uncertainty reduces priority (default 0.5).</param>
/// <returns>Combined score with impact, uncertainty, and effective priority.</returns>
CombinedImpactScore Calculate(
ImpactContext impactContext,
SignalSnapshot signalSnapshot,
double uncertaintyPenaltyFactor = 0.5);
}
/// <summary>
/// Calculates combined impact-uncertainty scores for unknown triage.
/// Integrates ImpactScoreCalculator with UncertaintyScoreCalculator for
/// a unified prioritization signal.
/// </summary>
public sealed class CombinedImpactCalculator : ICombinedImpactCalculator
{
private readonly IImpactScoreCalculator _impactCalculator;
private readonly IUncertaintyScoreCalculator _uncertaintyCalculator;
private readonly ILogger<CombinedImpactCalculator> _logger;
private readonly TimeProvider _timeProvider;
public CombinedImpactCalculator(
IImpactScoreCalculator impactCalculator,
IUncertaintyScoreCalculator uncertaintyCalculator,
ILogger<CombinedImpactCalculator> logger,
TimeProvider? timeProvider = null)
{
_impactCalculator = impactCalculator;
_uncertaintyCalculator = uncertaintyCalculator;
_logger = logger;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public CombinedImpactScore Calculate(
ImpactContext impactContext,
SignalSnapshot signalSnapshot,
double uncertaintyPenaltyFactor = 0.5)
{
ArgumentNullException.ThrowIfNull(impactContext);
ArgumentNullException.ThrowIfNull(signalSnapshot);
// Calculate individual scores
var impact = _impactCalculator.Calculate(impactContext);
var uncertainty = _uncertaintyCalculator.Calculate(signalSnapshot);
// Effective priority = impact * (1 - uncertainty * penalty)
// When entropy is high, priority is reduced
var penaltyFactor = Math.Clamp(uncertaintyPenaltyFactor, 0.0, 1.0);
var effectivePriority = impact.Score * (1.0 - uncertainty.Entropy * penaltyFactor);
effectivePriority = Math.Clamp(effectivePriority, 0.0, 1.0);
var effectivePriorityBasisPoints = (int)Math.Round(effectivePriority * 10000);
_logger.LogDebug(
"Calculated combined score: impact={Impact:F4}, uncertainty={Uncertainty:F4}, effective={Effective:F4} (penalty_factor={PenaltyFactor:F2})",
impact.Score,
uncertainty.Entropy,
effectivePriority,
penaltyFactor);
return new CombinedImpactScore
{
Impact = impact,
Uncertainty = uncertainty,
EffectivePriority = effectivePriority,
EffectivePriorityBasisPoints = effectivePriorityBasisPoints,
CalculatedAt = _timeProvider.GetUtcNow()
};
}
}

View File

@@ -0,0 +1,346 @@
// <copyright file="DeltaIfPresentCalculator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
// </copyright>
using StellaOps.Policy.Determinization.Evidence;
using StellaOps.Policy.Determinization.Models;
using System.Diagnostics.Metrics;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Calculates hypothetical score changes if missing signals were present.
/// Implements TSF-004: Delta-If-Present calculations for policy decision support.
/// </summary>
public sealed class DeltaIfPresentCalculator : IDeltaIfPresentCalculator
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
private static readonly Counter<long> DeltaCalculationsCounter = Meter.CreateCounter<long>(
"stellaops_determinization_delta_if_present_calculations_total",
description: "Total delta-if-present calculations performed");
private readonly ILogger<DeltaIfPresentCalculator> _logger;
private readonly IUncertaintyScoreCalculator _uncertaintyCalculator;
private readonly TrustScoreAggregator _trustAggregator;
private readonly TimeProvider _timeProvider;
// Default prior values for signals when simulating (moderate/neutral assumptions)
private static readonly IReadOnlyDictionary<string, double> DefaultPriors = new Dictionary<string, double>
{
["VEX"] = 0.5, // Neutral: under_investigation
["EPSS"] = 0.3, // Below median EPSS score
["Reachability"] = 0.5, // Unknown reachability
["Runtime"] = 0.3, // Likely not detected at runtime
["Backport"] = 0.5, // Unknown backport status
["SBOMLineage"] = 0.5 // Neutral lineage contribution
};
public DeltaIfPresentCalculator(
ILogger<DeltaIfPresentCalculator> logger,
IUncertaintyScoreCalculator uncertaintyCalculator,
TrustScoreAggregator trustAggregator,
TimeProvider? timeProvider = null)
{
_logger = logger;
_uncertaintyCalculator = uncertaintyCalculator;
_trustAggregator = trustAggregator;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public DeltaIfPresentResult CalculateSingleSignalDelta(
SignalSnapshot snapshot,
string signal,
double assumedValue,
SignalWeights? weights = null)
{
ArgumentNullException.ThrowIfNull(snapshot);
ArgumentException.ThrowIfNullOrWhiteSpace(signal);
var effectiveWeights = weights ?? SignalWeights.Default;
var signalWeight = GetSignalWeight(signal, effectiveWeights);
// Calculate current state
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
// Create hypothetical snapshot with the signal present
var hypotheticalSnapshot = CreateHypotheticalSnapshot(snapshot, signal, assumedValue);
// Calculate hypothetical state
var hypotheticalUncertainty = _uncertaintyCalculator.Calculate(hypotheticalSnapshot, effectiveWeights);
var hypotheticalScore = _trustAggregator.Aggregate(hypotheticalSnapshot, hypotheticalUncertainty, effectiveWeights);
DeltaCalculationsCounter.Add(1,
new KeyValuePair<string, object?>("signal", signal),
new KeyValuePair<string, object?>("cve", snapshot.Cve));
_logger.LogDebug(
"Delta-if-present for {Signal}={Value:F2}: score {Current:F4} -> {Hypothetical:F4} (delta={Delta:+0.0000;-0.0000})",
signal, assumedValue, currentScore, hypotheticalScore, hypotheticalScore - currentScore);
return new DeltaIfPresentResult
{
Signal = signal,
CurrentScore = currentScore,
HypotheticalScore = hypotheticalScore,
AssumedValue = assumedValue,
SignalWeight = signalWeight,
CurrentEntropy = currentUncertainty.Entropy,
HypotheticalEntropy = hypotheticalUncertainty.Entropy
};
}
public DeltaIfPresentAnalysis CalculateFullAnalysis(
SignalSnapshot snapshot,
SignalWeights? weights = null)
{
ArgumentNullException.ThrowIfNull(snapshot);
var effectiveWeights = weights ?? SignalWeights.Default;
// Calculate current state
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
var gapAnalysis = new List<SignalDeltaScenarios>();
// Analyze each gap
foreach (var gap in currentUncertainty.Gaps)
{
var priorValue = DefaultPriors.GetValueOrDefault(gap.Signal, 0.5);
var bestCase = CalculateSingleSignalDelta(snapshot, gap.Signal, 0.0, effectiveWeights);
var worstCase = CalculateSingleSignalDelta(snapshot, gap.Signal, 1.0, effectiveWeights);
var priorCase = CalculateSingleSignalDelta(snapshot, gap.Signal, priorValue, effectiveWeights);
gapAnalysis.Add(new SignalDeltaScenarios
{
Signal = gap.Signal,
Weight = gap.Weight,
GapReason = gap.Reason,
BestCase = bestCase,
WorstCase = worstCase,
PriorCase = priorCase
});
}
// Prioritize gaps by maximum potential impact
var prioritized = gapAnalysis
.OrderByDescending(g => g.MaxImpact)
.Select(g => g.Signal)
.ToList();
_logger.LogInformation(
"Delta-if-present analysis for {Cve}/{Purl}: {GapCount} gaps, prioritized: [{Priority}]",
snapshot.Cve, snapshot.Purl, gapAnalysis.Count,
string.Join(", ", prioritized.Take(3)));
return new DeltaIfPresentAnalysis
{
CurrentScore = currentScore,
CurrentEntropy = currentUncertainty.Entropy,
GapAnalysis = gapAnalysis,
PrioritizedGaps = prioritized,
ComputedAt = _timeProvider.GetUtcNow()
};
}
public ScoreBounds CalculateScoreBounds(
SignalSnapshot snapshot,
SignalWeights? weights = null)
{
ArgumentNullException.ThrowIfNull(snapshot);
var effectiveWeights = weights ?? SignalWeights.Default;
// Calculate current state
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
if (currentUncertainty.Gaps.Count == 0)
{
// No gaps - current score is the only possibility
return new ScoreBounds
{
CurrentScore = currentScore,
MinimumScore = currentScore,
MaximumScore = currentScore,
CurrentEntropy = currentUncertainty.Entropy,
GapCount = 0,
MissingWeightPercentage = 0.0
};
}
// Create best-case snapshot (all missing signals at low-risk values)
var bestSnapshot = snapshot;
foreach (var gap in currentUncertainty.Gaps)
{
bestSnapshot = CreateHypotheticalSnapshot(bestSnapshot, gap.Signal, 0.0);
}
// Create worst-case snapshot (all missing signals at high-risk values)
var worstSnapshot = snapshot;
foreach (var gap in currentUncertainty.Gaps)
{
worstSnapshot = CreateHypotheticalSnapshot(worstSnapshot, gap.Signal, 1.0);
}
// Calculate bounds
var bestUncertainty = _uncertaintyCalculator.Calculate(bestSnapshot, effectiveWeights);
var worstUncertainty = _uncertaintyCalculator.Calculate(worstSnapshot, effectiveWeights);
var maxScore = _trustAggregator.Aggregate(bestSnapshot, bestUncertainty, effectiveWeights);
var minScore = _trustAggregator.Aggregate(worstSnapshot, worstUncertainty, effectiveWeights);
// Calculate missing weight percentage
var missingWeight = currentUncertainty.Gaps.Sum(g => g.Weight);
var totalWeight = effectiveWeights.TotalWeight;
var missingPercentage = totalWeight > 0 ? (missingWeight / totalWeight) * 100.0 : 0.0;
_logger.LogDebug(
"Score bounds for {Cve}: current={Current:F4}, min={Min:F4}, max={Max:F4}, range={Range:F4}",
snapshot.Cve, currentScore, minScore, maxScore, maxScore - minScore);
return new ScoreBounds
{
CurrentScore = currentScore,
MinimumScore = minScore,
MaximumScore = maxScore,
CurrentEntropy = currentUncertainty.Entropy,
GapCount = currentUncertainty.Gaps.Count,
MissingWeightPercentage = missingPercentage
};
}
private static double GetSignalWeight(string signal, SignalWeights weights)
{
return signal.ToUpperInvariant() switch
{
"VEX" => weights.VexWeight,
"EPSS" => weights.EpssWeight,
"REACHABILITY" => weights.ReachabilityWeight,
"RUNTIME" => weights.RuntimeWeight,
"BACKPORT" => weights.BackportWeight,
"SBOMLINEAGE" or "SBOM" => weights.SbomLineageWeight,
_ => 0.0
};
}
private SignalSnapshot CreateHypotheticalSnapshot(
SignalSnapshot original,
string signal,
double normalizedValue)
{
var now = _timeProvider.GetUtcNow();
return signal.ToUpperInvariant() switch
{
"VEX" => original with
{
Vex = SignalState<VexClaimSummary>.Queried(
CreateHypotheticalVex(normalizedValue), now)
},
"EPSS" => original with
{
Epss = SignalState<EpssEvidence>.Queried(
CreateHypotheticalEpss(normalizedValue), now)
},
"REACHABILITY" => original with
{
Reachability = SignalState<ReachabilityEvidence>.Queried(
CreateHypotheticalReachability(normalizedValue), now)
},
"RUNTIME" => original with
{
Runtime = SignalState<RuntimeEvidence>.Queried(
CreateHypotheticalRuntime(normalizedValue), now)
},
"BACKPORT" => original with
{
Backport = SignalState<BackportEvidence>.Queried(
CreateHypotheticalBackport(normalizedValue), now)
},
"SBOMLINEAGE" or "SBOM" => original with
{
Sbom = SignalState<SbomLineageEvidence>.Queried(
CreateHypotheticalSbom(normalizedValue), now)
},
_ => original
};
}
private static VexClaimSummary CreateHypotheticalVex(double normalizedValue)
{
// Map 0.0-1.0 to VEX status
var status = normalizedValue switch
{
< 0.25 => "not_affected",
< 0.50 => "under_investigation",
< 0.75 => "under_investigation",
_ => "affected"
};
return new VexClaimSummary
{
Status = status,
Source = "hypothetical",
DocumentId = "delta-if-present-simulation",
Timestamp = DateTimeOffset.UtcNow
};
}
private static EpssEvidence CreateHypotheticalEpss(double normalizedValue)
{
return new EpssEvidence
{
Epss = normalizedValue,
Percentile = normalizedValue * 100.0,
Date = DateOnly.FromDateTime(DateTime.UtcNow)
};
}
private static ReachabilityEvidence CreateHypotheticalReachability(double normalizedValue)
{
var status = normalizedValue >= 0.5
? ReachabilityStatus.Reachable
: ReachabilityStatus.Unreachable;
return new ReachabilityEvidence
{
Status = status,
Confidence = 1.0 - Math.Abs(normalizedValue - 0.5) * 2,
PathCount = normalizedValue >= 0.5 ? 1 : 0,
Source = "hypothetical"
};
}
private static RuntimeEvidence CreateHypotheticalRuntime(double normalizedValue)
{
return new RuntimeEvidence
{
Detected = normalizedValue >= 0.5,
Source = "hypothetical",
Timestamp = DateTimeOffset.UtcNow
};
}
private static BackportEvidence CreateHypotheticalBackport(double normalizedValue)
{
return new BackportEvidence
{
Detected = normalizedValue < 0.5, // Backport = lower risk
Source = "hypothetical",
Timestamp = DateTimeOffset.UtcNow
};
}
private static SbomLineageEvidence CreateHypotheticalSbom(double normalizedValue)
{
return new SbomLineageEvidence
{
Present = true,
Depth = (int)(normalizedValue * 5),
Source = "hypothetical"
};
}
}

View File

@@ -0,0 +1,192 @@
// -----------------------------------------------------------------------------
// EwsCalculator.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Unified Evidence-Weighted Score calculator implementation.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Diagnostics.Metrics;
using Microsoft.Extensions.Logging;
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Unified calculator for Evidence-Weighted Scores (EWS).
/// Orchestrates 6-dimension normalization, weighting, and guardrails.
/// </summary>
public sealed class EwsCalculator : IEwsCalculator
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization.EWS");
private static readonly Histogram<int> EwsScoreHistogram = Meter.CreateHistogram<int>(
"stellaops_ews_score",
unit: "score",
description: "Evidence-Weighted Score distribution (0-100)");
private static readonly Counter<int> GuardrailsAppliedCounter = Meter.CreateCounter<int>(
"stellaops_ews_guardrails_applied",
description: "Count of guardrails applied to EWS scores");
private readonly ImmutableDictionary<EwsDimension, IEwsDimensionNormalizer> _normalizers;
private readonly IGuardrailsEngine _guardrailsEngine;
private readonly TimeProvider _timeProvider;
private readonly ILogger<EwsCalculator> _logger;
public EwsCalculator(
IEnumerable<IEwsDimensionNormalizer> normalizers,
IGuardrailsEngine guardrailsEngine,
TimeProvider? timeProvider = null,
ILogger<EwsCalculator>? logger = null)
{
_normalizers = normalizers.ToImmutableDictionary(n => n.Dimension);
_guardrailsEngine = guardrailsEngine;
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<EwsCalculator>.Instance;
ValidateNormalizers();
}
/// <summary>
/// Creates a default EwsCalculator with all standard normalizers.
/// </summary>
public static EwsCalculator CreateDefault(
TimeProvider? timeProvider = null,
ILogger<EwsCalculator>? logger = null)
{
var normalizers = new IEwsDimensionNormalizer[]
{
new ReachabilityNormalizer(),
new RuntimeSignalsNormalizer(),
new BackportEvidenceNormalizer(),
new ExploitabilityNormalizer(),
new SourceConfidenceNormalizer(),
new MitigationStatusNormalizer()
};
return new EwsCalculator(
normalizers,
new GuardrailsEngine(),
timeProvider,
logger);
}
/// <inheritdoc />
public EwsCompositeScore Calculate(
EwsSignalInput signal,
EwsDimensionWeights? weights = null,
EwsGuardrails? guardrails = null)
{
ArgumentNullException.ThrowIfNull(signal);
var effectiveWeights = weights ?? EwsDimensionWeights.Default;
var effectiveGuardrails = guardrails ?? EwsGuardrails.Default;
// Validate weights
if (!effectiveWeights.IsNormalized())
{
_logger.LogWarning(
"EWS dimension weights are not normalized (total={Total:F4}); results may be unexpected",
effectiveWeights.TotalWeight);
}
// Calculate all dimension scores
var dimensionScores = new List<EwsDimensionScore>();
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
{
var dimScore = CalculateDimension(dimension, signal, effectiveWeights.GetWeight(dimension));
dimensionScores.Add(dimScore);
}
var dimensions = dimensionScores.ToImmutableArray();
// Calculate raw composite score (weighted sum)
var rawScore = (int)Math.Round(dimensions.Sum(d => d.WeightedContribution));
rawScore = Math.Clamp(rawScore, 0, 100);
// Apply guardrails
var guardrailsResult = _guardrailsEngine.Apply(rawScore, signal, dimensions, effectiveGuardrails);
// Calculate overall confidence (weighted average)
var confidence = dimensions.Sum(d => d.Confidence * d.Weight);
// Determine if manual review is needed
var needsReview = confidence < effectiveGuardrails.MinConfidenceThreshold;
var result = new EwsCompositeScore
{
Score = guardrailsResult.AdjustedScore,
RawScore = rawScore,
Confidence = confidence,
Dimensions = dimensions,
AppliedGuardrails = guardrailsResult.AppliedGuardrails,
NeedsReview = needsReview,
CalculatedAt = _timeProvider.GetUtcNow(),
CveId = signal.CveId,
Purl = signal.Purl
};
// Emit metrics
EwsScoreHistogram.Record(result.Score,
new KeyValuePair<string, object?>("risk_tier", result.RiskTier),
new KeyValuePair<string, object?>("guardrails_applied", guardrailsResult.WasModified));
if (guardrailsResult.WasModified)
{
GuardrailsAppliedCounter.Add(guardrailsResult.AppliedGuardrails.Length);
}
_logger.LogDebug(
"Calculated EWS: score={Score} (raw={RawScore}), confidence={Confidence:P0}, tier={Tier}, guardrails={Guardrails}",
result.Score,
result.RawScore,
result.Confidence,
result.RiskTier,
string.Join(",", guardrailsResult.AppliedGuardrails));
return result;
}
/// <inheritdoc />
public EwsDimensionScore CalculateDimension(
EwsDimension dimension,
EwsSignalInput signal,
double weight)
{
var normalizer = GetNormalizer(dimension);
var score = normalizer.Normalize(signal);
var confidence = normalizer.GetConfidence(signal);
var explanation = normalizer.GetExplanation(signal, score);
return new EwsDimensionScore
{
Dimension = dimension,
Score = score,
Confidence = confidence,
Weight = weight,
Explanation = explanation
};
}
/// <inheritdoc />
public IEwsDimensionNormalizer GetNormalizer(EwsDimension dimension)
{
if (_normalizers.TryGetValue(dimension, out var normalizer))
{
return normalizer;
}
throw new InvalidOperationException($"No normalizer registered for dimension {dimension}");
}
private void ValidateNormalizers()
{
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
{
if (!_normalizers.ContainsKey(dimension))
{
throw new InvalidOperationException(
$"Missing normalizer for dimension {dimension}. All 6 dimensions must have normalizers.");
}
}
}
}

View File

@@ -0,0 +1,101 @@
// -----------------------------------------------------------------------------
// EwsDimension.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Defines the 6 canonical dimensions for EWS scoring.
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// The 6 canonical dimensions for Evidence-Weighted Score (EWS) model.
/// Each dimension maps specific signal inputs to a normalized 0-100 score.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum EwsDimension
{
/// <summary>
/// RCH - Reachability dimension.
/// Measures whether vulnerable code paths are reachable from entrypoints.
/// Input: Reachability tier (R0-R4), call graph analysis, runtime traces.
/// </summary>
Reachability = 0,
/// <summary>
/// RTS - Runtime Signals dimension.
/// Measures evidence from runtime detection and observability.
/// Input: Runtime telemetry, instrumentation coverage, APM signals.
/// </summary>
RuntimeSignals = 1,
/// <summary>
/// BKP - Backport Evidence dimension.
/// Measures evidence of patched code in affected packages.
/// Input: Backport detection, binary diff analysis, vendor advisories.
/// </summary>
BackportEvidence = 2,
/// <summary>
/// XPL - Exploitability dimension.
/// Measures likelihood and maturity of exploitation.
/// Input: EPSS, KEV status, exploit kit availability, PoC age.
/// </summary>
Exploitability = 3,
/// <summary>
/// SRC - Source Confidence dimension.
/// Measures confidence in SBOM and dependency lineage.
/// Input: SBOM completeness, verified signatures, attestations.
/// </summary>
SourceConfidence = 4,
/// <summary>
/// MIT - Mitigation Status dimension.
/// Measures VEX status and compensating controls.
/// Input: VEX statements, workarounds applied, network controls.
/// </summary>
MitigationStatus = 5
}
/// <summary>
/// Short codes for dimension serialization and display.
/// </summary>
public static class EwsDimensionCodes
{
public const string Reachability = "RCH";
public const string RuntimeSignals = "RTS";
public const string BackportEvidence = "BKP";
public const string Exploitability = "XPL";
public const string SourceConfidence = "SRC";
public const string MitigationStatus = "MIT";
/// <summary>
/// Gets the short code for a dimension.
/// </summary>
public static string ToCode(this EwsDimension dimension) => dimension switch
{
EwsDimension.Reachability => Reachability,
EwsDimension.RuntimeSignals => RuntimeSignals,
EwsDimension.BackportEvidence => BackportEvidence,
EwsDimension.Exploitability => Exploitability,
EwsDimension.SourceConfidence => SourceConfidence,
EwsDimension.MitigationStatus => MitigationStatus,
_ => throw new ArgumentOutOfRangeException(nameof(dimension), dimension, "Unknown dimension")
};
/// <summary>
/// Parses a short code to a dimension.
/// </summary>
public static EwsDimension? FromCode(string code) => code?.ToUpperInvariant() switch
{
Reachability => EwsDimension.Reachability,
RuntimeSignals => EwsDimension.RuntimeSignals,
BackportEvidence => EwsDimension.BackportEvidence,
Exploitability => EwsDimension.Exploitability,
SourceConfidence => EwsDimension.SourceConfidence,
MitigationStatus => EwsDimension.MitigationStatus,
_ => null
};
}

View File

@@ -0,0 +1,298 @@
// -----------------------------------------------------------------------------
// EwsModels.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Result models for Evidence-Weighted Score calculation.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Individual dimension score from normalization.
/// </summary>
public sealed record EwsDimensionScore
{
/// <summary>
/// The dimension this score represents.
/// </summary>
[JsonPropertyName("dimension")]
public required EwsDimension Dimension { get; init; }
/// <summary>
/// Short dimension code (RCH, RTS, BKP, XPL, SRC, MIT).
/// </summary>
[JsonPropertyName("code")]
public string Code => Dimension.ToCode();
/// <summary>
/// Normalized score in range [0, 100].
/// </summary>
[JsonPropertyName("score")]
public required int Score { get; init; }
/// <summary>
/// Confidence level for this score (0.0 to 1.0).
/// </summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>
/// Weight applied to this dimension in composite calculation.
/// </summary>
[JsonPropertyName("weight")]
public required double Weight { get; init; }
/// <summary>
/// Weighted contribution to composite score.
/// </summary>
[JsonPropertyName("weighted_contribution")]
public double WeightedContribution => Score * Weight;
/// <summary>
/// Human-readable explanation of how the score was derived.
/// </summary>
[JsonPropertyName("explanation")]
public required string Explanation { get; init; }
/// <summary>
/// Whether this score is based on actual evidence or assumptions.
/// </summary>
[JsonPropertyName("is_evidence_based")]
public bool IsEvidenceBased => Confidence >= 0.5;
}
/// <summary>
/// Weights for each dimension in the 6-dimension EWS model.
/// </summary>
public sealed record EwsDimensionWeights
{
/// <summary>
/// Weight for RCH (Reachability) dimension.
/// </summary>
[JsonPropertyName("rch")]
public double Reachability { get; init; } = 0.25;
/// <summary>
/// Weight for RTS (Runtime Signals) dimension.
/// </summary>
[JsonPropertyName("rts")]
public double RuntimeSignals { get; init; } = 0.15;
/// <summary>
/// Weight for BKP (Backport Evidence) dimension.
/// </summary>
[JsonPropertyName("bkp")]
public double BackportEvidence { get; init; } = 0.10;
/// <summary>
/// Weight for XPL (Exploitability) dimension.
/// </summary>
[JsonPropertyName("xpl")]
public double Exploitability { get; init; } = 0.20;
/// <summary>
/// Weight for SRC (Source Confidence) dimension.
/// </summary>
[JsonPropertyName("src")]
public double SourceConfidence { get; init; } = 0.10;
/// <summary>
/// Weight for MIT (Mitigation Status) dimension.
/// </summary>
[JsonPropertyName("mit")]
public double MitigationStatus { get; init; } = 0.20;
/// <summary>
/// Default weights as per advisory recommendations.
/// </summary>
public static EwsDimensionWeights Default => new();
/// <summary>
/// Legacy 6-dimension weights for backward compatibility.
/// </summary>
public static EwsDimensionWeights Legacy => new()
{
Reachability = 0.20,
RuntimeSignals = 0.10,
BackportEvidence = 0.15,
Exploitability = 0.25,
SourceConfidence = 0.10,
MitigationStatus = 0.20
};
/// <summary>
/// Gets the weight for a specific dimension.
/// </summary>
public double GetWeight(EwsDimension dimension) => dimension switch
{
EwsDimension.Reachability => Reachability,
EwsDimension.RuntimeSignals => RuntimeSignals,
EwsDimension.BackportEvidence => BackportEvidence,
EwsDimension.Exploitability => Exploitability,
EwsDimension.SourceConfidence => SourceConfidence,
EwsDimension.MitigationStatus => MitigationStatus,
_ => 0.0
};
/// <summary>
/// Sum of all weights (should equal 1.0 for normalized calculations).
/// </summary>
public double TotalWeight =>
Reachability + RuntimeSignals + BackportEvidence +
Exploitability + SourceConfidence + MitigationStatus;
/// <summary>
/// Validates that weights sum to approximately 1.0.
/// </summary>
public bool IsNormalized(double tolerance = 0.001) =>
Math.Abs(TotalWeight - 1.0) < tolerance;
}
/// <summary>
/// Guardrails configuration for EWS scoring.
/// Defines caps and floors to prevent extreme scores.
/// </summary>
public sealed record EwsGuardrails
{
/// <summary>
/// Maximum score for "not_affected" VEX status (cap).
/// Prevents fully mitigated items from being flagged as high risk.
/// </summary>
[JsonPropertyName("not_affected_cap")]
public int NotAffectedCap { get; init; } = 25;
/// <summary>
/// Minimum score when runtime evidence shows active usage (floor).
/// Ensures actively used vulnerable code is never fully suppressed.
/// </summary>
[JsonPropertyName("runtime_floor")]
public int RuntimeFloor { get; init; } = 30;
/// <summary>
/// Maximum score for speculative findings (no evidence, all assumptions).
/// Prevents assumption-based findings from dominating triage.
/// </summary>
[JsonPropertyName("speculative_cap")]
public int SpeculativeCap { get; init; } = 60;
/// <summary>
/// Minimum score when CVE is in KEV (floor).
/// Known exploited vulnerabilities always require attention.
/// </summary>
[JsonPropertyName("kev_floor")]
public int KevFloor { get; init; } = 70;
/// <summary>
/// Maximum score for backported findings (cap).
/// Confirmed backports should not be high priority.
/// </summary>
[JsonPropertyName("backported_cap")]
public int BackportedCap { get; init; } = 20;
/// <summary>
/// Minimum overall confidence to trust the composite score.
/// Below this, the score should be flagged for manual review.
/// </summary>
[JsonPropertyName("min_confidence_threshold")]
public double MinConfidenceThreshold { get; init; } = 0.3;
/// <summary>
/// Default guardrails configuration.
/// </summary>
public static EwsGuardrails Default => new();
}
/// <summary>
/// Composite Evidence-Weighted Score result.
/// </summary>
public sealed record EwsCompositeScore
{
/// <summary>
/// Final weighted composite score [0, 100].
/// </summary>
[JsonPropertyName("score")]
public required int Score { get; init; }
/// <summary>
/// Score before guardrails were applied.
/// </summary>
[JsonPropertyName("raw_score")]
public required int RawScore { get; init; }
/// <summary>
/// Basis points representation (0-10000) for deterministic storage.
/// </summary>
[JsonPropertyName("basis_points")]
public int BasisPoints => Score * 100;
/// <summary>
/// Overall confidence in the composite score (0.0 to 1.0).
/// Weighted average of dimension confidences.
/// </summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>
/// Individual dimension scores.
/// </summary>
[JsonPropertyName("dimensions")]
public required ImmutableArray<EwsDimensionScore> Dimensions { get; init; }
/// <summary>
/// Guardrails that were applied.
/// </summary>
[JsonPropertyName("applied_guardrails")]
public required ImmutableArray<string> AppliedGuardrails { get; init; }
/// <summary>
/// Whether guardrails modified the score.
/// </summary>
[JsonPropertyName("guardrails_applied")]
public bool GuardrailsApplied => Score != RawScore;
/// <summary>
/// Whether manual review is recommended due to low confidence.
/// </summary>
[JsonPropertyName("needs_review")]
public required bool NeedsReview { get; init; }
/// <summary>
/// When this score was calculated (UTC).
/// </summary>
[JsonPropertyName("calculated_at")]
public required DateTimeOffset CalculatedAt { get; init; }
/// <summary>
/// CVE identifier this score relates to.
/// </summary>
[JsonPropertyName("cve_id")]
public string? CveId { get; init; }
/// <summary>
/// Package URL (purl) this score relates to.
/// </summary>
[JsonPropertyName("purl")]
public string? Purl { get; init; }
/// <summary>
/// Gets a dimension score by dimension type.
/// </summary>
public EwsDimensionScore? GetDimension(EwsDimension dimension) =>
Dimensions.FirstOrDefault(d => d.Dimension == dimension);
/// <summary>
/// Gets a risk tier based on the score.
/// </summary>
[JsonPropertyName("risk_tier")]
public string RiskTier => Score switch
{
>= 80 => "Critical",
>= 60 => "High",
>= 40 => "Medium",
>= 20 => "Low",
_ => "Informational"
};
}

View File

@@ -0,0 +1,221 @@
// -----------------------------------------------------------------------------
// EwsSignalInput.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Signal input model for EWS dimension normalization.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Raw signal inputs for Evidence-Weighted Score calculation.
/// Contains all signals that feed into the 6-dimension model.
/// </summary>
public sealed record EwsSignalInput
{
// -------------------------------------------------------------------------
// RCH (Reachability) signals
// -------------------------------------------------------------------------
/// <summary>
/// Reachability tier from static analysis (R0=unreachable to R4=reachable).
/// </summary>
[JsonPropertyName("reachability_tier")]
public int? ReachabilityTier { get; init; }
/// <summary>
/// Call graph analysis confidence (0.0 to 1.0).
/// </summary>
[JsonPropertyName("call_graph_confidence")]
public double? CallGraphConfidence { get; init; }
/// <summary>
/// Whether runtime trace confirmed the path.
/// </summary>
[JsonPropertyName("runtime_trace_confirmed")]
public bool? RuntimeTraceConfirmed { get; init; }
// -------------------------------------------------------------------------
// RTS (Runtime Signals) signals
// -------------------------------------------------------------------------
/// <summary>
/// Runtime instrumentation coverage percentage (0.0 to 1.0).
/// </summary>
[JsonPropertyName("instrumentation_coverage")]
public double? InstrumentationCoverage { get; init; }
/// <summary>
/// Number of runtime invocations observed in the past period.
/// </summary>
[JsonPropertyName("runtime_invocation_count")]
public int? RuntimeInvocationCount { get; init; }
/// <summary>
/// Whether APM signals indicate active usage.
/// </summary>
[JsonPropertyName("apm_active_usage")]
public bool? ApmActiveUsage { get; init; }
// -------------------------------------------------------------------------
// BKP (Backport Evidence) signals
// -------------------------------------------------------------------------
/// <summary>
/// Whether backport was detected via binary analysis.
/// </summary>
[JsonPropertyName("backport_detected")]
public bool? BackportDetected { get; init; }
/// <summary>
/// Backport confidence score from binary diff (0.0 to 1.0).
/// </summary>
[JsonPropertyName("backport_confidence")]
public double? BackportConfidence { get; init; }
/// <summary>
/// Whether vendor advisory confirms backport.
/// </summary>
[JsonPropertyName("vendor_backport_confirmed")]
public bool? VendorBackportConfirmed { get; init; }
// -------------------------------------------------------------------------
// XPL (Exploitability) signals
// -------------------------------------------------------------------------
/// <summary>
/// EPSS probability (0.0 to 1.0).
/// </summary>
[JsonPropertyName("epss_probability")]
public double? EpssProbability { get; init; }
/// <summary>
/// Whether the CVE is in KEV (Known Exploited Vulnerabilities).
/// </summary>
[JsonPropertyName("is_in_kev")]
public bool? IsInKev { get; init; }
/// <summary>
/// Whether an exploit kit is available.
/// </summary>
[JsonPropertyName("exploit_kit_available")]
public bool? ExploitKitAvailable { get; init; }
/// <summary>
/// Age of the public PoC in days (null if no PoC).
/// </summary>
[JsonPropertyName("poc_age_days")]
public int? PocAgeDays { get; init; }
/// <summary>
/// CVSS base score (0.0 to 10.0).
/// </summary>
[JsonPropertyName("cvss_base_score")]
public double? CvssBaseScore { get; init; }
// -------------------------------------------------------------------------
// SRC (Source Confidence) signals
// -------------------------------------------------------------------------
/// <summary>
/// SBOM completeness percentage (0.0 to 1.0).
/// </summary>
[JsonPropertyName("sbom_completeness")]
public double? SbomCompleteness { get; init; }
/// <summary>
/// Whether SBOM has verified signatures.
/// </summary>
[JsonPropertyName("sbom_signed")]
public bool? SbomSigned { get; init; }
/// <summary>
/// Number of valid attestations.
/// </summary>
[JsonPropertyName("attestation_count")]
public int? AttestationCount { get; init; }
/// <summary>
/// Whether dependency lineage is verified.
/// </summary>
[JsonPropertyName("lineage_verified")]
public bool? LineageVerified { get; init; }
// -------------------------------------------------------------------------
// MIT (Mitigation Status) signals
// -------------------------------------------------------------------------
/// <summary>
/// VEX status string (not_affected, affected, fixed, under_investigation).
/// </summary>
[JsonPropertyName("vex_status")]
public string? VexStatus { get; init; }
/// <summary>
/// VEX justification string.
/// </summary>
[JsonPropertyName("vex_justification")]
public string? VexJustification { get; init; }
/// <summary>
/// Whether a workaround is applied.
/// </summary>
[JsonPropertyName("workaround_applied")]
public bool? WorkaroundApplied { get; init; }
/// <summary>
/// Whether network controls mitigate the vulnerability.
/// </summary>
[JsonPropertyName("network_controls_applied")]
public bool? NetworkControlsApplied { get; init; }
// -------------------------------------------------------------------------
// Metadata
// -------------------------------------------------------------------------
/// <summary>
/// Timestamp when these signals were collected.
/// </summary>
[JsonPropertyName("collected_at")]
public DateTimeOffset? CollectedAt { get; init; }
/// <summary>
/// CVE identifier this input relates to.
/// </summary>
[JsonPropertyName("cve_id")]
public string? CveId { get; init; }
/// <summary>
/// Package URL (purl) this input relates to.
/// </summary>
[JsonPropertyName("purl")]
public string? Purl { get; init; }
/// <summary>
/// Additional signals as key-value pairs for extensibility.
/// </summary>
[JsonPropertyName("additional_signals")]
public ImmutableDictionary<string, object?>? AdditionalSignals { get; init; }
/// <summary>
/// Creates an empty signal input (all assumptions mode).
/// </summary>
public static EwsSignalInput Empty => new();
/// <summary>
/// Checks if a signal is present for the specified dimension.
/// </summary>
public bool HasSignalForDimension(EwsDimension dimension) => dimension switch
{
EwsDimension.Reachability => ReachabilityTier.HasValue || CallGraphConfidence.HasValue || RuntimeTraceConfirmed.HasValue,
EwsDimension.RuntimeSignals => InstrumentationCoverage.HasValue || RuntimeInvocationCount.HasValue || ApmActiveUsage.HasValue,
EwsDimension.BackportEvidence => BackportDetected.HasValue || BackportConfidence.HasValue || VendorBackportConfirmed.HasValue,
EwsDimension.Exploitability => EpssProbability.HasValue || IsInKev.HasValue || ExploitKitAvailable.HasValue || PocAgeDays.HasValue || CvssBaseScore.HasValue,
EwsDimension.SourceConfidence => SbomCompleteness.HasValue || SbomSigned.HasValue || AttestationCount.HasValue || LineageVerified.HasValue,
EwsDimension.MitigationStatus => VexStatus != null || WorkaroundApplied.HasValue || NetworkControlsApplied.HasValue,
_ => false
};
}

View File

@@ -0,0 +1,109 @@
// -----------------------------------------------------------------------------
// GuardrailsEngine.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Implementation of guardrails enforcement for EWS scoring.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Applies guardrails (caps and floors) to EWS scores.
/// Guardrails prevent extreme scores and ensure business logic constraints.
/// </summary>
public sealed class GuardrailsEngine : IGuardrailsEngine
{
/// <inheritdoc />
public GuardrailsResult Apply(
int rawScore,
EwsSignalInput signal,
ImmutableArray<EwsDimensionScore> dimensions,
EwsGuardrails guardrails)
{
var score = rawScore;
var applied = new List<string>();
// Check for KEV floor first (highest priority)
if (signal.IsInKev == true && score < guardrails.KevFloor)
{
score = guardrails.KevFloor;
applied.Add($"kev_floor:{guardrails.KevFloor}");
}
// Check for backport cap
if ((signal.BackportDetected == true || signal.VendorBackportConfirmed == true)
&& score > guardrails.BackportedCap)
{
score = guardrails.BackportedCap;
applied.Add($"backported_cap:{guardrails.BackportedCap}");
}
// Check for not_affected cap
if (IsNotAffected(signal) && score > guardrails.NotAffectedCap)
{
score = guardrails.NotAffectedCap;
applied.Add($"not_affected_cap:{guardrails.NotAffectedCap}");
}
// Check for runtime floor (if runtime shows active usage)
if (HasActiveRuntimeUsage(signal) && score < guardrails.RuntimeFloor)
{
score = guardrails.RuntimeFloor;
applied.Add($"runtime_floor:{guardrails.RuntimeFloor}");
}
// Check for speculative cap (all assumptions, low confidence)
if (IsSpeculative(dimensions) && score > guardrails.SpeculativeCap)
{
score = guardrails.SpeculativeCap;
applied.Add($"speculative_cap:{guardrails.SpeculativeCap}");
}
return new GuardrailsResult
{
AdjustedScore = Math.Clamp(score, 0, 100),
OriginalScore = rawScore,
AppliedGuardrails = applied.ToImmutableArray()
};
}
private static bool IsNotAffected(EwsSignalInput signal)
{
return signal.VexStatus?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true
|| signal.VexStatus?.Equals("fixed", StringComparison.OrdinalIgnoreCase) == true;
}
private static bool HasActiveRuntimeUsage(EwsSignalInput signal)
{
return signal.ApmActiveUsage == true
|| (signal.RuntimeInvocationCount.HasValue && signal.RuntimeInvocationCount.Value > 0);
}
private static bool IsSpeculative(ImmutableArray<EwsDimensionScore> dimensions)
{
if (dimensions.IsDefaultOrEmpty)
{
return true;
}
// Calculate weighted confidence
var totalWeight = 0.0;
var weightedConfidence = 0.0;
foreach (var dim in dimensions)
{
totalWeight += dim.Weight;
weightedConfidence += dim.Confidence * dim.Weight;
}
if (totalWeight > 0)
{
var avgConfidence = weightedConfidence / totalWeight;
return avgConfidence < 0.3; // Less than 30% confidence = speculative
}
return true;
}
}

View File

@@ -0,0 +1,46 @@
// -----------------------------------------------------------------------------
// IEwsCalculator.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Interface for the unified Evidence-Weighted Score calculator.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Unified calculator for Evidence-Weighted Scores (EWS).
/// Orchestrates 6-dimension normalization, weighting, and guardrails.
/// </summary>
public interface IEwsCalculator
{
/// <summary>
/// Calculates a composite EWS from raw signals.
/// </summary>
/// <param name="signal">The raw signal input.</param>
/// <param name="weights">Optional custom weights (defaults to EwsDimensionWeights.Default).</param>
/// <param name="guardrails">Optional guardrails configuration (defaults to EwsGuardrails.Default).</param>
/// <returns>The composite EWS result.</returns>
EwsCompositeScore Calculate(
EwsSignalInput signal,
EwsDimensionWeights? weights = null,
EwsGuardrails? guardrails = null);
/// <summary>
/// Calculates a single dimension score from raw signals.
/// </summary>
/// <param name="dimension">The dimension to calculate.</param>
/// <param name="signal">The raw signal input.</param>
/// <param name="weight">The weight to assign to this dimension.</param>
/// <returns>The dimension score.</returns>
EwsDimensionScore CalculateDimension(
EwsDimension dimension,
EwsSignalInput signal,
double weight);
/// <summary>
/// Gets the normalizer for a specific dimension.
/// </summary>
/// <param name="dimension">The dimension.</param>
/// <returns>The normalizer for that dimension.</returns>
IEwsDimensionNormalizer GetNormalizer(EwsDimension dimension);
}

View File

@@ -0,0 +1,47 @@
// -----------------------------------------------------------------------------
// IEwsDimensionNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Pluggable interface for normalizing signal inputs to dimension scores.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Interface for normalizing raw signal inputs to a canonical 0-100 dimension score.
/// Each dimension has its own normalizer implementation that handles the specific
/// signal types and normalization logic for that dimension.
/// </summary>
public interface IEwsDimensionNormalizer
{
/// <summary>
/// The dimension this normalizer handles.
/// </summary>
EwsDimension Dimension { get; }
/// <summary>
/// Normalizes a raw signal value to a dimension score in range [0, 100].
/// </summary>
/// <param name="signal">The raw signal input for this dimension.</param>
/// <returns>Normalized score in range [0, 100], where:
/// - 0 = lowest risk/impact (e.g., unreachable, fully mitigated)
/// - 100 = highest risk/impact (e.g., reachable, actively exploited)
/// </returns>
int Normalize(EwsSignalInput signal);
/// <summary>
/// Gets the confidence level for this normalization (0.0 to 1.0).
/// Lower confidence when assumptions are made or data is missing.
/// </summary>
/// <param name="signal">The raw signal input for this dimension.</param>
/// <returns>Confidence level from 0.0 (all assumptions) to 1.0 (verified evidence).</returns>
double GetConfidence(EwsSignalInput signal);
/// <summary>
/// Gets a human-readable explanation of how the score was derived.
/// </summary>
/// <param name="signal">The raw signal input for this dimension.</param>
/// <param name="normalizedScore">The normalized score that was calculated.</param>
/// <returns>Explanation suitable for audit and operator review.</returns>
string GetExplanation(EwsSignalInput signal, int normalizedScore);
}

View File

@@ -0,0 +1,57 @@
// -----------------------------------------------------------------------------
// IGuardrailsEngine.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Interface for guardrails enforcement in EWS scoring.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Result of guardrails application.
/// </summary>
public sealed record GuardrailsResult
{
/// <summary>
/// The adjusted score after applying guardrails.
/// </summary>
public required int AdjustedScore { get; init; }
/// <summary>
/// The original score before guardrails.
/// </summary>
public required int OriginalScore { get; init; }
/// <summary>
/// List of guardrails that were applied.
/// </summary>
public required ImmutableArray<string> AppliedGuardrails { get; init; }
/// <summary>
/// Whether the score was modified.
/// </summary>
public bool WasModified => AdjustedScore != OriginalScore;
}
/// <summary>
/// Engine for applying guardrails (caps and floors) to EWS scores.
/// Guardrails prevent extreme scores in edge cases.
/// </summary>
public interface IGuardrailsEngine
{
/// <summary>
/// Applies guardrails to a raw composite score.
/// </summary>
/// <param name="rawScore">The raw composite score before guardrails.</param>
/// <param name="signal">The signal input that produced this score.</param>
/// <param name="dimensions">The individual dimension scores.</param>
/// <param name="guardrails">The guardrails configuration to apply.</param>
/// <returns>The result with adjusted score and list of applied guardrails.</returns>
GuardrailsResult Apply(
int rawScore,
EwsSignalInput signal,
ImmutableArray<EwsDimensionScore> dimensions,
EwsGuardrails guardrails);
}

View File

@@ -0,0 +1,94 @@
// -----------------------------------------------------------------------------
// BackportEvidenceNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Normalizer for BKP (Backport Evidence) dimension.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Normalizes backport evidence to the BKP dimension score.
/// Higher score = more evidence of vulnerability being present (not backported).
/// Lower score = strong evidence of backport (vulnerability patched).
/// </summary>
public sealed class BackportEvidenceNormalizer : IEwsDimensionNormalizer
{
/// <inheritdoc />
public EwsDimension Dimension => EwsDimension.BackportEvidence;
/// <inheritdoc />
public int Normalize(EwsSignalInput signal)
{
// Vendor confirmation is strongest signal
if (signal.VendorBackportConfirmed == true)
{
return 5; // Almost certainly patched
}
// Binary analysis detected backport
if (signal.BackportDetected == true)
{
if (signal.BackportConfidence.HasValue)
{
// Lower score = more likely patched
return (int)((1.0 - signal.BackportConfidence.Value) * 30);
}
return 15; // Backport detected with unknown confidence
}
// Binary analysis explicitly found no backport
if (signal.BackportDetected == false)
{
if (signal.BackportConfidence.HasValue)
{
// Higher confidence in "no backport" = higher risk score
return (int)(70 + signal.BackportConfidence.Value * 30);
}
return 80; // Likely vulnerable
}
// No backport analysis performed - assume vulnerable (conservative)
return 75;
}
/// <inheritdoc />
public double GetConfidence(EwsSignalInput signal)
{
if (signal.VendorBackportConfirmed.HasValue)
{
return 0.95; // Vendor confirmation is highly reliable
}
if (signal.BackportDetected.HasValue)
{
// Use backport confidence if available
return signal.BackportConfidence ?? 0.6;
}
return 0.2; // No analysis, low confidence
}
/// <inheritdoc />
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
{
if (signal.VendorBackportConfirmed == true)
{
return "Vendor confirmed backport; vulnerability patched in this build";
}
if (signal.BackportDetected == true)
{
var conf = signal.BackportConfidence?.ToString("P0") ?? "unknown";
return $"Binary analysis detected backport with {conf} confidence";
}
if (signal.BackportDetected == false)
{
var conf = signal.BackportConfidence?.ToString("P0") ?? "unknown";
return $"Binary analysis found no backport evidence ({conf} confidence)";
}
return "No backport analysis available; assuming vulnerable";
}
}

View File

@@ -0,0 +1,152 @@
// -----------------------------------------------------------------------------
// ExploitabilityNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Normalizer for XPL (Exploitability) dimension.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Normalizes exploitability signals to the XPL dimension score.
/// Maps EPSS, KEV, exploit availability, and CVSS to a 0-100 score.
/// </summary>
public sealed class ExploitabilityNormalizer : IEwsDimensionNormalizer
{
/// <inheritdoc />
public EwsDimension Dimension => EwsDimension.Exploitability;
/// <inheritdoc />
public int Normalize(EwsSignalInput signal)
{
// KEV is the strongest signal
if (signal.IsInKev == true)
{
return 100; // Known exploited = maximum exploitability
}
var score = 0.0;
var weights = 0.0;
// EPSS probability (most predictive)
if (signal.EpssProbability.HasValue)
{
weights += 0.4;
// EPSS is already 0-1, scale to 0-100
// Apply slight non-linear scaling to emphasize high-EPSS items
var epssScore = Math.Pow(signal.EpssProbability.Value, 0.7) * 100;
score += epssScore * 0.4;
}
// Exploit kit availability
if (signal.ExploitKitAvailable == true)
{
weights += 0.25;
score += 90 * 0.25; // Very high if exploit kit exists
}
else if (signal.ExploitKitAvailable == false)
{
weights += 0.25;
score += 20 * 0.25; // Lower if explicitly no kit
}
// PoC age (older PoC = more likely weaponized)
if (signal.PocAgeDays.HasValue)
{
weights += 0.15;
var pocScore = signal.PocAgeDays.Value switch
{
<= 7 => 60, // Fresh PoC
<= 30 => 75, // 1 month old
<= 90 => 85, // 3 months old
<= 365 => 90, // 1 year old
_ => 95 // Very old = likely weaponized
};
score += pocScore * 0.15;
}
// CVSS base score
if (signal.CvssBaseScore.HasValue)
{
weights += 0.2;
// Map 0-10 to 0-100
score += signal.CvssBaseScore.Value * 10 * 0.2;
}
if (weights > 0)
{
return (int)Math.Round(score / weights);
}
// No signals - default to moderate exploitability based on CVSS if available
return 50;
}
/// <inheritdoc />
public double GetConfidence(EwsSignalInput signal)
{
if (signal.IsInKev == true)
{
return 1.0; // Absolute certainty
}
var confidence = 0.0;
if (signal.EpssProbability.HasValue)
{
confidence = Math.Max(confidence, 0.85);
}
if (signal.ExploitKitAvailable.HasValue)
{
confidence = Math.Max(confidence, 0.9);
}
if (signal.PocAgeDays.HasValue)
{
confidence = Math.Max(confidence, 0.7);
}
if (signal.CvssBaseScore.HasValue)
{
confidence = Math.Max(confidence, 0.5); // CVSS alone is less predictive
}
return confidence > 0 ? confidence : 0.3; // Low confidence if no data
}
/// <inheritdoc />
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
{
var parts = new List<string>();
if (signal.IsInKev == true)
{
parts.Add("CVE is in CISA KEV (Known Exploited Vulnerabilities)");
}
if (signal.EpssProbability.HasValue)
{
parts.Add($"EPSS probability {signal.EpssProbability.Value:P2}");
}
if (signal.ExploitKitAvailable == true)
{
parts.Add("exploit kit available");
}
if (signal.PocAgeDays.HasValue)
{
parts.Add($"PoC available for {signal.PocAgeDays.Value} days");
}
if (signal.CvssBaseScore.HasValue)
{
parts.Add($"CVSS base score {signal.CvssBaseScore.Value:F1}");
}
return parts.Count > 0
? string.Join(", ", parts)
: "No exploitability signals; assuming moderate risk";
}
}

View File

@@ -0,0 +1,118 @@
// -----------------------------------------------------------------------------
// MitigationStatusNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Normalizer for MIT (Mitigation Status) dimension.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Normalizes mitigation status signals to the MIT dimension score.
/// Lower score = strong mitigation in place (low residual risk).
/// Higher score = no mitigation or vulnerable status.
/// </summary>
public sealed class MitigationStatusNormalizer : IEwsDimensionNormalizer
{
/// <inheritdoc />
public EwsDimension Dimension => EwsDimension.MitigationStatus;
/// <inheritdoc />
public int Normalize(EwsSignalInput signal)
{
// VEX status is the primary signal
var baseScore = ParseVexStatus(signal.VexStatus);
// Adjust for workarounds
if (signal.WorkaroundApplied == true)
{
baseScore = Math.Max(0, baseScore - 30);
}
// Adjust for network controls
if (signal.NetworkControlsApplied == true)
{
baseScore = Math.Max(0, baseScore - 20);
}
return Math.Clamp(baseScore, 0, 100);
}
private static int ParseVexStatus(string? vexStatus)
{
return vexStatus?.ToLowerInvariant() switch
{
"not_affected" => 5, // Confirmed not affected
"fixed" => 10, // Fix applied
"under_investigation" => 60, // Unknown yet
"affected" => 90, // Confirmed vulnerable
"exploitable" => 100, // Actively exploitable
null => 75, // No VEX = assume affected
_ => 75 // Unknown status = assume affected
};
}
/// <inheritdoc />
public double GetConfidence(EwsSignalInput signal)
{
if (!string.IsNullOrEmpty(signal.VexStatus))
{
// VEX status provides good confidence
var conf = signal.VexStatus.ToLowerInvariant() switch
{
"not_affected" => 0.9,
"fixed" => 0.85,
"affected" => 0.85,
"exploitable" => 0.95,
"under_investigation" => 0.4,
_ => 0.5
};
// Boost confidence if we also have justification
if (!string.IsNullOrEmpty(signal.VexJustification))
{
conf = Math.Min(1.0, conf + 0.1);
}
return conf;
}
// No VEX but have compensating controls
if (signal.WorkaroundApplied.HasValue || signal.NetworkControlsApplied.HasValue)
{
return 0.6;
}
return 0.2; // No mitigation data
}
/// <inheritdoc />
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
{
var parts = new List<string>();
if (!string.IsNullOrEmpty(signal.VexStatus))
{
parts.Add($"VEX status: {signal.VexStatus}");
if (!string.IsNullOrEmpty(signal.VexJustification))
{
parts.Add($"justification: {signal.VexJustification}");
}
}
if (signal.WorkaroundApplied == true)
{
parts.Add("workaround applied");
}
if (signal.NetworkControlsApplied == true)
{
parts.Add("network controls in place");
}
return parts.Count > 0
? string.Join(", ", parts)
: "No mitigation status available; assuming affected";
}
}

View File

@@ -0,0 +1,122 @@
// -----------------------------------------------------------------------------
// ReachabilityNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Normalizer for RCH (Reachability) dimension.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Normalizes reachability signals to the RCH dimension score.
/// Maps R0-R4 tiers and call graph confidence to a 0-100 score.
/// </summary>
public sealed class ReachabilityNormalizer : IEwsDimensionNormalizer
{
/// <inheritdoc />
public EwsDimension Dimension => EwsDimension.Reachability;
/// <inheritdoc />
public int Normalize(EwsSignalInput signal)
{
// Reachability tier takes precedence
if (signal.ReachabilityTier.HasValue)
{
var tierScore = signal.ReachabilityTier.Value switch
{
0 => 0, // R0: Unreachable
1 => 20, // R1: Present in dependency but not imported
2 => 40, // R2: Imported but not called
3 => 70, // R3: Called but not reachable from entrypoint
4 => 100, // R4: Reachable from entrypoint
_ => 50 // Unknown tier - moderate assumption
};
// Adjust by call graph confidence if available
if (signal.CallGraphConfidence.HasValue)
{
// Higher confidence = trust the tier more
// Lower confidence = pull toward middle (50)
var confidence = signal.CallGraphConfidence.Value;
tierScore = (int)(tierScore * confidence + 50 * (1 - confidence));
}
// Runtime trace confirmation boosts the score if reachable
if (signal.RuntimeTraceConfirmed == true && tierScore >= 70)
{
tierScore = Math.Min(100, tierScore + 15);
}
return Math.Clamp(tierScore, 0, 100);
}
// Fall back to call graph confidence only
if (signal.CallGraphConfidence.HasValue)
{
// High confidence but no tier = assume moderate reachability
return (int)(50 * signal.CallGraphConfidence.Value) + 25;
}
// Runtime trace only
if (signal.RuntimeTraceConfirmed == true)
{
return 85; // Strong evidence of reachability
}
// No signals - assume reachable (conservative)
return 75;
}
/// <inheritdoc />
public double GetConfidence(EwsSignalInput signal)
{
if (signal.ReachabilityTier.HasValue)
{
// Tier with call graph confidence
if (signal.CallGraphConfidence.HasValue)
{
return Math.Min(1.0, 0.7 + signal.CallGraphConfidence.Value * 0.3);
}
return 0.7; // Tier alone
}
if (signal.CallGraphConfidence.HasValue)
{
return signal.CallGraphConfidence.Value * 0.6;
}
if (signal.RuntimeTraceConfirmed == true)
{
return 0.9; // High confidence from runtime
}
return 0.2; // No evidence, pure assumption
}
/// <inheritdoc />
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
{
if (signal.ReachabilityTier.HasValue)
{
var tierName = signal.ReachabilityTier.Value switch
{
0 => "unreachable",
1 => "in-dependency-not-imported",
2 => "imported-not-called",
3 => "called-not-entrypoint-reachable",
4 => "entrypoint-reachable",
_ => "unknown-tier"
};
var confidence = signal.CallGraphConfidence?.ToString("P0") ?? "unknown";
return $"Reachability tier R{signal.ReachabilityTier.Value} ({tierName}), call graph confidence {confidence}";
}
if (signal.RuntimeTraceConfirmed == true)
{
return "Runtime trace confirmed reachability";
}
return "No reachability analysis; assumed reachable (conservative)";
}
}

View File

@@ -0,0 +1,116 @@
// -----------------------------------------------------------------------------
// RuntimeSignalsNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Normalizer for RTS (Runtime Signals) dimension.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Normalizes runtime signals to the RTS dimension score.
/// Higher score = more evidence of runtime activity.
/// </summary>
public sealed class RuntimeSignalsNormalizer : IEwsDimensionNormalizer
{
/// <inheritdoc />
public EwsDimension Dimension => EwsDimension.RuntimeSignals;
/// <inheritdoc />
public int Normalize(EwsSignalInput signal)
{
var score = 0.0;
var weights = 0.0;
// Instrumentation coverage
if (signal.InstrumentationCoverage.HasValue)
{
// Higher coverage = more confidence in runtime data
// If coverage is high but no invocations, that's good (not used)
// If coverage is low, we can't trust the data
weights += 0.3;
score += signal.InstrumentationCoverage.Value * 0.3;
}
// Runtime invocation count
if (signal.RuntimeInvocationCount.HasValue)
{
weights += 0.4;
// Logarithmic scale for invocations
// 0 = 0, 1-10 = 25, 11-100 = 50, 101-1000 = 75, 1000+ = 100
var invScore = signal.RuntimeInvocationCount.Value switch
{
0 => 0.0,
<= 10 => 0.25,
<= 100 => 0.5,
<= 1000 => 0.75,
_ => 1.0
};
score += invScore * 0.4;
}
// APM active usage
if (signal.ApmActiveUsage.HasValue)
{
weights += 0.3;
score += (signal.ApmActiveUsage.Value ? 1.0 : 0.0) * 0.3;
}
if (weights > 0)
{
return (int)Math.Round(score / weights * 100);
}
// No runtime signals - assume moderate risk (we don't know)
return 50;
}
/// <inheritdoc />
public double GetConfidence(EwsSignalInput signal)
{
var confidence = 0.0;
if (signal.InstrumentationCoverage.HasValue)
{
// Coverage itself tells us confidence
confidence = Math.Max(confidence, signal.InstrumentationCoverage.Value);
}
if (signal.RuntimeInvocationCount.HasValue)
{
confidence = Math.Max(confidence, 0.8); // Good data point
}
if (signal.ApmActiveUsage.HasValue)
{
confidence = Math.Max(confidence, 0.7);
}
return confidence > 0 ? confidence : 0.2; // Low if no data
}
/// <inheritdoc />
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
{
var parts = new List<string>();
if (signal.InstrumentationCoverage.HasValue)
{
parts.Add($"instrumentation coverage {signal.InstrumentationCoverage.Value:P0}");
}
if (signal.RuntimeInvocationCount.HasValue)
{
parts.Add($"{signal.RuntimeInvocationCount.Value} runtime invocations observed");
}
if (signal.ApmActiveUsage.HasValue)
{
parts.Add(signal.ApmActiveUsage.Value ? "APM shows active usage" : "APM shows no active usage");
}
return parts.Count > 0
? string.Join(", ", parts)
: "No runtime signals available; assuming moderate activity";
}
}

View File

@@ -0,0 +1,138 @@
// -----------------------------------------------------------------------------
// SourceConfidenceNormalizer.cs
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
// Description: Normalizer for SRC (Source Confidence) dimension.
// -----------------------------------------------------------------------------
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
/// <summary>
/// Normalizes source confidence signals to the SRC dimension score.
/// Higher score = less confidence in source data (higher uncertainty risk).
/// Lower score = high confidence in source data.
/// </summary>
public sealed class SourceConfidenceNormalizer : IEwsDimensionNormalizer
{
/// <inheritdoc />
public EwsDimension Dimension => EwsDimension.SourceConfidence;
/// <inheritdoc />
public int Normalize(EwsSignalInput signal)
{
// This dimension is inverted: high confidence in source = low risk
// We calculate confidence then invert
var confidenceScore = 0.0;
var weights = 0.0;
// SBOM completeness
if (signal.SbomCompleteness.HasValue)
{
weights += 0.35;
confidenceScore += signal.SbomCompleteness.Value * 0.35;
}
// SBOM signed
if (signal.SbomSigned.HasValue)
{
weights += 0.25;
confidenceScore += (signal.SbomSigned.Value ? 1.0 : 0.0) * 0.25;
}
// Attestation count
if (signal.AttestationCount.HasValue)
{
weights += 0.2;
// More attestations = more confidence (diminishing returns)
var attScore = signal.AttestationCount.Value switch
{
0 => 0.0,
1 => 0.5,
2 => 0.7,
3 => 0.85,
_ => 1.0
};
confidenceScore += attScore * 0.2;
}
// Lineage verified
if (signal.LineageVerified.HasValue)
{
weights += 0.2;
confidenceScore += (signal.LineageVerified.Value ? 1.0 : 0.0) * 0.2;
}
if (weights > 0)
{
var normalizedConfidence = confidenceScore / weights;
// Invert: high confidence = low score (low risk from source uncertainty)
return (int)Math.Round((1.0 - normalizedConfidence) * 100);
}
// No source signals - assume high uncertainty
return 80;
}
/// <inheritdoc />
public double GetConfidence(EwsSignalInput signal)
{
var hasData = signal.SbomCompleteness.HasValue ||
signal.SbomSigned.HasValue ||
signal.AttestationCount.HasValue ||
signal.LineageVerified.HasValue;
if (!hasData)
{
return 0.2;
}
// Count how many signals we have
var signalCount = 0;
if (signal.SbomCompleteness.HasValue) signalCount++;
if (signal.SbomSigned.HasValue) signalCount++;
if (signal.AttestationCount.HasValue) signalCount++;
if (signal.LineageVerified.HasValue) signalCount++;
// More signals = higher confidence in our assessment
return 0.4 + (signalCount * 0.15);
}
/// <inheritdoc />
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
{
var parts = new List<string>();
if (signal.SbomCompleteness.HasValue)
{
parts.Add($"SBOM completeness {signal.SbomCompleteness.Value:P0}");
}
if (signal.SbomSigned == true)
{
parts.Add("SBOM is signed");
}
else if (signal.SbomSigned == false)
{
parts.Add("SBOM is not signed");
}
if (signal.AttestationCount.HasValue)
{
parts.Add($"{signal.AttestationCount.Value} attestation(s) available");
}
if (signal.LineageVerified == true)
{
parts.Add("dependency lineage verified");
}
else if (signal.LineageVerified == false)
{
parts.Add("dependency lineage not verified");
}
return parts.Count > 0
? string.Join(", ", parts)
: "No source confidence signals; assuming high uncertainty";
}
}

View File

@@ -0,0 +1,217 @@
// <copyright file="IDeltaIfPresentCalculator.cs" company="StellaOps">
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
// </copyright>
using StellaOps.Policy.Determinization.Models;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Calculates hypothetical score changes if missing signals were present with various assumed values.
/// This enables "what-if" analysis to help operators prioritize signal collection efforts.
/// </summary>
public interface IDeltaIfPresentCalculator
{
/// <summary>
/// Calculate the hypothetical trust score delta if a specific missing signal were present.
/// </summary>
/// <param name="snapshot">Current signal snapshot.</param>
/// <param name="signal">The missing signal to simulate.</param>
/// <param name="assumedValue">The assumed value for the signal (0.0-1.0 normalized score).</param>
/// <param name="weights">Optional signal weights.</param>
/// <returns>Delta calculation result showing impact.</returns>
DeltaIfPresentResult CalculateSingleSignalDelta(
SignalSnapshot snapshot,
string signal,
double assumedValue,
SignalWeights? weights = null);
/// <summary>
/// Calculate hypothetical impacts for all missing signals at multiple assumed values.
/// </summary>
/// <param name="snapshot">Current signal snapshot.</param>
/// <param name="weights">Optional signal weights.</param>
/// <returns>Full delta-if-present analysis for all gaps.</returns>
DeltaIfPresentAnalysis CalculateFullAnalysis(
SignalSnapshot snapshot,
SignalWeights? weights = null);
/// <summary>
/// Calculate the best-case and worst-case score bounds if all missing signals were present.
/// </summary>
/// <param name="snapshot">Current signal snapshot.</param>
/// <param name="weights">Optional signal weights.</param>
/// <returns>Score bounds with completeness impact.</returns>
ScoreBounds CalculateScoreBounds(
SignalSnapshot snapshot,
SignalWeights? weights = null);
}
/// <summary>
/// Result of a single signal delta-if-present calculation.
/// </summary>
public sealed record DeltaIfPresentResult
{
/// <summary>
/// The signal that was simulated as present.
/// </summary>
public required string Signal { get; init; }
/// <summary>
/// The current score without this signal.
/// </summary>
public required double CurrentScore { get; init; }
/// <summary>
/// The hypothetical score with this signal present at the assumed value.
/// </summary>
public required double HypotheticalScore { get; init; }
/// <summary>
/// The delta (hypothetical - current). Positive means score would increase.
/// </summary>
public double Delta => HypotheticalScore - CurrentScore;
/// <summary>
/// The assumed value used for the simulation.
/// </summary>
public required double AssumedValue { get; init; }
/// <summary>
/// The weight of this signal in the scoring model.
/// </summary>
public required double SignalWeight { get; init; }
/// <summary>
/// Current entropy before adding signal.
/// </summary>
public required double CurrentEntropy { get; init; }
/// <summary>
/// Hypothetical entropy after adding signal.
/// </summary>
public required double HypotheticalEntropy { get; init; }
/// <summary>
/// Change in entropy (negative means entropy would decrease = less uncertainty).
/// </summary>
public double EntropyDelta => HypotheticalEntropy - CurrentEntropy;
}
/// <summary>
/// Complete analysis of all missing signals with delta-if-present calculations.
/// </summary>
public sealed record DeltaIfPresentAnalysis
{
/// <summary>
/// Current aggregate score.
/// </summary>
public required double CurrentScore { get; init; }
/// <summary>
/// Current entropy (uncertainty).
/// </summary>
public required double CurrentEntropy { get; init; }
/// <summary>
/// List of missing signals with their potential impact at different assumed values.
/// </summary>
public required IReadOnlyList<SignalDeltaScenarios> GapAnalysis { get; init; }
/// <summary>
/// Prioritized list of signals by maximum potential impact.
/// </summary>
public required IReadOnlyList<string> PrioritizedGaps { get; init; }
/// <summary>
/// When this analysis was computed.
/// </summary>
public required DateTimeOffset ComputedAt { get; init; }
}
/// <summary>
/// Delta scenarios for a single missing signal at various assumed values.
/// </summary>
public sealed record SignalDeltaScenarios
{
/// <summary>
/// Signal name.
/// </summary>
public required string Signal { get; init; }
/// <summary>
/// Signal weight in scoring model.
/// </summary>
public required double Weight { get; init; }
/// <summary>
/// Why this signal is missing.
/// </summary>
public required SignalGapReason GapReason { get; init; }
/// <summary>
/// Delta if signal present with best-case value (lowest risk contribution).
/// </summary>
public required DeltaIfPresentResult BestCase { get; init; }
/// <summary>
/// Delta if signal present with worst-case value (highest risk contribution).
/// </summary>
public required DeltaIfPresentResult WorstCase { get; init; }
/// <summary>
/// Delta if signal present with prior/expected value.
/// </summary>
public required DeltaIfPresentResult PriorCase { get; init; }
/// <summary>
/// Maximum absolute delta magnitude across all scenarios.
/// </summary>
public double MaxImpact => Math.Max(Math.Abs(BestCase.Delta), Math.Abs(WorstCase.Delta));
}
/// <summary>
/// Best-case and worst-case score bounds if all missing signals were present.
/// </summary>
public sealed record ScoreBounds
{
/// <summary>
/// Current score with missing signals.
/// </summary>
public required double CurrentScore { get; init; }
/// <summary>
/// Minimum possible score (all missing signals at worst-case values).
/// </summary>
public required double MinimumScore { get; init; }
/// <summary>
/// Maximum possible score (all missing signals at best-case values).
/// </summary>
public required double MaximumScore { get; init; }
/// <summary>
/// Score range (max - min).
/// </summary>
public double Range => MaximumScore - MinimumScore;
/// <summary>
/// Current entropy.
/// </summary>
public required double CurrentEntropy { get; init; }
/// <summary>
/// Entropy if all signals were present (would be 0).
/// </summary>
public double CompleteEntropy => 0.0;
/// <summary>
/// Number of missing signals.
/// </summary>
public required int GapCount { get; init; }
/// <summary>
/// Percentage of score weight that is missing.
/// </summary>
public required double MissingWeightPercentage { get; init; }
}

View File

@@ -0,0 +1,35 @@
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Interface for impact score calculation.
/// </summary>
public interface IImpactScoreCalculator
{
/// <summary>
/// Calculates the multi-factor impact score for unknowns.
/// </summary>
/// <param name="context">Impact context with environment, data sensitivity, fleet prevalence, SLA tier, and CVSS.</param>
/// <param name="weights">Optional custom weights (uses defaults if null).</param>
/// <returns>Calculated impact score with all component scores.</returns>
ImpactScore Calculate(ImpactContext context, ImpactFactorWeights? weights = null);
/// <summary>
/// Normalizes an environment type to a score [0.0, 1.0].
/// </summary>
double NormalizeEnvironment(EnvironmentType environment);
/// <summary>
/// Normalizes a data sensitivity level to a score [0.0, 1.0].
/// </summary>
double NormalizeDataSensitivity(DataSensitivity sensitivity);
/// <summary>
/// Normalizes an SLA tier to a score [0.0, 1.0].
/// </summary>
double NormalizeSlaTier(SlaTier tier);
/// <summary>
/// Normalizes a CVSS score [0.0, 10.0] to a score [0.0, 1.0].
/// </summary>
double NormalizeCvss(double cvssScore);
}

View File

@@ -0,0 +1,42 @@
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Configurable weights for impact scoring factors.
/// All weights are normalized to sum to 1.0.
/// </summary>
public sealed record ImpactFactorWeights
{
/// <summary>Default weights following advisory recommendations.</summary>
public static readonly ImpactFactorWeights Default = new()
{
EnvironmentExposureWeight = 0.20,
DataSensitivityWeight = 0.20,
FleetPrevalenceWeight = 0.15,
SlaTierWeight = 0.15,
CvssSeverityWeight = 0.30
};
/// <summary>Weight for environment exposure factor (prod/stage/dev).</summary>
public required double EnvironmentExposureWeight { get; init; }
/// <summary>Weight for data sensitivity factor (PII, financial, etc.).</summary>
public required double DataSensitivityWeight { get; init; }
/// <summary>Weight for fleet prevalence factor (how many assets affected).</summary>
public required double FleetPrevalenceWeight { get; init; }
/// <summary>Weight for SLA tier factor (business criticality).</summary>
public required double SlaTierWeight { get; init; }
/// <summary>Weight for CVSS severity factor.</summary>
public required double CvssSeverityWeight { get; init; }
/// <summary>Sum of all weights (should equal 1.0 for normalized calculations).</summary>
public double TotalWeight =>
EnvironmentExposureWeight + DataSensitivityWeight + FleetPrevalenceWeight +
SlaTierWeight + CvssSeverityWeight;
/// <summary>Validates that weights sum to approximately 1.0.</summary>
public bool IsNormalized(double tolerance = 0.001) =>
Math.Abs(TotalWeight - 1.0) < tolerance;
}

View File

@@ -0,0 +1,177 @@
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Environment type classification for exposure scoring.
/// </summary>
public enum EnvironmentType
{
/// <summary>Development environment - lowest exposure.</summary>
Development = 0,
/// <summary>Testing/QA environment.</summary>
Testing = 1,
/// <summary>Staging/Pre-production environment.</summary>
Staging = 2,
/// <summary>Production environment - highest exposure.</summary>
Production = 3
}
/// <summary>
/// Data sensitivity classification for impact scoring.
/// </summary>
public enum DataSensitivity
{
/// <summary>Public or non-sensitive data.</summary>
Public = 0,
/// <summary>Internal/company-confidential data.</summary>
Internal = 1,
/// <summary>Contains PII (Personally Identifiable Information).</summary>
Pii = 2,
/// <summary>Contains financial data.</summary>
Financial = 3,
/// <summary>Contains healthcare/PHI data.</summary>
Healthcare = 4,
/// <summary>Contains classified/government data.</summary>
Classified = 5
}
/// <summary>
/// SLA tier for business criticality scoring.
/// </summary>
public enum SlaTier
{
/// <summary>Non-critical - can tolerate extended downtime.</summary>
NonCritical = 0,
/// <summary>Standard - normal business operations.</summary>
Standard = 1,
/// <summary>Important - customer-facing or revenue-impacting.</summary>
Important = 2,
/// <summary>Critical - core business functionality.</summary>
Critical = 3,
/// <summary>Mission-critical - business cannot operate without.</summary>
MissionCritical = 4
}
/// <summary>
/// Input context for impact scoring calculation.
/// </summary>
public sealed record ImpactContext
{
/// <summary>Environment where the component is deployed.</summary>
[JsonPropertyName("environment")]
public required EnvironmentType Environment { get; init; }
/// <summary>Highest data sensitivity level accessed by the component.</summary>
[JsonPropertyName("data_sensitivity")]
public required DataSensitivity DataSensitivity { get; init; }
/// <summary>Proportion of fleet affected (0.0-1.0).</summary>
[JsonPropertyName("fleet_prevalence")]
public required double FleetPrevalence { get; init; }
/// <summary>SLA tier of the affected service.</summary>
[JsonPropertyName("sla_tier")]
public required SlaTier SlaTier { get; init; }
/// <summary>CVSS base score (0.0-10.0).</summary>
[JsonPropertyName("cvss_score")]
public required double CvssScore { get; init; }
/// <summary>
/// Creates a default context for unknowns (conservative scoring).
/// </summary>
public static ImpactContext DefaultForUnknowns() => new()
{
Environment = EnvironmentType.Production, // Assume worst-case
DataSensitivity = DataSensitivity.Internal, // Conservative default
FleetPrevalence = 0.5, // Assume moderate prevalence
SlaTier = SlaTier.Standard, // Standard tier
CvssScore = 5.0 // Medium severity default
};
}
/// <summary>
/// Result of impact score calculation.
/// </summary>
public sealed record ImpactScore
{
/// <summary>Final weighted impact score [0.0, 1.0].</summary>
[JsonPropertyName("score")]
public required double Score { get; init; }
/// <summary>Basis points representation (0-10000) for deterministic storage.</summary>
[JsonPropertyName("basis_points")]
public required int BasisPoints { get; init; }
/// <summary>Environment exposure component score [0.0, 1.0].</summary>
[JsonPropertyName("env_exposure")]
public required double EnvironmentExposure { get; init; }
/// <summary>Data sensitivity component score [0.0, 1.0].</summary>
[JsonPropertyName("data_sensitivity")]
public required double DataSensitivityScore { get; init; }
/// <summary>Fleet prevalence component score [0.0, 1.0].</summary>
[JsonPropertyName("fleet_prevalence")]
public required double FleetPrevalenceScore { get; init; }
/// <summary>SLA tier component score [0.0, 1.0].</summary>
[JsonPropertyName("sla_tier")]
public required double SlaTierScore { get; init; }
/// <summary>CVSS severity component score [0.0, 1.0].</summary>
[JsonPropertyName("cvss_severity")]
public required double CvssSeverityScore { get; init; }
/// <summary>When this score was calculated (UTC).</summary>
[JsonPropertyName("calculated_at")]
public required DateTimeOffset CalculatedAt { get; init; }
/// <summary>
/// Creates an impact score from component scores and weights.
/// </summary>
public static ImpactScore Create(
double envExposure,
double dataSensitivity,
double fleetPrevalence,
double slaTier,
double cvssSeverity,
ImpactFactorWeights weights,
DateTimeOffset calculatedAt)
{
var score =
envExposure * weights.EnvironmentExposureWeight +
dataSensitivity * weights.DataSensitivityWeight +
fleetPrevalence * weights.FleetPrevalenceWeight +
slaTier * weights.SlaTierWeight +
cvssSeverity * weights.CvssSeverityWeight;
var clampedScore = Math.Clamp(score, 0.0, 1.0);
var basisPoints = (int)Math.Round(clampedScore * 10000);
return new ImpactScore
{
Score = clampedScore,
BasisPoints = basisPoints,
EnvironmentExposure = envExposure,
DataSensitivityScore = dataSensitivity,
FleetPrevalenceScore = fleetPrevalence,
SlaTierScore = slaTier,
CvssSeverityScore = cvssSeverity,
CalculatedAt = calculatedAt
};
}
}

View File

@@ -0,0 +1,127 @@
using System.Diagnostics.Metrics;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Calculates multi-factor impact scores for unknowns using the formula:
/// impact = w_env * EnvExposure + w_data * DataSensitivity + w_fleet * FleetPrevalence + w_sla * SLATier + w_cvss * CVSSSeverity
/// </summary>
public sealed class ImpactScoreCalculator : IImpactScoreCalculator
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
private static readonly Histogram<double> ImpactHistogram = Meter.CreateHistogram<double>(
"stellaops_determinization_impact_score",
unit: "ratio",
description: "Impact score for unknowns (0.0 = minimal impact, 1.0 = critical impact)");
private readonly ILogger<ImpactScoreCalculator> _logger;
private readonly TimeProvider _timeProvider;
public ImpactScoreCalculator(ILogger<ImpactScoreCalculator> logger, TimeProvider? timeProvider = null)
{
_logger = logger;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <inheritdoc />
public ImpactScore Calculate(ImpactContext context, ImpactFactorWeights? weights = null)
{
ArgumentNullException.ThrowIfNull(context);
var effectiveWeights = weights ?? ImpactFactorWeights.Default;
// Validate weights
if (!effectiveWeights.IsNormalized())
{
_logger.LogWarning(
"Impact factor weights are not normalized (total={Total:F4}); results may be unexpected",
effectiveWeights.TotalWeight);
}
// Normalize each dimension
var envScore = NormalizeEnvironment(context.Environment);
var dataScore = NormalizeDataSensitivity(context.DataSensitivity);
var fleetScore = Math.Clamp(context.FleetPrevalence, 0.0, 1.0);
var slaScore = NormalizeSlaTier(context.SlaTier);
var cvssScore = NormalizeCvss(context.CvssScore);
// Create result with all components
var result = ImpactScore.Create(
envScore,
dataScore,
fleetScore,
slaScore,
cvssScore,
effectiveWeights,
_timeProvider.GetUtcNow());
_logger.LogDebug(
"Calculated impact score {Score:F4} (basis points={BasisPoints}) from env={Env:F2}, data={Data:F2}, fleet={Fleet:F2}, sla={Sla:F2}, cvss={Cvss:F2}",
result.Score,
result.BasisPoints,
envScore,
dataScore,
fleetScore,
slaScore,
cvssScore);
// Emit metric
ImpactHistogram.Record(result.Score,
new KeyValuePair<string, object?>("environment", context.Environment.ToString()),
new KeyValuePair<string, object?>("data_sensitivity", context.DataSensitivity.ToString()));
return result;
}
/// <inheritdoc />
public double NormalizeEnvironment(EnvironmentType environment)
{
// Development = 0.0, Production = 1.0
return environment switch
{
EnvironmentType.Development => 0.0,
EnvironmentType.Testing => 0.33,
EnvironmentType.Staging => 0.66,
EnvironmentType.Production => 1.0,
_ => 0.5 // Unknown defaults to moderate
};
}
/// <inheritdoc />
public double NormalizeDataSensitivity(DataSensitivity sensitivity)
{
// Public = 0.0, Classified = 1.0
return sensitivity switch
{
DataSensitivity.Public => 0.0,
DataSensitivity.Internal => 0.2,
DataSensitivity.Pii => 0.5,
DataSensitivity.Financial => 0.7,
DataSensitivity.Healthcare => 0.8,
DataSensitivity.Classified => 1.0,
_ => 0.5 // Unknown defaults to moderate
};
}
/// <inheritdoc />
public double NormalizeSlaTier(SlaTier tier)
{
// NonCritical = 0.0, MissionCritical = 1.0
return tier switch
{
SlaTier.NonCritical => 0.0,
SlaTier.Standard => 0.25,
SlaTier.Important => 0.5,
SlaTier.Critical => 0.75,
SlaTier.MissionCritical => 1.0,
_ => 0.5 // Unknown defaults to moderate
};
}
/// <inheritdoc />
public double NormalizeCvss(double cvssScore)
{
// CVSS 0.0-10.0 -> 0.0-1.0
return Math.Clamp(cvssScore / 10.0, 0.0, 1.0);
}
}

View File

@@ -0,0 +1,160 @@
using StellaOps.Policy.Determinization.Evidence;
using StellaOps.Policy.Determinization.Models;
using StellaOps.Policy.Scoring;
using StellaOps.Policy.TrustLattice;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Score.v1 predicate format for DSSE-signable attestation.
/// Contains all scoring dimensions in a single, deterministic payload.
/// All numeric scores use basis points (0-10000) for bit-exact determinism.
/// </summary>
public sealed record ScoreV1Predicate
{
/// <summary>
/// Predicate type URI for DSSE/In-Toto attestations.
/// </summary>
public const string PredicateType = "https://stella-ops.org/predicates/score/v1";
/// <summary>
/// Artifact being scored (PURL or component identifier).
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Vulnerability identifier if applicable (CVE, GHSA, etc.).
/// </summary>
public string? VulnerabilityId { get; init; }
/// <summary>
/// Final trust score in basis points (0-10000).
/// </summary>
public required int TrustScoreBps { get; init; }
/// <summary>
/// Risk tier derived from trust score.
/// </summary>
public required string Tier { get; init; }
/// <summary>
/// Lattice verdict from K4 logic evaluation.
/// </summary>
public required K4Value LatticeVerdict { get; init; }
/// <summary>
/// Uncertainty entropy in basis points (0-10000).
/// </summary>
public required int UncertaintyBps { get; init; }
/// <summary>
/// Individual dimension scores in basis points.
/// </summary>
public required ScoreDimensionsBps Dimensions { get; init; }
/// <summary>
/// Weights used for this scoring (in basis points).
/// </summary>
public required WeightsBps WeightsUsed { get; init; }
/// <summary>
/// Policy digest (SHA-256) for reproducibility.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// Timestamp when score was computed (UTC).
/// </summary>
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Tenant/namespace scope.
/// </summary>
public string? TenantId { get; init; }
}
/// <summary>
/// Individual scoring dimension values in basis points.
/// </summary>
public sealed record ScoreDimensionsBps
{
/// <summary>
/// Base severity score (from CVSS or equivalent) in basis points.
/// </summary>
public required int BaseSeverityBps { get; init; }
/// <summary>
/// Reachability score in basis points.
/// </summary>
public required int ReachabilityBps { get; init; }
/// <summary>
/// Evidence quality score in basis points.
/// </summary>
public required int EvidenceBps { get; init; }
/// <summary>
/// Provenance/supply-chain score in basis points.
/// </summary>
public required int ProvenanceBps { get; init; }
/// <summary>
/// EPSS score in basis points (if available).
/// </summary>
public int? EpssBps { get; init; }
/// <summary>
/// VEX status score in basis points (if available).
/// </summary>
public int? VexBps { get; init; }
}
/// <summary>
/// Risk tier enumeration for categorizing trust scores.
/// </summary>
public enum RiskTier
{
Info = 0,
Low = 1,
Medium = 2,
High = 3,
Critical = 4
}
/// <summary>
/// Request for computing a trust score.
/// </summary>
public sealed record TrustScoreRequest
{
public required string ArtifactId { get; init; }
public string? VulnerabilityId { get; init; }
public string? TenantId { get; init; }
public SignalSnapshot? Signals { get; init; }
public ScorePolicy? PolicyOverride { get; init; }
}
/// <summary>
/// Result from trust score computation with full explainability.
/// </summary>
public sealed record TrustScoreResult
{
/// <summary>
/// The Score.v1 predicate suitable for attestation signing.
/// </summary>
public required ScoreV1Predicate Predicate { get; init; }
/// <summary>
/// Signal snapshot used for computation.
/// </summary>
public required SignalSnapshot SignalsUsed { get; init; }
/// <summary>
/// Whether the score computation succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Error message if computation failed.
/// </summary>
public string? Error { get; init; }
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// Provides observations to the triage queue evaluator.
/// Implementations may read from a database, cache, or in-memory store.
/// </summary>
public interface ITriageObservationSource
{
/// <summary>
/// Retrieve observations that are candidates for triage evaluation.
/// The source should return observations that have not been evaluated recently
/// (based on <see cref="TriageQueueOptions.MinEvaluationIntervalMinutes"/>).
/// </summary>
/// <param name="tenantId">Optional tenant filter. Null returns all tenants.</param>
/// <param name="maxItems">Maximum number of observations to return.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Candidate observations.</returns>
Task<IReadOnlyList<TriageObservation>> GetCandidatesAsync(
string? tenantId = null,
int maxItems = 500,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// Evaluates a batch of observations and produces a priority-sorted triage queue.
/// </summary>
public interface ITriageQueueEvaluator
{
/// <summary>
/// Evaluate observations and produce a triage queue snapshot sorted by decay urgency.
/// </summary>
/// <param name="observations">Observations to evaluate.</param>
/// <param name="now">Reference time for decay calculation (deterministic).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Sorted triage queue snapshot.</returns>
Task<TriageQueueSnapshot> EvaluateAsync(
IReadOnlyList<TriageObservation> observations,
DateTimeOffset now,
CancellationToken cancellationToken = default);
/// <summary>
/// Evaluate a single observation and determine if it should be queued.
/// </summary>
/// <param name="observation">The observation to evaluate.</param>
/// <param name="now">Reference time for decay calculation.</param>
/// <returns>Triage item, or null if the observation does not need triage.</returns>
TriageItem? EvaluateSingle(TriageObservation observation, DateTimeOffset now);
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// Sink for stale observations that need re-analysis.
/// Implementations may enqueue to an in-memory channel, message bus, or database table.
/// </summary>
public interface ITriageReanalysisSink
{
/// <summary>
/// Enqueue stale observations for re-analysis.
/// </summary>
/// <param name="items">Triage items to re-analyse (already filtered to stale/approaching).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of items successfully enqueued.</returns>
Task<int> EnqueueAsync(
IReadOnlyList<TriageItem> items,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,94 @@
using System.Collections.Concurrent;
using System.Diagnostics.Metrics;
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// In-memory implementation of <see cref="ITriageReanalysisSink"/>.
/// Stores enqueued items in a thread-safe collection for consumption by re-analysis workers.
/// Suitable for single-node deployments, testing, and offline/air-gap scenarios.
/// </summary>
public sealed class InMemoryTriageReanalysisSink : ITriageReanalysisSink
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>(
"stellaops_triage_inmemory_enqueued_total",
unit: "{items}",
description: "Items enqueued in the in-memory triage sink");
private static readonly Counter<long> DequeuedCounter = Meter.CreateCounter<long>(
"stellaops_triage_inmemory_dequeued_total",
unit: "{items}",
description: "Items dequeued from the in-memory triage sink");
private readonly ConcurrentQueue<TriageItem> _queue = new();
private readonly ILogger<InMemoryTriageReanalysisSink> _logger;
public InMemoryTriageReanalysisSink(ILogger<InMemoryTriageReanalysisSink> logger)
{
_logger = logger;
}
/// <inheritdoc />
public Task<int> EnqueueAsync(
IReadOnlyList<TriageItem> items,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(items);
var enqueued = 0;
foreach (var item in items)
{
cancellationToken.ThrowIfCancellationRequested();
_queue.Enqueue(item);
enqueued++;
}
EnqueuedCounter.Add(enqueued);
_logger.LogDebug("Enqueued {Count} triage items (queue depth: {Depth})", enqueued, _queue.Count);
return Task.FromResult(enqueued);
}
/// <summary>
/// Try to dequeue the next item for re-analysis.
/// </summary>
/// <param name="item">The dequeued item, if available.</param>
/// <returns>True if an item was dequeued.</returns>
public bool TryDequeue(out TriageItem? item)
{
var result = _queue.TryDequeue(out item);
if (result)
DequeuedCounter.Add(1);
return result;
}
/// <summary>
/// Drain all pending items.
/// </summary>
/// <returns>All pending triage items.</returns>
public IReadOnlyList<TriageItem> DrainAll()
{
var items = new List<TriageItem>();
while (_queue.TryDequeue(out var item))
{
items.Add(item);
}
if (items.Count > 0)
DequeuedCounter.Add(items.Count);
return items;
}
/// <summary>
/// Current queue depth.
/// </summary>
public int Count => _queue.Count;
/// <summary>
/// Peek at all pending items without removing them.
/// </summary>
public IReadOnlyList<TriageItem> PeekAll() => _queue.ToArray();
}

View File

@@ -0,0 +1,172 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.Determinization.Models;
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// Priority classification for triage items based on decay urgency.
/// </summary>
public enum TriagePriority
{
/// <summary>No action needed — observation is fresh.</summary>
None = 0,
/// <summary>Observation approaching staleness (decay multiplier 0.500.70).</summary>
Low = 1,
/// <summary>Observation is stale (decay multiplier 0.300.50).</summary>
Medium = 2,
/// <summary>Observation is heavily decayed (decay multiplier 0.100.30).</summary>
High = 3,
/// <summary>Observation at or near floor — effectively no confidence (decay multiplier ≤ 0.10).</summary>
Critical = 4
}
/// <summary>
/// Represents a single unknown observation queued for triage.
/// </summary>
public sealed record TriageItem
{
/// <summary>CVE identifier.</summary>
[JsonPropertyName("cve")]
public required string Cve { get; init; }
/// <summary>Component PURL.</summary>
[JsonPropertyName("purl")]
public required string Purl { get; init; }
/// <summary>Tenant identifier for multi-tenant isolation.</summary>
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
/// <summary>The observation decay state.</summary>
[JsonPropertyName("observation_decay")]
public required ObservationDecay Decay { get; init; }
/// <summary>Current decay multiplier at evaluation time.</summary>
[JsonPropertyName("current_multiplier")]
public required double CurrentMultiplier { get; init; }
/// <summary>Computed triage priority based on decay urgency.</summary>
[JsonPropertyName("priority")]
public required TriagePriority Priority { get; init; }
/// <summary>Age in days since last refresh at evaluation time.</summary>
[JsonPropertyName("age_days")]
public required double AgeDays { get; init; }
/// <summary>Days until the observation crosses the staleness threshold (negative if already stale).</summary>
[JsonPropertyName("days_until_stale")]
public required double DaysUntilStale { get; init; }
/// <summary>When this triage item was evaluated (UTC).</summary>
[JsonPropertyName("evaluated_at")]
public required DateTimeOffset EvaluatedAt { get; init; }
/// <summary>Optional signal gaps contributing to uncertainty.</summary>
[JsonPropertyName("signal_gaps")]
public IReadOnlyList<SignalGap> SignalGaps { get; init; } = [];
/// <summary>Recommended next action for the operator.</summary>
[JsonPropertyName("recommended_action")]
public string? RecommendedAction { get; init; }
}
/// <summary>
/// Result of evaluating a batch of observations for triage.
/// </summary>
public sealed record TriageQueueSnapshot
{
/// <summary>Items sorted by priority (Critical first) then by days-until-stale ascending.</summary>
[JsonPropertyName("items")]
public required IReadOnlyList<TriageItem> Items { get; init; }
/// <summary>Total observations evaluated.</summary>
[JsonPropertyName("total_evaluated")]
public required int TotalEvaluated { get; init; }
/// <summary>Count of items that are already stale.</summary>
[JsonPropertyName("stale_count")]
public required int StaleCount { get; init; }
/// <summary>Count of items approaching staleness (Low priority).</summary>
[JsonPropertyName("approaching_count")]
public required int ApproachingCount { get; init; }
/// <summary>When this snapshot was computed (UTC).</summary>
[JsonPropertyName("evaluated_at")]
public required DateTimeOffset EvaluatedAt { get; init; }
/// <summary>Summary statistics by priority tier.</summary>
[JsonPropertyName("priority_summary")]
public required IReadOnlyDictionary<TriagePriority, int> PrioritySummary { get; init; }
}
/// <summary>
/// Configuration for triage queue evaluation thresholds.
/// </summary>
public sealed record TriageQueueOptions
{
/// <summary>Default section name in appsettings.json.</summary>
public const string SectionName = "Determinization:TriageQueue";
/// <summary>
/// Multiplier threshold for "approaching staleness" (Low priority).
/// Observations with decay multiplier below this but above staleness are flagged.
/// Default: 0.70
/// </summary>
public double ApproachingThreshold { get; init; } = 0.70;
/// <summary>
/// Multiplier threshold for High priority.
/// Default: 0.30
/// </summary>
public double HighPriorityThreshold { get; init; } = 0.30;
/// <summary>
/// Multiplier threshold for Critical priority.
/// Default: 0.10
/// </summary>
public double CriticalPriorityThreshold { get; init; } = 0.10;
/// <summary>
/// Maximum number of items to include in a snapshot.
/// Default: 500
/// </summary>
public int MaxSnapshotItems { get; init; } = 500;
/// <summary>
/// Whether to include non-stale observations that are approaching staleness.
/// Default: true
/// </summary>
public bool IncludeApproaching { get; init; } = true;
/// <summary>
/// Minimum interval between triage evaluations for the same observation in minutes.
/// Default: 60
/// </summary>
public int MinEvaluationIntervalMinutes { get; init; } = 60;
}
/// <summary>
/// Represents an observation to be evaluated for triage.
/// </summary>
public sealed record TriageObservation
{
/// <summary>CVE identifier.</summary>
public required string Cve { get; init; }
/// <summary>Component PURL.</summary>
public required string Purl { get; init; }
/// <summary>Tenant identifier.</summary>
public required string TenantId { get; init; }
/// <summary>Decay state of the observation.</summary>
public required ObservationDecay Decay { get; init; }
/// <summary>Optional signal gaps from the most recent uncertainty evaluation.</summary>
public IReadOnlyList<SignalGap> SignalGaps { get; init; } = [];
}

View File

@@ -0,0 +1,227 @@
using System.Diagnostics.Metrics;
using StellaOps.Policy.Determinization.Models;
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// Evaluates observations for decay-based triage and produces priority-sorted snapshots.
/// All calculations are deterministic given the same inputs and reference time.
/// </summary>
public sealed class TriageQueueEvaluator : ITriageQueueEvaluator
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
private static readonly Counter<long> ItemsEvaluatedCounter = Meter.CreateCounter<long>(
"stellaops_triage_items_evaluated_total",
unit: "{items}",
description: "Total observations evaluated for triage");
private static readonly Counter<long> ItemsQueuedCounter = Meter.CreateCounter<long>(
"stellaops_triage_items_queued_total",
unit: "{items}",
description: "Observations added to triage queue");
private static readonly Histogram<double> DecayMultiplierHistogram = Meter.CreateHistogram<double>(
"stellaops_triage_decay_multiplier",
unit: "ratio",
description: "Decay multiplier distribution of triage items");
private readonly ILogger<TriageQueueEvaluator> _logger;
private readonly TriageQueueOptions _options;
public TriageQueueEvaluator(
ILogger<TriageQueueEvaluator> logger,
IOptions<TriageQueueOptions> options)
{
_logger = logger;
_options = options.Value;
}
/// <inheritdoc />
public Task<TriageQueueSnapshot> EvaluateAsync(
IReadOnlyList<TriageObservation> observations,
DateTimeOffset now,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(observations);
var triageItems = new List<TriageItem>();
foreach (var obs in observations)
{
cancellationToken.ThrowIfCancellationRequested();
var item = EvaluateSingle(obs, now);
if (item is not null)
{
triageItems.Add(item);
}
}
// Sort: Critical first, then by days-until-stale ascending (most urgent first)
var sorted = triageItems
.OrderByDescending(i => i.Priority)
.ThenBy(i => i.DaysUntilStale)
.ThenBy(i => i.Cve, StringComparer.Ordinal)
.ThenBy(i => i.Purl, StringComparer.Ordinal)
.Take(_options.MaxSnapshotItems)
.ToList();
// Compute summary
var prioritySummary = new Dictionary<TriagePriority, int>();
foreach (var priority in Enum.GetValues<TriagePriority>())
{
var count = sorted.Count(i => i.Priority == priority);
if (count > 0)
prioritySummary[priority] = count;
}
var staleCount = sorted.Count(i => i.DaysUntilStale < 0);
var approachingCount = sorted.Count(i => i.Priority == TriagePriority.Low);
var snapshot = new TriageQueueSnapshot
{
Items = sorted,
TotalEvaluated = observations.Count,
StaleCount = staleCount,
ApproachingCount = approachingCount,
EvaluatedAt = now,
PrioritySummary = prioritySummary
};
// Emit metrics
ItemsEvaluatedCounter.Add(observations.Count);
ItemsQueuedCounter.Add(sorted.Count);
_logger.LogInformation(
"Triage evaluation: {Total} observations, {Queued} queued ({Stale} stale, {Approaching} approaching)",
observations.Count,
sorted.Count,
staleCount,
approachingCount);
return Task.FromResult(snapshot);
}
/// <inheritdoc />
public TriageItem? EvaluateSingle(TriageObservation observation, DateTimeOffset now)
{
ArgumentNullException.ThrowIfNull(observation);
var decay = observation.Decay;
var multiplier = decay.CalculateDecay(now);
var ageDays = (now - decay.RefreshedAt).TotalDays;
var isStale = decay.CheckIsStale(now);
var priority = ClassifyPriority(multiplier, decay.StalenessThreshold);
// Skip if not stale and not approaching (unless IncludeApproaching is true)
if (priority == TriagePriority.None)
return null;
if (priority == TriagePriority.Low && !_options.IncludeApproaching)
return null;
var daysUntilStale = CalculateDaysUntilStale(
decay.RefreshedAt,
decay.HalfLifeDays,
decay.StalenessThreshold,
decay.Floor,
now);
var recommendedAction = DetermineRecommendedAction(priority, observation.SignalGaps);
// Emit per-item metric
DecayMultiplierHistogram.Record(multiplier,
new KeyValuePair<string, object?>("priority", priority.ToString()),
new KeyValuePair<string, object?>("tenant_id", observation.TenantId));
return new TriageItem
{
Cve = observation.Cve,
Purl = observation.Purl,
TenantId = observation.TenantId,
Decay = decay,
CurrentMultiplier = multiplier,
Priority = priority,
AgeDays = Math.Max(0.0, ageDays),
DaysUntilStale = daysUntilStale,
EvaluatedAt = now,
SignalGaps = observation.SignalGaps,
RecommendedAction = recommendedAction
};
}
/// <summary>
/// Classifies triage priority based on current decay multiplier.
/// </summary>
internal TriagePriority ClassifyPriority(double multiplier, double stalenessThreshold)
{
if (multiplier <= _options.CriticalPriorityThreshold)
return TriagePriority.Critical;
if (multiplier <= _options.HighPriorityThreshold)
return TriagePriority.High;
if (multiplier <= stalenessThreshold)
return TriagePriority.Medium;
if (multiplier <= _options.ApproachingThreshold)
return TriagePriority.Low;
return TriagePriority.None;
}
/// <summary>
/// Calculates days until the observation crosses the staleness threshold.
/// Negative values indicate the observation is already stale.
/// Formula: days = -halfLife * ln(threshold) / ln(2), solving exp(-ln(2) * days / halfLife) = threshold
/// </summary>
internal static double CalculateDaysUntilStale(
DateTimeOffset refreshedAt,
double halfLifeDays,
double stalenessThreshold,
double floor,
DateTimeOffset now)
{
// If floor >= threshold, the observation can never become stale via decay alone
if (floor >= stalenessThreshold)
return double.MaxValue;
// Days at which multiplier crosses threshold:
// threshold = exp(-ln(2) * d / halfLife)
// ln(threshold) = -ln(2) * d / halfLife
// d = -halfLife * ln(threshold) / ln(2)
var daysToThreshold = -halfLifeDays * Math.Log(stalenessThreshold) / Math.Log(2.0);
var currentAgeDays = (now - refreshedAt).TotalDays;
return daysToThreshold - currentAgeDays;
}
/// <summary>
/// Determines a recommended action based on priority and signal gaps.
/// </summary>
private static string? DetermineRecommendedAction(TriagePriority priority, IReadOnlyList<SignalGap> gaps)
{
if (gaps.Count > 0)
{
var missingSignals = string.Join(", ", gaps.Select(g => g.Signal));
return priority switch
{
TriagePriority.Critical => $"URGENT: Re-analyse immediately. Missing signals: {missingSignals}",
TriagePriority.High => $"Re-analyse soon. Missing signals: {missingSignals}",
TriagePriority.Medium => $"Schedule re-analysis. Missing signals: {missingSignals}",
TriagePriority.Low => $"Monitor — approaching staleness. Missing signals: {missingSignals}",
_ => null
};
}
return priority switch
{
TriagePriority.Critical => "URGENT: Re-analyse immediately — evidence has decayed to floor",
TriagePriority.High => "Re-analyse soon — evidence is heavily decayed",
TriagePriority.Medium => "Schedule re-analysis — observation is stale",
TriagePriority.Low => "Monitor — observation is approaching staleness",
_ => null
};
}
}

View File

@@ -0,0 +1,139 @@
using System.Diagnostics.Metrics;
namespace StellaOps.Policy.Determinization.Scoring.Triage;
/// <summary>
/// Background service that periodically evaluates observations for decay-based staleness
/// and enqueues stale unknowns for re-analysis.
///
/// This service is the event-driven mechanism that bridges ObservationDecay.CheckIsStale()
/// with the re-analysis pipeline, fulfilling the automated re-analysis triggering requirement.
/// </summary>
public sealed class UnknownTriageQueueService
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
private static readonly Counter<long> CyclesCounter = Meter.CreateCounter<long>(
"stellaops_triage_cycles_total",
unit: "{cycles}",
description: "Total triage evaluation cycles executed");
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>(
"stellaops_triage_reanalysis_enqueued_total",
unit: "{items}",
description: "Total items enqueued for re-analysis");
private static readonly Histogram<double> CycleDurationHistogram = Meter.CreateHistogram<double>(
"stellaops_triage_cycle_duration_seconds",
unit: "s",
description: "Duration of triage evaluation cycles");
private readonly ITriageQueueEvaluator _evaluator;
private readonly ITriageObservationSource _source;
private readonly ITriageReanalysisSink _sink;
private readonly ILogger<UnknownTriageQueueService> _logger;
private readonly TriageQueueOptions _options;
private readonly TimeProvider _timeProvider;
public UnknownTriageQueueService(
ITriageQueueEvaluator evaluator,
ITriageObservationSource source,
ITriageReanalysisSink sink,
ILogger<UnknownTriageQueueService> logger,
IOptions<TriageQueueOptions> options,
TimeProvider? timeProvider = null)
{
_evaluator = evaluator;
_source = source;
_sink = sink;
_logger = logger;
_options = options.Value;
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Execute a single triage cycle: fetch candidates, evaluate, enqueue stale items.
/// This method is designed to be called by a background host, timer, or scheduler.
/// </summary>
/// <param name="tenantId">Optional tenant filter.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The triage snapshot from this cycle.</returns>
public async Task<TriageQueueSnapshot> ExecuteCycleAsync(
string? tenantId = null,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var sw = System.Diagnostics.Stopwatch.StartNew();
_logger.LogInformation(
"Starting triage cycle at {Now:O} for tenant {TenantId}",
now,
tenantId ?? "(all)");
try
{
// 1. Fetch candidate observations
var candidates = await _source.GetCandidatesAsync(
tenantId,
_options.MaxSnapshotItems,
cancellationToken);
_logger.LogDebug("Fetched {Count} candidate observations", candidates.Count);
// 2. Evaluate for triage
var snapshot = await _evaluator.EvaluateAsync(candidates, now, cancellationToken);
// 3. Enqueue stale items for re-analysis (Medium, High, Critical)
var reanalysisItems = snapshot.Items
.Where(i => i.Priority >= TriagePriority.Medium)
.ToList();
var enqueued = 0;
if (reanalysisItems.Count > 0)
{
enqueued = await _sink.EnqueueAsync(reanalysisItems, cancellationToken);
EnqueuedCounter.Add(enqueued);
}
sw.Stop();
CyclesCounter.Add(1);
CycleDurationHistogram.Record(sw.Elapsed.TotalSeconds,
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "all"));
_logger.LogInformation(
"Triage cycle complete: {Evaluated} evaluated, {Queued} queued, {Enqueued} enqueued for re-analysis ({Duration:F2}s)",
snapshot.TotalEvaluated,
snapshot.Items.Count,
enqueued,
sw.Elapsed.TotalSeconds);
return snapshot;
}
catch (OperationCanceledException)
{
_logger.LogWarning("Triage cycle cancelled");
throw;
}
catch (Exception ex)
{
_logger.LogError(ex, "Triage cycle failed");
throw;
}
}
/// <summary>
/// Evaluate a specific set of observations (for on-demand triage, e.g. CLI/API).
/// Does not enqueue — returns the snapshot for the caller to act on.
/// </summary>
/// <param name="observations">Observations to evaluate.</param>
/// <param name="now">Reference time.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Triage queue snapshot.</returns>
public Task<TriageQueueSnapshot> EvaluateOnDemandAsync(
IReadOnlyList<TriageObservation> observations,
DateTimeOffset now,
CancellationToken cancellationToken = default)
{
return _evaluator.EvaluateAsync(observations, now, cancellationToken);
}
}

View File

@@ -0,0 +1,340 @@
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Determinization.Evidence;
using StellaOps.Policy.Determinization.Models;
using StellaOps.Policy.Scoring;
using StellaOps.Policy.TrustLattice;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.Policy.Determinization.Scoring;
/// <summary>
/// Unified facade composing TrustScoreAggregator + K4Lattice + ScorePolicy into a single
/// deterministic scoring pipeline. Entry point for computing trust scores with full
/// explainability and attestation-ready output.
/// </summary>
public interface ITrustScoreAlgebraFacade
{
/// <summary>
/// Compute a complete trust score for an artifact.
/// </summary>
/// <param name="request">Scoring request with artifact, signals, and optional policy override.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Complete scoring result with Score.v1 predicate.</returns>
Task<TrustScoreResult> ComputeTrustScoreAsync(
TrustScoreRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Compute trust score synchronously (for batch/offline use).
/// </summary>
TrustScoreResult ComputeTrustScore(TrustScoreRequest request);
}
/// <summary>
/// Implementation of the trust score algebra facade.
/// Composes all scoring components into a deterministic pipeline.
/// </summary>
public sealed class TrustScoreAlgebraFacade : ITrustScoreAlgebraFacade
{
private readonly TrustScoreAggregator _aggregator;
private readonly UncertaintyScoreCalculator _uncertaintyCalculator;
private readonly ILogger<TrustScoreAlgebraFacade> _logger;
private readonly TimeProvider _timeProvider;
private readonly JsonSerializerOptions _jsonOptions;
public TrustScoreAlgebraFacade(
TrustScoreAggregator aggregator,
UncertaintyScoreCalculator uncertaintyCalculator,
ILogger<TrustScoreAlgebraFacade>? logger = null,
TimeProvider? timeProvider = null)
{
_aggregator = aggregator ?? throw new ArgumentNullException(nameof(aggregator));
_uncertaintyCalculator = uncertaintyCalculator ?? throw new ArgumentNullException(nameof(uncertaintyCalculator));
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<TrustScoreAlgebraFacade>.Instance;
_timeProvider = timeProvider ?? TimeProvider.System;
_jsonOptions = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
}
/// <inheritdoc />
public Task<TrustScoreResult> ComputeTrustScoreAsync(
TrustScoreRequest request,
CancellationToken cancellationToken = default)
{
// Scoring is CPU-bound and deterministic; run synchronously
var result = ComputeTrustScore(request);
return Task.FromResult(result);
}
/// <inheritdoc />
public TrustScoreResult ComputeTrustScore(TrustScoreRequest request)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.ArtifactId);
try
{
var now = _timeProvider.GetUtcNow();
var policy = request.PolicyOverride ?? ScorePolicy.Default;
var signals = request.Signals ?? SignalSnapshot.Empty(
request.VulnerabilityId ?? "UNKNOWN",
request.ArtifactId,
now);
// Step 1: Calculate uncertainty score
var uncertaintyScore = _uncertaintyCalculator.Calculate(signals);
// Step 2: Aggregate signals using weighted formula
var trustScore = _aggregator.Aggregate(signals, uncertaintyScore);
// Step 3: Compute K4 lattice verdict
var latticeVerdict = ComputeLatticeVerdict(signals);
// Step 4: Extract dimension scores
var dimensions = ExtractDimensions(signals, policy);
// Step 5: Compute weighted final score in basis points
var finalBps = ComputeWeightedScoreBps(dimensions, policy.WeightsBps);
// Step 6: Determine risk tier
var tier = DetermineRiskTier(finalBps);
// Step 7: Compute policy digest
var policyDigest = ComputePolicyDigest(policy);
// Step 8: Build Score.v1 predicate
var predicate = new ScoreV1Predicate
{
ArtifactId = request.ArtifactId,
VulnerabilityId = request.VulnerabilityId,
TrustScoreBps = finalBps,
Tier = tier.ToString(),
LatticeVerdict = latticeVerdict,
UncertaintyBps = ToBasisPoints(uncertaintyScore.Entropy),
Dimensions = dimensions,
WeightsUsed = policy.WeightsBps,
PolicyDigest = policyDigest,
ComputedAt = now,
TenantId = request.TenantId
};
_logger.LogDebug(
"Computed trust score for {ArtifactId}: {ScoreBps}bps ({Tier}), lattice={Verdict}",
request.ArtifactId, finalBps, tier, latticeVerdict);
return new TrustScoreResult
{
Predicate = predicate,
SignalsUsed = signals,
Success = true
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to compute trust score for {ArtifactId}", request.ArtifactId);
return new TrustScoreResult
{
Predicate = CreateEmptyPredicate(request),
SignalsUsed = request.Signals ?? SignalSnapshot.Empty(
request.VulnerabilityId ?? "UNKNOWN",
request.ArtifactId,
_timeProvider.GetUtcNow()),
Success = false,
Error = ex.Message
};
}
}
/// <summary>
/// Compute K4 lattice verdict from signal states.
/// </summary>
private static K4Value ComputeLatticeVerdict(SignalSnapshot signals)
{
var values = new List<K4Value>();
// Map each signal to K4 value
if (!signals.Vex.IsNotQueried)
{
values.Add(signals.Vex.Value?.Status?.ToLowerInvariant() switch
{
"affected" => K4Value.True, // Vulnerability confirmed
"not_affected" => K4Value.False, // Vulnerability not present
"fixed" => K4Value.False, // Fixed = not vulnerable
"under_investigation" => K4Value.Unknown,
_ => K4Value.Unknown
});
}
if (!signals.Reachability.IsNotQueried)
{
values.Add(signals.Reachability.Value?.Status switch
{
ReachabilityStatus.Reachable => K4Value.True,
ReachabilityStatus.Unreachable => K4Value.False,
ReachabilityStatus.Unknown => K4Value.Unknown,
_ => K4Value.Unknown
});
}
if (!signals.Epss.IsNotQueried && signals.Epss.Value is not null)
{
// High EPSS = likely exploitable
values.Add(signals.Epss.Value.Epss >= 0.5 ? K4Value.True : K4Value.False);
}
// Join all values using K4 lattice
return K4Lattice.JoinAll(values);
}
/// <summary>
/// Extract dimension scores from signals.
/// </summary>
private static ScoreDimensionsBps ExtractDimensions(SignalSnapshot signals, ScorePolicy policy)
{
// Base severity from CVSS or default
var baseSeverityBps = 5000; // Default to medium if no CVSS
// Reachability
var reachabilityBps = signals.Reachability.Value?.Status switch
{
ReachabilityStatus.Reachable => 10000,
ReachabilityStatus.Unreachable => 0,
_ => 5000 // Unknown = mid-range
};
// Evidence quality (based on how many signals are present)
var signalCount = CountPresentSignals(signals);
var evidenceBps = signalCount switch
{
>= 5 => 9000,
4 => 7500,
3 => 6000,
2 => 4000,
1 => 2000,
_ => 1000
};
// Provenance (SBOM lineage quality)
var provenanceBps = signals.Sbom.Value is not null ? 8000 : 3000;
// Optional dimensions
int? epssBps = signals.Epss.Value is not null
? ToBasisPoints(signals.Epss.Value.Epss)
: null;
int? vexBps = signals.Vex.Value?.Status?.ToLowerInvariant() switch
{
"affected" => 10000,
"under_investigation" => 7000,
"fixed" => 1000,
"not_affected" => 0,
_ => null
};
return new ScoreDimensionsBps
{
BaseSeverityBps = baseSeverityBps,
ReachabilityBps = reachabilityBps,
EvidenceBps = evidenceBps,
ProvenanceBps = provenanceBps,
EpssBps = epssBps,
VexBps = vexBps
};
}
/// <summary>
/// Compute final weighted score in basis points.
/// </summary>
private static int ComputeWeightedScoreBps(ScoreDimensionsBps dimensions, WeightsBps weights)
{
// Weighted average: Σ(dimension * weight) / Σ(weights)
// Since weights sum to 10000, we can use: Σ(dimension * weight) / 10000
long weighted =
(long)dimensions.BaseSeverityBps * weights.BaseSeverity +
(long)dimensions.ReachabilityBps * weights.Reachability +
(long)dimensions.EvidenceBps * weights.Evidence +
(long)dimensions.ProvenanceBps * weights.Provenance;
var result = (int)(weighted / 10000);
return Math.Clamp(result, 0, 10000);
}
/// <summary>
/// Determine risk tier from basis point score.
/// </summary>
private static RiskTier DetermineRiskTier(int scoreBps)
{
return scoreBps switch
{
>= 9000 => RiskTier.Critical,
>= 7000 => RiskTier.High,
>= 4000 => RiskTier.Medium,
>= 1000 => RiskTier.Low,
_ => RiskTier.Info
};
}
/// <summary>
/// Compute SHA-256 digest of policy for reproducibility.
/// </summary>
private string ComputePolicyDigest(ScorePolicy policy)
{
var json = JsonSerializer.Serialize(policy, _jsonOptions);
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexStringLower(bytes);
}
/// <summary>
/// Count present (non-null, non-queried) signals.
/// </summary>
private static int CountPresentSignals(SignalSnapshot signals)
{
var count = 0;
if (!signals.Vex.IsNotQueried && signals.Vex.Value is not null) count++;
if (!signals.Epss.IsNotQueried && signals.Epss.Value is not null) count++;
if (!signals.Reachability.IsNotQueried && signals.Reachability.Value is not null) count++;
if (!signals.Runtime.IsNotQueried && signals.Runtime.Value is not null) count++;
if (!signals.Backport.IsNotQueried && signals.Backport.Value is not null) count++;
if (!signals.Sbom.IsNotQueried && signals.Sbom.Value is not null) count++;
return count;
}
/// <summary>
/// Convert a 0.0-1.0 double to basis points.
/// </summary>
private static int ToBasisPoints(double value) =>
Math.Clamp((int)(value * 10000), 0, 10000);
/// <summary>
/// Create empty predicate for error cases.
/// </summary>
private ScoreV1Predicate CreateEmptyPredicate(TrustScoreRequest request)
{
return new ScoreV1Predicate
{
ArtifactId = request.ArtifactId,
VulnerabilityId = request.VulnerabilityId,
TrustScoreBps = 0,
Tier = RiskTier.Info.ToString(),
LatticeVerdict = K4Value.Unknown,
UncertaintyBps = 10000,
Dimensions = new ScoreDimensionsBps
{
BaseSeverityBps = 0,
ReachabilityBps = 0,
EvidenceBps = 0,
ProvenanceBps = 0
},
WeightsUsed = WeightsBps.Default,
PolicyDigest = "error",
ComputedAt = _timeProvider.GetUtcNow(),
TenantId = request.TenantId
};
}
}

View File

@@ -0,0 +1,59 @@
// -----------------------------------------------------------------------------
// IWeightManifestLoader.cs
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
// Task: T1 - Weight manifest loader interface
// Description: Contract for discovering, loading, validating, and selecting
// versioned weight manifests from the file system.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
/// <summary>
/// Discovers, loads, validates, and selects versioned weight manifests.
/// </summary>
public interface IWeightManifestLoader
{
/// <summary>
/// Lists all available weight manifests discovered in the configured directory,
/// sorted by <c>effectiveFrom</c> descending (most recent first).
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>All discovered manifest load results.</returns>
Task<ImmutableArray<WeightManifestLoadResult>> ListAsync(CancellationToken cancellationToken = default);
/// <summary>
/// Loads and validates a specific manifest file by path.
/// </summary>
/// <param name="filePath">Absolute or relative path to the manifest file.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Load result containing the manifest and hash verification status.</returns>
Task<WeightManifestLoadResult> LoadAsync(string filePath, CancellationToken cancellationToken = default);
/// <summary>
/// Selects the manifest effective for a given reference date.
/// Picks the most recent manifest where <c>effectiveFrom ≤ referenceDate</c>.
/// </summary>
/// <param name="referenceDate">The date to select for (typically <c>DateTimeOffset.UtcNow</c>).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The applicable manifest, or <c>null</c> if none is effective.</returns>
Task<WeightManifestLoadResult?> SelectEffectiveAsync(
DateTimeOffset referenceDate,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates a manifest: schema version, weight normalization, content hash.
/// </summary>
/// <param name="result">The load result to validate.</param>
/// <returns>Validation issues found (empty if valid).</returns>
ImmutableArray<string> Validate(WeightManifestLoadResult result);
/// <summary>
/// Computes a diff between two manifests, comparing weight values and thresholds.
/// </summary>
/// <param name="from">Source (older) manifest.</param>
/// <param name="to">Target (newer) manifest.</param>
/// <returns>Diff summary.</returns>
WeightManifestDiff Diff(WeightManifestDocument from, WeightManifestDocument to);
}

View File

@@ -0,0 +1,277 @@
// -----------------------------------------------------------------------------
// WeightManifestCommands.cs
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
// Task: T1 - CLI weight management commands
// Description: Service-level commands that back the `stella weights` CLI:
// list, validate, diff, activate, hash. Each produces a
// deterministic, serializable result model.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json;
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
/// <summary>
/// Provides the backing logic for CLI weight management commands:
/// <c>stella weights list</c>, <c>stella weights validate</c>,
/// <c>stella weights diff</c>, <c>stella weights activate</c>,
/// <c>stella weights hash</c>.
/// </summary>
public sealed class WeightManifestCommands
{
private readonly IWeightManifestLoader _loader;
public WeightManifestCommands(IWeightManifestLoader loader)
{
_loader = loader ?? throw new ArgumentNullException(nameof(loader));
}
// ── stella weights list ──────────────────────────────────────────────
/// <summary>
/// Lists all discovered weight manifests with their versions,
/// effective dates, profiles, and hash status.
/// </summary>
public async Task<WeightsListResult> ListAsync(CancellationToken ct = default)
{
var manifests = await _loader.ListAsync(ct).ConfigureAwait(false);
var entries = manifests.Select(r => new WeightsListEntry
{
Version = r.Manifest.Version,
EffectiveFrom = r.Manifest.EffectiveFrom,
Profile = r.Manifest.Profile,
HashStatus = r.Manifest.HasComputedHash
? (r.HashVerified ? "verified" : "mismatch")
: "auto",
SourcePath = r.SourcePath,
Description = r.Manifest.Description
}).ToImmutableArray();
return new WeightsListResult { Entries = entries };
}
// ── stella weights validate ──────────────────────────────────────────
/// <summary>
/// Validates a specific manifest file or all discovered manifests.
/// </summary>
/// <param name="filePath">
/// If specified, validate only this file. Otherwise validate all discovered manifests.
/// </param>
public async Task<WeightsValidateResult> ValidateAsync(
string? filePath = null,
CancellationToken ct = default)
{
var results = new List<WeightsValidateEntry>();
if (!string.IsNullOrEmpty(filePath))
{
var loadResult = await _loader.LoadAsync(filePath, ct).ConfigureAwait(false);
var issues = _loader.Validate(loadResult);
results.Add(new WeightsValidateEntry
{
Version = loadResult.Manifest.Version,
SourcePath = loadResult.SourcePath,
Issues = issues,
IsValid = issues.IsEmpty
});
}
else
{
var all = await _loader.ListAsync(ct).ConfigureAwait(false);
foreach (var loadResult in all)
{
var issues = _loader.Validate(loadResult);
results.Add(new WeightsValidateEntry
{
Version = loadResult.Manifest.Version,
SourcePath = loadResult.SourcePath,
Issues = issues,
IsValid = issues.IsEmpty
});
}
}
return new WeightsValidateResult
{
Entries = [.. results],
AllValid = results.TrueForAll(e => e.IsValid)
};
}
// ── stella weights diff ──────────────────────────────────────────────
/// <summary>
/// Diffs two manifest files, or two versions by version identifier.
/// </summary>
public async Task<WeightManifestDiff> DiffAsync(
string fromPath,
string toPath,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(fromPath);
ArgumentException.ThrowIfNullOrWhiteSpace(toPath);
var from = await _loader.LoadAsync(fromPath, ct).ConfigureAwait(false);
var to = await _loader.LoadAsync(toPath, ct).ConfigureAwait(false);
return _loader.Diff(from.Manifest, to.Manifest);
}
/// <summary>
/// Diffs two manifests by version string (searches the discovered set).
/// </summary>
public async Task<WeightManifestDiff> DiffByVersionAsync(
string fromVersion,
string toVersion,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(fromVersion);
ArgumentException.ThrowIfNullOrWhiteSpace(toVersion);
var all = await _loader.ListAsync(ct).ConfigureAwait(false);
var from = all.FirstOrDefault(r =>
string.Equals(r.Manifest.Version, fromVersion, StringComparison.OrdinalIgnoreCase));
var to = all.FirstOrDefault(r =>
string.Equals(r.Manifest.Version, toVersion, StringComparison.OrdinalIgnoreCase));
if (from is null)
throw new WeightManifestLoadException($"Manifest version '{fromVersion}' not found.");
if (to is null)
throw new WeightManifestLoadException($"Manifest version '{toVersion}' not found.");
return _loader.Diff(from.Manifest, to.Manifest);
}
// ── stella weights activate ──────────────────────────────────────────
/// <summary>
/// Selects the currently active (effective) manifest for a given date.
/// </summary>
public async Task<WeightsActivateResult> ActivateAsync(
DateTimeOffset? referenceDate = null,
CancellationToken ct = default)
{
var date = referenceDate ?? DateTimeOffset.UtcNow;
var result = await _loader.SelectEffectiveAsync(date, ct).ConfigureAwait(false);
if (result is null)
{
return new WeightsActivateResult
{
Found = false,
ReferenceDate = date,
Version = null,
SourcePath = null,
ContentHash = null
};
}
return new WeightsActivateResult
{
Found = true,
ReferenceDate = date,
Version = result.Manifest.Version,
SourcePath = result.SourcePath,
ContentHash = result.ComputedHash,
EffectiveFrom = result.Manifest.EffectiveFrom,
Profile = result.Manifest.Profile
};
}
// ── stella weights hash ──────────────────────────────────────────────
/// <summary>
/// Computes the content hash for a manifest file and optionally replaces
/// the "sha256:auto" placeholder in-place.
/// </summary>
/// <param name="filePath">Path to the manifest file.</param>
/// <param name="writeBack">If true, writes the computed hash back to the file.</param>
public async Task<WeightsHashResult> HashAsync(
string filePath,
bool writeBack = false,
CancellationToken ct = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
var resolvedPath = Path.GetFullPath(filePath);
var json = await File.ReadAllTextAsync(resolvedPath, ct).ConfigureAwait(false);
var computedHash = WeightManifestHashComputer.ComputeFromJson(json);
var hasPlaceholder = json.Contains(
WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal);
string? updatedJson = null;
if (writeBack && hasPlaceholder)
{
var (updated, _) = WeightManifestHashComputer.ReplaceAutoHash(json);
updatedJson = updated;
await File.WriteAllTextAsync(resolvedPath, updatedJson, ct).ConfigureAwait(false);
}
return new WeightsHashResult
{
SourcePath = resolvedPath,
ComputedHash = computedHash,
HadPlaceholder = hasPlaceholder,
WrittenBack = writeBack && hasPlaceholder
};
}
}
// ── CLI result models ────────────────────────────────────────────────────────
/// <summary>Result of <c>stella weights list</c>.</summary>
public sealed record WeightsListResult
{
public required ImmutableArray<WeightsListEntry> Entries { get; init; }
}
public sealed record WeightsListEntry
{
public required string Version { get; init; }
public required DateTimeOffset EffectiveFrom { get; init; }
public required string Profile { get; init; }
public required string HashStatus { get; init; }
public required string SourcePath { get; init; }
public string? Description { get; init; }
}
/// <summary>Result of <c>stella weights validate</c>.</summary>
public sealed record WeightsValidateResult
{
public required ImmutableArray<WeightsValidateEntry> Entries { get; init; }
public required bool AllValid { get; init; }
}
public sealed record WeightsValidateEntry
{
public required string Version { get; init; }
public required string SourcePath { get; init; }
public required ImmutableArray<string> Issues { get; init; }
public required bool IsValid { get; init; }
}
/// <summary>Result of <c>stella weights activate</c>.</summary>
public sealed record WeightsActivateResult
{
public required bool Found { get; init; }
public required DateTimeOffset ReferenceDate { get; init; }
public string? Version { get; init; }
public string? SourcePath { get; init; }
public string? ContentHash { get; init; }
public DateTimeOffset? EffectiveFrom { get; init; }
public string? Profile { get; init; }
}
/// <summary>Result of <c>stella weights hash</c>.</summary>
public sealed record WeightsHashResult
{
public required string SourcePath { get; init; }
public required string ComputedHash { get; init; }
public required bool HadPlaceholder { get; init; }
public required bool WrittenBack { get; init; }
}

View File

@@ -0,0 +1,185 @@
// -----------------------------------------------------------------------------
// WeightManifestHashComputer.cs
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
// Task: T1 - Content hash auto-compute
// Description: Deterministic SHA-256 content hash computation for weight
// manifests. Hashes the canonical content (excluding the
// contentHash field itself) to produce a stable digest.
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
/// <summary>
/// Computes deterministic SHA-256 content hashes for weight manifests.
/// The hash covers all content except the <c>contentHash</c> field itself.
/// </summary>
public static class WeightManifestHashComputer
{
private static readonly JsonSerializerOptions CanonicalOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
/// <summary>
/// Computes the SHA-256 content hash for a manifest's raw JSON content.
/// The <c>contentHash</c> field is excluded from the hash input to allow
/// the hash to be embedded in the same document it covers.
/// </summary>
/// <param name="jsonContent">Raw JSON content of the manifest file.</param>
/// <returns>Hash in "sha256:&lt;hex&gt;" format.</returns>
public static string ComputeFromJson(string jsonContent)
{
ArgumentException.ThrowIfNullOrWhiteSpace(jsonContent);
// Parse JSON, remove contentHash field, re-serialize canonically
var doc = JsonDocument.Parse(jsonContent);
var canonical = BuildCanonicalContent(doc.RootElement);
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonical));
return $"{WeightManifestConstants.HashPrefix}{Convert.ToHexStringLower(hashBytes)}";
}
/// <summary>
/// Computes the SHA-256 content hash for a deserialized manifest.
/// Re-serializes with the <c>contentHash</c> set to the placeholder,
/// then hashes the canonical form.
/// </summary>
/// <param name="manifest">The manifest document to hash.</param>
/// <returns>Hash in "sha256:&lt;hex&gt;" format.</returns>
public static string ComputeFromManifest(WeightManifestDocument manifest)
{
ArgumentNullException.ThrowIfNull(manifest);
// Serialize with placeholder to ensure contentHash doesn't affect the result
var withPlaceholder = manifest with
{
ContentHash = WeightManifestConstants.AutoHashPlaceholder
};
var json = JsonSerializer.Serialize(withPlaceholder, CanonicalOptions);
return ComputeFromJson(json);
}
/// <summary>
/// Verifies that a manifest's stored content hash matches its computed hash.
/// </summary>
/// <param name="jsonContent">Raw JSON content of the manifest file.</param>
/// <param name="storedHash">The hash stored in the manifest's contentHash field.</param>
/// <returns>True if the hashes match.</returns>
public static bool Verify(string jsonContent, string storedHash)
{
if (string.IsNullOrEmpty(storedHash)
|| storedHash.Equals(WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal))
{
return false;
}
var computed = ComputeFromJson(jsonContent);
return computed.Equals(storedHash, StringComparison.Ordinal);
}
/// <summary>
/// Replaces the "sha256:auto" placeholder in raw JSON with the computed hash.
/// Returns the updated JSON content and the computed hash.
/// </summary>
/// <param name="jsonContent">Raw JSON with contentHash placeholder.</param>
/// <returns>Tuple of (updatedJson, computedHash).</returns>
public static (string UpdatedJson, string ComputedHash) ReplaceAutoHash(string jsonContent)
{
ArgumentException.ThrowIfNullOrWhiteSpace(jsonContent);
var computedHash = ComputeFromJson(jsonContent);
var updatedJson = jsonContent.Replace(
$"\"{WeightManifestConstants.AutoHashPlaceholder}\"",
$"\"{computedHash}\"",
StringComparison.Ordinal);
return (updatedJson, computedHash);
}
/// <summary>
/// Builds a canonical JSON string from a <see cref="JsonElement"/>,
/// excluding the <c>contentHash</c> field for hash stability.
/// Properties are sorted alphabetically for determinism.
/// </summary>
private static string BuildCanonicalContent(JsonElement root)
{
using var stream = new MemoryStream();
using var writer = new Utf8JsonWriter(stream, new JsonWriterOptions
{
Indented = false,
SkipValidation = false
});
WriteCanonical(writer, root, excludeField: "contentHash");
writer.Flush();
return Encoding.UTF8.GetString(stream.ToArray());
}
/// <summary>
/// Recursively writes JSON with sorted property keys and an optional excluded field.
/// </summary>
private static void WriteCanonical(Utf8JsonWriter writer, JsonElement element, string? excludeField = null)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
writer.WriteStartObject();
// Sort properties alphabetically for deterministic output
var properties = element.EnumerateObject()
.Where(p => !string.Equals(p.Name, excludeField, StringComparison.Ordinal))
.OrderBy(p => p.Name, StringComparer.Ordinal)
.ToList();
foreach (var property in properties)
{
writer.WritePropertyName(property.Name);
WriteCanonical(writer, property.Value);
}
writer.WriteEndObject();
break;
case JsonValueKind.Array:
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteCanonical(writer, item);
}
writer.WriteEndArray();
break;
case JsonValueKind.String:
writer.WriteStringValue(element.GetString());
break;
case JsonValueKind.Number:
if (element.TryGetInt64(out var longValue))
writer.WriteNumberValue(longValue);
else
writer.WriteNumberValue(element.GetDouble());
break;
case JsonValueKind.True:
writer.WriteBooleanValue(true);
break;
case JsonValueKind.False:
writer.WriteBooleanValue(false);
break;
case JsonValueKind.Null:
writer.WriteNullValue();
break;
}
}
}

View File

@@ -0,0 +1,403 @@
// -----------------------------------------------------------------------------
// WeightManifestLoader.cs
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
// Task: T1 - Weight manifest loader implementation
// Description: File-system-based weight manifest discovery, loading,
// validation, selection by effectiveFrom date, and diffing.
// Deterministic and offline-friendly (no network calls).
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Diagnostics;
using System.Diagnostics.Metrics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
/// <summary>
/// Configuration options for the weight manifest loader.
/// </summary>
public sealed record WeightManifestLoaderOptions
{
/// <summary>Configuration section name.</summary>
public const string SectionName = "Determinization:WeightManifest";
/// <summary>
/// Base directory to discover manifests in.
/// Defaults to <c>etc/weights</c> relative to the application root.
/// </summary>
public string ManifestDirectory { get; init; } = WeightManifestConstants.DefaultManifestDirectory;
/// <summary>
/// Glob pattern for manifest files.
/// </summary>
public string FilePattern { get; init; } = WeightManifestConstants.DefaultGlobPattern;
/// <summary>
/// Whether to require valid content hashes (reject "sha256:auto").
/// In production this should be true; in development, false is acceptable.
/// </summary>
public bool RequireComputedHash { get; init; }
/// <summary>
/// Whether to fail on hash mismatch (true) or log a warning (false).
/// </summary>
public bool StrictHashVerification { get; init; }
}
/// <summary>
/// File-system-based weight manifest loader with deterministic behavior.
/// Discovers manifests from a configured directory, validates them,
/// computes/verifies content hashes, and selects by effectiveFrom date.
/// </summary>
public sealed class WeightManifestLoader : IWeightManifestLoader
{
private static readonly Meter Meter = new("StellaOps.Policy.Determinization.WeightManifest", "1.0.0");
private static readonly Counter<long> ManifestsLoaded = Meter.CreateCounter<long>(
"stellaops.weight_manifest.loaded_total", "manifests", "Total manifests loaded");
private static readonly Counter<long> ManifestsValidated = Meter.CreateCounter<long>(
"stellaops.weight_manifest.validated_total", "manifests", "Total manifests validated");
private static readonly Counter<long> HashMismatches = Meter.CreateCounter<long>(
"stellaops.weight_manifest.hash_mismatch_total", "errors", "Content hash mismatches detected");
private static readonly Counter<long> ValidationErrors = Meter.CreateCounter<long>(
"stellaops.weight_manifest.validation_error_total", "errors", "Validation errors encountered");
private static readonly JsonSerializerOptions DeserializeOptions = new()
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
};
private readonly WeightManifestLoaderOptions _options;
private readonly ILogger<WeightManifestLoader> _logger;
public WeightManifestLoader(
IOptions<WeightManifestLoaderOptions> options,
ILogger<WeightManifestLoader> logger)
{
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<ImmutableArray<WeightManifestLoadResult>> ListAsync(
CancellationToken cancellationToken = default)
{
var directory = ResolveManifestDirectory();
if (!Directory.Exists(directory))
{
_logger.LogWarning("Weight manifest directory not found: {Directory}", directory);
return [];
}
var files = Directory.GetFiles(directory, _options.FilePattern)
.OrderBy(f => f, StringComparer.Ordinal)
.ToList();
if (files.Count == 0)
{
_logger.LogWarning("No weight manifest files found in {Directory}", directory);
return [];
}
var results = new List<WeightManifestLoadResult>(files.Count);
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var result = await LoadCoreAsync(file, cancellationToken).ConfigureAwait(false);
results.Add(result);
}
catch (WeightManifestLoadException ex)
{
_logger.LogWarning(ex, "Skipping invalid manifest: {File}", file);
}
}
// Sort by effectiveFrom descending (most recent first)
return [.. results.OrderByDescending(r => r.Manifest.EffectiveFrom)];
}
/// <inheritdoc />
public Task<WeightManifestLoadResult> LoadAsync(
string filePath,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
return LoadCoreAsync(filePath, cancellationToken);
}
/// <inheritdoc />
public async Task<WeightManifestLoadResult?> SelectEffectiveAsync(
DateTimeOffset referenceDate,
CancellationToken cancellationToken = default)
{
var all = await ListAsync(cancellationToken).ConfigureAwait(false);
if (all.IsEmpty)
return null;
// Already sorted by effectiveFrom descending; pick first where effectiveFrom <= referenceDate
return all.FirstOrDefault(r => r.Manifest.EffectiveFrom <= referenceDate);
}
/// <inheritdoc />
public ImmutableArray<string> Validate(WeightManifestLoadResult result)
{
ArgumentNullException.ThrowIfNull(result);
var issues = new List<string>();
var manifest = result.Manifest;
// Schema version check
if (!string.Equals(manifest.SchemaVersion, WeightManifestConstants.SupportedSchemaVersion,
StringComparison.Ordinal))
{
issues.Add(
$"Unsupported schema version '{manifest.SchemaVersion}'. Expected '{WeightManifestConstants.SupportedSchemaVersion}'.");
}
// Version field
if (string.IsNullOrWhiteSpace(manifest.Version))
{
issues.Add("Version field is required.");
}
// Content hash
if (_options.RequireComputedHash && !manifest.HasComputedHash)
{
issues.Add("Content hash is required but manifest contains placeholder 'sha256:auto'.");
}
if (manifest.HasComputedHash && !result.HashVerified)
{
issues.Add(
$"Content hash mismatch: stored={manifest.ContentHash}, computed={result.ComputedHash}.");
}
// Legacy weight normalization
if (manifest.Weights.Legacy.Count > 0)
{
var legacySum = manifest.Weights.Legacy.Values.Sum();
if (Math.Abs(legacySum - 1.0) > 0.001)
{
issues.Add($"Legacy weights sum to {legacySum:F4}, expected 1.0.");
}
}
// Advisory weight normalization
if (manifest.Weights.Advisory.Count > 0)
{
var advisorySum = manifest.Weights.Advisory.Values.Sum();
if (Math.Abs(advisorySum - 1.0) > 0.001)
{
issues.Add($"Advisory weights sum to {advisorySum:F4}, expected 1.0.");
}
}
// Signal weights for entropy normalization
if (manifest.SignalWeightsForEntropy.Count > 0)
{
var signalSum = manifest.SignalWeightsForEntropy.Values.Sum();
if (Math.Abs(signalSum - 1.0) > 0.001)
{
issues.Add($"Signal weights for entropy sum to {signalSum:F4}, expected 1.0.");
}
}
ManifestsValidated.Add(1);
if (issues.Count > 0)
{
ValidationErrors.Add(issues.Count);
}
return [.. issues];
}
/// <inheritdoc />
public WeightManifestDiff Diff(WeightManifestDocument from, WeightManifestDocument to)
{
ArgumentNullException.ThrowIfNull(from);
ArgumentNullException.ThrowIfNull(to);
var diffs = new List<WeightManifestFieldDiff>();
// Compare scalar fields
CompareScalar(diffs, "version", from.Version, to.Version);
CompareScalar(diffs, "profile", from.Profile, to.Profile);
CompareScalar(diffs, "effectiveFrom", from.EffectiveFrom.ToString("O"), to.EffectiveFrom.ToString("O"));
// Compare legacy weights
CompareWeightDictionary(diffs, "weights.legacy", from.Weights.Legacy, to.Weights.Legacy);
// Compare advisory weights
CompareWeightDictionary(diffs, "weights.advisory", from.Weights.Advisory, to.Weights.Advisory);
// Compare signal weights for entropy
CompareWeightDictionary(diffs, "signalWeightsForEntropy",
from.SignalWeightsForEntropy, to.SignalWeightsForEntropy);
// Compare bucket thresholds
if (from.Buckets is not null && to.Buckets is not null)
{
CompareScalar(diffs, "buckets.actNowMin",
from.Buckets.ActNowMin.ToString(), to.Buckets.ActNowMin.ToString());
CompareScalar(diffs, "buckets.scheduleNextMin",
from.Buckets.ScheduleNextMin.ToString(), to.Buckets.ScheduleNextMin.ToString());
CompareScalar(diffs, "buckets.investigateMin",
from.Buckets.InvestigateMin.ToString(), to.Buckets.InvestigateMin.ToString());
}
// Compare determinization thresholds
if (from.DeterminizationThresholds is not null && to.DeterminizationThresholds is not null)
{
CompareScalar(diffs, "determinizationThresholds.manualReviewEntropy",
from.DeterminizationThresholds.ManualReviewEntropy.ToString("F4"),
to.DeterminizationThresholds.ManualReviewEntropy.ToString("F4"));
CompareScalar(diffs, "determinizationThresholds.refreshEntropy",
from.DeterminizationThresholds.RefreshEntropy.ToString("F4"),
to.DeterminizationThresholds.RefreshEntropy.ToString("F4"));
}
return new WeightManifestDiff
{
FromVersion = from.Version,
ToVersion = to.Version,
Differences = [.. diffs]
};
}
// ── Private helpers ──────────────────────────────────────────────────
private async Task<WeightManifestLoadResult> LoadCoreAsync(
string filePath,
CancellationToken cancellationToken)
{
var resolvedPath = Path.GetFullPath(filePath);
if (!File.Exists(resolvedPath))
{
throw new WeightManifestLoadException($"Weight manifest file not found: {resolvedPath}");
}
var json = await File.ReadAllTextAsync(resolvedPath, cancellationToken).ConfigureAwait(false);
WeightManifestDocument manifest;
try
{
manifest = JsonSerializer.Deserialize<WeightManifestDocument>(json, DeserializeOptions)
?? throw new WeightManifestLoadException(
$"Failed to deserialize weight manifest from {resolvedPath}: empty document");
}
catch (JsonException ex)
{
throw new WeightManifestLoadException(
$"JSON parse error in {resolvedPath}: {ex.Message}", ex);
}
// Compute content hash
var computedHash = WeightManifestHashComputer.ComputeFromJson(json);
var hashVerified = manifest.HasComputedHash
&& computedHash.Equals(manifest.ContentHash, StringComparison.Ordinal);
if (manifest.HasComputedHash && !hashVerified)
{
HashMismatches.Add(1);
var message =
$"Content hash mismatch for {resolvedPath}: stored={manifest.ContentHash}, computed={computedHash}";
if (_options.StrictHashVerification)
{
throw new WeightManifestLoadException(message);
}
_logger.LogWarning("{Message}", message);
}
ManifestsLoaded.Add(1);
_logger.LogDebug(
"Loaded weight manifest {Version} from {Path} (hash verified: {HashVerified})",
manifest.Version, resolvedPath, hashVerified);
return new WeightManifestLoadResult
{
Manifest = manifest,
SourcePath = resolvedPath,
HashVerified = hashVerified,
ComputedHash = computedHash
};
}
private string ResolveManifestDirectory()
{
var dir = _options.ManifestDirectory;
if (Path.IsPathRooted(dir))
return dir;
// Resolve relative to current directory (application root)
return Path.GetFullPath(dir, AppContext.BaseDirectory);
}
private static void CompareScalar(
List<WeightManifestFieldDiff> diffs, string path, string? from, string? to)
{
if (!string.Equals(from, to, StringComparison.Ordinal))
{
diffs.Add(new WeightManifestFieldDiff { Path = path, OldValue = from, NewValue = to });
}
}
private static void CompareWeightDictionary(
List<WeightManifestFieldDiff> diffs,
string prefix,
ImmutableDictionary<string, double> from,
ImmutableDictionary<string, double> to)
{
var allKeys = from.Keys.Union(to.Keys).Order().ToList();
foreach (var key in allKeys)
{
var hasFrom = from.TryGetValue(key, out var fromVal);
var hasTo = to.TryGetValue(key, out var toVal);
if (!hasFrom)
{
diffs.Add(new WeightManifestFieldDiff
{
Path = $"{prefix}.{key}",
OldValue = null,
NewValue = toVal.ToString("F4")
});
}
else if (!hasTo)
{
diffs.Add(new WeightManifestFieldDiff
{
Path = $"{prefix}.{key}",
OldValue = fromVal.ToString("F4"),
NewValue = null
});
}
else if (Math.Abs(fromVal - toVal) > 0.0001)
{
diffs.Add(new WeightManifestFieldDiff
{
Path = $"{prefix}.{key}",
OldValue = fromVal.ToString("F4"),
NewValue = toVal.ToString("F4")
});
}
}
}
}
/// <summary>
/// Exception thrown when weight manifest loading or validation fails.
/// </summary>
public sealed class WeightManifestLoadException : Exception
{
public WeightManifestLoadException(string message) : base(message) { }
public WeightManifestLoadException(string message, Exception inner) : base(message, inner) { }
}

View File

@@ -0,0 +1,278 @@
// -----------------------------------------------------------------------------
// WeightManifestModels.cs
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
// Task: T1 - Versioned weight manifest models
// Description: Immutable models for weight manifests with content-addressed
// hashing, versioning, and deterministic serialization.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
/// <summary>
/// Immutable representation of a versioned weight manifest file.
/// </summary>
public sealed record WeightManifestDocument
{
/// <summary>JSON Schema URI.</summary>
[JsonPropertyName("$schema")]
public string? Schema { get; init; }
/// <summary>Schema version (e.g. "1.0.0").</summary>
[JsonPropertyName("schemaVersion")]
public required string SchemaVersion { get; init; }
/// <summary>Manifest version identifier (e.g. "v2026-01-22").</summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>UTC date from which this manifest is effective.</summary>
[JsonPropertyName("effectiveFrom")]
public required DateTimeOffset EffectiveFrom { get; init; }
/// <summary>Profile name (e.g. "production", "staging").</summary>
[JsonPropertyName("profile")]
public string Profile { get; init; } = "production";
/// <summary>Human-readable description of this manifest.</summary>
[JsonPropertyName("description")]
public string? Description { get; init; }
/// <summary>
/// Content hash in "sha256:&lt;hex&gt;" format.
/// The placeholder "sha256:auto" means the hash has not been computed yet.
/// </summary>
[JsonPropertyName("contentHash")]
public required string ContentHash { get; init; }
/// <summary>Legacy 6-dimension EWS weights.</summary>
[JsonPropertyName("weights")]
public required WeightManifestWeights Weights { get; init; }
/// <summary>Dimension human-readable names.</summary>
[JsonPropertyName("dimensionNames")]
public ImmutableDictionary<string, string> DimensionNames { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>Dimensions that subtract from risk score.</summary>
[JsonPropertyName("subtractiveDimensions")]
public ImmutableArray<string> SubtractiveDimensions { get; init; } = [];
/// <summary>Guardrail configurations.</summary>
[JsonPropertyName("guardrails")]
public WeightManifestGuardrails? Guardrails { get; init; }
/// <summary>Bucket boundaries for action tiers.</summary>
[JsonPropertyName("buckets")]
public WeightManifestBuckets? Buckets { get; init; }
/// <summary>Determinization thresholds for entropy-based triage.</summary>
[JsonPropertyName("determinizationThresholds")]
public WeightManifestDeterminizationThresholds? DeterminizationThresholds { get; init; }
/// <summary>Signal weights for entropy calculation (maps to <see cref="SignalWeights"/>).</summary>
[JsonPropertyName("signalWeightsForEntropy")]
public ImmutableDictionary<string, double> SignalWeightsForEntropy { get; init; } =
ImmutableDictionary<string, double>.Empty;
/// <summary>Provenance metadata.</summary>
[JsonPropertyName("metadata")]
public WeightManifestMetadata? Metadata { get; init; }
/// <summary>Whether the content hash is a computed hash vs. the placeholder.</summary>
[JsonIgnore]
public bool HasComputedHash => !string.IsNullOrEmpty(ContentHash)
&& !ContentHash.Equals(WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal);
}
/// <summary>
/// Multi-profile weights block (legacy + advisory).
/// </summary>
public sealed record WeightManifestWeights
{
/// <summary>Legacy 6-dimension weights.</summary>
[JsonPropertyName("legacy")]
public ImmutableDictionary<string, double> Legacy { get; init; } =
ImmutableDictionary<string, double>.Empty;
/// <summary>Advisory weights.</summary>
[JsonPropertyName("advisory")]
public ImmutableDictionary<string, double> Advisory { get; init; } =
ImmutableDictionary<string, double>.Empty;
}
/// <summary>
/// Guardrail configuration from a weight manifest.
/// </summary>
public sealed record WeightManifestGuardrails
{
[JsonPropertyName("notAffectedCap")]
public GuardrailRule? NotAffectedCap { get; init; }
[JsonPropertyName("runtimeFloor")]
public GuardrailRule? RuntimeFloor { get; init; }
[JsonPropertyName("speculativeCap")]
public GuardrailRule? SpeculativeCap { get; init; }
}
/// <summary>
/// Individual guardrail rule.
/// </summary>
public sealed record GuardrailRule
{
[JsonPropertyName("enabled")]
public bool Enabled { get; init; }
[JsonPropertyName("maxScore")]
public int? MaxScore { get; init; }
[JsonPropertyName("minScore")]
public int? MinScore { get; init; }
[JsonPropertyName("requiresBkpMin")]
public double? RequiresBkpMin { get; init; }
[JsonPropertyName("requiresRtsMax")]
public double? RequiresRtsMax { get; init; }
[JsonPropertyName("requiresRtsMin")]
public double? RequiresRtsMin { get; init; }
[JsonPropertyName("requiresRchMax")]
public double? RequiresRchMax { get; init; }
}
/// <summary>
/// Action bucket boundaries.
/// </summary>
public sealed record WeightManifestBuckets
{
[JsonPropertyName("actNowMin")]
public int ActNowMin { get; init; } = 90;
[JsonPropertyName("scheduleNextMin")]
public int ScheduleNextMin { get; init; } = 70;
[JsonPropertyName("investigateMin")]
public int InvestigateMin { get; init; } = 40;
}
/// <summary>
/// Entropy-based determinization thresholds.
/// </summary>
public sealed record WeightManifestDeterminizationThresholds
{
[JsonPropertyName("manualReviewEntropy")]
public double ManualReviewEntropy { get; init; } = 0.60;
[JsonPropertyName("refreshEntropy")]
public double RefreshEntropy { get; init; } = 0.40;
}
/// <summary>
/// Provenance metadata for audit trail.
/// </summary>
public sealed record WeightManifestMetadata
{
[JsonPropertyName("createdBy")]
public string? CreatedBy { get; init; }
[JsonPropertyName("createdAt")]
public DateTimeOffset? CreatedAt { get; init; }
[JsonPropertyName("changelog")]
public ImmutableArray<ChangelogEntry> Changelog { get; init; } = [];
[JsonPropertyName("notes")]
public ImmutableArray<string> Notes { get; init; } = [];
}
/// <summary>
/// Changelog entry for manifest versioning audit.
/// </summary>
public sealed record ChangelogEntry
{
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("date")]
public string? Date { get; init; }
[JsonPropertyName("changes")]
public ImmutableArray<string> Changes { get; init; } = [];
}
/// <summary>
/// Constants for the weight manifest system.
/// </summary>
public static class WeightManifestConstants
{
/// <summary>Placeholder that signals "compute hash at build/load time".</summary>
public const string AutoHashPlaceholder = "sha256:auto";
/// <summary>Prefix for content hashes.</summary>
public const string HashPrefix = "sha256:";
/// <summary>Supported schema version.</summary>
public const string SupportedSchemaVersion = "1.0.0";
/// <summary>Default glob pattern for discovering manifest files.</summary>
public const string DefaultGlobPattern = "*.weights.json";
/// <summary>Default manifest directory (relative to application root).</summary>
public const string DefaultManifestDirectory = "etc/weights";
}
/// <summary>
/// Result of loading and validating a weight manifest.
/// </summary>
public sealed record WeightManifestLoadResult
{
/// <summary>The loaded and validated manifest.</summary>
public required WeightManifestDocument Manifest { get; init; }
/// <summary>File path the manifest was loaded from.</summary>
public required string SourcePath { get; init; }
/// <summary>Whether the content hash was verified (vs computed fresh).</summary>
public required bool HashVerified { get; init; }
/// <summary>Computed content hash (may differ from manifest if auto).</summary>
public required string ComputedHash { get; init; }
}
/// <summary>
/// Result of comparing two weight manifests.
/// </summary>
public sealed record WeightManifestDiff
{
/// <summary>Source (older) manifest version.</summary>
public required string FromVersion { get; init; }
/// <summary>Target (newer) manifest version.</summary>
public required string ToVersion { get; init; }
/// <summary>Individual field differences.</summary>
public required ImmutableArray<WeightManifestFieldDiff> Differences { get; init; }
/// <summary>Whether any differences exist.</summary>
public bool HasDifferences => !Differences.IsEmpty;
}
/// <summary>
/// Individual field difference between two manifests.
/// </summary>
public sealed record WeightManifestFieldDiff
{
/// <summary>Dot-delimited path to the changed field.</summary>
public required string Path { get; init; }
/// <summary>Previous value (serialized as string).</summary>
public required string? OldValue { get; init; }
/// <summary>New value (serialized as string).</summary>
public required string? NewValue { get; init; }
}

View File

@@ -3,6 +3,9 @@ using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using StellaOps.Policy.Determinization.Scoring;
using StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
using StellaOps.Policy.Determinization.Scoring.Triage;
using StellaOps.Policy.Determinization.Scoring.WeightManifest;
namespace StellaOps.Policy.Determinization;
@@ -43,8 +46,26 @@ public static class ServiceCollectionExtensions
services.TryAddSingleton<DecayedConfidenceCalculator>();
services.TryAddSingleton<IDecayedConfidenceCalculator>(sp => sp.GetRequiredService<DecayedConfidenceCalculator>());
services.TryAddSingleton<ImpactScoreCalculator>();
services.TryAddSingleton<IImpactScoreCalculator>(sp => sp.GetRequiredService<ImpactScoreCalculator>());
services.TryAddSingleton<CombinedImpactCalculator>();
services.TryAddSingleton<ICombinedImpactCalculator>(sp => sp.GetRequiredService<CombinedImpactCalculator>());
services.TryAddSingleton<TrustScoreAggregator>();
services.TryAddSingleton<TrustScoreAlgebraFacade>();
services.TryAddSingleton<ITrustScoreAlgebraFacade>(sp => sp.GetRequiredService<TrustScoreAlgebraFacade>());
// EWS: 6-dimension Evidence-Weighted Score model
RegisterEwsServices(services);
// Triage: decay-based staleness evaluation and re-analysis queue
RegisterTriageServices(services);
// Weight Manifests: versioned weight discovery, validation, selection
RegisterWeightManifestServices(services);
return services;
}
@@ -66,8 +87,76 @@ public static class ServiceCollectionExtensions
services.TryAddSingleton<DecayedConfidenceCalculator>();
services.TryAddSingleton<IDecayedConfidenceCalculator>(sp => sp.GetRequiredService<DecayedConfidenceCalculator>());
services.TryAddSingleton<ImpactScoreCalculator>();
services.TryAddSingleton<IImpactScoreCalculator>(sp => sp.GetRequiredService<ImpactScoreCalculator>());
services.TryAddSingleton<CombinedImpactCalculator>();
services.TryAddSingleton<ICombinedImpactCalculator>(sp => sp.GetRequiredService<CombinedImpactCalculator>());
services.TryAddSingleton<TrustScoreAggregator>();
services.TryAddSingleton<TrustScoreAlgebraFacade>();
services.TryAddSingleton<ITrustScoreAlgebraFacade>(sp => sp.GetRequiredService<TrustScoreAlgebraFacade>());
// TSF-004: Delta-if-present calculator for hypothetical score simulations
services.TryAddSingleton<DeltaIfPresentCalculator>();
services.TryAddSingleton<IDeltaIfPresentCalculator>(sp => sp.GetRequiredService<DeltaIfPresentCalculator>());
// EWS: 6-dimension Evidence-Weighted Score model
RegisterEwsServices(services);
// Triage: decay-based staleness evaluation and re-analysis queue
RegisterTriageServices(services);
// Weight Manifests: versioned weight discovery, validation, selection
RegisterWeightManifestServices(services);
return services;
}
private static void RegisterEwsServices(IServiceCollection services)
{
// Register all 6 dimension normalizers (AddSingleton, not TryAdd,
// so IEnumerable<IEwsDimensionNormalizer> resolves all of them)
services.AddSingleton<IEwsDimensionNormalizer, ReachabilityNormalizer>();
services.AddSingleton<IEwsDimensionNormalizer, RuntimeSignalsNormalizer>();
services.AddSingleton<IEwsDimensionNormalizer, BackportEvidenceNormalizer>();
services.AddSingleton<IEwsDimensionNormalizer, ExploitabilityNormalizer>();
services.AddSingleton<IEwsDimensionNormalizer, SourceConfidenceNormalizer>();
services.AddSingleton<IEwsDimensionNormalizer, MitigationStatusNormalizer>();
// Register guardrails engine
services.TryAddSingleton<IGuardrailsEngine, GuardrailsEngine>();
// Register unified EWS calculator
services.TryAddSingleton<IEwsCalculator, EwsCalculator>();
}
private static void RegisterTriageServices(IServiceCollection services)
{
// Register triage options (defaults if not bound to config)
services.AddOptions<TriageQueueOptions>();
// Register evaluator
services.TryAddSingleton<ITriageQueueEvaluator, TriageQueueEvaluator>();
// Register in-memory sink as default (can be overridden by host-level registration)
services.TryAddSingleton<InMemoryTriageReanalysisSink>();
services.TryAddSingleton<ITriageReanalysisSink>(sp => sp.GetRequiredService<InMemoryTriageReanalysisSink>());
// Register the triage queue service
services.TryAddSingleton<UnknownTriageQueueService>();
}
private static void RegisterWeightManifestServices(IServiceCollection services)
{
// Register loader options (defaults if not bound to config)
services.AddOptions<WeightManifestLoaderOptions>();
// Register manifest loader
services.TryAddSingleton<IWeightManifestLoader, WeightManifestLoader>();
// Register CLI command service
services.TryAddSingleton<WeightManifestCommands>();
}
}