partly or unimplemented features - now implemented
This commit is contained in:
@@ -0,0 +1,114 @@
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Combined score result that integrates impact and uncertainty scores.
|
||||
/// </summary>
|
||||
public sealed record CombinedImpactScore
|
||||
{
|
||||
/// <summary>Impact score from multi-factor calculation.</summary>
|
||||
[JsonPropertyName("impact")]
|
||||
public required ImpactScore Impact { get; init; }
|
||||
|
||||
/// <summary>Uncertainty score from entropy calculation.</summary>
|
||||
[JsonPropertyName("uncertainty")]
|
||||
public required UncertaintyScore Uncertainty { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Effective priority score combining impact and uncertainty.
|
||||
/// Higher uncertainty reduces the effective priority.
|
||||
/// Formula: impact * (1 - uncertainty_entropy * uncertainty_penalty_factor)
|
||||
/// </summary>
|
||||
[JsonPropertyName("effective_priority")]
|
||||
public required double EffectivePriority { get; init; }
|
||||
|
||||
/// <summary>Basis points representation of effective priority (0-10000).</summary>
|
||||
[JsonPropertyName("effective_priority_basis_points")]
|
||||
public required int EffectivePriorityBasisPoints { get; init; }
|
||||
|
||||
/// <summary>When this combined score was calculated (UTC).</summary>
|
||||
[JsonPropertyName("calculated_at")]
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for combined impact-uncertainty score calculation.
|
||||
/// </summary>
|
||||
public interface ICombinedImpactCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates combined impact-uncertainty score for prioritization.
|
||||
/// </summary>
|
||||
/// <param name="impactContext">Impact context with environment, data sensitivity, etc.</param>
|
||||
/// <param name="signalSnapshot">Signal snapshot for uncertainty calculation.</param>
|
||||
/// <param name="uncertaintyPenaltyFactor">How much uncertainty reduces priority (default 0.5).</param>
|
||||
/// <returns>Combined score with impact, uncertainty, and effective priority.</returns>
|
||||
CombinedImpactScore Calculate(
|
||||
ImpactContext impactContext,
|
||||
SignalSnapshot signalSnapshot,
|
||||
double uncertaintyPenaltyFactor = 0.5);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates combined impact-uncertainty scores for unknown triage.
|
||||
/// Integrates ImpactScoreCalculator with UncertaintyScoreCalculator for
|
||||
/// a unified prioritization signal.
|
||||
/// </summary>
|
||||
public sealed class CombinedImpactCalculator : ICombinedImpactCalculator
|
||||
{
|
||||
private readonly IImpactScoreCalculator _impactCalculator;
|
||||
private readonly IUncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly ILogger<CombinedImpactCalculator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public CombinedImpactCalculator(
|
||||
IImpactScoreCalculator impactCalculator,
|
||||
IUncertaintyScoreCalculator uncertaintyCalculator,
|
||||
ILogger<CombinedImpactCalculator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_impactCalculator = impactCalculator;
|
||||
_uncertaintyCalculator = uncertaintyCalculator;
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public CombinedImpactScore Calculate(
|
||||
ImpactContext impactContext,
|
||||
SignalSnapshot signalSnapshot,
|
||||
double uncertaintyPenaltyFactor = 0.5)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(impactContext);
|
||||
ArgumentNullException.ThrowIfNull(signalSnapshot);
|
||||
|
||||
// Calculate individual scores
|
||||
var impact = _impactCalculator.Calculate(impactContext);
|
||||
var uncertainty = _uncertaintyCalculator.Calculate(signalSnapshot);
|
||||
|
||||
// Effective priority = impact * (1 - uncertainty * penalty)
|
||||
// When entropy is high, priority is reduced
|
||||
var penaltyFactor = Math.Clamp(uncertaintyPenaltyFactor, 0.0, 1.0);
|
||||
var effectivePriority = impact.Score * (1.0 - uncertainty.Entropy * penaltyFactor);
|
||||
effectivePriority = Math.Clamp(effectivePriority, 0.0, 1.0);
|
||||
var effectivePriorityBasisPoints = (int)Math.Round(effectivePriority * 10000);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Calculated combined score: impact={Impact:F4}, uncertainty={Uncertainty:F4}, effective={Effective:F4} (penalty_factor={PenaltyFactor:F2})",
|
||||
impact.Score,
|
||||
uncertainty.Entropy,
|
||||
effectivePriority,
|
||||
penaltyFactor);
|
||||
|
||||
return new CombinedImpactScore
|
||||
{
|
||||
Impact = impact,
|
||||
Uncertainty = uncertainty,
|
||||
EffectivePriority = effectivePriority,
|
||||
EffectivePriorityBasisPoints = effectivePriorityBasisPoints,
|
||||
CalculatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,346 @@
|
||||
// <copyright file="DeltaIfPresentCalculator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates hypothetical score changes if missing signals were present.
|
||||
/// Implements TSF-004: Delta-If-Present calculations for policy decision support.
|
||||
/// </summary>
|
||||
public sealed class DeltaIfPresentCalculator : IDeltaIfPresentCalculator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
private static readonly Counter<long> DeltaCalculationsCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_determinization_delta_if_present_calculations_total",
|
||||
description: "Total delta-if-present calculations performed");
|
||||
|
||||
private readonly ILogger<DeltaIfPresentCalculator> _logger;
|
||||
private readonly IUncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly TrustScoreAggregator _trustAggregator;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
// Default prior values for signals when simulating (moderate/neutral assumptions)
|
||||
private static readonly IReadOnlyDictionary<string, double> DefaultPriors = new Dictionary<string, double>
|
||||
{
|
||||
["VEX"] = 0.5, // Neutral: under_investigation
|
||||
["EPSS"] = 0.3, // Below median EPSS score
|
||||
["Reachability"] = 0.5, // Unknown reachability
|
||||
["Runtime"] = 0.3, // Likely not detected at runtime
|
||||
["Backport"] = 0.5, // Unknown backport status
|
||||
["SBOMLineage"] = 0.5 // Neutral lineage contribution
|
||||
};
|
||||
|
||||
public DeltaIfPresentCalculator(
|
||||
ILogger<DeltaIfPresentCalculator> logger,
|
||||
IUncertaintyScoreCalculator uncertaintyCalculator,
|
||||
TrustScoreAggregator trustAggregator,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_uncertaintyCalculator = uncertaintyCalculator;
|
||||
_trustAggregator = trustAggregator;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public DeltaIfPresentResult CalculateSingleSignalDelta(
|
||||
SignalSnapshot snapshot,
|
||||
string signal,
|
||||
double assumedValue,
|
||||
SignalWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(signal);
|
||||
|
||||
var effectiveWeights = weights ?? SignalWeights.Default;
|
||||
var signalWeight = GetSignalWeight(signal, effectiveWeights);
|
||||
|
||||
// Calculate current state
|
||||
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
|
||||
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
|
||||
|
||||
// Create hypothetical snapshot with the signal present
|
||||
var hypotheticalSnapshot = CreateHypotheticalSnapshot(snapshot, signal, assumedValue);
|
||||
|
||||
// Calculate hypothetical state
|
||||
var hypotheticalUncertainty = _uncertaintyCalculator.Calculate(hypotheticalSnapshot, effectiveWeights);
|
||||
var hypotheticalScore = _trustAggregator.Aggregate(hypotheticalSnapshot, hypotheticalUncertainty, effectiveWeights);
|
||||
|
||||
DeltaCalculationsCounter.Add(1,
|
||||
new KeyValuePair<string, object?>("signal", signal),
|
||||
new KeyValuePair<string, object?>("cve", snapshot.Cve));
|
||||
|
||||
_logger.LogDebug(
|
||||
"Delta-if-present for {Signal}={Value:F2}: score {Current:F4} -> {Hypothetical:F4} (delta={Delta:+0.0000;-0.0000})",
|
||||
signal, assumedValue, currentScore, hypotheticalScore, hypotheticalScore - currentScore);
|
||||
|
||||
return new DeltaIfPresentResult
|
||||
{
|
||||
Signal = signal,
|
||||
CurrentScore = currentScore,
|
||||
HypotheticalScore = hypotheticalScore,
|
||||
AssumedValue = assumedValue,
|
||||
SignalWeight = signalWeight,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
HypotheticalEntropy = hypotheticalUncertainty.Entropy
|
||||
};
|
||||
}
|
||||
|
||||
public DeltaIfPresentAnalysis CalculateFullAnalysis(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
var effectiveWeights = weights ?? SignalWeights.Default;
|
||||
|
||||
// Calculate current state
|
||||
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
|
||||
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
|
||||
|
||||
var gapAnalysis = new List<SignalDeltaScenarios>();
|
||||
|
||||
// Analyze each gap
|
||||
foreach (var gap in currentUncertainty.Gaps)
|
||||
{
|
||||
var priorValue = DefaultPriors.GetValueOrDefault(gap.Signal, 0.5);
|
||||
|
||||
var bestCase = CalculateSingleSignalDelta(snapshot, gap.Signal, 0.0, effectiveWeights);
|
||||
var worstCase = CalculateSingleSignalDelta(snapshot, gap.Signal, 1.0, effectiveWeights);
|
||||
var priorCase = CalculateSingleSignalDelta(snapshot, gap.Signal, priorValue, effectiveWeights);
|
||||
|
||||
gapAnalysis.Add(new SignalDeltaScenarios
|
||||
{
|
||||
Signal = gap.Signal,
|
||||
Weight = gap.Weight,
|
||||
GapReason = gap.Reason,
|
||||
BestCase = bestCase,
|
||||
WorstCase = worstCase,
|
||||
PriorCase = priorCase
|
||||
});
|
||||
}
|
||||
|
||||
// Prioritize gaps by maximum potential impact
|
||||
var prioritized = gapAnalysis
|
||||
.OrderByDescending(g => g.MaxImpact)
|
||||
.Select(g => g.Signal)
|
||||
.ToList();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Delta-if-present analysis for {Cve}/{Purl}: {GapCount} gaps, prioritized: [{Priority}]",
|
||||
snapshot.Cve, snapshot.Purl, gapAnalysis.Count,
|
||||
string.Join(", ", prioritized.Take(3)));
|
||||
|
||||
return new DeltaIfPresentAnalysis
|
||||
{
|
||||
CurrentScore = currentScore,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
GapAnalysis = gapAnalysis,
|
||||
PrioritizedGaps = prioritized,
|
||||
ComputedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
public ScoreBounds CalculateScoreBounds(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
var effectiveWeights = weights ?? SignalWeights.Default;
|
||||
|
||||
// Calculate current state
|
||||
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
|
||||
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
|
||||
|
||||
if (currentUncertainty.Gaps.Count == 0)
|
||||
{
|
||||
// No gaps - current score is the only possibility
|
||||
return new ScoreBounds
|
||||
{
|
||||
CurrentScore = currentScore,
|
||||
MinimumScore = currentScore,
|
||||
MaximumScore = currentScore,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
GapCount = 0,
|
||||
MissingWeightPercentage = 0.0
|
||||
};
|
||||
}
|
||||
|
||||
// Create best-case snapshot (all missing signals at low-risk values)
|
||||
var bestSnapshot = snapshot;
|
||||
foreach (var gap in currentUncertainty.Gaps)
|
||||
{
|
||||
bestSnapshot = CreateHypotheticalSnapshot(bestSnapshot, gap.Signal, 0.0);
|
||||
}
|
||||
|
||||
// Create worst-case snapshot (all missing signals at high-risk values)
|
||||
var worstSnapshot = snapshot;
|
||||
foreach (var gap in currentUncertainty.Gaps)
|
||||
{
|
||||
worstSnapshot = CreateHypotheticalSnapshot(worstSnapshot, gap.Signal, 1.0);
|
||||
}
|
||||
|
||||
// Calculate bounds
|
||||
var bestUncertainty = _uncertaintyCalculator.Calculate(bestSnapshot, effectiveWeights);
|
||||
var worstUncertainty = _uncertaintyCalculator.Calculate(worstSnapshot, effectiveWeights);
|
||||
|
||||
var maxScore = _trustAggregator.Aggregate(bestSnapshot, bestUncertainty, effectiveWeights);
|
||||
var minScore = _trustAggregator.Aggregate(worstSnapshot, worstUncertainty, effectiveWeights);
|
||||
|
||||
// Calculate missing weight percentage
|
||||
var missingWeight = currentUncertainty.Gaps.Sum(g => g.Weight);
|
||||
var totalWeight = effectiveWeights.TotalWeight;
|
||||
var missingPercentage = totalWeight > 0 ? (missingWeight / totalWeight) * 100.0 : 0.0;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Score bounds for {Cve}: current={Current:F4}, min={Min:F4}, max={Max:F4}, range={Range:F4}",
|
||||
snapshot.Cve, currentScore, minScore, maxScore, maxScore - minScore);
|
||||
|
||||
return new ScoreBounds
|
||||
{
|
||||
CurrentScore = currentScore,
|
||||
MinimumScore = minScore,
|
||||
MaximumScore = maxScore,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
GapCount = currentUncertainty.Gaps.Count,
|
||||
MissingWeightPercentage = missingPercentage
|
||||
};
|
||||
}
|
||||
|
||||
private static double GetSignalWeight(string signal, SignalWeights weights)
|
||||
{
|
||||
return signal.ToUpperInvariant() switch
|
||||
{
|
||||
"VEX" => weights.VexWeight,
|
||||
"EPSS" => weights.EpssWeight,
|
||||
"REACHABILITY" => weights.ReachabilityWeight,
|
||||
"RUNTIME" => weights.RuntimeWeight,
|
||||
"BACKPORT" => weights.BackportWeight,
|
||||
"SBOMLINEAGE" or "SBOM" => weights.SbomLineageWeight,
|
||||
_ => 0.0
|
||||
};
|
||||
}
|
||||
|
||||
private SignalSnapshot CreateHypotheticalSnapshot(
|
||||
SignalSnapshot original,
|
||||
string signal,
|
||||
double normalizedValue)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
return signal.ToUpperInvariant() switch
|
||||
{
|
||||
"VEX" => original with
|
||||
{
|
||||
Vex = SignalState<VexClaimSummary>.Queried(
|
||||
CreateHypotheticalVex(normalizedValue), now)
|
||||
},
|
||||
"EPSS" => original with
|
||||
{
|
||||
Epss = SignalState<EpssEvidence>.Queried(
|
||||
CreateHypotheticalEpss(normalizedValue), now)
|
||||
},
|
||||
"REACHABILITY" => original with
|
||||
{
|
||||
Reachability = SignalState<ReachabilityEvidence>.Queried(
|
||||
CreateHypotheticalReachability(normalizedValue), now)
|
||||
},
|
||||
"RUNTIME" => original with
|
||||
{
|
||||
Runtime = SignalState<RuntimeEvidence>.Queried(
|
||||
CreateHypotheticalRuntime(normalizedValue), now)
|
||||
},
|
||||
"BACKPORT" => original with
|
||||
{
|
||||
Backport = SignalState<BackportEvidence>.Queried(
|
||||
CreateHypotheticalBackport(normalizedValue), now)
|
||||
},
|
||||
"SBOMLINEAGE" or "SBOM" => original with
|
||||
{
|
||||
Sbom = SignalState<SbomLineageEvidence>.Queried(
|
||||
CreateHypotheticalSbom(normalizedValue), now)
|
||||
},
|
||||
_ => original
|
||||
};
|
||||
}
|
||||
|
||||
private static VexClaimSummary CreateHypotheticalVex(double normalizedValue)
|
||||
{
|
||||
// Map 0.0-1.0 to VEX status
|
||||
var status = normalizedValue switch
|
||||
{
|
||||
< 0.25 => "not_affected",
|
||||
< 0.50 => "under_investigation",
|
||||
< 0.75 => "under_investigation",
|
||||
_ => "affected"
|
||||
};
|
||||
|
||||
return new VexClaimSummary
|
||||
{
|
||||
Status = status,
|
||||
Source = "hypothetical",
|
||||
DocumentId = "delta-if-present-simulation",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EpssEvidence CreateHypotheticalEpss(double normalizedValue)
|
||||
{
|
||||
return new EpssEvidence
|
||||
{
|
||||
Epss = normalizedValue,
|
||||
Percentile = normalizedValue * 100.0,
|
||||
Date = DateOnly.FromDateTime(DateTime.UtcNow)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilityEvidence CreateHypotheticalReachability(double normalizedValue)
|
||||
{
|
||||
var status = normalizedValue >= 0.5
|
||||
? ReachabilityStatus.Reachable
|
||||
: ReachabilityStatus.Unreachable;
|
||||
|
||||
return new ReachabilityEvidence
|
||||
{
|
||||
Status = status,
|
||||
Confidence = 1.0 - Math.Abs(normalizedValue - 0.5) * 2,
|
||||
PathCount = normalizedValue >= 0.5 ? 1 : 0,
|
||||
Source = "hypothetical"
|
||||
};
|
||||
}
|
||||
|
||||
private static RuntimeEvidence CreateHypotheticalRuntime(double normalizedValue)
|
||||
{
|
||||
return new RuntimeEvidence
|
||||
{
|
||||
Detected = normalizedValue >= 0.5,
|
||||
Source = "hypothetical",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static BackportEvidence CreateHypotheticalBackport(double normalizedValue)
|
||||
{
|
||||
return new BackportEvidence
|
||||
{
|
||||
Detected = normalizedValue < 0.5, // Backport = lower risk
|
||||
Source = "hypothetical",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static SbomLineageEvidence CreateHypotheticalSbom(double normalizedValue)
|
||||
{
|
||||
return new SbomLineageEvidence
|
||||
{
|
||||
Present = true,
|
||||
Depth = (int)(normalizedValue * 5),
|
||||
Source = "hypothetical"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsCalculator.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Unified Evidence-Weighted Score calculator implementation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unified calculator for Evidence-Weighted Scores (EWS).
|
||||
/// Orchestrates 6-dimension normalization, weighting, and guardrails.
|
||||
/// </summary>
|
||||
public sealed class EwsCalculator : IEwsCalculator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization.EWS");
|
||||
private static readonly Histogram<int> EwsScoreHistogram = Meter.CreateHistogram<int>(
|
||||
"stellaops_ews_score",
|
||||
unit: "score",
|
||||
description: "Evidence-Weighted Score distribution (0-100)");
|
||||
private static readonly Counter<int> GuardrailsAppliedCounter = Meter.CreateCounter<int>(
|
||||
"stellaops_ews_guardrails_applied",
|
||||
description: "Count of guardrails applied to EWS scores");
|
||||
|
||||
private readonly ImmutableDictionary<EwsDimension, IEwsDimensionNormalizer> _normalizers;
|
||||
private readonly IGuardrailsEngine _guardrailsEngine;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<EwsCalculator> _logger;
|
||||
|
||||
public EwsCalculator(
|
||||
IEnumerable<IEwsDimensionNormalizer> normalizers,
|
||||
IGuardrailsEngine guardrailsEngine,
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<EwsCalculator>? logger = null)
|
||||
{
|
||||
_normalizers = normalizers.ToImmutableDictionary(n => n.Dimension);
|
||||
_guardrailsEngine = guardrailsEngine;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<EwsCalculator>.Instance;
|
||||
|
||||
ValidateNormalizers();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default EwsCalculator with all standard normalizers.
|
||||
/// </summary>
|
||||
public static EwsCalculator CreateDefault(
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<EwsCalculator>? logger = null)
|
||||
{
|
||||
var normalizers = new IEwsDimensionNormalizer[]
|
||||
{
|
||||
new ReachabilityNormalizer(),
|
||||
new RuntimeSignalsNormalizer(),
|
||||
new BackportEvidenceNormalizer(),
|
||||
new ExploitabilityNormalizer(),
|
||||
new SourceConfidenceNormalizer(),
|
||||
new MitigationStatusNormalizer()
|
||||
};
|
||||
|
||||
return new EwsCalculator(
|
||||
normalizers,
|
||||
new GuardrailsEngine(),
|
||||
timeProvider,
|
||||
logger);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public EwsCompositeScore Calculate(
|
||||
EwsSignalInput signal,
|
||||
EwsDimensionWeights? weights = null,
|
||||
EwsGuardrails? guardrails = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(signal);
|
||||
|
||||
var effectiveWeights = weights ?? EwsDimensionWeights.Default;
|
||||
var effectiveGuardrails = guardrails ?? EwsGuardrails.Default;
|
||||
|
||||
// Validate weights
|
||||
if (!effectiveWeights.IsNormalized())
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"EWS dimension weights are not normalized (total={Total:F4}); results may be unexpected",
|
||||
effectiveWeights.TotalWeight);
|
||||
}
|
||||
|
||||
// Calculate all dimension scores
|
||||
var dimensionScores = new List<EwsDimensionScore>();
|
||||
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
|
||||
{
|
||||
var dimScore = CalculateDimension(dimension, signal, effectiveWeights.GetWeight(dimension));
|
||||
dimensionScores.Add(dimScore);
|
||||
}
|
||||
|
||||
var dimensions = dimensionScores.ToImmutableArray();
|
||||
|
||||
// Calculate raw composite score (weighted sum)
|
||||
var rawScore = (int)Math.Round(dimensions.Sum(d => d.WeightedContribution));
|
||||
rawScore = Math.Clamp(rawScore, 0, 100);
|
||||
|
||||
// Apply guardrails
|
||||
var guardrailsResult = _guardrailsEngine.Apply(rawScore, signal, dimensions, effectiveGuardrails);
|
||||
|
||||
// Calculate overall confidence (weighted average)
|
||||
var confidence = dimensions.Sum(d => d.Confidence * d.Weight);
|
||||
|
||||
// Determine if manual review is needed
|
||||
var needsReview = confidence < effectiveGuardrails.MinConfidenceThreshold;
|
||||
|
||||
var result = new EwsCompositeScore
|
||||
{
|
||||
Score = guardrailsResult.AdjustedScore,
|
||||
RawScore = rawScore,
|
||||
Confidence = confidence,
|
||||
Dimensions = dimensions,
|
||||
AppliedGuardrails = guardrailsResult.AppliedGuardrails,
|
||||
NeedsReview = needsReview,
|
||||
CalculatedAt = _timeProvider.GetUtcNow(),
|
||||
CveId = signal.CveId,
|
||||
Purl = signal.Purl
|
||||
};
|
||||
|
||||
// Emit metrics
|
||||
EwsScoreHistogram.Record(result.Score,
|
||||
new KeyValuePair<string, object?>("risk_tier", result.RiskTier),
|
||||
new KeyValuePair<string, object?>("guardrails_applied", guardrailsResult.WasModified));
|
||||
|
||||
if (guardrailsResult.WasModified)
|
||||
{
|
||||
GuardrailsAppliedCounter.Add(guardrailsResult.AppliedGuardrails.Length);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Calculated EWS: score={Score} (raw={RawScore}), confidence={Confidence:P0}, tier={Tier}, guardrails={Guardrails}",
|
||||
result.Score,
|
||||
result.RawScore,
|
||||
result.Confidence,
|
||||
result.RiskTier,
|
||||
string.Join(",", guardrailsResult.AppliedGuardrails));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public EwsDimensionScore CalculateDimension(
|
||||
EwsDimension dimension,
|
||||
EwsSignalInput signal,
|
||||
double weight)
|
||||
{
|
||||
var normalizer = GetNormalizer(dimension);
|
||||
|
||||
var score = normalizer.Normalize(signal);
|
||||
var confidence = normalizer.GetConfidence(signal);
|
||||
var explanation = normalizer.GetExplanation(signal, score);
|
||||
|
||||
return new EwsDimensionScore
|
||||
{
|
||||
Dimension = dimension,
|
||||
Score = score,
|
||||
Confidence = confidence,
|
||||
Weight = weight,
|
||||
Explanation = explanation
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public IEwsDimensionNormalizer GetNormalizer(EwsDimension dimension)
|
||||
{
|
||||
if (_normalizers.TryGetValue(dimension, out var normalizer))
|
||||
{
|
||||
return normalizer;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"No normalizer registered for dimension {dimension}");
|
||||
}
|
||||
|
||||
private void ValidateNormalizers()
|
||||
{
|
||||
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
|
||||
{
|
||||
if (!_normalizers.ContainsKey(dimension))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Missing normalizer for dimension {dimension}. All 6 dimensions must have normalizers.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsDimension.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Defines the 6 canonical dimensions for EWS scoring.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// The 6 canonical dimensions for Evidence-Weighted Score (EWS) model.
|
||||
/// Each dimension maps specific signal inputs to a normalized 0-100 score.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EwsDimension
|
||||
{
|
||||
/// <summary>
|
||||
/// RCH - Reachability dimension.
|
||||
/// Measures whether vulnerable code paths are reachable from entrypoints.
|
||||
/// Input: Reachability tier (R0-R4), call graph analysis, runtime traces.
|
||||
/// </summary>
|
||||
Reachability = 0,
|
||||
|
||||
/// <summary>
|
||||
/// RTS - Runtime Signals dimension.
|
||||
/// Measures evidence from runtime detection and observability.
|
||||
/// Input: Runtime telemetry, instrumentation coverage, APM signals.
|
||||
/// </summary>
|
||||
RuntimeSignals = 1,
|
||||
|
||||
/// <summary>
|
||||
/// BKP - Backport Evidence dimension.
|
||||
/// Measures evidence of patched code in affected packages.
|
||||
/// Input: Backport detection, binary diff analysis, vendor advisories.
|
||||
/// </summary>
|
||||
BackportEvidence = 2,
|
||||
|
||||
/// <summary>
|
||||
/// XPL - Exploitability dimension.
|
||||
/// Measures likelihood and maturity of exploitation.
|
||||
/// Input: EPSS, KEV status, exploit kit availability, PoC age.
|
||||
/// </summary>
|
||||
Exploitability = 3,
|
||||
|
||||
/// <summary>
|
||||
/// SRC - Source Confidence dimension.
|
||||
/// Measures confidence in SBOM and dependency lineage.
|
||||
/// Input: SBOM completeness, verified signatures, attestations.
|
||||
/// </summary>
|
||||
SourceConfidence = 4,
|
||||
|
||||
/// <summary>
|
||||
/// MIT - Mitigation Status dimension.
|
||||
/// Measures VEX status and compensating controls.
|
||||
/// Input: VEX statements, workarounds applied, network controls.
|
||||
/// </summary>
|
||||
MitigationStatus = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Short codes for dimension serialization and display.
|
||||
/// </summary>
|
||||
public static class EwsDimensionCodes
|
||||
{
|
||||
public const string Reachability = "RCH";
|
||||
public const string RuntimeSignals = "RTS";
|
||||
public const string BackportEvidence = "BKP";
|
||||
public const string Exploitability = "XPL";
|
||||
public const string SourceConfidence = "SRC";
|
||||
public const string MitigationStatus = "MIT";
|
||||
|
||||
/// <summary>
|
||||
/// Gets the short code for a dimension.
|
||||
/// </summary>
|
||||
public static string ToCode(this EwsDimension dimension) => dimension switch
|
||||
{
|
||||
EwsDimension.Reachability => Reachability,
|
||||
EwsDimension.RuntimeSignals => RuntimeSignals,
|
||||
EwsDimension.BackportEvidence => BackportEvidence,
|
||||
EwsDimension.Exploitability => Exploitability,
|
||||
EwsDimension.SourceConfidence => SourceConfidence,
|
||||
EwsDimension.MitigationStatus => MitigationStatus,
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(dimension), dimension, "Unknown dimension")
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Parses a short code to a dimension.
|
||||
/// </summary>
|
||||
public static EwsDimension? FromCode(string code) => code?.ToUpperInvariant() switch
|
||||
{
|
||||
Reachability => EwsDimension.Reachability,
|
||||
RuntimeSignals => EwsDimension.RuntimeSignals,
|
||||
BackportEvidence => EwsDimension.BackportEvidence,
|
||||
Exploitability => EwsDimension.Exploitability,
|
||||
SourceConfidence => EwsDimension.SourceConfidence,
|
||||
MitigationStatus => EwsDimension.MitigationStatus,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,298 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsModels.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Result models for Evidence-Weighted Score calculation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Individual dimension score from normalization.
|
||||
/// </summary>
|
||||
public sealed record EwsDimensionScore
|
||||
{
|
||||
/// <summary>
|
||||
/// The dimension this score represents.
|
||||
/// </summary>
|
||||
[JsonPropertyName("dimension")]
|
||||
public required EwsDimension Dimension { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Short dimension code (RCH, RTS, BKP, XPL, SRC, MIT).
|
||||
/// </summary>
|
||||
[JsonPropertyName("code")]
|
||||
public string Code => Dimension.ToCode();
|
||||
|
||||
/// <summary>
|
||||
/// Normalized score in range [0, 100].
|
||||
/// </summary>
|
||||
[JsonPropertyName("score")]
|
||||
public required int Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence level for this score (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weight applied to this dimension in composite calculation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public required double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weighted contribution to composite score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("weighted_contribution")]
|
||||
public double WeightedContribution => Score * Weight;
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable explanation of how the score was derived.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public required string Explanation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this score is based on actual evidence or assumptions.
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_evidence_based")]
|
||||
public bool IsEvidenceBased => Confidence >= 0.5;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Weights for each dimension in the 6-dimension EWS model.
|
||||
/// </summary>
|
||||
public sealed record EwsDimensionWeights
|
||||
{
|
||||
/// <summary>
|
||||
/// Weight for RCH (Reachability) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rch")]
|
||||
public double Reachability { get; init; } = 0.25;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for RTS (Runtime Signals) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rts")]
|
||||
public double RuntimeSignals { get; init; } = 0.15;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for BKP (Backport Evidence) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bkp")]
|
||||
public double BackportEvidence { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for XPL (Exploitability) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("xpl")]
|
||||
public double Exploitability { get; init; } = 0.20;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for SRC (Source Confidence) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("src")]
|
||||
public double SourceConfidence { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for MIT (Mitigation Status) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("mit")]
|
||||
public double MitigationStatus { get; init; } = 0.20;
|
||||
|
||||
/// <summary>
|
||||
/// Default weights as per advisory recommendations.
|
||||
/// </summary>
|
||||
public static EwsDimensionWeights Default => new();
|
||||
|
||||
/// <summary>
|
||||
/// Legacy 6-dimension weights for backward compatibility.
|
||||
/// </summary>
|
||||
public static EwsDimensionWeights Legacy => new()
|
||||
{
|
||||
Reachability = 0.20,
|
||||
RuntimeSignals = 0.10,
|
||||
BackportEvidence = 0.15,
|
||||
Exploitability = 0.25,
|
||||
SourceConfidence = 0.10,
|
||||
MitigationStatus = 0.20
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Gets the weight for a specific dimension.
|
||||
/// </summary>
|
||||
public double GetWeight(EwsDimension dimension) => dimension switch
|
||||
{
|
||||
EwsDimension.Reachability => Reachability,
|
||||
EwsDimension.RuntimeSignals => RuntimeSignals,
|
||||
EwsDimension.BackportEvidence => BackportEvidence,
|
||||
EwsDimension.Exploitability => Exploitability,
|
||||
EwsDimension.SourceConfidence => SourceConfidence,
|
||||
EwsDimension.MitigationStatus => MitigationStatus,
|
||||
_ => 0.0
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Sum of all weights (should equal 1.0 for normalized calculations).
|
||||
/// </summary>
|
||||
public double TotalWeight =>
|
||||
Reachability + RuntimeSignals + BackportEvidence +
|
||||
Exploitability + SourceConfidence + MitigationStatus;
|
||||
|
||||
/// <summary>
|
||||
/// Validates that weights sum to approximately 1.0.
|
||||
/// </summary>
|
||||
public bool IsNormalized(double tolerance = 0.001) =>
|
||||
Math.Abs(TotalWeight - 1.0) < tolerance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Guardrails configuration for EWS scoring.
|
||||
/// Defines caps and floors to prevent extreme scores.
|
||||
/// </summary>
|
||||
public sealed record EwsGuardrails
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum score for "not_affected" VEX status (cap).
|
||||
/// Prevents fully mitigated items from being flagged as high risk.
|
||||
/// </summary>
|
||||
[JsonPropertyName("not_affected_cap")]
|
||||
public int NotAffectedCap { get; init; } = 25;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum score when runtime evidence shows active usage (floor).
|
||||
/// Ensures actively used vulnerable code is never fully suppressed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_floor")]
|
||||
public int RuntimeFloor { get; init; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum score for speculative findings (no evidence, all assumptions).
|
||||
/// Prevents assumption-based findings from dominating triage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("speculative_cap")]
|
||||
public int SpeculativeCap { get; init; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum score when CVE is in KEV (floor).
|
||||
/// Known exploited vulnerabilities always require attention.
|
||||
/// </summary>
|
||||
[JsonPropertyName("kev_floor")]
|
||||
public int KevFloor { get; init; } = 70;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum score for backported findings (cap).
|
||||
/// Confirmed backports should not be high priority.
|
||||
/// </summary>
|
||||
[JsonPropertyName("backported_cap")]
|
||||
public int BackportedCap { get; init; } = 20;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum overall confidence to trust the composite score.
|
||||
/// Below this, the score should be flagged for manual review.
|
||||
/// </summary>
|
||||
[JsonPropertyName("min_confidence_threshold")]
|
||||
public double MinConfidenceThreshold { get; init; } = 0.3;
|
||||
|
||||
/// <summary>
|
||||
/// Default guardrails configuration.
|
||||
/// </summary>
|
||||
public static EwsGuardrails Default => new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Composite Evidence-Weighted Score result.
|
||||
/// </summary>
|
||||
public sealed record EwsCompositeScore
|
||||
{
|
||||
/// <summary>
|
||||
/// Final weighted composite score [0, 100].
|
||||
/// </summary>
|
||||
[JsonPropertyName("score")]
|
||||
public required int Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score before guardrails were applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("raw_score")]
|
||||
public required int RawScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Basis points representation (0-10000) for deterministic storage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("basis_points")]
|
||||
public int BasisPoints => Score * 100;
|
||||
|
||||
/// <summary>
|
||||
/// Overall confidence in the composite score (0.0 to 1.0).
|
||||
/// Weighted average of dimension confidences.
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual dimension scores.
|
||||
/// </summary>
|
||||
[JsonPropertyName("dimensions")]
|
||||
public required ImmutableArray<EwsDimensionScore> Dimensions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Guardrails that were applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("applied_guardrails")]
|
||||
public required ImmutableArray<string> AppliedGuardrails { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether guardrails modified the score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("guardrails_applied")]
|
||||
public bool GuardrailsApplied => Score != RawScore;
|
||||
|
||||
/// <summary>
|
||||
/// Whether manual review is recommended due to low confidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("needs_review")]
|
||||
public required bool NeedsReview { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this score was calculated (UTC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("calculated_at")]
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier this score relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (purl) this score relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets a dimension score by dimension type.
|
||||
/// </summary>
|
||||
public EwsDimensionScore? GetDimension(EwsDimension dimension) =>
|
||||
Dimensions.FirstOrDefault(d => d.Dimension == dimension);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a risk tier based on the score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("risk_tier")]
|
||||
public string RiskTier => Score switch
|
||||
{
|
||||
>= 80 => "Critical",
|
||||
>= 60 => "High",
|
||||
>= 40 => "Medium",
|
||||
>= 20 => "Low",
|
||||
_ => "Informational"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,221 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsSignalInput.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Signal input model for EWS dimension normalization.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Raw signal inputs for Evidence-Weighted Score calculation.
|
||||
/// Contains all signals that feed into the 6-dimension model.
|
||||
/// </summary>
|
||||
public sealed record EwsSignalInput
|
||||
{
|
||||
// -------------------------------------------------------------------------
|
||||
// RCH (Reachability) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Reachability tier from static analysis (R0=unreachable to R4=reachable).
|
||||
/// </summary>
|
||||
[JsonPropertyName("reachability_tier")]
|
||||
public int? ReachabilityTier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Call graph analysis confidence (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("call_graph_confidence")]
|
||||
public double? CallGraphConfidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether runtime trace confirmed the path.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_trace_confirmed")]
|
||||
public bool? RuntimeTraceConfirmed { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// RTS (Runtime Signals) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Runtime instrumentation coverage percentage (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("instrumentation_coverage")]
|
||||
public double? InstrumentationCoverage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of runtime invocations observed in the past period.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_invocation_count")]
|
||||
public int? RuntimeInvocationCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether APM signals indicate active usage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("apm_active_usage")]
|
||||
public bool? ApmActiveUsage { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// BKP (Backport Evidence) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Whether backport was detected via binary analysis.
|
||||
/// </summary>
|
||||
[JsonPropertyName("backport_detected")]
|
||||
public bool? BackportDetected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Backport confidence score from binary diff (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("backport_confidence")]
|
||||
public double? BackportConfidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether vendor advisory confirms backport.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vendor_backport_confirmed")]
|
||||
public bool? VendorBackportConfirmed { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// XPL (Exploitability) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// EPSS probability (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("epss_probability")]
|
||||
public double? EpssProbability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the CVE is in KEV (Known Exploited Vulnerabilities).
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_in_kev")]
|
||||
public bool? IsInKev { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether an exploit kit is available.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exploit_kit_available")]
|
||||
public bool? ExploitKitAvailable { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Age of the public PoC in days (null if no PoC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("poc_age_days")]
|
||||
public int? PocAgeDays { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVSS base score (0.0 to 10.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("cvss_base_score")]
|
||||
public double? CvssBaseScore { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// SRC (Source Confidence) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// SBOM completeness percentage (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom_completeness")]
|
||||
public double? SbomCompleteness { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether SBOM has verified signatures.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom_signed")]
|
||||
public bool? SbomSigned { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of valid attestations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_count")]
|
||||
public int? AttestationCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether dependency lineage is verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("lineage_verified")]
|
||||
public bool? LineageVerified { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// MIT (Mitigation Status) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// VEX status string (not_affected, affected, fixed, under_investigation).
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex_status")]
|
||||
public string? VexStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX justification string.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex_justification")]
|
||||
public string? VexJustification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether a workaround is applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("workaround_applied")]
|
||||
public bool? WorkaroundApplied { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether network controls mitigate the vulnerability.
|
||||
/// </summary>
|
||||
[JsonPropertyName("network_controls_applied")]
|
||||
public bool? NetworkControlsApplied { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Metadata
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when these signals were collected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("collected_at")]
|
||||
public DateTimeOffset? CollectedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier this input relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (purl) this input relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional signals as key-value pairs for extensibility.
|
||||
/// </summary>
|
||||
[JsonPropertyName("additional_signals")]
|
||||
public ImmutableDictionary<string, object?>? AdditionalSignals { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an empty signal input (all assumptions mode).
|
||||
/// </summary>
|
||||
public static EwsSignalInput Empty => new();
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a signal is present for the specified dimension.
|
||||
/// </summary>
|
||||
public bool HasSignalForDimension(EwsDimension dimension) => dimension switch
|
||||
{
|
||||
EwsDimension.Reachability => ReachabilityTier.HasValue || CallGraphConfidence.HasValue || RuntimeTraceConfirmed.HasValue,
|
||||
EwsDimension.RuntimeSignals => InstrumentationCoverage.HasValue || RuntimeInvocationCount.HasValue || ApmActiveUsage.HasValue,
|
||||
EwsDimension.BackportEvidence => BackportDetected.HasValue || BackportConfidence.HasValue || VendorBackportConfirmed.HasValue,
|
||||
EwsDimension.Exploitability => EpssProbability.HasValue || IsInKev.HasValue || ExploitKitAvailable.HasValue || PocAgeDays.HasValue || CvssBaseScore.HasValue,
|
||||
EwsDimension.SourceConfidence => SbomCompleteness.HasValue || SbomSigned.HasValue || AttestationCount.HasValue || LineageVerified.HasValue,
|
||||
EwsDimension.MitigationStatus => VexStatus != null || WorkaroundApplied.HasValue || NetworkControlsApplied.HasValue,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// GuardrailsEngine.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Implementation of guardrails enforcement for EWS scoring.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Applies guardrails (caps and floors) to EWS scores.
|
||||
/// Guardrails prevent extreme scores and ensure business logic constraints.
|
||||
/// </summary>
|
||||
public sealed class GuardrailsEngine : IGuardrailsEngine
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public GuardrailsResult Apply(
|
||||
int rawScore,
|
||||
EwsSignalInput signal,
|
||||
ImmutableArray<EwsDimensionScore> dimensions,
|
||||
EwsGuardrails guardrails)
|
||||
{
|
||||
var score = rawScore;
|
||||
var applied = new List<string>();
|
||||
|
||||
// Check for KEV floor first (highest priority)
|
||||
if (signal.IsInKev == true && score < guardrails.KevFloor)
|
||||
{
|
||||
score = guardrails.KevFloor;
|
||||
applied.Add($"kev_floor:{guardrails.KevFloor}");
|
||||
}
|
||||
|
||||
// Check for backport cap
|
||||
if ((signal.BackportDetected == true || signal.VendorBackportConfirmed == true)
|
||||
&& score > guardrails.BackportedCap)
|
||||
{
|
||||
score = guardrails.BackportedCap;
|
||||
applied.Add($"backported_cap:{guardrails.BackportedCap}");
|
||||
}
|
||||
|
||||
// Check for not_affected cap
|
||||
if (IsNotAffected(signal) && score > guardrails.NotAffectedCap)
|
||||
{
|
||||
score = guardrails.NotAffectedCap;
|
||||
applied.Add($"not_affected_cap:{guardrails.NotAffectedCap}");
|
||||
}
|
||||
|
||||
// Check for runtime floor (if runtime shows active usage)
|
||||
if (HasActiveRuntimeUsage(signal) && score < guardrails.RuntimeFloor)
|
||||
{
|
||||
score = guardrails.RuntimeFloor;
|
||||
applied.Add($"runtime_floor:{guardrails.RuntimeFloor}");
|
||||
}
|
||||
|
||||
// Check for speculative cap (all assumptions, low confidence)
|
||||
if (IsSpeculative(dimensions) && score > guardrails.SpeculativeCap)
|
||||
{
|
||||
score = guardrails.SpeculativeCap;
|
||||
applied.Add($"speculative_cap:{guardrails.SpeculativeCap}");
|
||||
}
|
||||
|
||||
return new GuardrailsResult
|
||||
{
|
||||
AdjustedScore = Math.Clamp(score, 0, 100),
|
||||
OriginalScore = rawScore,
|
||||
AppliedGuardrails = applied.ToImmutableArray()
|
||||
};
|
||||
}
|
||||
|
||||
private static bool IsNotAffected(EwsSignalInput signal)
|
||||
{
|
||||
return signal.VexStatus?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true
|
||||
|| signal.VexStatus?.Equals("fixed", StringComparison.OrdinalIgnoreCase) == true;
|
||||
}
|
||||
|
||||
private static bool HasActiveRuntimeUsage(EwsSignalInput signal)
|
||||
{
|
||||
return signal.ApmActiveUsage == true
|
||||
|| (signal.RuntimeInvocationCount.HasValue && signal.RuntimeInvocationCount.Value > 0);
|
||||
}
|
||||
|
||||
private static bool IsSpeculative(ImmutableArray<EwsDimensionScore> dimensions)
|
||||
{
|
||||
if (dimensions.IsDefaultOrEmpty)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Calculate weighted confidence
|
||||
var totalWeight = 0.0;
|
||||
var weightedConfidence = 0.0;
|
||||
|
||||
foreach (var dim in dimensions)
|
||||
{
|
||||
totalWeight += dim.Weight;
|
||||
weightedConfidence += dim.Confidence * dim.Weight;
|
||||
}
|
||||
|
||||
if (totalWeight > 0)
|
||||
{
|
||||
var avgConfidence = weightedConfidence / totalWeight;
|
||||
return avgConfidence < 0.3; // Less than 30% confidence = speculative
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEwsCalculator.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Interface for the unified Evidence-Weighted Score calculator.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unified calculator for Evidence-Weighted Scores (EWS).
|
||||
/// Orchestrates 6-dimension normalization, weighting, and guardrails.
|
||||
/// </summary>
|
||||
public interface IEwsCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates a composite EWS from raw signals.
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input.</param>
|
||||
/// <param name="weights">Optional custom weights (defaults to EwsDimensionWeights.Default).</param>
|
||||
/// <param name="guardrails">Optional guardrails configuration (defaults to EwsGuardrails.Default).</param>
|
||||
/// <returns>The composite EWS result.</returns>
|
||||
EwsCompositeScore Calculate(
|
||||
EwsSignalInput signal,
|
||||
EwsDimensionWeights? weights = null,
|
||||
EwsGuardrails? guardrails = null);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates a single dimension score from raw signals.
|
||||
/// </summary>
|
||||
/// <param name="dimension">The dimension to calculate.</param>
|
||||
/// <param name="signal">The raw signal input.</param>
|
||||
/// <param name="weight">The weight to assign to this dimension.</param>
|
||||
/// <returns>The dimension score.</returns>
|
||||
EwsDimensionScore CalculateDimension(
|
||||
EwsDimension dimension,
|
||||
EwsSignalInput signal,
|
||||
double weight);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the normalizer for a specific dimension.
|
||||
/// </summary>
|
||||
/// <param name="dimension">The dimension.</param>
|
||||
/// <returns>The normalizer for that dimension.</returns>
|
||||
IEwsDimensionNormalizer GetNormalizer(EwsDimension dimension);
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEwsDimensionNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Pluggable interface for normalizing signal inputs to dimension scores.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for normalizing raw signal inputs to a canonical 0-100 dimension score.
|
||||
/// Each dimension has its own normalizer implementation that handles the specific
|
||||
/// signal types and normalization logic for that dimension.
|
||||
/// </summary>
|
||||
public interface IEwsDimensionNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// The dimension this normalizer handles.
|
||||
/// </summary>
|
||||
EwsDimension Dimension { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a raw signal value to a dimension score in range [0, 100].
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input for this dimension.</param>
|
||||
/// <returns>Normalized score in range [0, 100], where:
|
||||
/// - 0 = lowest risk/impact (e.g., unreachable, fully mitigated)
|
||||
/// - 100 = highest risk/impact (e.g., reachable, actively exploited)
|
||||
/// </returns>
|
||||
int Normalize(EwsSignalInput signal);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the confidence level for this normalization (0.0 to 1.0).
|
||||
/// Lower confidence when assumptions are made or data is missing.
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input for this dimension.</param>
|
||||
/// <returns>Confidence level from 0.0 (all assumptions) to 1.0 (verified evidence).</returns>
|
||||
double GetConfidence(EwsSignalInput signal);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a human-readable explanation of how the score was derived.
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input for this dimension.</param>
|
||||
/// <param name="normalizedScore">The normalized score that was calculated.</param>
|
||||
/// <returns>Explanation suitable for audit and operator review.</returns>
|
||||
string GetExplanation(EwsSignalInput signal, int normalizedScore);
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IGuardrailsEngine.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Interface for guardrails enforcement in EWS scoring.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Result of guardrails application.
|
||||
/// </summary>
|
||||
public sealed record GuardrailsResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The adjusted score after applying guardrails.
|
||||
/// </summary>
|
||||
public required int AdjustedScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The original score before guardrails.
|
||||
/// </summary>
|
||||
public required int OriginalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of guardrails that were applied.
|
||||
/// </summary>
|
||||
public required ImmutableArray<string> AppliedGuardrails { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the score was modified.
|
||||
/// </summary>
|
||||
public bool WasModified => AdjustedScore != OriginalScore;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Engine for applying guardrails (caps and floors) to EWS scores.
|
||||
/// Guardrails prevent extreme scores in edge cases.
|
||||
/// </summary>
|
||||
public interface IGuardrailsEngine
|
||||
{
|
||||
/// <summary>
|
||||
/// Applies guardrails to a raw composite score.
|
||||
/// </summary>
|
||||
/// <param name="rawScore">The raw composite score before guardrails.</param>
|
||||
/// <param name="signal">The signal input that produced this score.</param>
|
||||
/// <param name="dimensions">The individual dimension scores.</param>
|
||||
/// <param name="guardrails">The guardrails configuration to apply.</param>
|
||||
/// <returns>The result with adjusted score and list of applied guardrails.</returns>
|
||||
GuardrailsResult Apply(
|
||||
int rawScore,
|
||||
EwsSignalInput signal,
|
||||
ImmutableArray<EwsDimensionScore> dimensions,
|
||||
EwsGuardrails guardrails);
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// BackportEvidenceNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for BKP (Backport Evidence) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes backport evidence to the BKP dimension score.
|
||||
/// Higher score = more evidence of vulnerability being present (not backported).
|
||||
/// Lower score = strong evidence of backport (vulnerability patched).
|
||||
/// </summary>
|
||||
public sealed class BackportEvidenceNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.BackportEvidence;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// Vendor confirmation is strongest signal
|
||||
if (signal.VendorBackportConfirmed == true)
|
||||
{
|
||||
return 5; // Almost certainly patched
|
||||
}
|
||||
|
||||
// Binary analysis detected backport
|
||||
if (signal.BackportDetected == true)
|
||||
{
|
||||
if (signal.BackportConfidence.HasValue)
|
||||
{
|
||||
// Lower score = more likely patched
|
||||
return (int)((1.0 - signal.BackportConfidence.Value) * 30);
|
||||
}
|
||||
return 15; // Backport detected with unknown confidence
|
||||
}
|
||||
|
||||
// Binary analysis explicitly found no backport
|
||||
if (signal.BackportDetected == false)
|
||||
{
|
||||
if (signal.BackportConfidence.HasValue)
|
||||
{
|
||||
// Higher confidence in "no backport" = higher risk score
|
||||
return (int)(70 + signal.BackportConfidence.Value * 30);
|
||||
}
|
||||
return 80; // Likely vulnerable
|
||||
}
|
||||
|
||||
// No backport analysis performed - assume vulnerable (conservative)
|
||||
return 75;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (signal.VendorBackportConfirmed.HasValue)
|
||||
{
|
||||
return 0.95; // Vendor confirmation is highly reliable
|
||||
}
|
||||
|
||||
if (signal.BackportDetected.HasValue)
|
||||
{
|
||||
// Use backport confidence if available
|
||||
return signal.BackportConfidence ?? 0.6;
|
||||
}
|
||||
|
||||
return 0.2; // No analysis, low confidence
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
if (signal.VendorBackportConfirmed == true)
|
||||
{
|
||||
return "Vendor confirmed backport; vulnerability patched in this build";
|
||||
}
|
||||
|
||||
if (signal.BackportDetected == true)
|
||||
{
|
||||
var conf = signal.BackportConfidence?.ToString("P0") ?? "unknown";
|
||||
return $"Binary analysis detected backport with {conf} confidence";
|
||||
}
|
||||
|
||||
if (signal.BackportDetected == false)
|
||||
{
|
||||
var conf = signal.BackportConfidence?.ToString("P0") ?? "unknown";
|
||||
return $"Binary analysis found no backport evidence ({conf} confidence)";
|
||||
}
|
||||
|
||||
return "No backport analysis available; assuming vulnerable";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ExploitabilityNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for XPL (Exploitability) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes exploitability signals to the XPL dimension score.
|
||||
/// Maps EPSS, KEV, exploit availability, and CVSS to a 0-100 score.
|
||||
/// </summary>
|
||||
public sealed class ExploitabilityNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.Exploitability;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// KEV is the strongest signal
|
||||
if (signal.IsInKev == true)
|
||||
{
|
||||
return 100; // Known exploited = maximum exploitability
|
||||
}
|
||||
|
||||
var score = 0.0;
|
||||
var weights = 0.0;
|
||||
|
||||
// EPSS probability (most predictive)
|
||||
if (signal.EpssProbability.HasValue)
|
||||
{
|
||||
weights += 0.4;
|
||||
// EPSS is already 0-1, scale to 0-100
|
||||
// Apply slight non-linear scaling to emphasize high-EPSS items
|
||||
var epssScore = Math.Pow(signal.EpssProbability.Value, 0.7) * 100;
|
||||
score += epssScore * 0.4;
|
||||
}
|
||||
|
||||
// Exploit kit availability
|
||||
if (signal.ExploitKitAvailable == true)
|
||||
{
|
||||
weights += 0.25;
|
||||
score += 90 * 0.25; // Very high if exploit kit exists
|
||||
}
|
||||
else if (signal.ExploitKitAvailable == false)
|
||||
{
|
||||
weights += 0.25;
|
||||
score += 20 * 0.25; // Lower if explicitly no kit
|
||||
}
|
||||
|
||||
// PoC age (older PoC = more likely weaponized)
|
||||
if (signal.PocAgeDays.HasValue)
|
||||
{
|
||||
weights += 0.15;
|
||||
var pocScore = signal.PocAgeDays.Value switch
|
||||
{
|
||||
<= 7 => 60, // Fresh PoC
|
||||
<= 30 => 75, // 1 month old
|
||||
<= 90 => 85, // 3 months old
|
||||
<= 365 => 90, // 1 year old
|
||||
_ => 95 // Very old = likely weaponized
|
||||
};
|
||||
score += pocScore * 0.15;
|
||||
}
|
||||
|
||||
// CVSS base score
|
||||
if (signal.CvssBaseScore.HasValue)
|
||||
{
|
||||
weights += 0.2;
|
||||
// Map 0-10 to 0-100
|
||||
score += signal.CvssBaseScore.Value * 10 * 0.2;
|
||||
}
|
||||
|
||||
if (weights > 0)
|
||||
{
|
||||
return (int)Math.Round(score / weights);
|
||||
}
|
||||
|
||||
// No signals - default to moderate exploitability based on CVSS if available
|
||||
return 50;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (signal.IsInKev == true)
|
||||
{
|
||||
return 1.0; // Absolute certainty
|
||||
}
|
||||
|
||||
var confidence = 0.0;
|
||||
|
||||
if (signal.EpssProbability.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.85);
|
||||
}
|
||||
|
||||
if (signal.ExploitKitAvailable.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.9);
|
||||
}
|
||||
|
||||
if (signal.PocAgeDays.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.7);
|
||||
}
|
||||
|
||||
if (signal.CvssBaseScore.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.5); // CVSS alone is less predictive
|
||||
}
|
||||
|
||||
return confidence > 0 ? confidence : 0.3; // Low confidence if no data
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (signal.IsInKev == true)
|
||||
{
|
||||
parts.Add("CVE is in CISA KEV (Known Exploited Vulnerabilities)");
|
||||
}
|
||||
|
||||
if (signal.EpssProbability.HasValue)
|
||||
{
|
||||
parts.Add($"EPSS probability {signal.EpssProbability.Value:P2}");
|
||||
}
|
||||
|
||||
if (signal.ExploitKitAvailable == true)
|
||||
{
|
||||
parts.Add("exploit kit available");
|
||||
}
|
||||
|
||||
if (signal.PocAgeDays.HasValue)
|
||||
{
|
||||
parts.Add($"PoC available for {signal.PocAgeDays.Value} days");
|
||||
}
|
||||
|
||||
if (signal.CvssBaseScore.HasValue)
|
||||
{
|
||||
parts.Add($"CVSS base score {signal.CvssBaseScore.Value:F1}");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No exploitability signals; assuming moderate risk";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MitigationStatusNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for MIT (Mitigation Status) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes mitigation status signals to the MIT dimension score.
|
||||
/// Lower score = strong mitigation in place (low residual risk).
|
||||
/// Higher score = no mitigation or vulnerable status.
|
||||
/// </summary>
|
||||
public sealed class MitigationStatusNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.MitigationStatus;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// VEX status is the primary signal
|
||||
var baseScore = ParseVexStatus(signal.VexStatus);
|
||||
|
||||
// Adjust for workarounds
|
||||
if (signal.WorkaroundApplied == true)
|
||||
{
|
||||
baseScore = Math.Max(0, baseScore - 30);
|
||||
}
|
||||
|
||||
// Adjust for network controls
|
||||
if (signal.NetworkControlsApplied == true)
|
||||
{
|
||||
baseScore = Math.Max(0, baseScore - 20);
|
||||
}
|
||||
|
||||
return Math.Clamp(baseScore, 0, 100);
|
||||
}
|
||||
|
||||
private static int ParseVexStatus(string? vexStatus)
|
||||
{
|
||||
return vexStatus?.ToLowerInvariant() switch
|
||||
{
|
||||
"not_affected" => 5, // Confirmed not affected
|
||||
"fixed" => 10, // Fix applied
|
||||
"under_investigation" => 60, // Unknown yet
|
||||
"affected" => 90, // Confirmed vulnerable
|
||||
"exploitable" => 100, // Actively exploitable
|
||||
null => 75, // No VEX = assume affected
|
||||
_ => 75 // Unknown status = assume affected
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(signal.VexStatus))
|
||||
{
|
||||
// VEX status provides good confidence
|
||||
var conf = signal.VexStatus.ToLowerInvariant() switch
|
||||
{
|
||||
"not_affected" => 0.9,
|
||||
"fixed" => 0.85,
|
||||
"affected" => 0.85,
|
||||
"exploitable" => 0.95,
|
||||
"under_investigation" => 0.4,
|
||||
_ => 0.5
|
||||
};
|
||||
|
||||
// Boost confidence if we also have justification
|
||||
if (!string.IsNullOrEmpty(signal.VexJustification))
|
||||
{
|
||||
conf = Math.Min(1.0, conf + 0.1);
|
||||
}
|
||||
|
||||
return conf;
|
||||
}
|
||||
|
||||
// No VEX but have compensating controls
|
||||
if (signal.WorkaroundApplied.HasValue || signal.NetworkControlsApplied.HasValue)
|
||||
{
|
||||
return 0.6;
|
||||
}
|
||||
|
||||
return 0.2; // No mitigation data
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (!string.IsNullOrEmpty(signal.VexStatus))
|
||||
{
|
||||
parts.Add($"VEX status: {signal.VexStatus}");
|
||||
|
||||
if (!string.IsNullOrEmpty(signal.VexJustification))
|
||||
{
|
||||
parts.Add($"justification: {signal.VexJustification}");
|
||||
}
|
||||
}
|
||||
|
||||
if (signal.WorkaroundApplied == true)
|
||||
{
|
||||
parts.Add("workaround applied");
|
||||
}
|
||||
|
||||
if (signal.NetworkControlsApplied == true)
|
||||
{
|
||||
parts.Add("network controls in place");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No mitigation status available; assuming affected";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ReachabilityNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for RCH (Reachability) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes reachability signals to the RCH dimension score.
|
||||
/// Maps R0-R4 tiers and call graph confidence to a 0-100 score.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.Reachability;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// Reachability tier takes precedence
|
||||
if (signal.ReachabilityTier.HasValue)
|
||||
{
|
||||
var tierScore = signal.ReachabilityTier.Value switch
|
||||
{
|
||||
0 => 0, // R0: Unreachable
|
||||
1 => 20, // R1: Present in dependency but not imported
|
||||
2 => 40, // R2: Imported but not called
|
||||
3 => 70, // R3: Called but not reachable from entrypoint
|
||||
4 => 100, // R4: Reachable from entrypoint
|
||||
_ => 50 // Unknown tier - moderate assumption
|
||||
};
|
||||
|
||||
// Adjust by call graph confidence if available
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
// Higher confidence = trust the tier more
|
||||
// Lower confidence = pull toward middle (50)
|
||||
var confidence = signal.CallGraphConfidence.Value;
|
||||
tierScore = (int)(tierScore * confidence + 50 * (1 - confidence));
|
||||
}
|
||||
|
||||
// Runtime trace confirmation boosts the score if reachable
|
||||
if (signal.RuntimeTraceConfirmed == true && tierScore >= 70)
|
||||
{
|
||||
tierScore = Math.Min(100, tierScore + 15);
|
||||
}
|
||||
|
||||
return Math.Clamp(tierScore, 0, 100);
|
||||
}
|
||||
|
||||
// Fall back to call graph confidence only
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
// High confidence but no tier = assume moderate reachability
|
||||
return (int)(50 * signal.CallGraphConfidence.Value) + 25;
|
||||
}
|
||||
|
||||
// Runtime trace only
|
||||
if (signal.RuntimeTraceConfirmed == true)
|
||||
{
|
||||
return 85; // Strong evidence of reachability
|
||||
}
|
||||
|
||||
// No signals - assume reachable (conservative)
|
||||
return 75;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (signal.ReachabilityTier.HasValue)
|
||||
{
|
||||
// Tier with call graph confidence
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
return Math.Min(1.0, 0.7 + signal.CallGraphConfidence.Value * 0.3);
|
||||
}
|
||||
return 0.7; // Tier alone
|
||||
}
|
||||
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
return signal.CallGraphConfidence.Value * 0.6;
|
||||
}
|
||||
|
||||
if (signal.RuntimeTraceConfirmed == true)
|
||||
{
|
||||
return 0.9; // High confidence from runtime
|
||||
}
|
||||
|
||||
return 0.2; // No evidence, pure assumption
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
if (signal.ReachabilityTier.HasValue)
|
||||
{
|
||||
var tierName = signal.ReachabilityTier.Value switch
|
||||
{
|
||||
0 => "unreachable",
|
||||
1 => "in-dependency-not-imported",
|
||||
2 => "imported-not-called",
|
||||
3 => "called-not-entrypoint-reachable",
|
||||
4 => "entrypoint-reachable",
|
||||
_ => "unknown-tier"
|
||||
};
|
||||
|
||||
var confidence = signal.CallGraphConfidence?.ToString("P0") ?? "unknown";
|
||||
return $"Reachability tier R{signal.ReachabilityTier.Value} ({tierName}), call graph confidence {confidence}";
|
||||
}
|
||||
|
||||
if (signal.RuntimeTraceConfirmed == true)
|
||||
{
|
||||
return "Runtime trace confirmed reachability";
|
||||
}
|
||||
|
||||
return "No reachability analysis; assumed reachable (conservative)";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RuntimeSignalsNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for RTS (Runtime Signals) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes runtime signals to the RTS dimension score.
|
||||
/// Higher score = more evidence of runtime activity.
|
||||
/// </summary>
|
||||
public sealed class RuntimeSignalsNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.RuntimeSignals;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
var score = 0.0;
|
||||
var weights = 0.0;
|
||||
|
||||
// Instrumentation coverage
|
||||
if (signal.InstrumentationCoverage.HasValue)
|
||||
{
|
||||
// Higher coverage = more confidence in runtime data
|
||||
// If coverage is high but no invocations, that's good (not used)
|
||||
// If coverage is low, we can't trust the data
|
||||
weights += 0.3;
|
||||
score += signal.InstrumentationCoverage.Value * 0.3;
|
||||
}
|
||||
|
||||
// Runtime invocation count
|
||||
if (signal.RuntimeInvocationCount.HasValue)
|
||||
{
|
||||
weights += 0.4;
|
||||
// Logarithmic scale for invocations
|
||||
// 0 = 0, 1-10 = 25, 11-100 = 50, 101-1000 = 75, 1000+ = 100
|
||||
var invScore = signal.RuntimeInvocationCount.Value switch
|
||||
{
|
||||
0 => 0.0,
|
||||
<= 10 => 0.25,
|
||||
<= 100 => 0.5,
|
||||
<= 1000 => 0.75,
|
||||
_ => 1.0
|
||||
};
|
||||
score += invScore * 0.4;
|
||||
}
|
||||
|
||||
// APM active usage
|
||||
if (signal.ApmActiveUsage.HasValue)
|
||||
{
|
||||
weights += 0.3;
|
||||
score += (signal.ApmActiveUsage.Value ? 1.0 : 0.0) * 0.3;
|
||||
}
|
||||
|
||||
if (weights > 0)
|
||||
{
|
||||
return (int)Math.Round(score / weights * 100);
|
||||
}
|
||||
|
||||
// No runtime signals - assume moderate risk (we don't know)
|
||||
return 50;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
var confidence = 0.0;
|
||||
|
||||
if (signal.InstrumentationCoverage.HasValue)
|
||||
{
|
||||
// Coverage itself tells us confidence
|
||||
confidence = Math.Max(confidence, signal.InstrumentationCoverage.Value);
|
||||
}
|
||||
|
||||
if (signal.RuntimeInvocationCount.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.8); // Good data point
|
||||
}
|
||||
|
||||
if (signal.ApmActiveUsage.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.7);
|
||||
}
|
||||
|
||||
return confidence > 0 ? confidence : 0.2; // Low if no data
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (signal.InstrumentationCoverage.HasValue)
|
||||
{
|
||||
parts.Add($"instrumentation coverage {signal.InstrumentationCoverage.Value:P0}");
|
||||
}
|
||||
|
||||
if (signal.RuntimeInvocationCount.HasValue)
|
||||
{
|
||||
parts.Add($"{signal.RuntimeInvocationCount.Value} runtime invocations observed");
|
||||
}
|
||||
|
||||
if (signal.ApmActiveUsage.HasValue)
|
||||
{
|
||||
parts.Add(signal.ApmActiveUsage.Value ? "APM shows active usage" : "APM shows no active usage");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No runtime signals available; assuming moderate activity";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SourceConfidenceNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for SRC (Source Confidence) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes source confidence signals to the SRC dimension score.
|
||||
/// Higher score = less confidence in source data (higher uncertainty risk).
|
||||
/// Lower score = high confidence in source data.
|
||||
/// </summary>
|
||||
public sealed class SourceConfidenceNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.SourceConfidence;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// This dimension is inverted: high confidence in source = low risk
|
||||
// We calculate confidence then invert
|
||||
|
||||
var confidenceScore = 0.0;
|
||||
var weights = 0.0;
|
||||
|
||||
// SBOM completeness
|
||||
if (signal.SbomCompleteness.HasValue)
|
||||
{
|
||||
weights += 0.35;
|
||||
confidenceScore += signal.SbomCompleteness.Value * 0.35;
|
||||
}
|
||||
|
||||
// SBOM signed
|
||||
if (signal.SbomSigned.HasValue)
|
||||
{
|
||||
weights += 0.25;
|
||||
confidenceScore += (signal.SbomSigned.Value ? 1.0 : 0.0) * 0.25;
|
||||
}
|
||||
|
||||
// Attestation count
|
||||
if (signal.AttestationCount.HasValue)
|
||||
{
|
||||
weights += 0.2;
|
||||
// More attestations = more confidence (diminishing returns)
|
||||
var attScore = signal.AttestationCount.Value switch
|
||||
{
|
||||
0 => 0.0,
|
||||
1 => 0.5,
|
||||
2 => 0.7,
|
||||
3 => 0.85,
|
||||
_ => 1.0
|
||||
};
|
||||
confidenceScore += attScore * 0.2;
|
||||
}
|
||||
|
||||
// Lineage verified
|
||||
if (signal.LineageVerified.HasValue)
|
||||
{
|
||||
weights += 0.2;
|
||||
confidenceScore += (signal.LineageVerified.Value ? 1.0 : 0.0) * 0.2;
|
||||
}
|
||||
|
||||
if (weights > 0)
|
||||
{
|
||||
var normalizedConfidence = confidenceScore / weights;
|
||||
// Invert: high confidence = low score (low risk from source uncertainty)
|
||||
return (int)Math.Round((1.0 - normalizedConfidence) * 100);
|
||||
}
|
||||
|
||||
// No source signals - assume high uncertainty
|
||||
return 80;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
var hasData = signal.SbomCompleteness.HasValue ||
|
||||
signal.SbomSigned.HasValue ||
|
||||
signal.AttestationCount.HasValue ||
|
||||
signal.LineageVerified.HasValue;
|
||||
|
||||
if (!hasData)
|
||||
{
|
||||
return 0.2;
|
||||
}
|
||||
|
||||
// Count how many signals we have
|
||||
var signalCount = 0;
|
||||
if (signal.SbomCompleteness.HasValue) signalCount++;
|
||||
if (signal.SbomSigned.HasValue) signalCount++;
|
||||
if (signal.AttestationCount.HasValue) signalCount++;
|
||||
if (signal.LineageVerified.HasValue) signalCount++;
|
||||
|
||||
// More signals = higher confidence in our assessment
|
||||
return 0.4 + (signalCount * 0.15);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (signal.SbomCompleteness.HasValue)
|
||||
{
|
||||
parts.Add($"SBOM completeness {signal.SbomCompleteness.Value:P0}");
|
||||
}
|
||||
|
||||
if (signal.SbomSigned == true)
|
||||
{
|
||||
parts.Add("SBOM is signed");
|
||||
}
|
||||
else if (signal.SbomSigned == false)
|
||||
{
|
||||
parts.Add("SBOM is not signed");
|
||||
}
|
||||
|
||||
if (signal.AttestationCount.HasValue)
|
||||
{
|
||||
parts.Add($"{signal.AttestationCount.Value} attestation(s) available");
|
||||
}
|
||||
|
||||
if (signal.LineageVerified == true)
|
||||
{
|
||||
parts.Add("dependency lineage verified");
|
||||
}
|
||||
else if (signal.LineageVerified == false)
|
||||
{
|
||||
parts.Add("dependency lineage not verified");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No source confidence signals; assuming high uncertainty";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
// <copyright file="IDeltaIfPresentCalculator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates hypothetical score changes if missing signals were present with various assumed values.
|
||||
/// This enables "what-if" analysis to help operators prioritize signal collection efforts.
|
||||
/// </summary>
|
||||
public interface IDeltaIfPresentCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculate the hypothetical trust score delta if a specific missing signal were present.
|
||||
/// </summary>
|
||||
/// <param name="snapshot">Current signal snapshot.</param>
|
||||
/// <param name="signal">The missing signal to simulate.</param>
|
||||
/// <param name="assumedValue">The assumed value for the signal (0.0-1.0 normalized score).</param>
|
||||
/// <param name="weights">Optional signal weights.</param>
|
||||
/// <returns>Delta calculation result showing impact.</returns>
|
||||
DeltaIfPresentResult CalculateSingleSignalDelta(
|
||||
SignalSnapshot snapshot,
|
||||
string signal,
|
||||
double assumedValue,
|
||||
SignalWeights? weights = null);
|
||||
|
||||
/// <summary>
|
||||
/// Calculate hypothetical impacts for all missing signals at multiple assumed values.
|
||||
/// </summary>
|
||||
/// <param name="snapshot">Current signal snapshot.</param>
|
||||
/// <param name="weights">Optional signal weights.</param>
|
||||
/// <returns>Full delta-if-present analysis for all gaps.</returns>
|
||||
DeltaIfPresentAnalysis CalculateFullAnalysis(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null);
|
||||
|
||||
/// <summary>
|
||||
/// Calculate the best-case and worst-case score bounds if all missing signals were present.
|
||||
/// </summary>
|
||||
/// <param name="snapshot">Current signal snapshot.</param>
|
||||
/// <param name="weights">Optional signal weights.</param>
|
||||
/// <returns>Score bounds with completeness impact.</returns>
|
||||
ScoreBounds CalculateScoreBounds(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a single signal delta-if-present calculation.
|
||||
/// </summary>
|
||||
public sealed record DeltaIfPresentResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The signal that was simulated as present.
|
||||
/// </summary>
|
||||
public required string Signal { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The current score without this signal.
|
||||
/// </summary>
|
||||
public required double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The hypothetical score with this signal present at the assumed value.
|
||||
/// </summary>
|
||||
public required double HypotheticalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The delta (hypothetical - current). Positive means score would increase.
|
||||
/// </summary>
|
||||
public double Delta => HypotheticalScore - CurrentScore;
|
||||
|
||||
/// <summary>
|
||||
/// The assumed value used for the simulation.
|
||||
/// </summary>
|
||||
public required double AssumedValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The weight of this signal in the scoring model.
|
||||
/// </summary>
|
||||
public required double SignalWeight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy before adding signal.
|
||||
/// </summary>
|
||||
public required double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical entropy after adding signal.
|
||||
/// </summary>
|
||||
public required double HypotheticalEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change in entropy (negative means entropy would decrease = less uncertainty).
|
||||
/// </summary>
|
||||
public double EntropyDelta => HypotheticalEntropy - CurrentEntropy;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete analysis of all missing signals with delta-if-present calculations.
|
||||
/// </summary>
|
||||
public sealed record DeltaIfPresentAnalysis
|
||||
{
|
||||
/// <summary>
|
||||
/// Current aggregate score.
|
||||
/// </summary>
|
||||
public required double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy (uncertainty).
|
||||
/// </summary>
|
||||
public required double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of missing signals with their potential impact at different assumed values.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<SignalDeltaScenarios> GapAnalysis { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Prioritized list of signals by maximum potential impact.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> PrioritizedGaps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this analysis was computed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delta scenarios for a single missing signal at various assumed values.
|
||||
/// </summary>
|
||||
public sealed record SignalDeltaScenarios
|
||||
{
|
||||
/// <summary>
|
||||
/// Signal name.
|
||||
/// </summary>
|
||||
public required string Signal { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signal weight in scoring model.
|
||||
/// </summary>
|
||||
public required double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Why this signal is missing.
|
||||
/// </summary>
|
||||
public required SignalGapReason GapReason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Delta if signal present with best-case value (lowest risk contribution).
|
||||
/// </summary>
|
||||
public required DeltaIfPresentResult BestCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Delta if signal present with worst-case value (highest risk contribution).
|
||||
/// </summary>
|
||||
public required DeltaIfPresentResult WorstCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Delta if signal present with prior/expected value.
|
||||
/// </summary>
|
||||
public required DeltaIfPresentResult PriorCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum absolute delta magnitude across all scenarios.
|
||||
/// </summary>
|
||||
public double MaxImpact => Math.Max(Math.Abs(BestCase.Delta), Math.Abs(WorstCase.Delta));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Best-case and worst-case score bounds if all missing signals were present.
|
||||
/// </summary>
|
||||
public sealed record ScoreBounds
|
||||
{
|
||||
/// <summary>
|
||||
/// Current score with missing signals.
|
||||
/// </summary>
|
||||
public required double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Minimum possible score (all missing signals at worst-case values).
|
||||
/// </summary>
|
||||
public required double MinimumScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum possible score (all missing signals at best-case values).
|
||||
/// </summary>
|
||||
public required double MaximumScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score range (max - min).
|
||||
/// </summary>
|
||||
public double Range => MaximumScore - MinimumScore;
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy.
|
||||
/// </summary>
|
||||
public required double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entropy if all signals were present (would be 0).
|
||||
/// </summary>
|
||||
public double CompleteEntropy => 0.0;
|
||||
|
||||
/// <summary>
|
||||
/// Number of missing signals.
|
||||
/// </summary>
|
||||
public required int GapCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Percentage of score weight that is missing.
|
||||
/// </summary>
|
||||
public required double MissingWeightPercentage { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for impact score calculation.
|
||||
/// </summary>
|
||||
public interface IImpactScoreCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates the multi-factor impact score for unknowns.
|
||||
/// </summary>
|
||||
/// <param name="context">Impact context with environment, data sensitivity, fleet prevalence, SLA tier, and CVSS.</param>
|
||||
/// <param name="weights">Optional custom weights (uses defaults if null).</param>
|
||||
/// <returns>Calculated impact score with all component scores.</returns>
|
||||
ImpactScore Calculate(ImpactContext context, ImpactFactorWeights? weights = null);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an environment type to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeEnvironment(EnvironmentType environment);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a data sensitivity level to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeDataSensitivity(DataSensitivity sensitivity);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an SLA tier to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeSlaTier(SlaTier tier);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a CVSS score [0.0, 10.0] to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeCvss(double cvssScore);
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Configurable weights for impact scoring factors.
|
||||
/// All weights are normalized to sum to 1.0.
|
||||
/// </summary>
|
||||
public sealed record ImpactFactorWeights
|
||||
{
|
||||
/// <summary>Default weights following advisory recommendations.</summary>
|
||||
public static readonly ImpactFactorWeights Default = new()
|
||||
{
|
||||
EnvironmentExposureWeight = 0.20,
|
||||
DataSensitivityWeight = 0.20,
|
||||
FleetPrevalenceWeight = 0.15,
|
||||
SlaTierWeight = 0.15,
|
||||
CvssSeverityWeight = 0.30
|
||||
};
|
||||
|
||||
/// <summary>Weight for environment exposure factor (prod/stage/dev).</summary>
|
||||
public required double EnvironmentExposureWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for data sensitivity factor (PII, financial, etc.).</summary>
|
||||
public required double DataSensitivityWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for fleet prevalence factor (how many assets affected).</summary>
|
||||
public required double FleetPrevalenceWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for SLA tier factor (business criticality).</summary>
|
||||
public required double SlaTierWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for CVSS severity factor.</summary>
|
||||
public required double CvssSeverityWeight { get; init; }
|
||||
|
||||
/// <summary>Sum of all weights (should equal 1.0 for normalized calculations).</summary>
|
||||
public double TotalWeight =>
|
||||
EnvironmentExposureWeight + DataSensitivityWeight + FleetPrevalenceWeight +
|
||||
SlaTierWeight + CvssSeverityWeight;
|
||||
|
||||
/// <summary>Validates that weights sum to approximately 1.0.</summary>
|
||||
public bool IsNormalized(double tolerance = 0.001) =>
|
||||
Math.Abs(TotalWeight - 1.0) < tolerance;
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Environment type classification for exposure scoring.
|
||||
/// </summary>
|
||||
public enum EnvironmentType
|
||||
{
|
||||
/// <summary>Development environment - lowest exposure.</summary>
|
||||
Development = 0,
|
||||
|
||||
/// <summary>Testing/QA environment.</summary>
|
||||
Testing = 1,
|
||||
|
||||
/// <summary>Staging/Pre-production environment.</summary>
|
||||
Staging = 2,
|
||||
|
||||
/// <summary>Production environment - highest exposure.</summary>
|
||||
Production = 3
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Data sensitivity classification for impact scoring.
|
||||
/// </summary>
|
||||
public enum DataSensitivity
|
||||
{
|
||||
/// <summary>Public or non-sensitive data.</summary>
|
||||
Public = 0,
|
||||
|
||||
/// <summary>Internal/company-confidential data.</summary>
|
||||
Internal = 1,
|
||||
|
||||
/// <summary>Contains PII (Personally Identifiable Information).</summary>
|
||||
Pii = 2,
|
||||
|
||||
/// <summary>Contains financial data.</summary>
|
||||
Financial = 3,
|
||||
|
||||
/// <summary>Contains healthcare/PHI data.</summary>
|
||||
Healthcare = 4,
|
||||
|
||||
/// <summary>Contains classified/government data.</summary>
|
||||
Classified = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SLA tier for business criticality scoring.
|
||||
/// </summary>
|
||||
public enum SlaTier
|
||||
{
|
||||
/// <summary>Non-critical - can tolerate extended downtime.</summary>
|
||||
NonCritical = 0,
|
||||
|
||||
/// <summary>Standard - normal business operations.</summary>
|
||||
Standard = 1,
|
||||
|
||||
/// <summary>Important - customer-facing or revenue-impacting.</summary>
|
||||
Important = 2,
|
||||
|
||||
/// <summary>Critical - core business functionality.</summary>
|
||||
Critical = 3,
|
||||
|
||||
/// <summary>Mission-critical - business cannot operate without.</summary>
|
||||
MissionCritical = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input context for impact scoring calculation.
|
||||
/// </summary>
|
||||
public sealed record ImpactContext
|
||||
{
|
||||
/// <summary>Environment where the component is deployed.</summary>
|
||||
[JsonPropertyName("environment")]
|
||||
public required EnvironmentType Environment { get; init; }
|
||||
|
||||
/// <summary>Highest data sensitivity level accessed by the component.</summary>
|
||||
[JsonPropertyName("data_sensitivity")]
|
||||
public required DataSensitivity DataSensitivity { get; init; }
|
||||
|
||||
/// <summary>Proportion of fleet affected (0.0-1.0).</summary>
|
||||
[JsonPropertyName("fleet_prevalence")]
|
||||
public required double FleetPrevalence { get; init; }
|
||||
|
||||
/// <summary>SLA tier of the affected service.</summary>
|
||||
[JsonPropertyName("sla_tier")]
|
||||
public required SlaTier SlaTier { get; init; }
|
||||
|
||||
/// <summary>CVSS base score (0.0-10.0).</summary>
|
||||
[JsonPropertyName("cvss_score")]
|
||||
public required double CvssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default context for unknowns (conservative scoring).
|
||||
/// </summary>
|
||||
public static ImpactContext DefaultForUnknowns() => new()
|
||||
{
|
||||
Environment = EnvironmentType.Production, // Assume worst-case
|
||||
DataSensitivity = DataSensitivity.Internal, // Conservative default
|
||||
FleetPrevalence = 0.5, // Assume moderate prevalence
|
||||
SlaTier = SlaTier.Standard, // Standard tier
|
||||
CvssScore = 5.0 // Medium severity default
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of impact score calculation.
|
||||
/// </summary>
|
||||
public sealed record ImpactScore
|
||||
{
|
||||
/// <summary>Final weighted impact score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("score")]
|
||||
public required double Score { get; init; }
|
||||
|
||||
/// <summary>Basis points representation (0-10000) for deterministic storage.</summary>
|
||||
[JsonPropertyName("basis_points")]
|
||||
public required int BasisPoints { get; init; }
|
||||
|
||||
/// <summary>Environment exposure component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("env_exposure")]
|
||||
public required double EnvironmentExposure { get; init; }
|
||||
|
||||
/// <summary>Data sensitivity component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("data_sensitivity")]
|
||||
public required double DataSensitivityScore { get; init; }
|
||||
|
||||
/// <summary>Fleet prevalence component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("fleet_prevalence")]
|
||||
public required double FleetPrevalenceScore { get; init; }
|
||||
|
||||
/// <summary>SLA tier component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("sla_tier")]
|
||||
public required double SlaTierScore { get; init; }
|
||||
|
||||
/// <summary>CVSS severity component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("cvss_severity")]
|
||||
public required double CvssSeverityScore { get; init; }
|
||||
|
||||
/// <summary>When this score was calculated (UTC).</summary>
|
||||
[JsonPropertyName("calculated_at")]
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an impact score from component scores and weights.
|
||||
/// </summary>
|
||||
public static ImpactScore Create(
|
||||
double envExposure,
|
||||
double dataSensitivity,
|
||||
double fleetPrevalence,
|
||||
double slaTier,
|
||||
double cvssSeverity,
|
||||
ImpactFactorWeights weights,
|
||||
DateTimeOffset calculatedAt)
|
||||
{
|
||||
var score =
|
||||
envExposure * weights.EnvironmentExposureWeight +
|
||||
dataSensitivity * weights.DataSensitivityWeight +
|
||||
fleetPrevalence * weights.FleetPrevalenceWeight +
|
||||
slaTier * weights.SlaTierWeight +
|
||||
cvssSeverity * weights.CvssSeverityWeight;
|
||||
|
||||
var clampedScore = Math.Clamp(score, 0.0, 1.0);
|
||||
var basisPoints = (int)Math.Round(clampedScore * 10000);
|
||||
|
||||
return new ImpactScore
|
||||
{
|
||||
Score = clampedScore,
|
||||
BasisPoints = basisPoints,
|
||||
EnvironmentExposure = envExposure,
|
||||
DataSensitivityScore = dataSensitivity,
|
||||
FleetPrevalenceScore = fleetPrevalence,
|
||||
SlaTierScore = slaTier,
|
||||
CvssSeverityScore = cvssSeverity,
|
||||
CalculatedAt = calculatedAt
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,127 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates multi-factor impact scores for unknowns using the formula:
|
||||
/// impact = w_env * EnvExposure + w_data * DataSensitivity + w_fleet * FleetPrevalence + w_sla * SLATier + w_cvss * CVSSSeverity
|
||||
/// </summary>
|
||||
public sealed class ImpactScoreCalculator : IImpactScoreCalculator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
private static readonly Histogram<double> ImpactHistogram = Meter.CreateHistogram<double>(
|
||||
"stellaops_determinization_impact_score",
|
||||
unit: "ratio",
|
||||
description: "Impact score for unknowns (0.0 = minimal impact, 1.0 = critical impact)");
|
||||
|
||||
private readonly ILogger<ImpactScoreCalculator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public ImpactScoreCalculator(ILogger<ImpactScoreCalculator> logger, TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ImpactScore Calculate(ImpactContext context, ImpactFactorWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var effectiveWeights = weights ?? ImpactFactorWeights.Default;
|
||||
|
||||
// Validate weights
|
||||
if (!effectiveWeights.IsNormalized())
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Impact factor weights are not normalized (total={Total:F4}); results may be unexpected",
|
||||
effectiveWeights.TotalWeight);
|
||||
}
|
||||
|
||||
// Normalize each dimension
|
||||
var envScore = NormalizeEnvironment(context.Environment);
|
||||
var dataScore = NormalizeDataSensitivity(context.DataSensitivity);
|
||||
var fleetScore = Math.Clamp(context.FleetPrevalence, 0.0, 1.0);
|
||||
var slaScore = NormalizeSlaTier(context.SlaTier);
|
||||
var cvssScore = NormalizeCvss(context.CvssScore);
|
||||
|
||||
// Create result with all components
|
||||
var result = ImpactScore.Create(
|
||||
envScore,
|
||||
dataScore,
|
||||
fleetScore,
|
||||
slaScore,
|
||||
cvssScore,
|
||||
effectiveWeights,
|
||||
_timeProvider.GetUtcNow());
|
||||
|
||||
_logger.LogDebug(
|
||||
"Calculated impact score {Score:F4} (basis points={BasisPoints}) from env={Env:F2}, data={Data:F2}, fleet={Fleet:F2}, sla={Sla:F2}, cvss={Cvss:F2}",
|
||||
result.Score,
|
||||
result.BasisPoints,
|
||||
envScore,
|
||||
dataScore,
|
||||
fleetScore,
|
||||
slaScore,
|
||||
cvssScore);
|
||||
|
||||
// Emit metric
|
||||
ImpactHistogram.Record(result.Score,
|
||||
new KeyValuePair<string, object?>("environment", context.Environment.ToString()),
|
||||
new KeyValuePair<string, object?>("data_sensitivity", context.DataSensitivity.ToString()));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeEnvironment(EnvironmentType environment)
|
||||
{
|
||||
// Development = 0.0, Production = 1.0
|
||||
return environment switch
|
||||
{
|
||||
EnvironmentType.Development => 0.0,
|
||||
EnvironmentType.Testing => 0.33,
|
||||
EnvironmentType.Staging => 0.66,
|
||||
EnvironmentType.Production => 1.0,
|
||||
_ => 0.5 // Unknown defaults to moderate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeDataSensitivity(DataSensitivity sensitivity)
|
||||
{
|
||||
// Public = 0.0, Classified = 1.0
|
||||
return sensitivity switch
|
||||
{
|
||||
DataSensitivity.Public => 0.0,
|
||||
DataSensitivity.Internal => 0.2,
|
||||
DataSensitivity.Pii => 0.5,
|
||||
DataSensitivity.Financial => 0.7,
|
||||
DataSensitivity.Healthcare => 0.8,
|
||||
DataSensitivity.Classified => 1.0,
|
||||
_ => 0.5 // Unknown defaults to moderate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeSlaTier(SlaTier tier)
|
||||
{
|
||||
// NonCritical = 0.0, MissionCritical = 1.0
|
||||
return tier switch
|
||||
{
|
||||
SlaTier.NonCritical => 0.0,
|
||||
SlaTier.Standard => 0.25,
|
||||
SlaTier.Important => 0.5,
|
||||
SlaTier.Critical => 0.75,
|
||||
SlaTier.MissionCritical => 1.0,
|
||||
_ => 0.5 // Unknown defaults to moderate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeCvss(double cvssScore)
|
||||
{
|
||||
// CVSS 0.0-10.0 -> 0.0-1.0
|
||||
return Math.Clamp(cvssScore / 10.0, 0.0, 1.0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,160 @@
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.TrustLattice;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Score.v1 predicate format for DSSE-signable attestation.
|
||||
/// Contains all scoring dimensions in a single, deterministic payload.
|
||||
/// All numeric scores use basis points (0-10000) for bit-exact determinism.
|
||||
/// </summary>
|
||||
public sealed record ScoreV1Predicate
|
||||
{
|
||||
/// <summary>
|
||||
/// Predicate type URI for DSSE/In-Toto attestations.
|
||||
/// </summary>
|
||||
public const string PredicateType = "https://stella-ops.org/predicates/score/v1";
|
||||
|
||||
/// <summary>
|
||||
/// Artifact being scored (PURL or component identifier).
|
||||
/// </summary>
|
||||
public required string ArtifactId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability identifier if applicable (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Final trust score in basis points (0-10000).
|
||||
/// </summary>
|
||||
public required int TrustScoreBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk tier derived from trust score.
|
||||
/// </summary>
|
||||
public required string Tier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Lattice verdict from K4 logic evaluation.
|
||||
/// </summary>
|
||||
public required K4Value LatticeVerdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncertainty entropy in basis points (0-10000).
|
||||
/// </summary>
|
||||
public required int UncertaintyBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual dimension scores in basis points.
|
||||
/// </summary>
|
||||
public required ScoreDimensionsBps Dimensions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weights used for this scoring (in basis points).
|
||||
/// </summary>
|
||||
public required WeightsBps WeightsUsed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy digest (SHA-256) for reproducibility.
|
||||
/// </summary>
|
||||
public required string PolicyDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when score was computed (UTC).
|
||||
/// </summary>
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant/namespace scope.
|
||||
/// </summary>
|
||||
public string? TenantId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual scoring dimension values in basis points.
|
||||
/// </summary>
|
||||
public sealed record ScoreDimensionsBps
|
||||
{
|
||||
/// <summary>
|
||||
/// Base severity score (from CVSS or equivalent) in basis points.
|
||||
/// </summary>
|
||||
public required int BaseSeverityBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability score in basis points.
|
||||
/// </summary>
|
||||
public required int ReachabilityBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence quality score in basis points.
|
||||
/// </summary>
|
||||
public required int EvidenceBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance/supply-chain score in basis points.
|
||||
/// </summary>
|
||||
public required int ProvenanceBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS score in basis points (if available).
|
||||
/// </summary>
|
||||
public int? EpssBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status score in basis points (if available).
|
||||
/// </summary>
|
||||
public int? VexBps { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Risk tier enumeration for categorizing trust scores.
|
||||
/// </summary>
|
||||
public enum RiskTier
|
||||
{
|
||||
Info = 0,
|
||||
Low = 1,
|
||||
Medium = 2,
|
||||
High = 3,
|
||||
Critical = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for computing a trust score.
|
||||
/// </summary>
|
||||
public sealed record TrustScoreRequest
|
||||
{
|
||||
public required string ArtifactId { get; init; }
|
||||
public string? VulnerabilityId { get; init; }
|
||||
public string? TenantId { get; init; }
|
||||
public SignalSnapshot? Signals { get; init; }
|
||||
public ScorePolicy? PolicyOverride { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result from trust score computation with full explainability.
|
||||
/// </summary>
|
||||
public sealed record TrustScoreResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The Score.v1 predicate suitable for attestation signing.
|
||||
/// </summary>
|
||||
public required ScoreV1Predicate Predicate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signal snapshot used for computation.
|
||||
/// </summary>
|
||||
public required SignalSnapshot SignalsUsed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the score computation succeeded.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if computation failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Provides observations to the triage queue evaluator.
|
||||
/// Implementations may read from a database, cache, or in-memory store.
|
||||
/// </summary>
|
||||
public interface ITriageObservationSource
|
||||
{
|
||||
/// <summary>
|
||||
/// Retrieve observations that are candidates for triage evaluation.
|
||||
/// The source should return observations that have not been evaluated recently
|
||||
/// (based on <see cref="TriageQueueOptions.MinEvaluationIntervalMinutes"/>).
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Optional tenant filter. Null returns all tenants.</param>
|
||||
/// <param name="maxItems">Maximum number of observations to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Candidate observations.</returns>
|
||||
Task<IReadOnlyList<TriageObservation>> GetCandidatesAsync(
|
||||
string? tenantId = null,
|
||||
int maxItems = 500,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates a batch of observations and produces a priority-sorted triage queue.
|
||||
/// </summary>
|
||||
public interface ITriageQueueEvaluator
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluate observations and produce a triage queue snapshot sorted by decay urgency.
|
||||
/// </summary>
|
||||
/// <param name="observations">Observations to evaluate.</param>
|
||||
/// <param name="now">Reference time for decay calculation (deterministic).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Sorted triage queue snapshot.</returns>
|
||||
Task<TriageQueueSnapshot> EvaluateAsync(
|
||||
IReadOnlyList<TriageObservation> observations,
|
||||
DateTimeOffset now,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Evaluate a single observation and determine if it should be queued.
|
||||
/// </summary>
|
||||
/// <param name="observation">The observation to evaluate.</param>
|
||||
/// <param name="now">Reference time for decay calculation.</param>
|
||||
/// <returns>Triage item, or null if the observation does not need triage.</returns>
|
||||
TriageItem? EvaluateSingle(TriageObservation observation, DateTimeOffset now);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Sink for stale observations that need re-analysis.
|
||||
/// Implementations may enqueue to an in-memory channel, message bus, or database table.
|
||||
/// </summary>
|
||||
public interface ITriageReanalysisSink
|
||||
{
|
||||
/// <summary>
|
||||
/// Enqueue stale observations for re-analysis.
|
||||
/// </summary>
|
||||
/// <param name="items">Triage items to re-analyse (already filtered to stale/approaching).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of items successfully enqueued.</returns>
|
||||
Task<int> EnqueueAsync(
|
||||
IReadOnlyList<TriageItem> items,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of <see cref="ITriageReanalysisSink"/>.
|
||||
/// Stores enqueued items in a thread-safe collection for consumption by re-analysis workers.
|
||||
/// Suitable for single-node deployments, testing, and offline/air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class InMemoryTriageReanalysisSink : ITriageReanalysisSink
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
|
||||
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_inmemory_enqueued_total",
|
||||
unit: "{items}",
|
||||
description: "Items enqueued in the in-memory triage sink");
|
||||
|
||||
private static readonly Counter<long> DequeuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_inmemory_dequeued_total",
|
||||
unit: "{items}",
|
||||
description: "Items dequeued from the in-memory triage sink");
|
||||
|
||||
private readonly ConcurrentQueue<TriageItem> _queue = new();
|
||||
private readonly ILogger<InMemoryTriageReanalysisSink> _logger;
|
||||
|
||||
public InMemoryTriageReanalysisSink(ILogger<InMemoryTriageReanalysisSink> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<int> EnqueueAsync(
|
||||
IReadOnlyList<TriageItem> items,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(items);
|
||||
|
||||
var enqueued = 0;
|
||||
foreach (var item in items)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_queue.Enqueue(item);
|
||||
enqueued++;
|
||||
}
|
||||
|
||||
EnqueuedCounter.Add(enqueued);
|
||||
_logger.LogDebug("Enqueued {Count} triage items (queue depth: {Depth})", enqueued, _queue.Count);
|
||||
|
||||
return Task.FromResult(enqueued);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to dequeue the next item for re-analysis.
|
||||
/// </summary>
|
||||
/// <param name="item">The dequeued item, if available.</param>
|
||||
/// <returns>True if an item was dequeued.</returns>
|
||||
public bool TryDequeue(out TriageItem? item)
|
||||
{
|
||||
var result = _queue.TryDequeue(out item);
|
||||
if (result)
|
||||
DequeuedCounter.Add(1);
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Drain all pending items.
|
||||
/// </summary>
|
||||
/// <returns>All pending triage items.</returns>
|
||||
public IReadOnlyList<TriageItem> DrainAll()
|
||||
{
|
||||
var items = new List<TriageItem>();
|
||||
while (_queue.TryDequeue(out var item))
|
||||
{
|
||||
items.Add(item);
|
||||
}
|
||||
|
||||
if (items.Count > 0)
|
||||
DequeuedCounter.Add(items.Count);
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Current queue depth.
|
||||
/// </summary>
|
||||
public int Count => _queue.Count;
|
||||
|
||||
/// <summary>
|
||||
/// Peek at all pending items without removing them.
|
||||
/// </summary>
|
||||
public IReadOnlyList<TriageItem> PeekAll() => _queue.ToArray();
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Priority classification for triage items based on decay urgency.
|
||||
/// </summary>
|
||||
public enum TriagePriority
|
||||
{
|
||||
/// <summary>No action needed — observation is fresh.</summary>
|
||||
None = 0,
|
||||
|
||||
/// <summary>Observation approaching staleness (decay multiplier 0.50–0.70).</summary>
|
||||
Low = 1,
|
||||
|
||||
/// <summary>Observation is stale (decay multiplier 0.30–0.50).</summary>
|
||||
Medium = 2,
|
||||
|
||||
/// <summary>Observation is heavily decayed (decay multiplier 0.10–0.30).</summary>
|
||||
High = 3,
|
||||
|
||||
/// <summary>Observation at or near floor — effectively no confidence (decay multiplier ≤ 0.10).</summary>
|
||||
Critical = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single unknown observation queued for triage.
|
||||
/// </summary>
|
||||
public sealed record TriageItem
|
||||
{
|
||||
/// <summary>CVE identifier.</summary>
|
||||
[JsonPropertyName("cve")]
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>Component PURL.</summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>Tenant identifier for multi-tenant isolation.</summary>
|
||||
[JsonPropertyName("tenant_id")]
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>The observation decay state.</summary>
|
||||
[JsonPropertyName("observation_decay")]
|
||||
public required ObservationDecay Decay { get; init; }
|
||||
|
||||
/// <summary>Current decay multiplier at evaluation time.</summary>
|
||||
[JsonPropertyName("current_multiplier")]
|
||||
public required double CurrentMultiplier { get; init; }
|
||||
|
||||
/// <summary>Computed triage priority based on decay urgency.</summary>
|
||||
[JsonPropertyName("priority")]
|
||||
public required TriagePriority Priority { get; init; }
|
||||
|
||||
/// <summary>Age in days since last refresh at evaluation time.</summary>
|
||||
[JsonPropertyName("age_days")]
|
||||
public required double AgeDays { get; init; }
|
||||
|
||||
/// <summary>Days until the observation crosses the staleness threshold (negative if already stale).</summary>
|
||||
[JsonPropertyName("days_until_stale")]
|
||||
public required double DaysUntilStale { get; init; }
|
||||
|
||||
/// <summary>When this triage item was evaluated (UTC).</summary>
|
||||
[JsonPropertyName("evaluated_at")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>Optional signal gaps contributing to uncertainty.</summary>
|
||||
[JsonPropertyName("signal_gaps")]
|
||||
public IReadOnlyList<SignalGap> SignalGaps { get; init; } = [];
|
||||
|
||||
/// <summary>Recommended next action for the operator.</summary>
|
||||
[JsonPropertyName("recommended_action")]
|
||||
public string? RecommendedAction { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of evaluating a batch of observations for triage.
|
||||
/// </summary>
|
||||
public sealed record TriageQueueSnapshot
|
||||
{
|
||||
/// <summary>Items sorted by priority (Critical first) then by days-until-stale ascending.</summary>
|
||||
[JsonPropertyName("items")]
|
||||
public required IReadOnlyList<TriageItem> Items { get; init; }
|
||||
|
||||
/// <summary>Total observations evaluated.</summary>
|
||||
[JsonPropertyName("total_evaluated")]
|
||||
public required int TotalEvaluated { get; init; }
|
||||
|
||||
/// <summary>Count of items that are already stale.</summary>
|
||||
[JsonPropertyName("stale_count")]
|
||||
public required int StaleCount { get; init; }
|
||||
|
||||
/// <summary>Count of items approaching staleness (Low priority).</summary>
|
||||
[JsonPropertyName("approaching_count")]
|
||||
public required int ApproachingCount { get; init; }
|
||||
|
||||
/// <summary>When this snapshot was computed (UTC).</summary>
|
||||
[JsonPropertyName("evaluated_at")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>Summary statistics by priority tier.</summary>
|
||||
[JsonPropertyName("priority_summary")]
|
||||
public required IReadOnlyDictionary<TriagePriority, int> PrioritySummary { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for triage queue evaluation thresholds.
|
||||
/// </summary>
|
||||
public sealed record TriageQueueOptions
|
||||
{
|
||||
/// <summary>Default section name in appsettings.json.</summary>
|
||||
public const string SectionName = "Determinization:TriageQueue";
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier threshold for "approaching staleness" (Low priority).
|
||||
/// Observations with decay multiplier below this but above staleness are flagged.
|
||||
/// Default: 0.70
|
||||
/// </summary>
|
||||
public double ApproachingThreshold { get; init; } = 0.70;
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier threshold for High priority.
|
||||
/// Default: 0.30
|
||||
/// </summary>
|
||||
public double HighPriorityThreshold { get; init; } = 0.30;
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier threshold for Critical priority.
|
||||
/// Default: 0.10
|
||||
/// </summary>
|
||||
public double CriticalPriorityThreshold { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of items to include in a snapshot.
|
||||
/// Default: 500
|
||||
/// </summary>
|
||||
public int MaxSnapshotItems { get; init; } = 500;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include non-stale observations that are approaching staleness.
|
||||
/// Default: true
|
||||
/// </summary>
|
||||
public bool IncludeApproaching { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum interval between triage evaluations for the same observation in minutes.
|
||||
/// Default: 60
|
||||
/// </summary>
|
||||
public int MinEvaluationIntervalMinutes { get; init; } = 60;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an observation to be evaluated for triage.
|
||||
/// </summary>
|
||||
public sealed record TriageObservation
|
||||
{
|
||||
/// <summary>CVE identifier.</summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>Component PURL.</summary>
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>Tenant identifier.</summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>Decay state of the observation.</summary>
|
||||
public required ObservationDecay Decay { get; init; }
|
||||
|
||||
/// <summary>Optional signal gaps from the most recent uncertainty evaluation.</summary>
|
||||
public IReadOnlyList<SignalGap> SignalGaps { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,227 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates observations for decay-based triage and produces priority-sorted snapshots.
|
||||
/// All calculations are deterministic given the same inputs and reference time.
|
||||
/// </summary>
|
||||
public sealed class TriageQueueEvaluator : ITriageQueueEvaluator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
|
||||
private static readonly Counter<long> ItemsEvaluatedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_items_evaluated_total",
|
||||
unit: "{items}",
|
||||
description: "Total observations evaluated for triage");
|
||||
|
||||
private static readonly Counter<long> ItemsQueuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_items_queued_total",
|
||||
unit: "{items}",
|
||||
description: "Observations added to triage queue");
|
||||
|
||||
private static readonly Histogram<double> DecayMultiplierHistogram = Meter.CreateHistogram<double>(
|
||||
"stellaops_triage_decay_multiplier",
|
||||
unit: "ratio",
|
||||
description: "Decay multiplier distribution of triage items");
|
||||
|
||||
private readonly ILogger<TriageQueueEvaluator> _logger;
|
||||
private readonly TriageQueueOptions _options;
|
||||
|
||||
public TriageQueueEvaluator(
|
||||
ILogger<TriageQueueEvaluator> logger,
|
||||
IOptions<TriageQueueOptions> options)
|
||||
{
|
||||
_logger = logger;
|
||||
_options = options.Value;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<TriageQueueSnapshot> EvaluateAsync(
|
||||
IReadOnlyList<TriageObservation> observations,
|
||||
DateTimeOffset now,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(observations);
|
||||
|
||||
var triageItems = new List<TriageItem>();
|
||||
|
||||
foreach (var obs in observations)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var item = EvaluateSingle(obs, now);
|
||||
if (item is not null)
|
||||
{
|
||||
triageItems.Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort: Critical first, then by days-until-stale ascending (most urgent first)
|
||||
var sorted = triageItems
|
||||
.OrderByDescending(i => i.Priority)
|
||||
.ThenBy(i => i.DaysUntilStale)
|
||||
.ThenBy(i => i.Cve, StringComparer.Ordinal)
|
||||
.ThenBy(i => i.Purl, StringComparer.Ordinal)
|
||||
.Take(_options.MaxSnapshotItems)
|
||||
.ToList();
|
||||
|
||||
// Compute summary
|
||||
var prioritySummary = new Dictionary<TriagePriority, int>();
|
||||
foreach (var priority in Enum.GetValues<TriagePriority>())
|
||||
{
|
||||
var count = sorted.Count(i => i.Priority == priority);
|
||||
if (count > 0)
|
||||
prioritySummary[priority] = count;
|
||||
}
|
||||
|
||||
var staleCount = sorted.Count(i => i.DaysUntilStale < 0);
|
||||
var approachingCount = sorted.Count(i => i.Priority == TriagePriority.Low);
|
||||
|
||||
var snapshot = new TriageQueueSnapshot
|
||||
{
|
||||
Items = sorted,
|
||||
TotalEvaluated = observations.Count,
|
||||
StaleCount = staleCount,
|
||||
ApproachingCount = approachingCount,
|
||||
EvaluatedAt = now,
|
||||
PrioritySummary = prioritySummary
|
||||
};
|
||||
|
||||
// Emit metrics
|
||||
ItemsEvaluatedCounter.Add(observations.Count);
|
||||
ItemsQueuedCounter.Add(sorted.Count);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Triage evaluation: {Total} observations, {Queued} queued ({Stale} stale, {Approaching} approaching)",
|
||||
observations.Count,
|
||||
sorted.Count,
|
||||
staleCount,
|
||||
approachingCount);
|
||||
|
||||
return Task.FromResult(snapshot);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public TriageItem? EvaluateSingle(TriageObservation observation, DateTimeOffset now)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(observation);
|
||||
|
||||
var decay = observation.Decay;
|
||||
var multiplier = decay.CalculateDecay(now);
|
||||
var ageDays = (now - decay.RefreshedAt).TotalDays;
|
||||
var isStale = decay.CheckIsStale(now);
|
||||
var priority = ClassifyPriority(multiplier, decay.StalenessThreshold);
|
||||
|
||||
// Skip if not stale and not approaching (unless IncludeApproaching is true)
|
||||
if (priority == TriagePriority.None)
|
||||
return null;
|
||||
|
||||
if (priority == TriagePriority.Low && !_options.IncludeApproaching)
|
||||
return null;
|
||||
|
||||
var daysUntilStale = CalculateDaysUntilStale(
|
||||
decay.RefreshedAt,
|
||||
decay.HalfLifeDays,
|
||||
decay.StalenessThreshold,
|
||||
decay.Floor,
|
||||
now);
|
||||
|
||||
var recommendedAction = DetermineRecommendedAction(priority, observation.SignalGaps);
|
||||
|
||||
// Emit per-item metric
|
||||
DecayMultiplierHistogram.Record(multiplier,
|
||||
new KeyValuePair<string, object?>("priority", priority.ToString()),
|
||||
new KeyValuePair<string, object?>("tenant_id", observation.TenantId));
|
||||
|
||||
return new TriageItem
|
||||
{
|
||||
Cve = observation.Cve,
|
||||
Purl = observation.Purl,
|
||||
TenantId = observation.TenantId,
|
||||
Decay = decay,
|
||||
CurrentMultiplier = multiplier,
|
||||
Priority = priority,
|
||||
AgeDays = Math.Max(0.0, ageDays),
|
||||
DaysUntilStale = daysUntilStale,
|
||||
EvaluatedAt = now,
|
||||
SignalGaps = observation.SignalGaps,
|
||||
RecommendedAction = recommendedAction
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classifies triage priority based on current decay multiplier.
|
||||
/// </summary>
|
||||
internal TriagePriority ClassifyPriority(double multiplier, double stalenessThreshold)
|
||||
{
|
||||
if (multiplier <= _options.CriticalPriorityThreshold)
|
||||
return TriagePriority.Critical;
|
||||
|
||||
if (multiplier <= _options.HighPriorityThreshold)
|
||||
return TriagePriority.High;
|
||||
|
||||
if (multiplier <= stalenessThreshold)
|
||||
return TriagePriority.Medium;
|
||||
|
||||
if (multiplier <= _options.ApproachingThreshold)
|
||||
return TriagePriority.Low;
|
||||
|
||||
return TriagePriority.None;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates days until the observation crosses the staleness threshold.
|
||||
/// Negative values indicate the observation is already stale.
|
||||
/// Formula: days = -halfLife * ln(threshold) / ln(2), solving exp(-ln(2) * days / halfLife) = threshold
|
||||
/// </summary>
|
||||
internal static double CalculateDaysUntilStale(
|
||||
DateTimeOffset refreshedAt,
|
||||
double halfLifeDays,
|
||||
double stalenessThreshold,
|
||||
double floor,
|
||||
DateTimeOffset now)
|
||||
{
|
||||
// If floor >= threshold, the observation can never become stale via decay alone
|
||||
if (floor >= stalenessThreshold)
|
||||
return double.MaxValue;
|
||||
|
||||
// Days at which multiplier crosses threshold:
|
||||
// threshold = exp(-ln(2) * d / halfLife)
|
||||
// ln(threshold) = -ln(2) * d / halfLife
|
||||
// d = -halfLife * ln(threshold) / ln(2)
|
||||
var daysToThreshold = -halfLifeDays * Math.Log(stalenessThreshold) / Math.Log(2.0);
|
||||
var currentAgeDays = (now - refreshedAt).TotalDays;
|
||||
|
||||
return daysToThreshold - currentAgeDays;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines a recommended action based on priority and signal gaps.
|
||||
/// </summary>
|
||||
private static string? DetermineRecommendedAction(TriagePriority priority, IReadOnlyList<SignalGap> gaps)
|
||||
{
|
||||
if (gaps.Count > 0)
|
||||
{
|
||||
var missingSignals = string.Join(", ", gaps.Select(g => g.Signal));
|
||||
return priority switch
|
||||
{
|
||||
TriagePriority.Critical => $"URGENT: Re-analyse immediately. Missing signals: {missingSignals}",
|
||||
TriagePriority.High => $"Re-analyse soon. Missing signals: {missingSignals}",
|
||||
TriagePriority.Medium => $"Schedule re-analysis. Missing signals: {missingSignals}",
|
||||
TriagePriority.Low => $"Monitor — approaching staleness. Missing signals: {missingSignals}",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
return priority switch
|
||||
{
|
||||
TriagePriority.Critical => "URGENT: Re-analyse immediately — evidence has decayed to floor",
|
||||
TriagePriority.High => "Re-analyse soon — evidence is heavily decayed",
|
||||
TriagePriority.Medium => "Schedule re-analysis — observation is stale",
|
||||
TriagePriority.Low => "Monitor — observation is approaching staleness",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that periodically evaluates observations for decay-based staleness
|
||||
/// and enqueues stale unknowns for re-analysis.
|
||||
///
|
||||
/// This service is the event-driven mechanism that bridges ObservationDecay.CheckIsStale()
|
||||
/// with the re-analysis pipeline, fulfilling the automated re-analysis triggering requirement.
|
||||
/// </summary>
|
||||
public sealed class UnknownTriageQueueService
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
|
||||
private static readonly Counter<long> CyclesCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_cycles_total",
|
||||
unit: "{cycles}",
|
||||
description: "Total triage evaluation cycles executed");
|
||||
|
||||
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_reanalysis_enqueued_total",
|
||||
unit: "{items}",
|
||||
description: "Total items enqueued for re-analysis");
|
||||
|
||||
private static readonly Histogram<double> CycleDurationHistogram = Meter.CreateHistogram<double>(
|
||||
"stellaops_triage_cycle_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of triage evaluation cycles");
|
||||
|
||||
private readonly ITriageQueueEvaluator _evaluator;
|
||||
private readonly ITriageObservationSource _source;
|
||||
private readonly ITriageReanalysisSink _sink;
|
||||
private readonly ILogger<UnknownTriageQueueService> _logger;
|
||||
private readonly TriageQueueOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public UnknownTriageQueueService(
|
||||
ITriageQueueEvaluator evaluator,
|
||||
ITriageObservationSource source,
|
||||
ITriageReanalysisSink sink,
|
||||
ILogger<UnknownTriageQueueService> logger,
|
||||
IOptions<TriageQueueOptions> options,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_evaluator = evaluator;
|
||||
_source = source;
|
||||
_sink = sink;
|
||||
_logger = logger;
|
||||
_options = options.Value;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Execute a single triage cycle: fetch candidates, evaluate, enqueue stale items.
|
||||
/// This method is designed to be called by a background host, timer, or scheduler.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Optional tenant filter.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The triage snapshot from this cycle.</returns>
|
||||
public async Task<TriageQueueSnapshot> ExecuteCycleAsync(
|
||||
string? tenantId = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting triage cycle at {Now:O} for tenant {TenantId}",
|
||||
now,
|
||||
tenantId ?? "(all)");
|
||||
|
||||
try
|
||||
{
|
||||
// 1. Fetch candidate observations
|
||||
var candidates = await _source.GetCandidatesAsync(
|
||||
tenantId,
|
||||
_options.MaxSnapshotItems,
|
||||
cancellationToken);
|
||||
|
||||
_logger.LogDebug("Fetched {Count} candidate observations", candidates.Count);
|
||||
|
||||
// 2. Evaluate for triage
|
||||
var snapshot = await _evaluator.EvaluateAsync(candidates, now, cancellationToken);
|
||||
|
||||
// 3. Enqueue stale items for re-analysis (Medium, High, Critical)
|
||||
var reanalysisItems = snapshot.Items
|
||||
.Where(i => i.Priority >= TriagePriority.Medium)
|
||||
.ToList();
|
||||
|
||||
var enqueued = 0;
|
||||
if (reanalysisItems.Count > 0)
|
||||
{
|
||||
enqueued = await _sink.EnqueueAsync(reanalysisItems, cancellationToken);
|
||||
EnqueuedCounter.Add(enqueued);
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
CyclesCounter.Add(1);
|
||||
CycleDurationHistogram.Record(sw.Elapsed.TotalSeconds,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "all"));
|
||||
|
||||
_logger.LogInformation(
|
||||
"Triage cycle complete: {Evaluated} evaluated, {Queued} queued, {Enqueued} enqueued for re-analysis ({Duration:F2}s)",
|
||||
snapshot.TotalEvaluated,
|
||||
snapshot.Items.Count,
|
||||
enqueued,
|
||||
sw.Elapsed.TotalSeconds);
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
_logger.LogWarning("Triage cycle cancelled");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Triage cycle failed");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluate a specific set of observations (for on-demand triage, e.g. CLI/API).
|
||||
/// Does not enqueue — returns the snapshot for the caller to act on.
|
||||
/// </summary>
|
||||
/// <param name="observations">Observations to evaluate.</param>
|
||||
/// <param name="now">Reference time.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Triage queue snapshot.</returns>
|
||||
public Task<TriageQueueSnapshot> EvaluateOnDemandAsync(
|
||||
IReadOnlyList<TriageObservation> observations,
|
||||
DateTimeOffset now,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return _evaluator.EvaluateAsync(observations, now, cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,340 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.TrustLattice;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unified facade composing TrustScoreAggregator + K4Lattice + ScorePolicy into a single
|
||||
/// deterministic scoring pipeline. Entry point for computing trust scores with full
|
||||
/// explainability and attestation-ready output.
|
||||
/// </summary>
|
||||
public interface ITrustScoreAlgebraFacade
|
||||
{
|
||||
/// <summary>
|
||||
/// Compute a complete trust score for an artifact.
|
||||
/// </summary>
|
||||
/// <param name="request">Scoring request with artifact, signals, and optional policy override.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Complete scoring result with Score.v1 predicate.</returns>
|
||||
Task<TrustScoreResult> ComputeTrustScoreAsync(
|
||||
TrustScoreRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Compute trust score synchronously (for batch/offline use).
|
||||
/// </summary>
|
||||
TrustScoreResult ComputeTrustScore(TrustScoreRequest request);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of the trust score algebra facade.
|
||||
/// Composes all scoring components into a deterministic pipeline.
|
||||
/// </summary>
|
||||
public sealed class TrustScoreAlgebraFacade : ITrustScoreAlgebraFacade
|
||||
{
|
||||
private readonly TrustScoreAggregator _aggregator;
|
||||
private readonly UncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly ILogger<TrustScoreAlgebraFacade> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly JsonSerializerOptions _jsonOptions;
|
||||
|
||||
public TrustScoreAlgebraFacade(
|
||||
TrustScoreAggregator aggregator,
|
||||
UncertaintyScoreCalculator uncertaintyCalculator,
|
||||
ILogger<TrustScoreAlgebraFacade>? logger = null,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_aggregator = aggregator ?? throw new ArgumentNullException(nameof(aggregator));
|
||||
_uncertaintyCalculator = uncertaintyCalculator ?? throw new ArgumentNullException(nameof(uncertaintyCalculator));
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<TrustScoreAlgebraFacade>.Instance;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<TrustScoreResult> ComputeTrustScoreAsync(
|
||||
TrustScoreRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Scoring is CPU-bound and deterministic; run synchronously
|
||||
var result = ComputeTrustScore(request);
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public TrustScoreResult ComputeTrustScore(TrustScoreRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.ArtifactId);
|
||||
|
||||
try
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var policy = request.PolicyOverride ?? ScorePolicy.Default;
|
||||
var signals = request.Signals ?? SignalSnapshot.Empty(
|
||||
request.VulnerabilityId ?? "UNKNOWN",
|
||||
request.ArtifactId,
|
||||
now);
|
||||
|
||||
// Step 1: Calculate uncertainty score
|
||||
var uncertaintyScore = _uncertaintyCalculator.Calculate(signals);
|
||||
|
||||
// Step 2: Aggregate signals using weighted formula
|
||||
var trustScore = _aggregator.Aggregate(signals, uncertaintyScore);
|
||||
|
||||
// Step 3: Compute K4 lattice verdict
|
||||
var latticeVerdict = ComputeLatticeVerdict(signals);
|
||||
|
||||
// Step 4: Extract dimension scores
|
||||
var dimensions = ExtractDimensions(signals, policy);
|
||||
|
||||
// Step 5: Compute weighted final score in basis points
|
||||
var finalBps = ComputeWeightedScoreBps(dimensions, policy.WeightsBps);
|
||||
|
||||
// Step 6: Determine risk tier
|
||||
var tier = DetermineRiskTier(finalBps);
|
||||
|
||||
// Step 7: Compute policy digest
|
||||
var policyDigest = ComputePolicyDigest(policy);
|
||||
|
||||
// Step 8: Build Score.v1 predicate
|
||||
var predicate = new ScoreV1Predicate
|
||||
{
|
||||
ArtifactId = request.ArtifactId,
|
||||
VulnerabilityId = request.VulnerabilityId,
|
||||
TrustScoreBps = finalBps,
|
||||
Tier = tier.ToString(),
|
||||
LatticeVerdict = latticeVerdict,
|
||||
UncertaintyBps = ToBasisPoints(uncertaintyScore.Entropy),
|
||||
Dimensions = dimensions,
|
||||
WeightsUsed = policy.WeightsBps,
|
||||
PolicyDigest = policyDigest,
|
||||
ComputedAt = now,
|
||||
TenantId = request.TenantId
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Computed trust score for {ArtifactId}: {ScoreBps}bps ({Tier}), lattice={Verdict}",
|
||||
request.ArtifactId, finalBps, tier, latticeVerdict);
|
||||
|
||||
return new TrustScoreResult
|
||||
{
|
||||
Predicate = predicate,
|
||||
SignalsUsed = signals,
|
||||
Success = true
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to compute trust score for {ArtifactId}", request.ArtifactId);
|
||||
return new TrustScoreResult
|
||||
{
|
||||
Predicate = CreateEmptyPredicate(request),
|
||||
SignalsUsed = request.Signals ?? SignalSnapshot.Empty(
|
||||
request.VulnerabilityId ?? "UNKNOWN",
|
||||
request.ArtifactId,
|
||||
_timeProvider.GetUtcNow()),
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute K4 lattice verdict from signal states.
|
||||
/// </summary>
|
||||
private static K4Value ComputeLatticeVerdict(SignalSnapshot signals)
|
||||
{
|
||||
var values = new List<K4Value>();
|
||||
|
||||
// Map each signal to K4 value
|
||||
if (!signals.Vex.IsNotQueried)
|
||||
{
|
||||
values.Add(signals.Vex.Value?.Status?.ToLowerInvariant() switch
|
||||
{
|
||||
"affected" => K4Value.True, // Vulnerability confirmed
|
||||
"not_affected" => K4Value.False, // Vulnerability not present
|
||||
"fixed" => K4Value.False, // Fixed = not vulnerable
|
||||
"under_investigation" => K4Value.Unknown,
|
||||
_ => K4Value.Unknown
|
||||
});
|
||||
}
|
||||
|
||||
if (!signals.Reachability.IsNotQueried)
|
||||
{
|
||||
values.Add(signals.Reachability.Value?.Status switch
|
||||
{
|
||||
ReachabilityStatus.Reachable => K4Value.True,
|
||||
ReachabilityStatus.Unreachable => K4Value.False,
|
||||
ReachabilityStatus.Unknown => K4Value.Unknown,
|
||||
_ => K4Value.Unknown
|
||||
});
|
||||
}
|
||||
|
||||
if (!signals.Epss.IsNotQueried && signals.Epss.Value is not null)
|
||||
{
|
||||
// High EPSS = likely exploitable
|
||||
values.Add(signals.Epss.Value.Epss >= 0.5 ? K4Value.True : K4Value.False);
|
||||
}
|
||||
|
||||
// Join all values using K4 lattice
|
||||
return K4Lattice.JoinAll(values);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract dimension scores from signals.
|
||||
/// </summary>
|
||||
private static ScoreDimensionsBps ExtractDimensions(SignalSnapshot signals, ScorePolicy policy)
|
||||
{
|
||||
// Base severity from CVSS or default
|
||||
var baseSeverityBps = 5000; // Default to medium if no CVSS
|
||||
|
||||
// Reachability
|
||||
var reachabilityBps = signals.Reachability.Value?.Status switch
|
||||
{
|
||||
ReachabilityStatus.Reachable => 10000,
|
||||
ReachabilityStatus.Unreachable => 0,
|
||||
_ => 5000 // Unknown = mid-range
|
||||
};
|
||||
|
||||
// Evidence quality (based on how many signals are present)
|
||||
var signalCount = CountPresentSignals(signals);
|
||||
var evidenceBps = signalCount switch
|
||||
{
|
||||
>= 5 => 9000,
|
||||
4 => 7500,
|
||||
3 => 6000,
|
||||
2 => 4000,
|
||||
1 => 2000,
|
||||
_ => 1000
|
||||
};
|
||||
|
||||
// Provenance (SBOM lineage quality)
|
||||
var provenanceBps = signals.Sbom.Value is not null ? 8000 : 3000;
|
||||
|
||||
// Optional dimensions
|
||||
int? epssBps = signals.Epss.Value is not null
|
||||
? ToBasisPoints(signals.Epss.Value.Epss)
|
||||
: null;
|
||||
|
||||
int? vexBps = signals.Vex.Value?.Status?.ToLowerInvariant() switch
|
||||
{
|
||||
"affected" => 10000,
|
||||
"under_investigation" => 7000,
|
||||
"fixed" => 1000,
|
||||
"not_affected" => 0,
|
||||
_ => null
|
||||
};
|
||||
|
||||
return new ScoreDimensionsBps
|
||||
{
|
||||
BaseSeverityBps = baseSeverityBps,
|
||||
ReachabilityBps = reachabilityBps,
|
||||
EvidenceBps = evidenceBps,
|
||||
ProvenanceBps = provenanceBps,
|
||||
EpssBps = epssBps,
|
||||
VexBps = vexBps
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute final weighted score in basis points.
|
||||
/// </summary>
|
||||
private static int ComputeWeightedScoreBps(ScoreDimensionsBps dimensions, WeightsBps weights)
|
||||
{
|
||||
// Weighted average: Σ(dimension * weight) / Σ(weights)
|
||||
// Since weights sum to 10000, we can use: Σ(dimension * weight) / 10000
|
||||
|
||||
long weighted =
|
||||
(long)dimensions.BaseSeverityBps * weights.BaseSeverity +
|
||||
(long)dimensions.ReachabilityBps * weights.Reachability +
|
||||
(long)dimensions.EvidenceBps * weights.Evidence +
|
||||
(long)dimensions.ProvenanceBps * weights.Provenance;
|
||||
|
||||
var result = (int)(weighted / 10000);
|
||||
return Math.Clamp(result, 0, 10000);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine risk tier from basis point score.
|
||||
/// </summary>
|
||||
private static RiskTier DetermineRiskTier(int scoreBps)
|
||||
{
|
||||
return scoreBps switch
|
||||
{
|
||||
>= 9000 => RiskTier.Critical,
|
||||
>= 7000 => RiskTier.High,
|
||||
>= 4000 => RiskTier.Medium,
|
||||
>= 1000 => RiskTier.Low,
|
||||
_ => RiskTier.Info
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute SHA-256 digest of policy for reproducibility.
|
||||
/// </summary>
|
||||
private string ComputePolicyDigest(ScorePolicy policy)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(policy, _jsonOptions);
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return Convert.ToHexStringLower(bytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Count present (non-null, non-queried) signals.
|
||||
/// </summary>
|
||||
private static int CountPresentSignals(SignalSnapshot signals)
|
||||
{
|
||||
var count = 0;
|
||||
if (!signals.Vex.IsNotQueried && signals.Vex.Value is not null) count++;
|
||||
if (!signals.Epss.IsNotQueried && signals.Epss.Value is not null) count++;
|
||||
if (!signals.Reachability.IsNotQueried && signals.Reachability.Value is not null) count++;
|
||||
if (!signals.Runtime.IsNotQueried && signals.Runtime.Value is not null) count++;
|
||||
if (!signals.Backport.IsNotQueried && signals.Backport.Value is not null) count++;
|
||||
if (!signals.Sbom.IsNotQueried && signals.Sbom.Value is not null) count++;
|
||||
return count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert a 0.0-1.0 double to basis points.
|
||||
/// </summary>
|
||||
private static int ToBasisPoints(double value) =>
|
||||
Math.Clamp((int)(value * 10000), 0, 10000);
|
||||
|
||||
/// <summary>
|
||||
/// Create empty predicate for error cases.
|
||||
/// </summary>
|
||||
private ScoreV1Predicate CreateEmptyPredicate(TrustScoreRequest request)
|
||||
{
|
||||
return new ScoreV1Predicate
|
||||
{
|
||||
ArtifactId = request.ArtifactId,
|
||||
VulnerabilityId = request.VulnerabilityId,
|
||||
TrustScoreBps = 0,
|
||||
Tier = RiskTier.Info.ToString(),
|
||||
LatticeVerdict = K4Value.Unknown,
|
||||
UncertaintyBps = 10000,
|
||||
Dimensions = new ScoreDimensionsBps
|
||||
{
|
||||
BaseSeverityBps = 0,
|
||||
ReachabilityBps = 0,
|
||||
EvidenceBps = 0,
|
||||
ProvenanceBps = 0
|
||||
},
|
||||
WeightsUsed = WeightsBps.Default,
|
||||
PolicyDigest = "error",
|
||||
ComputedAt = _timeProvider.GetUtcNow(),
|
||||
TenantId = request.TenantId
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IWeightManifestLoader.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Weight manifest loader interface
|
||||
// Description: Contract for discovering, loading, validating, and selecting
|
||||
// versioned weight manifests from the file system.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Discovers, loads, validates, and selects versioned weight manifests.
|
||||
/// </summary>
|
||||
public interface IWeightManifestLoader
|
||||
{
|
||||
/// <summary>
|
||||
/// Lists all available weight manifests discovered in the configured directory,
|
||||
/// sorted by <c>effectiveFrom</c> descending (most recent first).
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>All discovered manifest load results.</returns>
|
||||
Task<ImmutableArray<WeightManifestLoadResult>> ListAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Loads and validates a specific manifest file by path.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Absolute or relative path to the manifest file.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Load result containing the manifest and hash verification status.</returns>
|
||||
Task<WeightManifestLoadResult> LoadAsync(string filePath, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Selects the manifest effective for a given reference date.
|
||||
/// Picks the most recent manifest where <c>effectiveFrom ≤ referenceDate</c>.
|
||||
/// </summary>
|
||||
/// <param name="referenceDate">The date to select for (typically <c>DateTimeOffset.UtcNow</c>).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The applicable manifest, or <c>null</c> if none is effective.</returns>
|
||||
Task<WeightManifestLoadResult?> SelectEffectiveAsync(
|
||||
DateTimeOffset referenceDate,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validates a manifest: schema version, weight normalization, content hash.
|
||||
/// </summary>
|
||||
/// <param name="result">The load result to validate.</param>
|
||||
/// <returns>Validation issues found (empty if valid).</returns>
|
||||
ImmutableArray<string> Validate(WeightManifestLoadResult result);
|
||||
|
||||
/// <summary>
|
||||
/// Computes a diff between two manifests, comparing weight values and thresholds.
|
||||
/// </summary>
|
||||
/// <param name="from">Source (older) manifest.</param>
|
||||
/// <param name="to">Target (newer) manifest.</param>
|
||||
/// <returns>Diff summary.</returns>
|
||||
WeightManifestDiff Diff(WeightManifestDocument from, WeightManifestDocument to);
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestCommands.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - CLI weight management commands
|
||||
// Description: Service-level commands that back the `stella weights` CLI:
|
||||
// list, validate, diff, activate, hash. Each produces a
|
||||
// deterministic, serializable result model.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Provides the backing logic for CLI weight management commands:
|
||||
/// <c>stella weights list</c>, <c>stella weights validate</c>,
|
||||
/// <c>stella weights diff</c>, <c>stella weights activate</c>,
|
||||
/// <c>stella weights hash</c>.
|
||||
/// </summary>
|
||||
public sealed class WeightManifestCommands
|
||||
{
|
||||
private readonly IWeightManifestLoader _loader;
|
||||
|
||||
public WeightManifestCommands(IWeightManifestLoader loader)
|
||||
{
|
||||
_loader = loader ?? throw new ArgumentNullException(nameof(loader));
|
||||
}
|
||||
|
||||
// ── stella weights list ──────────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Lists all discovered weight manifests with their versions,
|
||||
/// effective dates, profiles, and hash status.
|
||||
/// </summary>
|
||||
public async Task<WeightsListResult> ListAsync(CancellationToken ct = default)
|
||||
{
|
||||
var manifests = await _loader.ListAsync(ct).ConfigureAwait(false);
|
||||
|
||||
var entries = manifests.Select(r => new WeightsListEntry
|
||||
{
|
||||
Version = r.Manifest.Version,
|
||||
EffectiveFrom = r.Manifest.EffectiveFrom,
|
||||
Profile = r.Manifest.Profile,
|
||||
HashStatus = r.Manifest.HasComputedHash
|
||||
? (r.HashVerified ? "verified" : "mismatch")
|
||||
: "auto",
|
||||
SourcePath = r.SourcePath,
|
||||
Description = r.Manifest.Description
|
||||
}).ToImmutableArray();
|
||||
|
||||
return new WeightsListResult { Entries = entries };
|
||||
}
|
||||
|
||||
// ── stella weights validate ──────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Validates a specific manifest file or all discovered manifests.
|
||||
/// </summary>
|
||||
/// <param name="filePath">
|
||||
/// If specified, validate only this file. Otherwise validate all discovered manifests.
|
||||
/// </param>
|
||||
public async Task<WeightsValidateResult> ValidateAsync(
|
||||
string? filePath = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = new List<WeightsValidateEntry>();
|
||||
|
||||
if (!string.IsNullOrEmpty(filePath))
|
||||
{
|
||||
var loadResult = await _loader.LoadAsync(filePath, ct).ConfigureAwait(false);
|
||||
var issues = _loader.Validate(loadResult);
|
||||
results.Add(new WeightsValidateEntry
|
||||
{
|
||||
Version = loadResult.Manifest.Version,
|
||||
SourcePath = loadResult.SourcePath,
|
||||
Issues = issues,
|
||||
IsValid = issues.IsEmpty
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
var all = await _loader.ListAsync(ct).ConfigureAwait(false);
|
||||
foreach (var loadResult in all)
|
||||
{
|
||||
var issues = _loader.Validate(loadResult);
|
||||
results.Add(new WeightsValidateEntry
|
||||
{
|
||||
Version = loadResult.Manifest.Version,
|
||||
SourcePath = loadResult.SourcePath,
|
||||
Issues = issues,
|
||||
IsValid = issues.IsEmpty
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return new WeightsValidateResult
|
||||
{
|
||||
Entries = [.. results],
|
||||
AllValid = results.TrueForAll(e => e.IsValid)
|
||||
};
|
||||
}
|
||||
|
||||
// ── stella weights diff ──────────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Diffs two manifest files, or two versions by version identifier.
|
||||
/// </summary>
|
||||
public async Task<WeightManifestDiff> DiffAsync(
|
||||
string fromPath,
|
||||
string toPath,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(fromPath);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(toPath);
|
||||
|
||||
var from = await _loader.LoadAsync(fromPath, ct).ConfigureAwait(false);
|
||||
var to = await _loader.LoadAsync(toPath, ct).ConfigureAwait(false);
|
||||
|
||||
return _loader.Diff(from.Manifest, to.Manifest);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Diffs two manifests by version string (searches the discovered set).
|
||||
/// </summary>
|
||||
public async Task<WeightManifestDiff> DiffByVersionAsync(
|
||||
string fromVersion,
|
||||
string toVersion,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(fromVersion);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(toVersion);
|
||||
|
||||
var all = await _loader.ListAsync(ct).ConfigureAwait(false);
|
||||
|
||||
var from = all.FirstOrDefault(r =>
|
||||
string.Equals(r.Manifest.Version, fromVersion, StringComparison.OrdinalIgnoreCase));
|
||||
var to = all.FirstOrDefault(r =>
|
||||
string.Equals(r.Manifest.Version, toVersion, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (from is null)
|
||||
throw new WeightManifestLoadException($"Manifest version '{fromVersion}' not found.");
|
||||
if (to is null)
|
||||
throw new WeightManifestLoadException($"Manifest version '{toVersion}' not found.");
|
||||
|
||||
return _loader.Diff(from.Manifest, to.Manifest);
|
||||
}
|
||||
|
||||
// ── stella weights activate ──────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Selects the currently active (effective) manifest for a given date.
|
||||
/// </summary>
|
||||
public async Task<WeightsActivateResult> ActivateAsync(
|
||||
DateTimeOffset? referenceDate = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var date = referenceDate ?? DateTimeOffset.UtcNow;
|
||||
var result = await _loader.SelectEffectiveAsync(date, ct).ConfigureAwait(false);
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
return new WeightsActivateResult
|
||||
{
|
||||
Found = false,
|
||||
ReferenceDate = date,
|
||||
Version = null,
|
||||
SourcePath = null,
|
||||
ContentHash = null
|
||||
};
|
||||
}
|
||||
|
||||
return new WeightsActivateResult
|
||||
{
|
||||
Found = true,
|
||||
ReferenceDate = date,
|
||||
Version = result.Manifest.Version,
|
||||
SourcePath = result.SourcePath,
|
||||
ContentHash = result.ComputedHash,
|
||||
EffectiveFrom = result.Manifest.EffectiveFrom,
|
||||
Profile = result.Manifest.Profile
|
||||
};
|
||||
}
|
||||
|
||||
// ── stella weights hash ──────────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Computes the content hash for a manifest file and optionally replaces
|
||||
/// the "sha256:auto" placeholder in-place.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the manifest file.</param>
|
||||
/// <param name="writeBack">If true, writes the computed hash back to the file.</param>
|
||||
public async Task<WeightsHashResult> HashAsync(
|
||||
string filePath,
|
||||
bool writeBack = false,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var resolvedPath = Path.GetFullPath(filePath);
|
||||
var json = await File.ReadAllTextAsync(resolvedPath, ct).ConfigureAwait(false);
|
||||
|
||||
var computedHash = WeightManifestHashComputer.ComputeFromJson(json);
|
||||
var hasPlaceholder = json.Contains(
|
||||
WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal);
|
||||
|
||||
string? updatedJson = null;
|
||||
if (writeBack && hasPlaceholder)
|
||||
{
|
||||
var (updated, _) = WeightManifestHashComputer.ReplaceAutoHash(json);
|
||||
updatedJson = updated;
|
||||
await File.WriteAllTextAsync(resolvedPath, updatedJson, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return new WeightsHashResult
|
||||
{
|
||||
SourcePath = resolvedPath,
|
||||
ComputedHash = computedHash,
|
||||
HadPlaceholder = hasPlaceholder,
|
||||
WrittenBack = writeBack && hasPlaceholder
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── CLI result models ────────────────────────────────────────────────────────
|
||||
|
||||
/// <summary>Result of <c>stella weights list</c>.</summary>
|
||||
public sealed record WeightsListResult
|
||||
{
|
||||
public required ImmutableArray<WeightsListEntry> Entries { get; init; }
|
||||
}
|
||||
|
||||
public sealed record WeightsListEntry
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public required DateTimeOffset EffectiveFrom { get; init; }
|
||||
public required string Profile { get; init; }
|
||||
public required string HashStatus { get; init; }
|
||||
public required string SourcePath { get; init; }
|
||||
public string? Description { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Result of <c>stella weights validate</c>.</summary>
|
||||
public sealed record WeightsValidateResult
|
||||
{
|
||||
public required ImmutableArray<WeightsValidateEntry> Entries { get; init; }
|
||||
public required bool AllValid { get; init; }
|
||||
}
|
||||
|
||||
public sealed record WeightsValidateEntry
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public required string SourcePath { get; init; }
|
||||
public required ImmutableArray<string> Issues { get; init; }
|
||||
public required bool IsValid { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Result of <c>stella weights activate</c>.</summary>
|
||||
public sealed record WeightsActivateResult
|
||||
{
|
||||
public required bool Found { get; init; }
|
||||
public required DateTimeOffset ReferenceDate { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? SourcePath { get; init; }
|
||||
public string? ContentHash { get; init; }
|
||||
public DateTimeOffset? EffectiveFrom { get; init; }
|
||||
public string? Profile { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Result of <c>stella weights hash</c>.</summary>
|
||||
public sealed record WeightsHashResult
|
||||
{
|
||||
public required string SourcePath { get; init; }
|
||||
public required string ComputedHash { get; init; }
|
||||
public required bool HadPlaceholder { get; init; }
|
||||
public required bool WrittenBack { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,185 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestHashComputer.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Content hash auto-compute
|
||||
// Description: Deterministic SHA-256 content hash computation for weight
|
||||
// manifests. Hashes the canonical content (excluding the
|
||||
// contentHash field itself) to produce a stable digest.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic SHA-256 content hashes for weight manifests.
|
||||
/// The hash covers all content except the <c>contentHash</c> field itself.
|
||||
/// </summary>
|
||||
public static class WeightManifestHashComputer
|
||||
{
|
||||
private static readonly JsonSerializerOptions CanonicalOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Computes the SHA-256 content hash for a manifest's raw JSON content.
|
||||
/// The <c>contentHash</c> field is excluded from the hash input to allow
|
||||
/// the hash to be embedded in the same document it covers.
|
||||
/// </summary>
|
||||
/// <param name="jsonContent">Raw JSON content of the manifest file.</param>
|
||||
/// <returns>Hash in "sha256:<hex>" format.</returns>
|
||||
public static string ComputeFromJson(string jsonContent)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(jsonContent);
|
||||
|
||||
// Parse JSON, remove contentHash field, re-serialize canonically
|
||||
var doc = JsonDocument.Parse(jsonContent);
|
||||
var canonical = BuildCanonicalContent(doc.RootElement);
|
||||
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonical));
|
||||
return $"{WeightManifestConstants.HashPrefix}{Convert.ToHexStringLower(hashBytes)}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes the SHA-256 content hash for a deserialized manifest.
|
||||
/// Re-serializes with the <c>contentHash</c> set to the placeholder,
|
||||
/// then hashes the canonical form.
|
||||
/// </summary>
|
||||
/// <param name="manifest">The manifest document to hash.</param>
|
||||
/// <returns>Hash in "sha256:<hex>" format.</returns>
|
||||
public static string ComputeFromManifest(WeightManifestDocument manifest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
// Serialize with placeholder to ensure contentHash doesn't affect the result
|
||||
var withPlaceholder = manifest with
|
||||
{
|
||||
ContentHash = WeightManifestConstants.AutoHashPlaceholder
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(withPlaceholder, CanonicalOptions);
|
||||
return ComputeFromJson(json);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that a manifest's stored content hash matches its computed hash.
|
||||
/// </summary>
|
||||
/// <param name="jsonContent">Raw JSON content of the manifest file.</param>
|
||||
/// <param name="storedHash">The hash stored in the manifest's contentHash field.</param>
|
||||
/// <returns>True if the hashes match.</returns>
|
||||
public static bool Verify(string jsonContent, string storedHash)
|
||||
{
|
||||
if (string.IsNullOrEmpty(storedHash)
|
||||
|| storedHash.Equals(WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var computed = ComputeFromJson(jsonContent);
|
||||
return computed.Equals(storedHash, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replaces the "sha256:auto" placeholder in raw JSON with the computed hash.
|
||||
/// Returns the updated JSON content and the computed hash.
|
||||
/// </summary>
|
||||
/// <param name="jsonContent">Raw JSON with contentHash placeholder.</param>
|
||||
/// <returns>Tuple of (updatedJson, computedHash).</returns>
|
||||
public static (string UpdatedJson, string ComputedHash) ReplaceAutoHash(string jsonContent)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(jsonContent);
|
||||
|
||||
var computedHash = ComputeFromJson(jsonContent);
|
||||
|
||||
var updatedJson = jsonContent.Replace(
|
||||
$"\"{WeightManifestConstants.AutoHashPlaceholder}\"",
|
||||
$"\"{computedHash}\"",
|
||||
StringComparison.Ordinal);
|
||||
|
||||
return (updatedJson, computedHash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a canonical JSON string from a <see cref="JsonElement"/>,
|
||||
/// excluding the <c>contentHash</c> field for hash stability.
|
||||
/// Properties are sorted alphabetically for determinism.
|
||||
/// </summary>
|
||||
private static string BuildCanonicalContent(JsonElement root)
|
||||
{
|
||||
using var stream = new MemoryStream();
|
||||
using var writer = new Utf8JsonWriter(stream, new JsonWriterOptions
|
||||
{
|
||||
Indented = false,
|
||||
SkipValidation = false
|
||||
});
|
||||
|
||||
WriteCanonical(writer, root, excludeField: "contentHash");
|
||||
writer.Flush();
|
||||
|
||||
return Encoding.UTF8.GetString(stream.ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively writes JSON with sorted property keys and an optional excluded field.
|
||||
/// </summary>
|
||||
private static void WriteCanonical(Utf8JsonWriter writer, JsonElement element, string? excludeField = null)
|
||||
{
|
||||
switch (element.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
writer.WriteStartObject();
|
||||
|
||||
// Sort properties alphabetically for deterministic output
|
||||
var properties = element.EnumerateObject()
|
||||
.Where(p => !string.Equals(p.Name, excludeField, StringComparison.Ordinal))
|
||||
.OrderBy(p => p.Name, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
foreach (var property in properties)
|
||||
{
|
||||
writer.WritePropertyName(property.Name);
|
||||
WriteCanonical(writer, property.Value);
|
||||
}
|
||||
|
||||
writer.WriteEndObject();
|
||||
break;
|
||||
|
||||
case JsonValueKind.Array:
|
||||
writer.WriteStartArray();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
WriteCanonical(writer, item);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
break;
|
||||
|
||||
case JsonValueKind.String:
|
||||
writer.WriteStringValue(element.GetString());
|
||||
break;
|
||||
|
||||
case JsonValueKind.Number:
|
||||
if (element.TryGetInt64(out var longValue))
|
||||
writer.WriteNumberValue(longValue);
|
||||
else
|
||||
writer.WriteNumberValue(element.GetDouble());
|
||||
break;
|
||||
|
||||
case JsonValueKind.True:
|
||||
writer.WriteBooleanValue(true);
|
||||
break;
|
||||
|
||||
case JsonValueKind.False:
|
||||
writer.WriteBooleanValue(false);
|
||||
break;
|
||||
|
||||
case JsonValueKind.Null:
|
||||
writer.WriteNullValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,403 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestLoader.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Weight manifest loader implementation
|
||||
// Description: File-system-based weight manifest discovery, loading,
|
||||
// validation, selection by effectiveFrom date, and diffing.
|
||||
// Deterministic and offline-friendly (no network calls).
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the weight manifest loader.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestLoaderOptions
|
||||
{
|
||||
/// <summary>Configuration section name.</summary>
|
||||
public const string SectionName = "Determinization:WeightManifest";
|
||||
|
||||
/// <summary>
|
||||
/// Base directory to discover manifests in.
|
||||
/// Defaults to <c>etc/weights</c> relative to the application root.
|
||||
/// </summary>
|
||||
public string ManifestDirectory { get; init; } = WeightManifestConstants.DefaultManifestDirectory;
|
||||
|
||||
/// <summary>
|
||||
/// Glob pattern for manifest files.
|
||||
/// </summary>
|
||||
public string FilePattern { get; init; } = WeightManifestConstants.DefaultGlobPattern;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to require valid content hashes (reject "sha256:auto").
|
||||
/// In production this should be true; in development, false is acceptable.
|
||||
/// </summary>
|
||||
public bool RequireComputedHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to fail on hash mismatch (true) or log a warning (false).
|
||||
/// </summary>
|
||||
public bool StrictHashVerification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File-system-based weight manifest loader with deterministic behavior.
|
||||
/// Discovers manifests from a configured directory, validates them,
|
||||
/// computes/verifies content hashes, and selects by effectiveFrom date.
|
||||
/// </summary>
|
||||
public sealed class WeightManifestLoader : IWeightManifestLoader
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization.WeightManifest", "1.0.0");
|
||||
private static readonly Counter<long> ManifestsLoaded = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.loaded_total", "manifests", "Total manifests loaded");
|
||||
private static readonly Counter<long> ManifestsValidated = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.validated_total", "manifests", "Total manifests validated");
|
||||
private static readonly Counter<long> HashMismatches = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.hash_mismatch_total", "errors", "Content hash mismatches detected");
|
||||
private static readonly Counter<long> ValidationErrors = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.validation_error_total", "errors", "Validation errors encountered");
|
||||
|
||||
private static readonly JsonSerializerOptions DeserializeOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true
|
||||
};
|
||||
|
||||
private readonly WeightManifestLoaderOptions _options;
|
||||
private readonly ILogger<WeightManifestLoader> _logger;
|
||||
|
||||
public WeightManifestLoader(
|
||||
IOptions<WeightManifestLoaderOptions> options,
|
||||
ILogger<WeightManifestLoader> logger)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ImmutableArray<WeightManifestLoadResult>> ListAsync(
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var directory = ResolveManifestDirectory();
|
||||
if (!Directory.Exists(directory))
|
||||
{
|
||||
_logger.LogWarning("Weight manifest directory not found: {Directory}", directory);
|
||||
return [];
|
||||
}
|
||||
|
||||
var files = Directory.GetFiles(directory, _options.FilePattern)
|
||||
.OrderBy(f => f, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (files.Count == 0)
|
||||
{
|
||||
_logger.LogWarning("No weight manifest files found in {Directory}", directory);
|
||||
return [];
|
||||
}
|
||||
|
||||
var results = new List<WeightManifestLoadResult>(files.Count);
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
var result = await LoadCoreAsync(file, cancellationToken).ConfigureAwait(false);
|
||||
results.Add(result);
|
||||
}
|
||||
catch (WeightManifestLoadException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Skipping invalid manifest: {File}", file);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by effectiveFrom descending (most recent first)
|
||||
return [.. results.OrderByDescending(r => r.Manifest.EffectiveFrom)];
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<WeightManifestLoadResult> LoadAsync(
|
||||
string filePath,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
return LoadCoreAsync(filePath, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<WeightManifestLoadResult?> SelectEffectiveAsync(
|
||||
DateTimeOffset referenceDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var all = await ListAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (all.IsEmpty)
|
||||
return null;
|
||||
|
||||
// Already sorted by effectiveFrom descending; pick first where effectiveFrom <= referenceDate
|
||||
return all.FirstOrDefault(r => r.Manifest.EffectiveFrom <= referenceDate);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ImmutableArray<string> Validate(WeightManifestLoadResult result)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
|
||||
var issues = new List<string>();
|
||||
var manifest = result.Manifest;
|
||||
|
||||
// Schema version check
|
||||
if (!string.Equals(manifest.SchemaVersion, WeightManifestConstants.SupportedSchemaVersion,
|
||||
StringComparison.Ordinal))
|
||||
{
|
||||
issues.Add(
|
||||
$"Unsupported schema version '{manifest.SchemaVersion}'. Expected '{WeightManifestConstants.SupportedSchemaVersion}'.");
|
||||
}
|
||||
|
||||
// Version field
|
||||
if (string.IsNullOrWhiteSpace(manifest.Version))
|
||||
{
|
||||
issues.Add("Version field is required.");
|
||||
}
|
||||
|
||||
// Content hash
|
||||
if (_options.RequireComputedHash && !manifest.HasComputedHash)
|
||||
{
|
||||
issues.Add("Content hash is required but manifest contains placeholder 'sha256:auto'.");
|
||||
}
|
||||
|
||||
if (manifest.HasComputedHash && !result.HashVerified)
|
||||
{
|
||||
issues.Add(
|
||||
$"Content hash mismatch: stored={manifest.ContentHash}, computed={result.ComputedHash}.");
|
||||
}
|
||||
|
||||
// Legacy weight normalization
|
||||
if (manifest.Weights.Legacy.Count > 0)
|
||||
{
|
||||
var legacySum = manifest.Weights.Legacy.Values.Sum();
|
||||
if (Math.Abs(legacySum - 1.0) > 0.001)
|
||||
{
|
||||
issues.Add($"Legacy weights sum to {legacySum:F4}, expected 1.0.");
|
||||
}
|
||||
}
|
||||
|
||||
// Advisory weight normalization
|
||||
if (manifest.Weights.Advisory.Count > 0)
|
||||
{
|
||||
var advisorySum = manifest.Weights.Advisory.Values.Sum();
|
||||
if (Math.Abs(advisorySum - 1.0) > 0.001)
|
||||
{
|
||||
issues.Add($"Advisory weights sum to {advisorySum:F4}, expected 1.0.");
|
||||
}
|
||||
}
|
||||
|
||||
// Signal weights for entropy normalization
|
||||
if (manifest.SignalWeightsForEntropy.Count > 0)
|
||||
{
|
||||
var signalSum = manifest.SignalWeightsForEntropy.Values.Sum();
|
||||
if (Math.Abs(signalSum - 1.0) > 0.001)
|
||||
{
|
||||
issues.Add($"Signal weights for entropy sum to {signalSum:F4}, expected 1.0.");
|
||||
}
|
||||
}
|
||||
|
||||
ManifestsValidated.Add(1);
|
||||
if (issues.Count > 0)
|
||||
{
|
||||
ValidationErrors.Add(issues.Count);
|
||||
}
|
||||
|
||||
return [.. issues];
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public WeightManifestDiff Diff(WeightManifestDocument from, WeightManifestDocument to)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(from);
|
||||
ArgumentNullException.ThrowIfNull(to);
|
||||
|
||||
var diffs = new List<WeightManifestFieldDiff>();
|
||||
|
||||
// Compare scalar fields
|
||||
CompareScalar(diffs, "version", from.Version, to.Version);
|
||||
CompareScalar(diffs, "profile", from.Profile, to.Profile);
|
||||
CompareScalar(diffs, "effectiveFrom", from.EffectiveFrom.ToString("O"), to.EffectiveFrom.ToString("O"));
|
||||
|
||||
// Compare legacy weights
|
||||
CompareWeightDictionary(diffs, "weights.legacy", from.Weights.Legacy, to.Weights.Legacy);
|
||||
|
||||
// Compare advisory weights
|
||||
CompareWeightDictionary(diffs, "weights.advisory", from.Weights.Advisory, to.Weights.Advisory);
|
||||
|
||||
// Compare signal weights for entropy
|
||||
CompareWeightDictionary(diffs, "signalWeightsForEntropy",
|
||||
from.SignalWeightsForEntropy, to.SignalWeightsForEntropy);
|
||||
|
||||
// Compare bucket thresholds
|
||||
if (from.Buckets is not null && to.Buckets is not null)
|
||||
{
|
||||
CompareScalar(diffs, "buckets.actNowMin",
|
||||
from.Buckets.ActNowMin.ToString(), to.Buckets.ActNowMin.ToString());
|
||||
CompareScalar(diffs, "buckets.scheduleNextMin",
|
||||
from.Buckets.ScheduleNextMin.ToString(), to.Buckets.ScheduleNextMin.ToString());
|
||||
CompareScalar(diffs, "buckets.investigateMin",
|
||||
from.Buckets.InvestigateMin.ToString(), to.Buckets.InvestigateMin.ToString());
|
||||
}
|
||||
|
||||
// Compare determinization thresholds
|
||||
if (from.DeterminizationThresholds is not null && to.DeterminizationThresholds is not null)
|
||||
{
|
||||
CompareScalar(diffs, "determinizationThresholds.manualReviewEntropy",
|
||||
from.DeterminizationThresholds.ManualReviewEntropy.ToString("F4"),
|
||||
to.DeterminizationThresholds.ManualReviewEntropy.ToString("F4"));
|
||||
CompareScalar(diffs, "determinizationThresholds.refreshEntropy",
|
||||
from.DeterminizationThresholds.RefreshEntropy.ToString("F4"),
|
||||
to.DeterminizationThresholds.RefreshEntropy.ToString("F4"));
|
||||
}
|
||||
|
||||
return new WeightManifestDiff
|
||||
{
|
||||
FromVersion = from.Version,
|
||||
ToVersion = to.Version,
|
||||
Differences = [.. diffs]
|
||||
};
|
||||
}
|
||||
|
||||
// ── Private helpers ──────────────────────────────────────────────────
|
||||
|
||||
private async Task<WeightManifestLoadResult> LoadCoreAsync(
|
||||
string filePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(filePath);
|
||||
if (!File.Exists(resolvedPath))
|
||||
{
|
||||
throw new WeightManifestLoadException($"Weight manifest file not found: {resolvedPath}");
|
||||
}
|
||||
|
||||
var json = await File.ReadAllTextAsync(resolvedPath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
WeightManifestDocument manifest;
|
||||
try
|
||||
{
|
||||
manifest = JsonSerializer.Deserialize<WeightManifestDocument>(json, DeserializeOptions)
|
||||
?? throw new WeightManifestLoadException(
|
||||
$"Failed to deserialize weight manifest from {resolvedPath}: empty document");
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
throw new WeightManifestLoadException(
|
||||
$"JSON parse error in {resolvedPath}: {ex.Message}", ex);
|
||||
}
|
||||
|
||||
// Compute content hash
|
||||
var computedHash = WeightManifestHashComputer.ComputeFromJson(json);
|
||||
var hashVerified = manifest.HasComputedHash
|
||||
&& computedHash.Equals(manifest.ContentHash, StringComparison.Ordinal);
|
||||
|
||||
if (manifest.HasComputedHash && !hashVerified)
|
||||
{
|
||||
HashMismatches.Add(1);
|
||||
var message =
|
||||
$"Content hash mismatch for {resolvedPath}: stored={manifest.ContentHash}, computed={computedHash}";
|
||||
|
||||
if (_options.StrictHashVerification)
|
||||
{
|
||||
throw new WeightManifestLoadException(message);
|
||||
}
|
||||
|
||||
_logger.LogWarning("{Message}", message);
|
||||
}
|
||||
|
||||
ManifestsLoaded.Add(1);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Loaded weight manifest {Version} from {Path} (hash verified: {HashVerified})",
|
||||
manifest.Version, resolvedPath, hashVerified);
|
||||
|
||||
return new WeightManifestLoadResult
|
||||
{
|
||||
Manifest = manifest,
|
||||
SourcePath = resolvedPath,
|
||||
HashVerified = hashVerified,
|
||||
ComputedHash = computedHash
|
||||
};
|
||||
}
|
||||
|
||||
private string ResolveManifestDirectory()
|
||||
{
|
||||
var dir = _options.ManifestDirectory;
|
||||
if (Path.IsPathRooted(dir))
|
||||
return dir;
|
||||
|
||||
// Resolve relative to current directory (application root)
|
||||
return Path.GetFullPath(dir, AppContext.BaseDirectory);
|
||||
}
|
||||
|
||||
private static void CompareScalar(
|
||||
List<WeightManifestFieldDiff> diffs, string path, string? from, string? to)
|
||||
{
|
||||
if (!string.Equals(from, to, StringComparison.Ordinal))
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff { Path = path, OldValue = from, NewValue = to });
|
||||
}
|
||||
}
|
||||
|
||||
private static void CompareWeightDictionary(
|
||||
List<WeightManifestFieldDiff> diffs,
|
||||
string prefix,
|
||||
ImmutableDictionary<string, double> from,
|
||||
ImmutableDictionary<string, double> to)
|
||||
{
|
||||
var allKeys = from.Keys.Union(to.Keys).Order().ToList();
|
||||
foreach (var key in allKeys)
|
||||
{
|
||||
var hasFrom = from.TryGetValue(key, out var fromVal);
|
||||
var hasTo = to.TryGetValue(key, out var toVal);
|
||||
|
||||
if (!hasFrom)
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff
|
||||
{
|
||||
Path = $"{prefix}.{key}",
|
||||
OldValue = null,
|
||||
NewValue = toVal.ToString("F4")
|
||||
});
|
||||
}
|
||||
else if (!hasTo)
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff
|
||||
{
|
||||
Path = $"{prefix}.{key}",
|
||||
OldValue = fromVal.ToString("F4"),
|
||||
NewValue = null
|
||||
});
|
||||
}
|
||||
else if (Math.Abs(fromVal - toVal) > 0.0001)
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff
|
||||
{
|
||||
Path = $"{prefix}.{key}",
|
||||
OldValue = fromVal.ToString("F4"),
|
||||
NewValue = toVal.ToString("F4")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when weight manifest loading or validation fails.
|
||||
/// </summary>
|
||||
public sealed class WeightManifestLoadException : Exception
|
||||
{
|
||||
public WeightManifestLoadException(string message) : base(message) { }
|
||||
public WeightManifestLoadException(string message, Exception inner) : base(message, inner) { }
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestModels.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Versioned weight manifest models
|
||||
// Description: Immutable models for weight manifests with content-addressed
|
||||
// hashing, versioning, and deterministic serialization.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Immutable representation of a versioned weight manifest file.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestDocument
|
||||
{
|
||||
/// <summary>JSON Schema URI.</summary>
|
||||
[JsonPropertyName("$schema")]
|
||||
public string? Schema { get; init; }
|
||||
|
||||
/// <summary>Schema version (e.g. "1.0.0").</summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public required string SchemaVersion { get; init; }
|
||||
|
||||
/// <summary>Manifest version identifier (e.g. "v2026-01-22").</summary>
|
||||
[JsonPropertyName("version")]
|
||||
public required string Version { get; init; }
|
||||
|
||||
/// <summary>UTC date from which this manifest is effective.</summary>
|
||||
[JsonPropertyName("effectiveFrom")]
|
||||
public required DateTimeOffset EffectiveFrom { get; init; }
|
||||
|
||||
/// <summary>Profile name (e.g. "production", "staging").</summary>
|
||||
[JsonPropertyName("profile")]
|
||||
public string Profile { get; init; } = "production";
|
||||
|
||||
/// <summary>Human-readable description of this manifest.</summary>
|
||||
[JsonPropertyName("description")]
|
||||
public string? Description { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content hash in "sha256:<hex>" format.
|
||||
/// The placeholder "sha256:auto" means the hash has not been computed yet.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contentHash")]
|
||||
public required string ContentHash { get; init; }
|
||||
|
||||
/// <summary>Legacy 6-dimension EWS weights.</summary>
|
||||
[JsonPropertyName("weights")]
|
||||
public required WeightManifestWeights Weights { get; init; }
|
||||
|
||||
/// <summary>Dimension human-readable names.</summary>
|
||||
[JsonPropertyName("dimensionNames")]
|
||||
public ImmutableDictionary<string, string> DimensionNames { get; init; } =
|
||||
ImmutableDictionary<string, string>.Empty;
|
||||
|
||||
/// <summary>Dimensions that subtract from risk score.</summary>
|
||||
[JsonPropertyName("subtractiveDimensions")]
|
||||
public ImmutableArray<string> SubtractiveDimensions { get; init; } = [];
|
||||
|
||||
/// <summary>Guardrail configurations.</summary>
|
||||
[JsonPropertyName("guardrails")]
|
||||
public WeightManifestGuardrails? Guardrails { get; init; }
|
||||
|
||||
/// <summary>Bucket boundaries for action tiers.</summary>
|
||||
[JsonPropertyName("buckets")]
|
||||
public WeightManifestBuckets? Buckets { get; init; }
|
||||
|
||||
/// <summary>Determinization thresholds for entropy-based triage.</summary>
|
||||
[JsonPropertyName("determinizationThresholds")]
|
||||
public WeightManifestDeterminizationThresholds? DeterminizationThresholds { get; init; }
|
||||
|
||||
/// <summary>Signal weights for entropy calculation (maps to <see cref="SignalWeights"/>).</summary>
|
||||
[JsonPropertyName("signalWeightsForEntropy")]
|
||||
public ImmutableDictionary<string, double> SignalWeightsForEntropy { get; init; } =
|
||||
ImmutableDictionary<string, double>.Empty;
|
||||
|
||||
/// <summary>Provenance metadata.</summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
public WeightManifestMetadata? Metadata { get; init; }
|
||||
|
||||
/// <summary>Whether the content hash is a computed hash vs. the placeholder.</summary>
|
||||
[JsonIgnore]
|
||||
public bool HasComputedHash => !string.IsNullOrEmpty(ContentHash)
|
||||
&& !ContentHash.Equals(WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Multi-profile weights block (legacy + advisory).
|
||||
/// </summary>
|
||||
public sealed record WeightManifestWeights
|
||||
{
|
||||
/// <summary>Legacy 6-dimension weights.</summary>
|
||||
[JsonPropertyName("legacy")]
|
||||
public ImmutableDictionary<string, double> Legacy { get; init; } =
|
||||
ImmutableDictionary<string, double>.Empty;
|
||||
|
||||
/// <summary>Advisory weights.</summary>
|
||||
[JsonPropertyName("advisory")]
|
||||
public ImmutableDictionary<string, double> Advisory { get; init; } =
|
||||
ImmutableDictionary<string, double>.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Guardrail configuration from a weight manifest.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestGuardrails
|
||||
{
|
||||
[JsonPropertyName("notAffectedCap")]
|
||||
public GuardrailRule? NotAffectedCap { get; init; }
|
||||
|
||||
[JsonPropertyName("runtimeFloor")]
|
||||
public GuardrailRule? RuntimeFloor { get; init; }
|
||||
|
||||
[JsonPropertyName("speculativeCap")]
|
||||
public GuardrailRule? SpeculativeCap { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual guardrail rule.
|
||||
/// </summary>
|
||||
public sealed record GuardrailRule
|
||||
{
|
||||
[JsonPropertyName("enabled")]
|
||||
public bool Enabled { get; init; }
|
||||
|
||||
[JsonPropertyName("maxScore")]
|
||||
public int? MaxScore { get; init; }
|
||||
|
||||
[JsonPropertyName("minScore")]
|
||||
public int? MinScore { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresBkpMin")]
|
||||
public double? RequiresBkpMin { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresRtsMax")]
|
||||
public double? RequiresRtsMax { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresRtsMin")]
|
||||
public double? RequiresRtsMin { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresRchMax")]
|
||||
public double? RequiresRchMax { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Action bucket boundaries.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestBuckets
|
||||
{
|
||||
[JsonPropertyName("actNowMin")]
|
||||
public int ActNowMin { get; init; } = 90;
|
||||
|
||||
[JsonPropertyName("scheduleNextMin")]
|
||||
public int ScheduleNextMin { get; init; } = 70;
|
||||
|
||||
[JsonPropertyName("investigateMin")]
|
||||
public int InvestigateMin { get; init; } = 40;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entropy-based determinization thresholds.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestDeterminizationThresholds
|
||||
{
|
||||
[JsonPropertyName("manualReviewEntropy")]
|
||||
public double ManualReviewEntropy { get; init; } = 0.60;
|
||||
|
||||
[JsonPropertyName("refreshEntropy")]
|
||||
public double RefreshEntropy { get; init; } = 0.40;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance metadata for audit trail.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestMetadata
|
||||
{
|
||||
[JsonPropertyName("createdBy")]
|
||||
public string? CreatedBy { get; init; }
|
||||
|
||||
[JsonPropertyName("createdAt")]
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("changelog")]
|
||||
public ImmutableArray<ChangelogEntry> Changelog { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("notes")]
|
||||
public ImmutableArray<string> Notes { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Changelog entry for manifest versioning audit.
|
||||
/// </summary>
|
||||
public sealed record ChangelogEntry
|
||||
{
|
||||
[JsonPropertyName("version")]
|
||||
public string? Version { get; init; }
|
||||
|
||||
[JsonPropertyName("date")]
|
||||
public string? Date { get; init; }
|
||||
|
||||
[JsonPropertyName("changes")]
|
||||
public ImmutableArray<string> Changes { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constants for the weight manifest system.
|
||||
/// </summary>
|
||||
public static class WeightManifestConstants
|
||||
{
|
||||
/// <summary>Placeholder that signals "compute hash at build/load time".</summary>
|
||||
public const string AutoHashPlaceholder = "sha256:auto";
|
||||
|
||||
/// <summary>Prefix for content hashes.</summary>
|
||||
public const string HashPrefix = "sha256:";
|
||||
|
||||
/// <summary>Supported schema version.</summary>
|
||||
public const string SupportedSchemaVersion = "1.0.0";
|
||||
|
||||
/// <summary>Default glob pattern for discovering manifest files.</summary>
|
||||
public const string DefaultGlobPattern = "*.weights.json";
|
||||
|
||||
/// <summary>Default manifest directory (relative to application root).</summary>
|
||||
public const string DefaultManifestDirectory = "etc/weights";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of loading and validating a weight manifest.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestLoadResult
|
||||
{
|
||||
/// <summary>The loaded and validated manifest.</summary>
|
||||
public required WeightManifestDocument Manifest { get; init; }
|
||||
|
||||
/// <summary>File path the manifest was loaded from.</summary>
|
||||
public required string SourcePath { get; init; }
|
||||
|
||||
/// <summary>Whether the content hash was verified (vs computed fresh).</summary>
|
||||
public required bool HashVerified { get; init; }
|
||||
|
||||
/// <summary>Computed content hash (may differ from manifest if auto).</summary>
|
||||
public required string ComputedHash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two weight manifests.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestDiff
|
||||
{
|
||||
/// <summary>Source (older) manifest version.</summary>
|
||||
public required string FromVersion { get; init; }
|
||||
|
||||
/// <summary>Target (newer) manifest version.</summary>
|
||||
public required string ToVersion { get; init; }
|
||||
|
||||
/// <summary>Individual field differences.</summary>
|
||||
public required ImmutableArray<WeightManifestFieldDiff> Differences { get; init; }
|
||||
|
||||
/// <summary>Whether any differences exist.</summary>
|
||||
public bool HasDifferences => !Differences.IsEmpty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual field difference between two manifests.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestFieldDiff
|
||||
{
|
||||
/// <summary>Dot-delimited path to the changed field.</summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>Previous value (serialized as string).</summary>
|
||||
public required string? OldValue { get; init; }
|
||||
|
||||
/// <summary>New value (serialized as string).</summary>
|
||||
public required string? NewValue { get; init; }
|
||||
}
|
||||
@@ -3,6 +3,9 @@ using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
using StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
using StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
namespace StellaOps.Policy.Determinization;
|
||||
|
||||
@@ -43,8 +46,26 @@ public static class ServiceCollectionExtensions
|
||||
services.TryAddSingleton<DecayedConfidenceCalculator>();
|
||||
services.TryAddSingleton<IDecayedConfidenceCalculator>(sp => sp.GetRequiredService<DecayedConfidenceCalculator>());
|
||||
|
||||
services.TryAddSingleton<ImpactScoreCalculator>();
|
||||
services.TryAddSingleton<IImpactScoreCalculator>(sp => sp.GetRequiredService<ImpactScoreCalculator>());
|
||||
|
||||
services.TryAddSingleton<CombinedImpactCalculator>();
|
||||
services.TryAddSingleton<ICombinedImpactCalculator>(sp => sp.GetRequiredService<CombinedImpactCalculator>());
|
||||
|
||||
services.TryAddSingleton<TrustScoreAggregator>();
|
||||
|
||||
services.TryAddSingleton<TrustScoreAlgebraFacade>();
|
||||
services.TryAddSingleton<ITrustScoreAlgebraFacade>(sp => sp.GetRequiredService<TrustScoreAlgebraFacade>());
|
||||
|
||||
// EWS: 6-dimension Evidence-Weighted Score model
|
||||
RegisterEwsServices(services);
|
||||
|
||||
// Triage: decay-based staleness evaluation and re-analysis queue
|
||||
RegisterTriageServices(services);
|
||||
|
||||
// Weight Manifests: versioned weight discovery, validation, selection
|
||||
RegisterWeightManifestServices(services);
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -66,8 +87,76 @@ public static class ServiceCollectionExtensions
|
||||
services.TryAddSingleton<DecayedConfidenceCalculator>();
|
||||
services.TryAddSingleton<IDecayedConfidenceCalculator>(sp => sp.GetRequiredService<DecayedConfidenceCalculator>());
|
||||
|
||||
services.TryAddSingleton<ImpactScoreCalculator>();
|
||||
services.TryAddSingleton<IImpactScoreCalculator>(sp => sp.GetRequiredService<ImpactScoreCalculator>());
|
||||
|
||||
services.TryAddSingleton<CombinedImpactCalculator>();
|
||||
services.TryAddSingleton<ICombinedImpactCalculator>(sp => sp.GetRequiredService<CombinedImpactCalculator>());
|
||||
|
||||
services.TryAddSingleton<TrustScoreAggregator>();
|
||||
|
||||
services.TryAddSingleton<TrustScoreAlgebraFacade>();
|
||||
services.TryAddSingleton<ITrustScoreAlgebraFacade>(sp => sp.GetRequiredService<TrustScoreAlgebraFacade>());
|
||||
|
||||
// TSF-004: Delta-if-present calculator for hypothetical score simulations
|
||||
services.TryAddSingleton<DeltaIfPresentCalculator>();
|
||||
services.TryAddSingleton<IDeltaIfPresentCalculator>(sp => sp.GetRequiredService<DeltaIfPresentCalculator>());
|
||||
|
||||
// EWS: 6-dimension Evidence-Weighted Score model
|
||||
RegisterEwsServices(services);
|
||||
|
||||
// Triage: decay-based staleness evaluation and re-analysis queue
|
||||
RegisterTriageServices(services);
|
||||
|
||||
// Weight Manifests: versioned weight discovery, validation, selection
|
||||
RegisterWeightManifestServices(services);
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void RegisterEwsServices(IServiceCollection services)
|
||||
{
|
||||
// Register all 6 dimension normalizers (AddSingleton, not TryAdd,
|
||||
// so IEnumerable<IEwsDimensionNormalizer> resolves all of them)
|
||||
services.AddSingleton<IEwsDimensionNormalizer, ReachabilityNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, RuntimeSignalsNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, BackportEvidenceNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, ExploitabilityNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, SourceConfidenceNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, MitigationStatusNormalizer>();
|
||||
|
||||
// Register guardrails engine
|
||||
services.TryAddSingleton<IGuardrailsEngine, GuardrailsEngine>();
|
||||
|
||||
// Register unified EWS calculator
|
||||
services.TryAddSingleton<IEwsCalculator, EwsCalculator>();
|
||||
}
|
||||
|
||||
private static void RegisterTriageServices(IServiceCollection services)
|
||||
{
|
||||
// Register triage options (defaults if not bound to config)
|
||||
services.AddOptions<TriageQueueOptions>();
|
||||
|
||||
// Register evaluator
|
||||
services.TryAddSingleton<ITriageQueueEvaluator, TriageQueueEvaluator>();
|
||||
|
||||
// Register in-memory sink as default (can be overridden by host-level registration)
|
||||
services.TryAddSingleton<InMemoryTriageReanalysisSink>();
|
||||
services.TryAddSingleton<ITriageReanalysisSink>(sp => sp.GetRequiredService<InMemoryTriageReanalysisSink>());
|
||||
|
||||
// Register the triage queue service
|
||||
services.TryAddSingleton<UnknownTriageQueueService>();
|
||||
}
|
||||
|
||||
private static void RegisterWeightManifestServices(IServiceCollection services)
|
||||
{
|
||||
// Register loader options (defaults if not bound to config)
|
||||
services.AddOptions<WeightManifestLoaderOptions>();
|
||||
|
||||
// Register manifest loader
|
||||
services.TryAddSingleton<IWeightManifestLoader, WeightManifestLoader>();
|
||||
|
||||
// Register CLI command service
|
||||
services.TryAddSingleton<WeightManifestCommands>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,452 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofGraphBuilder.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Proof graph builder
|
||||
// Description: Constructs proof graphs from verdict rationale data.
|
||||
// Deterministic: same inputs always produce same graph with
|
||||
// content-addressed ID. Supports counterfactual overlay nodes.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Builds proof graphs from verdict rationale components.
|
||||
/// </summary>
|
||||
public interface IProofGraphBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds a complete proof graph from a verdict rationale and
|
||||
/// optional score breakdown data.
|
||||
/// </summary>
|
||||
ProofGraph Build(ProofGraphInput input);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a counterfactual overlay to an existing proof graph,
|
||||
/// showing how scores would change under hypothetical conditions.
|
||||
/// </summary>
|
||||
ProofGraph AddCounterfactualOverlay(
|
||||
ProofGraph baseGraph,
|
||||
CounterfactualScenario scenario);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input data for building a proof graph.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphInput
|
||||
{
|
||||
/// <summary>The verdict rationale to visualize.</summary>
|
||||
public required VerdictRationale Rationale { get; init; }
|
||||
|
||||
/// <summary>Per-factor score breakdown, if available.</summary>
|
||||
public ScoreBreakdownDashboard? ScoreBreakdown { get; init; }
|
||||
|
||||
/// <summary>Reference time for graph computation.</summary>
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A counterfactual scenario for what-if analysis.
|
||||
/// </summary>
|
||||
public sealed record CounterfactualScenario
|
||||
{
|
||||
/// <summary>Scenario label.</summary>
|
||||
[JsonPropertyName("label")]
|
||||
public required string Label { get; init; }
|
||||
|
||||
/// <summary>Factor overrides (factorId → hypothetical score).</summary>
|
||||
[JsonPropertyName("factor_overrides")]
|
||||
public required ImmutableDictionary<string, int> FactorOverrides { get; init; }
|
||||
|
||||
/// <summary>Resulting composite score under this scenario.</summary>
|
||||
[JsonPropertyName("resulting_score")]
|
||||
public int? ResultingScore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic proof graph builder.
|
||||
/// </summary>
|
||||
public sealed class ProofGraphBuilder : IProofGraphBuilder
|
||||
{
|
||||
private readonly ILogger<ProofGraphBuilder> _logger;
|
||||
|
||||
public ProofGraphBuilder(ILogger<ProofGraphBuilder> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public ProofGraph Build(ProofGraphInput input)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var nodes = new List<ProofGraphNode>();
|
||||
var edges = new List<ProofGraphEdge>();
|
||||
|
||||
// 1. Create verdict root node (depth 0)
|
||||
var verdictNodeId = $"verdict:{input.Rationale.VerdictRef.AttestationId}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = verdictNodeId,
|
||||
Label = $"Verdict: {input.Rationale.Decision.Verdict}",
|
||||
Type = ProofNodeType.Verdict,
|
||||
Confidence = input.Rationale.Decision.Score.HasValue
|
||||
? input.Rationale.Decision.Score.Value / 100.0
|
||||
: null,
|
||||
ScoreContribution = input.Rationale.Decision.Score,
|
||||
Depth = 0
|
||||
});
|
||||
|
||||
// 2. Create policy rule node (depth 1)
|
||||
var policyNodeId = $"policy:{input.Rationale.PolicyClause.ClauseId}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = policyNodeId,
|
||||
Label = input.Rationale.PolicyClause.RuleDescription,
|
||||
Type = ProofNodeType.PolicyRule,
|
||||
Depth = 1
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = policyNodeId,
|
||||
Target = verdictNodeId,
|
||||
Relation = ProofEdgeRelation.Gates,
|
||||
Label = "Policy evaluation"
|
||||
});
|
||||
|
||||
// 3. Create score computation nodes from breakdown (depth 2)
|
||||
if (input.ScoreBreakdown is not null)
|
||||
{
|
||||
foreach (var factor in input.ScoreBreakdown.Factors)
|
||||
{
|
||||
var factorNodeId = $"score:{factor.FactorId}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = factorNodeId,
|
||||
Label = $"{factor.FactorName} ({factor.RawScore})",
|
||||
Type = ProofNodeType.ScoreComputation,
|
||||
Confidence = factor.Confidence,
|
||||
ScoreContribution = factor.WeightedContribution,
|
||||
Depth = 2,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
.Add("weight", factor.Weight.ToString("F2"))
|
||||
.Add("raw_score", factor.RawScore.ToString())
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = factorNodeId,
|
||||
Target = verdictNodeId,
|
||||
Relation = ProofEdgeRelation.ContributesScore,
|
||||
Weight = factor.Weight,
|
||||
Label = $"{factor.Weight:P0} weight"
|
||||
});
|
||||
}
|
||||
|
||||
// 3b. Guardrail nodes (depth 1, override verdict)
|
||||
foreach (var guardrail in input.ScoreBreakdown.GuardrailsApplied)
|
||||
{
|
||||
var guardrailNodeId = $"guardrail:{guardrail.GuardrailName}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = guardrailNodeId,
|
||||
Label = $"Guardrail: {guardrail.GuardrailName} ({guardrail.ScoreBefore}→{guardrail.ScoreAfter})",
|
||||
Type = ProofNodeType.Guardrail,
|
||||
Depth = 1,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
.Add("reason", guardrail.Reason)
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = guardrailNodeId,
|
||||
Target = verdictNodeId,
|
||||
Relation = ProofEdgeRelation.GuardrailApplied,
|
||||
Label = guardrail.Reason
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Create evidence leaf nodes (depth 3)
|
||||
var leafNodeIds = new List<string>();
|
||||
|
||||
// Reachability evidence
|
||||
if (input.Rationale.Evidence.Reachability is not null)
|
||||
{
|
||||
var reachNodeId = $"evidence:reachability:{input.Rationale.Evidence.Cve}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = reachNodeId,
|
||||
Label = $"Reachability: {input.Rationale.Evidence.Reachability.VulnerableFunction ?? "analyzed"}",
|
||||
Type = ProofNodeType.ReachabilityAnalysis,
|
||||
Depth = 3,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
.Add("entry_point", input.Rationale.Evidence.Reachability.EntryPoint ?? "unknown")
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = reachNodeId,
|
||||
Target = TryFindScoreNode(nodes, "rch") ?? policyNodeId,
|
||||
Relation = ProofEdgeRelation.ProvidesEvidence,
|
||||
Label = "Reachability signal"
|
||||
});
|
||||
leafNodeIds.Add(reachNodeId);
|
||||
}
|
||||
|
||||
// VEX statement evidence
|
||||
if (input.Rationale.Attestations.VexStatements?.Count > 0)
|
||||
{
|
||||
for (int i = 0; i < input.Rationale.Attestations.VexStatements.Count; i++)
|
||||
{
|
||||
var vex = input.Rationale.Attestations.VexStatements[i];
|
||||
var vexNodeId = $"evidence:vex:{vex.Id}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = vexNodeId,
|
||||
Label = $"VEX: {vex.Summary ?? vex.Id}",
|
||||
Type = ProofNodeType.VexStatement,
|
||||
Digest = vex.Digest,
|
||||
Depth = 3
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = vexNodeId,
|
||||
Target = policyNodeId,
|
||||
Relation = ProofEdgeRelation.Attests,
|
||||
Label = "VEX statement"
|
||||
});
|
||||
leafNodeIds.Add(vexNodeId);
|
||||
}
|
||||
}
|
||||
|
||||
// Provenance attestation
|
||||
if (input.Rationale.Attestations.Provenance is not null)
|
||||
{
|
||||
var provNodeId = $"evidence:provenance:{input.Rationale.Attestations.Provenance.Id}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = provNodeId,
|
||||
Label = $"Provenance: {input.Rationale.Attestations.Provenance.Summary ?? "verified"}",
|
||||
Type = ProofNodeType.Provenance,
|
||||
Digest = input.Rationale.Attestations.Provenance.Digest,
|
||||
Depth = 3
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = provNodeId,
|
||||
Target = policyNodeId,
|
||||
Relation = ProofEdgeRelation.Attests,
|
||||
Label = "Provenance attestation"
|
||||
});
|
||||
leafNodeIds.Add(provNodeId);
|
||||
}
|
||||
|
||||
// Path witness
|
||||
if (input.Rationale.Attestations.PathWitness is not null)
|
||||
{
|
||||
var pathNodeId = $"evidence:pathwitness:{input.Rationale.Attestations.PathWitness.Id}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = pathNodeId,
|
||||
Label = $"Path Witness: {input.Rationale.Attestations.PathWitness.Summary ?? "verified"}",
|
||||
Type = ProofNodeType.ReachabilityAnalysis,
|
||||
Digest = input.Rationale.Attestations.PathWitness.Digest,
|
||||
Depth = 3
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = pathNodeId,
|
||||
Target = TryFindScoreNode(nodes, "rch") ?? policyNodeId,
|
||||
Relation = ProofEdgeRelation.Attests,
|
||||
Label = "Path witness attestation"
|
||||
});
|
||||
leafNodeIds.Add(pathNodeId);
|
||||
}
|
||||
|
||||
// 5. Build critical paths (leaf → root)
|
||||
var criticalPaths = BuildCriticalPaths(nodes, edges, verdictNodeId, leafNodeIds);
|
||||
|
||||
// 6. Compute content-addressed graph ID
|
||||
var graphId = ComputeGraphId(nodes, edges);
|
||||
|
||||
var graph = new ProofGraph
|
||||
{
|
||||
GraphId = graphId,
|
||||
VerdictRef = input.Rationale.VerdictRef,
|
||||
Nodes = [.. nodes],
|
||||
Edges = [.. edges],
|
||||
CriticalPaths = [.. criticalPaths],
|
||||
RootNodeId = verdictNodeId,
|
||||
LeafNodeIds = [.. leafNodeIds],
|
||||
ComputedAt = input.ComputedAt
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Built proof graph {GraphId} with {NodeCount} nodes, {EdgeCount} edges, {PathCount} paths",
|
||||
graphId, nodes.Count, edges.Count, criticalPaths.Count);
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
public ProofGraph AddCounterfactualOverlay(
|
||||
ProofGraph baseGraph,
|
||||
CounterfactualScenario scenario)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(baseGraph);
|
||||
ArgumentNullException.ThrowIfNull(scenario);
|
||||
|
||||
var nodes = baseGraph.Nodes.ToList();
|
||||
var edges = baseGraph.Edges.ToList();
|
||||
|
||||
// Add a counterfactual hypothesis node
|
||||
var cfNodeId = $"counterfactual:{scenario.Label.Replace(" ", "_").ToLowerInvariant()}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = cfNodeId,
|
||||
Label = $"What-If: {scenario.Label}",
|
||||
Type = ProofNodeType.Counterfactual,
|
||||
ScoreContribution = scenario.ResultingScore,
|
||||
Depth = 0,
|
||||
Metadata = scenario.FactorOverrides
|
||||
.ToImmutableDictionary(kv => $"override_{kv.Key}", kv => kv.Value.ToString())
|
||||
});
|
||||
|
||||
// Connect overridden factors to the counterfactual node
|
||||
foreach (var (factorId, _) in scenario.FactorOverrides)
|
||||
{
|
||||
var existingNode = nodes.FirstOrDefault(n => n.Id == $"score:{factorId}");
|
||||
if (existingNode is not null)
|
||||
{
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = existingNode.Id,
|
||||
Target = cfNodeId,
|
||||
Relation = ProofEdgeRelation.Overrides,
|
||||
Label = $"What-if override: {factorId}"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var newGraphId = ComputeGraphId(nodes, edges);
|
||||
|
||||
return baseGraph with
|
||||
{
|
||||
GraphId = newGraphId,
|
||||
Nodes = [.. nodes],
|
||||
Edges = [.. edges]
|
||||
};
|
||||
}
|
||||
|
||||
// ── Private helpers ──────────────────────────────────────────────────
|
||||
|
||||
private static string? TryFindScoreNode(List<ProofGraphNode> nodes, string factorCode)
|
||||
{
|
||||
return nodes.FirstOrDefault(n => n.Id == $"score:{factorCode}")?.Id;
|
||||
}
|
||||
|
||||
private static List<ProofGraphPath> BuildCriticalPaths(
|
||||
List<ProofGraphNode> nodes,
|
||||
List<ProofGraphEdge> edges,
|
||||
string rootId,
|
||||
List<string> leafIds)
|
||||
{
|
||||
var paths = new List<ProofGraphPath>();
|
||||
|
||||
// Build adjacency list (reverse: from target to source for tracing back)
|
||||
var reverseAdj = edges
|
||||
.GroupBy(e => e.Target)
|
||||
.ToDictionary(g => g.Key, g => g.Select(e => (e.Source, e.Weight)).ToList());
|
||||
|
||||
// Forward adjacency for tracing leaf to root
|
||||
var forwardAdj = edges
|
||||
.GroupBy(e => e.Source)
|
||||
.ToDictionary(g => g.Key, g => g.Select(e => (e.Target, e.Weight)).ToList());
|
||||
|
||||
foreach (var leafId in leafIds)
|
||||
{
|
||||
var path = FindPathBfs(forwardAdj, leafId, rootId);
|
||||
if (path.Count > 0)
|
||||
{
|
||||
// Calculate path confidence as product of edge weights
|
||||
var confidence = 1.0;
|
||||
for (int i = 0; i < path.Count - 1; i++)
|
||||
{
|
||||
var edge = edges.FirstOrDefault(e =>
|
||||
e.Source == path[i] && e.Target == path[i + 1]);
|
||||
if (edge is not null)
|
||||
{
|
||||
confidence *= edge.Weight;
|
||||
}
|
||||
}
|
||||
|
||||
var leafNode = nodes.FirstOrDefault(n => n.Id == leafId);
|
||||
paths.Add(new ProofGraphPath
|
||||
{
|
||||
NodeIds = [.. path],
|
||||
PathConfidence = confidence,
|
||||
Description = $"{leafNode?.Label ?? leafId} → verdict"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Mark highest-confidence path as critical
|
||||
if (paths.Count > 0)
|
||||
{
|
||||
var maxConfidence = paths.Max(p => p.PathConfidence);
|
||||
for (int i = 0; i < paths.Count; i++)
|
||||
{
|
||||
if (Math.Abs(paths[i].PathConfidence - maxConfidence) < 0.0001)
|
||||
{
|
||||
paths[i] = paths[i] with { IsCritical = true };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
private static List<string> FindPathBfs(
|
||||
Dictionary<string, List<(string Target, double Weight)>> adj,
|
||||
string from,
|
||||
string to)
|
||||
{
|
||||
var visited = new HashSet<string>();
|
||||
var queue = new Queue<List<string>>();
|
||||
queue.Enqueue([from]);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var path = queue.Dequeue();
|
||||
var current = path[^1];
|
||||
|
||||
if (current == to)
|
||||
return path;
|
||||
|
||||
if (!visited.Add(current))
|
||||
continue;
|
||||
|
||||
if (adj.TryGetValue(current, out var neighbors))
|
||||
{
|
||||
foreach (var (target, _) in neighbors.OrderBy(n => n.Target, StringComparer.Ordinal))
|
||||
{
|
||||
if (!visited.Contains(target))
|
||||
{
|
||||
queue.Enqueue([.. path, target]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
private static string ComputeGraphId(List<ProofGraphNode> nodes, List<ProofGraphEdge> edges)
|
||||
{
|
||||
// Deterministic: sort nodes by ID, edges by source+target
|
||||
var sortedNodes = string.Join("|", nodes.OrderBy(n => n.Id).Select(n => n.Id));
|
||||
var sortedEdges = string.Join("|", edges
|
||||
.OrderBy(e => e.Source).ThenBy(e => e.Target)
|
||||
.Select(e => $"{e.Source}->{e.Target}"));
|
||||
var content = $"{sortedNodes}:{sortedEdges}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
|
||||
return $"pg:sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofGraphModels.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Proof graph visualization models
|
||||
// Description: Directed acyclic graph representation of the full evidence
|
||||
// chain backing a verdict. Nodes represent evidence artifacts,
|
||||
// edges represent derivation/dependency relationships, and
|
||||
// paths show the full chain from source evidence to verdict.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Complete directed acyclic graph representing the evidence chain
|
||||
/// from source artifacts to a final verdict decision.
|
||||
/// </summary>
|
||||
public sealed record ProofGraph
|
||||
{
|
||||
/// <summary>Content-addressed graph identifier.</summary>
|
||||
[JsonPropertyName("graph_id")]
|
||||
public required string GraphId { get; init; }
|
||||
|
||||
/// <summary>Reference to the verdict this graph explains.</summary>
|
||||
[JsonPropertyName("verdict_ref")]
|
||||
public required VerdictReference VerdictRef { get; init; }
|
||||
|
||||
/// <summary>All nodes in the proof graph.</summary>
|
||||
[JsonPropertyName("nodes")]
|
||||
public required ImmutableArray<ProofGraphNode> Nodes { get; init; }
|
||||
|
||||
/// <summary>All edges in the proof graph.</summary>
|
||||
[JsonPropertyName("edges")]
|
||||
public required ImmutableArray<ProofGraphEdge> Edges { get; init; }
|
||||
|
||||
/// <summary>Critical paths from source evidence to verdict.</summary>
|
||||
[JsonPropertyName("critical_paths")]
|
||||
public required ImmutableArray<ProofGraphPath> CriticalPaths { get; init; }
|
||||
|
||||
/// <summary>Root node ID (the verdict node).</summary>
|
||||
[JsonPropertyName("root_node_id")]
|
||||
public required string RootNodeId { get; init; }
|
||||
|
||||
/// <summary>Leaf node IDs (source evidence).</summary>
|
||||
[JsonPropertyName("leaf_node_ids")]
|
||||
public required ImmutableArray<string> LeafNodeIds { get; init; }
|
||||
|
||||
/// <summary>When the graph was computed.</summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A node in the proof graph representing an evidence artifact,
|
||||
/// intermediate computation, or the final verdict.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphNode
|
||||
{
|
||||
/// <summary>Unique node identifier.</summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>Human-readable label for display.</summary>
|
||||
[JsonPropertyName("label")]
|
||||
public required string Label { get; init; }
|
||||
|
||||
/// <summary>Node type classification.</summary>
|
||||
[JsonPropertyName("type")]
|
||||
public required ProofNodeType Type { get; init; }
|
||||
|
||||
/// <summary>Confidence score at this node (0.0 to 1.0).</summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double? Confidence { get; init; }
|
||||
|
||||
/// <summary>Score contribution of this node to the verdict.</summary>
|
||||
[JsonPropertyName("score_contribution")]
|
||||
public double? ScoreContribution { get; init; }
|
||||
|
||||
/// <summary>Content digest of the underlying artifact.</summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>Additional metadata for display.</summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
public ImmutableDictionary<string, string> Metadata { get; init; } =
|
||||
ImmutableDictionary<string, string>.Empty;
|
||||
|
||||
/// <summary>Visual depth in the graph (0 = verdict root).</summary>
|
||||
[JsonPropertyName("depth")]
|
||||
public int Depth { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classification of proof graph nodes.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum ProofNodeType
|
||||
{
|
||||
/// <summary>Final verdict decision.</summary>
|
||||
Verdict,
|
||||
|
||||
/// <summary>Policy rule evaluation.</summary>
|
||||
PolicyRule,
|
||||
|
||||
/// <summary>Scoring computation (e.g., EWS dimension).</summary>
|
||||
ScoreComputation,
|
||||
|
||||
/// <summary>VEX statement evidence.</summary>
|
||||
VexStatement,
|
||||
|
||||
/// <summary>Reachability analysis result.</summary>
|
||||
ReachabilityAnalysis,
|
||||
|
||||
/// <summary>SBOM lineage evidence.</summary>
|
||||
SbomEvidence,
|
||||
|
||||
/// <summary>Provenance attestation.</summary>
|
||||
Provenance,
|
||||
|
||||
/// <summary>Runtime signal observation.</summary>
|
||||
RuntimeSignal,
|
||||
|
||||
/// <summary>EPSS/CVSS advisory data.</summary>
|
||||
AdvisoryData,
|
||||
|
||||
/// <summary>Guardrail rule application.</summary>
|
||||
Guardrail,
|
||||
|
||||
/// <summary>Counterfactual hypothesis node.</summary>
|
||||
Counterfactual
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A directed edge in the proof graph showing derivation.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphEdge
|
||||
{
|
||||
/// <summary>Source node ID (evidence provider).</summary>
|
||||
[JsonPropertyName("source")]
|
||||
public required string Source { get; init; }
|
||||
|
||||
/// <summary>Target node ID (evidence consumer).</summary>
|
||||
[JsonPropertyName("target")]
|
||||
public required string Target { get; init; }
|
||||
|
||||
/// <summary>Relationship type.</summary>
|
||||
[JsonPropertyName("relation")]
|
||||
public required ProofEdgeRelation Relation { get; init; }
|
||||
|
||||
/// <summary>Weight/importance of this edge (0.0 to 1.0).</summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public double Weight { get; init; } = 1.0;
|
||||
|
||||
/// <summary>Human-readable label for the edge.</summary>
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of relationships between proof graph nodes.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum ProofEdgeRelation
|
||||
{
|
||||
/// <summary>Source provides input evidence to target.</summary>
|
||||
ProvidesEvidence,
|
||||
|
||||
/// <summary>Source score contributes to target aggregate.</summary>
|
||||
ContributesScore,
|
||||
|
||||
/// <summary>Source evaluation gates target decision.</summary>
|
||||
Gates,
|
||||
|
||||
/// <summary>Source attestation supports target claim.</summary>
|
||||
Attests,
|
||||
|
||||
/// <summary>Source overrides target under certain conditions.</summary>
|
||||
Overrides,
|
||||
|
||||
/// <summary>Source guardrail modifies target score.</summary>
|
||||
GuardrailApplied
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A path through the proof graph from a leaf evidence node
|
||||
/// to the root verdict node.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphPath
|
||||
{
|
||||
/// <summary>Ordered node IDs from leaf to root.</summary>
|
||||
[JsonPropertyName("node_ids")]
|
||||
public required ImmutableArray<string> NodeIds { get; init; }
|
||||
|
||||
/// <summary>Cumulative confidence along this path.</summary>
|
||||
[JsonPropertyName("path_confidence")]
|
||||
public required double PathConfidence { get; init; }
|
||||
|
||||
/// <summary>Whether this path is the highest-confidence path.</summary>
|
||||
[JsonPropertyName("is_critical")]
|
||||
public bool IsCritical { get; init; }
|
||||
|
||||
/// <summary>Human-readable description of this evidence chain.</summary>
|
||||
[JsonPropertyName("description")]
|
||||
public required string Description { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofStudioService.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T2 - Integration service wiring proof graph + score breakdown
|
||||
// Description: Orchestrates proof graph construction and score breakdown
|
||||
// composition from existing policy engine data models.
|
||||
// Bridges ScoreExplanation (Policy.Scoring) and VerdictRationale
|
||||
// (Explainability) into the proof studio visualization models.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Integration surface for the Proof Studio UX.
|
||||
/// Composes proof graphs and score breakdowns from existing
|
||||
/// policy engine results.
|
||||
/// </summary>
|
||||
public interface IProofStudioService
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds a full proof studio view from a verdict rationale and
|
||||
/// optional per-factor score explanation data.
|
||||
/// </summary>
|
||||
ProofStudioView Compose(ProofStudioRequest request);
|
||||
|
||||
/// <summary>
|
||||
/// Applies a counterfactual scenario to an existing proof studio view,
|
||||
/// returning a new view with the overlay applied.
|
||||
/// </summary>
|
||||
ProofStudioView ApplyCounterfactual(
|
||||
ProofStudioView current,
|
||||
CounterfactualScenario scenario);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to compose a proof studio view.
|
||||
/// </summary>
|
||||
public sealed record ProofStudioRequest
|
||||
{
|
||||
/// <summary>Verdict rationale from the explainability module.</summary>
|
||||
public required VerdictRationale Rationale { get; init; }
|
||||
|
||||
/// <summary>Per-factor score explanations from scoring engine.</summary>
|
||||
public IReadOnlyList<ScoreFactorInput>? ScoreFactors { get; init; }
|
||||
|
||||
/// <summary>Composite score (0-100).</summary>
|
||||
public int? CompositeScore { get; init; }
|
||||
|
||||
/// <summary>Action bucket label.</summary>
|
||||
public string? ActionBucket { get; init; }
|
||||
|
||||
/// <summary>Guardrail applications, if any.</summary>
|
||||
public IReadOnlyList<GuardrailInput>? Guardrails { get; init; }
|
||||
|
||||
/// <summary>Entropy value (0-1).</summary>
|
||||
public double? Entropy { get; init; }
|
||||
|
||||
/// <summary>Whether manual review is required.</summary>
|
||||
public bool NeedsReview { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Score factor input from the scoring engine.
|
||||
/// </summary>
|
||||
public sealed record ScoreFactorInput
|
||||
{
|
||||
/// <summary>Factor identifier (e.g., "reachability", "evidence").</summary>
|
||||
public required string Factor { get; init; }
|
||||
|
||||
/// <summary>Raw factor value (0-100).</summary>
|
||||
public required int Value { get; init; }
|
||||
|
||||
/// <summary>Weight applied to this factor (0-1).</summary>
|
||||
public double Weight { get; init; }
|
||||
|
||||
/// <summary>Confidence in this factor's accuracy (0-1).</summary>
|
||||
public double Confidence { get; init; } = 1.0;
|
||||
|
||||
/// <summary>Human-readable explanation.</summary>
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>Whether this factor is subtractive.</summary>
|
||||
public bool IsSubtractive { get; init; }
|
||||
|
||||
/// <summary>Contributing evidence digests.</summary>
|
||||
public IReadOnlyList<string>? ContributingDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Guardrail application input from the scoring engine.
|
||||
/// </summary>
|
||||
public sealed record GuardrailInput
|
||||
{
|
||||
public required string Name { get; init; }
|
||||
public int ScoreBefore { get; init; }
|
||||
public int ScoreAfter { get; init; }
|
||||
public required string Reason { get; init; }
|
||||
public IReadOnlyList<string>? Conditions { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete proof studio view combining graph and dashboard.
|
||||
/// </summary>
|
||||
public sealed record ProofStudioView
|
||||
{
|
||||
/// <summary>The proof graph DAG.</summary>
|
||||
[JsonPropertyName("proof_graph")]
|
||||
public required ProofGraph ProofGraph { get; init; }
|
||||
|
||||
/// <summary>The score breakdown dashboard.</summary>
|
||||
[JsonPropertyName("score_breakdown")]
|
||||
public ScoreBreakdownDashboard? ScoreBreakdown { get; init; }
|
||||
|
||||
/// <summary>When this view was composed.</summary>
|
||||
[JsonPropertyName("composed_at")]
|
||||
public required DateTimeOffset ComposedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IProofStudioService"/>.
|
||||
/// </summary>
|
||||
public sealed class ProofStudioService : IProofStudioService
|
||||
{
|
||||
private readonly IProofGraphBuilder _graphBuilder;
|
||||
private readonly ILogger<ProofStudioService> _logger;
|
||||
private readonly Counter<long> _viewsComposed;
|
||||
private readonly Counter<long> _counterfactualsApplied;
|
||||
|
||||
public ProofStudioService(
|
||||
IProofGraphBuilder graphBuilder,
|
||||
ILogger<ProofStudioService> logger,
|
||||
IMeterFactory meterFactory)
|
||||
{
|
||||
_graphBuilder = graphBuilder;
|
||||
_logger = logger;
|
||||
|
||||
var meter = meterFactory.Create("StellaOps.Policy.Explainability.ProofStudio");
|
||||
_viewsComposed = meter.CreateCounter<long>(
|
||||
"stellaops.proofstudio.views_composed_total",
|
||||
description: "Total proof studio views composed");
|
||||
_counterfactualsApplied = meter.CreateCounter<long>(
|
||||
"stellaops.proofstudio.counterfactuals_applied_total",
|
||||
description: "Total counterfactual scenarios applied");
|
||||
}
|
||||
|
||||
public ProofStudioView Compose(ProofStudioRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
// Build score breakdown dashboard if factor data is available
|
||||
ScoreBreakdownDashboard? dashboard = null;
|
||||
if (request.ScoreFactors is { Count: > 0 })
|
||||
{
|
||||
dashboard = BuildDashboard(request, now);
|
||||
}
|
||||
|
||||
// Build proof graph
|
||||
var graphInput = new ProofGraphInput
|
||||
{
|
||||
Rationale = request.Rationale,
|
||||
ScoreBreakdown = dashboard,
|
||||
ComputedAt = now
|
||||
};
|
||||
|
||||
var proofGraph = _graphBuilder.Build(graphInput);
|
||||
|
||||
_viewsComposed.Add(1);
|
||||
_logger.LogDebug(
|
||||
"Composed proof studio view {GraphId} with {HasDashboard} dashboard",
|
||||
proofGraph.GraphId, dashboard is not null);
|
||||
|
||||
return new ProofStudioView
|
||||
{
|
||||
ProofGraph = proofGraph,
|
||||
ScoreBreakdown = dashboard,
|
||||
ComposedAt = now
|
||||
};
|
||||
}
|
||||
|
||||
public ProofStudioView ApplyCounterfactual(
|
||||
ProofStudioView current,
|
||||
CounterfactualScenario scenario)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(current);
|
||||
ArgumentNullException.ThrowIfNull(scenario);
|
||||
|
||||
var overlayGraph = _graphBuilder.AddCounterfactualOverlay(
|
||||
current.ProofGraph, scenario);
|
||||
|
||||
_counterfactualsApplied.Add(1);
|
||||
_logger.LogDebug(
|
||||
"Applied counterfactual '{Label}' to graph {GraphId}",
|
||||
scenario.Label, current.ProofGraph.GraphId);
|
||||
|
||||
return current with
|
||||
{
|
||||
ProofGraph = overlayGraph,
|
||||
ComposedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// ── Private helpers ──────────────────────────────────────────────────
|
||||
|
||||
private static ScoreBreakdownDashboard BuildDashboard(
|
||||
ProofStudioRequest request,
|
||||
DateTimeOffset computedAt)
|
||||
{
|
||||
var factors = request.ScoreFactors!
|
||||
.Select(f => new FactorContribution
|
||||
{
|
||||
FactorId = f.Factor,
|
||||
FactorName = FormatFactorName(f.Factor),
|
||||
RawScore = f.Value,
|
||||
Weight = f.Weight,
|
||||
Confidence = f.Confidence,
|
||||
IsSubtractive = f.IsSubtractive,
|
||||
EvidenceSource = f.ContributingDigests?.FirstOrDefault(),
|
||||
Explanation = f.Reason
|
||||
})
|
||||
.ToImmutableArray();
|
||||
|
||||
var guardrails = (request.Guardrails ?? [])
|
||||
.Select(g => new GuardrailApplication
|
||||
{
|
||||
GuardrailName = g.Name,
|
||||
ScoreBefore = g.ScoreBefore,
|
||||
ScoreAfter = g.ScoreAfter,
|
||||
Reason = g.Reason,
|
||||
Conditions = g.Conditions is not null
|
||||
? [.. g.Conditions]
|
||||
: []
|
||||
})
|
||||
.ToImmutableArray();
|
||||
|
||||
return new ScoreBreakdownDashboard
|
||||
{
|
||||
DashboardId = $"dash:{Guid.CreateVersion7():N}",
|
||||
VerdictRef = request.Rationale.VerdictRef,
|
||||
CompositeScore = request.CompositeScore ?? 0,
|
||||
ActionBucket = request.ActionBucket ?? "Unknown",
|
||||
Factors = factors,
|
||||
GuardrailsApplied = guardrails,
|
||||
PreGuardrailScore = request.CompositeScore ?? 0,
|
||||
Entropy = request.Entropy ?? 0.0,
|
||||
NeedsReview = request.NeedsReview,
|
||||
ComputedAt = computedAt
|
||||
};
|
||||
}
|
||||
|
||||
private static string FormatFactorName(string factorId)
|
||||
{
|
||||
return factorId switch
|
||||
{
|
||||
"reachability" or "rch" => "Reachability",
|
||||
"evidence" or "evd" => "Evidence",
|
||||
"provenance" or "prv" => "Provenance",
|
||||
"baseSeverity" or "sev" => "Base Severity",
|
||||
"runtimeSignal" or "rts" => "Runtime Signal",
|
||||
"mitigation" or "mit" => "Mitigation",
|
||||
"exploit" or "exp" => "Exploit Maturity",
|
||||
"temporal" or "tmp" => "Temporal",
|
||||
_ => factorId.Length > 0
|
||||
? char.ToUpperInvariant(factorId[0]) + factorId[1..]
|
||||
: factorId
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoreBreakdownDashboard.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Score breakdown dashboard data models
|
||||
// Description: Per-factor score breakdown for dashboard visualization.
|
||||
// Produces chart-ready data showing how each scoring dimension
|
||||
// contributes to the final verdict score.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Complete score breakdown for dashboard visualization, showing
|
||||
/// per-factor contributions to the final verdict score.
|
||||
/// </summary>
|
||||
public sealed record ScoreBreakdownDashboard
|
||||
{
|
||||
/// <summary>Content-addressed dashboard identifier.</summary>
|
||||
[JsonPropertyName("dashboard_id")]
|
||||
public required string DashboardId { get; init; }
|
||||
|
||||
/// <summary>Reference to the verdict being broken down.</summary>
|
||||
[JsonPropertyName("verdict_ref")]
|
||||
public required VerdictReference VerdictRef { get; init; }
|
||||
|
||||
/// <summary>Overall composite score (0-100).</summary>
|
||||
[JsonPropertyName("composite_score")]
|
||||
public required int CompositeScore { get; init; }
|
||||
|
||||
/// <summary>Action bucket label (e.g., "Act Now", "Schedule Next").</summary>
|
||||
[JsonPropertyName("action_bucket")]
|
||||
public required string ActionBucket { get; init; }
|
||||
|
||||
/// <summary>Per-factor contribution breakdown for chart rendering.</summary>
|
||||
[JsonPropertyName("factors")]
|
||||
public required ImmutableArray<FactorContribution> Factors { get; init; }
|
||||
|
||||
/// <summary>Guardrails that were applied, if any.</summary>
|
||||
[JsonPropertyName("guardrails_applied")]
|
||||
public ImmutableArray<GuardrailApplication> GuardrailsApplied { get; init; } = [];
|
||||
|
||||
/// <summary>Score before guardrails were applied.</summary>
|
||||
[JsonPropertyName("pre_guardrail_score")]
|
||||
public int? PreGuardrailScore { get; init; }
|
||||
|
||||
/// <summary>Entropy level for determinization decisions.</summary>
|
||||
[JsonPropertyName("entropy")]
|
||||
public double? Entropy { get; init; }
|
||||
|
||||
/// <summary>Whether this verdict needs manual review based on entropy.</summary>
|
||||
[JsonPropertyName("needs_review")]
|
||||
public bool NeedsReview { get; init; }
|
||||
|
||||
/// <summary>When the breakdown was computed.</summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual factor contribution to the composite score.
|
||||
/// </summary>
|
||||
public sealed record FactorContribution
|
||||
{
|
||||
/// <summary>Factor identifier (e.g., "rch", "rts", "bkp").</summary>
|
||||
[JsonPropertyName("factor_id")]
|
||||
public required string FactorId { get; init; }
|
||||
|
||||
/// <summary>Human-readable factor name.</summary>
|
||||
[JsonPropertyName("factor_name")]
|
||||
public required string FactorName { get; init; }
|
||||
|
||||
/// <summary>Raw normalized score for this factor (0-100).</summary>
|
||||
[JsonPropertyName("raw_score")]
|
||||
public required int RawScore { get; init; }
|
||||
|
||||
/// <summary>Weight assigned to this factor (0.0-1.0).</summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public required double Weight { get; init; }
|
||||
|
||||
/// <summary>Weighted contribution to composite score.</summary>
|
||||
[JsonPropertyName("weighted_contribution")]
|
||||
public double WeightedContribution => RawScore * Weight;
|
||||
|
||||
/// <summary>Confidence level for this factor (0.0-1.0).</summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this is a subtractive factor (reduces risk).
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_subtractive")]
|
||||
public bool IsSubtractive { get; init; }
|
||||
|
||||
/// <summary>Source of the evidence for this factor.</summary>
|
||||
[JsonPropertyName("evidence_source")]
|
||||
public string? EvidenceSource { get; init; }
|
||||
|
||||
/// <summary>Human-readable explanation of the score.</summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public required string Explanation { get; init; }
|
||||
|
||||
/// <summary>Percentage of composite that this factor contributes.</summary>
|
||||
[JsonPropertyName("percentage_of_total")]
|
||||
public double PercentageOfTotal { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record of a guardrail being applied to the score.
|
||||
/// </summary>
|
||||
public sealed record GuardrailApplication
|
||||
{
|
||||
/// <summary>Guardrail name (e.g., "notAffectedCap", "runtimeFloor").</summary>
|
||||
[JsonPropertyName("guardrail_name")]
|
||||
public required string GuardrailName { get; init; }
|
||||
|
||||
/// <summary>Score before this guardrail.</summary>
|
||||
[JsonPropertyName("score_before")]
|
||||
public required int ScoreBefore { get; init; }
|
||||
|
||||
/// <summary>Score after this guardrail.</summary>
|
||||
[JsonPropertyName("score_after")]
|
||||
public required int ScoreAfter { get; init; }
|
||||
|
||||
/// <summary>Human-readable reason the guardrail triggered.</summary>
|
||||
[JsonPropertyName("reason")]
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>Conditions that caused the guardrail to fire.</summary>
|
||||
[JsonPropertyName("conditions")]
|
||||
public ImmutableArray<string> Conditions { get; init; } = [];
|
||||
}
|
||||
@@ -7,6 +7,8 @@ public static class ExplainabilityServiceCollectionExtensions
|
||||
public static IServiceCollection AddVerdictExplainability(this IServiceCollection services)
|
||||
{
|
||||
services.AddSingleton<IVerdictRationaleRenderer, VerdictRationaleRenderer>();
|
||||
services.AddSingleton<IProofGraphBuilder, ProofGraphBuilder>();
|
||||
services.AddSingleton<IProofStudioService, ProofStudioService>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,153 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IPolicyDiffMerge.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Policy diff/merge interface
|
||||
// Description: Interface for diffing and merging PolicyPackDocuments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Abstractions;
|
||||
|
||||
/// <summary>
|
||||
/// Computes structural diffs between two PolicyPackDocuments and merges packs.
|
||||
/// </summary>
|
||||
public interface IPolicyDiffMerge
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes a structural diff between two policy pack documents.
|
||||
/// Returns a list of changes (additions, removals, modifications).
|
||||
/// </summary>
|
||||
PolicyDiffResult Diff(PolicyPackDocument baseline, PolicyPackDocument updated);
|
||||
|
||||
/// <summary>
|
||||
/// Merges two policy pack documents according to the specified strategy.
|
||||
/// </summary>
|
||||
PolicyMergeResult Merge(
|
||||
PolicyPackDocument baseDoc,
|
||||
PolicyPackDocument overlay,
|
||||
PolicyMergeStrategy strategy = PolicyMergeStrategy.OverlayWins);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a policy diff operation.
|
||||
/// </summary>
|
||||
public sealed record PolicyDiffResult
|
||||
{
|
||||
/// <summary>Whether the two documents are identical.</summary>
|
||||
public required bool AreIdentical { get; init; }
|
||||
|
||||
/// <summary>Ordered list of changes between baseline and updated.</summary>
|
||||
public IReadOnlyList<PolicyChange> Changes { get; init; } = [];
|
||||
|
||||
/// <summary>Summary statistics about the diff.</summary>
|
||||
public required PolicyDiffSummary Summary { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single change between two policy documents.
|
||||
/// </summary>
|
||||
public sealed record PolicyChange
|
||||
{
|
||||
/// <summary>Type of change.</summary>
|
||||
public required PolicyChangeType ChangeType { get; init; }
|
||||
|
||||
/// <summary>Category: "gate", "rule", "setting", "metadata".</summary>
|
||||
public required string Category { get; init; }
|
||||
|
||||
/// <summary>Path to the changed element (e.g., "gates[cvss-threshold].config.threshold").</summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>Previous value (null for additions).</summary>
|
||||
public object? OldValue { get; init; }
|
||||
|
||||
/// <summary>New value (null for removals).</summary>
|
||||
public object? NewValue { get; init; }
|
||||
|
||||
/// <summary>Human-readable description of the change.</summary>
|
||||
public required string Description { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of policy change.
|
||||
/// </summary>
|
||||
public enum PolicyChangeType
|
||||
{
|
||||
/// <summary>Element was added in the updated document.</summary>
|
||||
Added,
|
||||
|
||||
/// <summary>Element was removed in the updated document.</summary>
|
||||
Removed,
|
||||
|
||||
/// <summary>Element was modified between documents.</summary>
|
||||
Modified
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary statistics for a policy diff.
|
||||
/// </summary>
|
||||
public sealed record PolicyDiffSummary
|
||||
{
|
||||
/// <summary>Number of additions.</summary>
|
||||
public int Additions { get; init; }
|
||||
|
||||
/// <summary>Number of removals.</summary>
|
||||
public int Removals { get; init; }
|
||||
|
||||
/// <summary>Number of modifications.</summary>
|
||||
public int Modifications { get; init; }
|
||||
|
||||
/// <summary>Total number of changes.</summary>
|
||||
public int Total => Additions + Removals + Modifications;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Strategy for merging two policy pack documents.
|
||||
/// </summary>
|
||||
public enum PolicyMergeStrategy
|
||||
{
|
||||
/// <summary>Overlay values win on conflict.</summary>
|
||||
OverlayWins,
|
||||
|
||||
/// <summary>Base values win on conflict.</summary>
|
||||
BaseWins,
|
||||
|
||||
/// <summary>Fail on any conflict.</summary>
|
||||
FailOnConflict
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a policy merge operation.
|
||||
/// </summary>
|
||||
public sealed record PolicyMergeResult
|
||||
{
|
||||
/// <summary>Whether the merge succeeded.</summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>Merged document (null if failed).</summary>
|
||||
public PolicyPackDocument? Document { get; init; }
|
||||
|
||||
/// <summary>Conflicts encountered during merge (empty if OverlayWins/BaseWins).</summary>
|
||||
public IReadOnlyList<PolicyMergeConflict> Conflicts { get; init; } = [];
|
||||
|
||||
/// <summary>Diagnostics from the merge operation.</summary>
|
||||
public IReadOnlyList<PolicyDiagnostic> Diagnostics { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A conflict encountered during policy merge.
|
||||
/// </summary>
|
||||
public sealed record PolicyMergeConflict
|
||||
{
|
||||
/// <summary>Path to the conflicting element.</summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>Value from the base document.</summary>
|
||||
public object? BaseValue { get; init; }
|
||||
|
||||
/// <summary>Value from the overlay document.</summary>
|
||||
public object? OverlayValue { get; init; }
|
||||
|
||||
/// <summary>Human-readable description of the conflict.</summary>
|
||||
public required string Description { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IPolicyYamlExporter.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - YAML export interface
|
||||
// Description: Interface for YAML export of PolicyPackDocuments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Abstractions;
|
||||
|
||||
/// <summary>
|
||||
/// Exports native C# policy packs to YAML format.
|
||||
/// </summary>
|
||||
public interface IPolicyYamlExporter
|
||||
{
|
||||
/// <summary>
|
||||
/// Exports the given policy pack document to canonical YAML format.
|
||||
/// The output is deterministic: same input produces byte-identical output.
|
||||
/// </summary>
|
||||
Task<YamlExportResult> ExportToYamlAsync(
|
||||
PolicyPackDocument document,
|
||||
PolicyExportRequest request,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a YAML export operation.
|
||||
/// </summary>
|
||||
public sealed record YamlExportResult
|
||||
{
|
||||
/// <summary>Whether export succeeded.</summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>Generated YAML content.</summary>
|
||||
public required string YamlContent { get; init; }
|
||||
|
||||
/// <summary>SHA-256 digest of the generated YAML.</summary>
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>Warnings generated during export.</summary>
|
||||
public IReadOnlyList<string> Warnings { get; init; } = [];
|
||||
}
|
||||
@@ -320,12 +320,14 @@ public static class PolicyFormats
|
||||
{
|
||||
public const string Json = "json";
|
||||
public const string Rego = "rego";
|
||||
public const string Yaml = "yaml";
|
||||
|
||||
public static readonly IReadOnlyList<string> All = [Json, Rego];
|
||||
public static readonly IReadOnlyList<string> All = [Json, Rego, Yaml];
|
||||
|
||||
public static bool IsValid(string format) =>
|
||||
string.Equals(format, Json, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(format, Rego, StringComparison.OrdinalIgnoreCase);
|
||||
string.Equals(format, Rego, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(format, Yaml, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.DiffMerge;
|
||||
using StellaOps.Policy.Interop.Export;
|
||||
using StellaOps.Policy.Interop.Import;
|
||||
|
||||
namespace StellaOps.Policy.Interop.DependencyInjection;
|
||||
|
||||
@@ -10,13 +14,26 @@ public static class PolicyInteropServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds Policy Interop services to the service collection.
|
||||
/// Registers: IPolicyExporter, IPolicyImporter, IPolicyValidator,
|
||||
/// Registers: IPolicyExporter, IPolicyImporter (JSON + YAML),
|
||||
/// IPolicyYamlExporter, IPolicyDiffMerge, IPolicyValidator,
|
||||
/// IPolicyEvaluator, IRegoCodeGenerator, IRemediationResolver.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyInterop(this IServiceCollection services)
|
||||
{
|
||||
// Implementations are registered in TASK-02..05 when created.
|
||||
// This extension point ensures consistent DI wiring.
|
||||
// JSON export/import
|
||||
services.TryAddSingleton<IPolicyExporter, JsonPolicyExporter>();
|
||||
services.TryAddSingleton<JsonPolicyImporter>();
|
||||
|
||||
// YAML export/import
|
||||
services.TryAddSingleton<IPolicyYamlExporter, YamlPolicyExporter>();
|
||||
services.TryAddSingleton<YamlPolicyImporter>();
|
||||
|
||||
// Register both importers as IPolicyImporter (JSON is the primary/default)
|
||||
services.TryAddSingleton<IPolicyImporter, JsonPolicyImporter>();
|
||||
|
||||
// Policy diff/merge engine
|
||||
services.TryAddSingleton<IPolicyDiffMerge, PolicyDiffMergeEngine>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,639 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyDiffMergeEngine.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Policy diff/merge implementation
|
||||
// Description: Structural diff and merge engine for PolicyPackDocuments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
|
||||
namespace StellaOps.Policy.Interop.DiffMerge;
|
||||
|
||||
/// <summary>
|
||||
/// Computes structural diffs and merges for PolicyPackDocuments.
|
||||
/// All operations are deterministic and offline-safe.
|
||||
/// </summary>
|
||||
public sealed class PolicyDiffMergeEngine : IPolicyDiffMerge
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public PolicyDiffResult Diff(PolicyPackDocument baseline, PolicyPackDocument updated)
|
||||
{
|
||||
var changes = new List<PolicyChange>();
|
||||
|
||||
// Diff metadata
|
||||
DiffMetadata(baseline.Metadata, updated.Metadata, changes);
|
||||
|
||||
// Diff settings
|
||||
DiffSettings(baseline.Spec.Settings, updated.Spec.Settings, changes);
|
||||
|
||||
// Diff gates
|
||||
DiffGates(baseline.Spec.Gates, updated.Spec.Gates, changes);
|
||||
|
||||
// Diff rules
|
||||
DiffRules(baseline.Spec.Rules, updated.Spec.Rules, changes);
|
||||
|
||||
var summary = new PolicyDiffSummary
|
||||
{
|
||||
Additions = changes.Count(c => c.ChangeType == PolicyChangeType.Added),
|
||||
Removals = changes.Count(c => c.ChangeType == PolicyChangeType.Removed),
|
||||
Modifications = changes.Count(c => c.ChangeType == PolicyChangeType.Modified)
|
||||
};
|
||||
|
||||
return new PolicyDiffResult
|
||||
{
|
||||
AreIdentical = changes.Count == 0,
|
||||
Changes = changes,
|
||||
Summary = summary
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public PolicyMergeResult Merge(
|
||||
PolicyPackDocument baseDoc,
|
||||
PolicyPackDocument overlay,
|
||||
PolicyMergeStrategy strategy = PolicyMergeStrategy.OverlayWins)
|
||||
{
|
||||
var conflicts = new List<PolicyMergeConflict>();
|
||||
var diagnostics = new List<PolicyDiagnostic>();
|
||||
|
||||
// Merge metadata (overlay wins for description, version)
|
||||
var mergedMeta = MergeMetadata(baseDoc.Metadata, overlay.Metadata, strategy, conflicts);
|
||||
|
||||
// Merge settings
|
||||
var mergedSettings = MergeSettings(baseDoc.Spec.Settings, overlay.Spec.Settings, strategy, conflicts);
|
||||
|
||||
// Merge gates
|
||||
var mergedGates = MergeGates(baseDoc.Spec.Gates, overlay.Spec.Gates, strategy, conflicts);
|
||||
|
||||
// Merge rules
|
||||
var mergedRules = MergeRules(baseDoc.Spec.Rules, overlay.Spec.Rules, strategy, conflicts);
|
||||
|
||||
// Fail on conflicts if strategy demands it
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict && conflicts.Count > 0)
|
||||
{
|
||||
diagnostics.Add(new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "MERGE_CONFLICT",
|
||||
Message = $"Merge failed: {conflicts.Count} conflict(s) found."
|
||||
});
|
||||
|
||||
return new PolicyMergeResult
|
||||
{
|
||||
Success = false,
|
||||
Document = null,
|
||||
Conflicts = conflicts,
|
||||
Diagnostics = diagnostics
|
||||
};
|
||||
}
|
||||
|
||||
var merged = new PolicyPackDocument
|
||||
{
|
||||
ApiVersion = baseDoc.ApiVersion,
|
||||
Kind = baseDoc.Kind,
|
||||
Metadata = mergedMeta,
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = mergedSettings,
|
||||
Gates = mergedGates,
|
||||
Rules = mergedRules
|
||||
}
|
||||
};
|
||||
|
||||
if (conflicts.Count > 0)
|
||||
{
|
||||
diagnostics.Add(new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Warning,
|
||||
Code = "MERGE_CONFLICTS_RESOLVED",
|
||||
Message = $"{conflicts.Count} conflict(s) resolved using {strategy} strategy."
|
||||
});
|
||||
}
|
||||
|
||||
return new PolicyMergeResult
|
||||
{
|
||||
Success = true,
|
||||
Document = merged,
|
||||
Conflicts = conflicts,
|
||||
Diagnostics = diagnostics
|
||||
};
|
||||
}
|
||||
|
||||
#region Diff Methods
|
||||
|
||||
private static void DiffMetadata(
|
||||
PolicyPackMetadata baseline, PolicyPackMetadata updated, List<PolicyChange> changes)
|
||||
{
|
||||
if (baseline.Name != updated.Name)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "metadata",
|
||||
Path = "metadata.name",
|
||||
OldValue = baseline.Name,
|
||||
NewValue = updated.Name,
|
||||
Description = $"Name changed from '{baseline.Name}' to '{updated.Name}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.Version != updated.Version)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "metadata",
|
||||
Path = "metadata.version",
|
||||
OldValue = baseline.Version,
|
||||
NewValue = updated.Version,
|
||||
Description = $"Version changed from '{baseline.Version}' to '{updated.Version}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.Description != updated.Description)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "metadata",
|
||||
Path = "metadata.description",
|
||||
OldValue = baseline.Description,
|
||||
NewValue = updated.Description,
|
||||
Description = "Description changed."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffSettings(
|
||||
PolicyPackSettings baseline, PolicyPackSettings updated, List<PolicyChange> changes)
|
||||
{
|
||||
if (baseline.DefaultAction != updated.DefaultAction)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.defaultAction",
|
||||
OldValue = baseline.DefaultAction,
|
||||
NewValue = updated.DefaultAction,
|
||||
Description = $"Default action changed from '{baseline.DefaultAction}' to '{updated.DefaultAction}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (Math.Abs(baseline.UnknownsThreshold - updated.UnknownsThreshold) > 1e-10)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.unknownsThreshold",
|
||||
OldValue = baseline.UnknownsThreshold,
|
||||
NewValue = updated.UnknownsThreshold,
|
||||
Description = $"Unknowns threshold changed from {baseline.UnknownsThreshold} to {updated.UnknownsThreshold}."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.StopOnFirstFailure != updated.StopOnFirstFailure)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.stopOnFirstFailure",
|
||||
OldValue = baseline.StopOnFirstFailure,
|
||||
NewValue = updated.StopOnFirstFailure,
|
||||
Description = $"StopOnFirstFailure changed from {baseline.StopOnFirstFailure} to {updated.StopOnFirstFailure}."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.DeterministicMode != updated.DeterministicMode)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.deterministicMode",
|
||||
OldValue = baseline.DeterministicMode,
|
||||
NewValue = updated.DeterministicMode,
|
||||
Description = $"DeterministicMode changed from {baseline.DeterministicMode} to {updated.DeterministicMode}."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffGates(
|
||||
IReadOnlyList<PolicyGateDefinition> baselineGates,
|
||||
IReadOnlyList<PolicyGateDefinition> updatedGates,
|
||||
List<PolicyChange> changes)
|
||||
{
|
||||
var baseMap = baselineGates.ToDictionary(g => g.Id);
|
||||
var updatedMap = updatedGates.ToDictionary(g => g.Id);
|
||||
|
||||
// Removed gates
|
||||
foreach (var id in baseMap.Keys.Except(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Removed,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}]",
|
||||
OldValue = baseMap[id].Type,
|
||||
NewValue = null,
|
||||
Description = $"Gate '{id}' ({baseMap[id].Type}) removed."
|
||||
});
|
||||
}
|
||||
|
||||
// Added gates
|
||||
foreach (var id in updatedMap.Keys.Except(baseMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Added,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}]",
|
||||
OldValue = null,
|
||||
NewValue = updatedMap[id].Type,
|
||||
Description = $"Gate '{id}' ({updatedMap[id].Type}) added."
|
||||
});
|
||||
}
|
||||
|
||||
// Modified gates
|
||||
foreach (var id in baseMap.Keys.Intersect(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
var baseGate = baseMap[id];
|
||||
var updatedGate = updatedMap[id];
|
||||
|
||||
if (baseGate.Enabled != updatedGate.Enabled)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}].enabled",
|
||||
OldValue = baseGate.Enabled,
|
||||
NewValue = updatedGate.Enabled,
|
||||
Description = $"Gate '{id}' enabled changed from {baseGate.Enabled} to {updatedGate.Enabled}."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseGate.Type != updatedGate.Type)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}].type",
|
||||
OldValue = baseGate.Type,
|
||||
NewValue = updatedGate.Type,
|
||||
Description = $"Gate '{id}' type changed from '{baseGate.Type}' to '{updatedGate.Type}'."
|
||||
});
|
||||
}
|
||||
|
||||
// Diff config values
|
||||
DiffDictionary(baseGate.Config, updatedGate.Config, $"spec.gates[{id}].config", "gate", changes);
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffRules(
|
||||
IReadOnlyList<PolicyRuleDefinition> baselineRules,
|
||||
IReadOnlyList<PolicyRuleDefinition> updatedRules,
|
||||
List<PolicyChange> changes)
|
||||
{
|
||||
var baseMap = baselineRules.ToDictionary(r => r.Name);
|
||||
var updatedMap = updatedRules.ToDictionary(r => r.Name);
|
||||
|
||||
// Removed rules
|
||||
foreach (var name in baseMap.Keys.Except(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Removed,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}]",
|
||||
OldValue = baseMap[name].Action,
|
||||
NewValue = null,
|
||||
Description = $"Rule '{name}' removed."
|
||||
});
|
||||
}
|
||||
|
||||
// Added rules
|
||||
foreach (var name in updatedMap.Keys.Except(baseMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Added,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}]",
|
||||
OldValue = null,
|
||||
NewValue = updatedMap[name].Action,
|
||||
Description = $"Rule '{name}' added."
|
||||
});
|
||||
}
|
||||
|
||||
// Modified rules
|
||||
foreach (var name in baseMap.Keys.Intersect(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
var baseRule = baseMap[name];
|
||||
var updatedRule = updatedMap[name];
|
||||
|
||||
if (baseRule.Action != updatedRule.Action)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}].action",
|
||||
OldValue = baseRule.Action,
|
||||
NewValue = updatedRule.Action,
|
||||
Description = $"Rule '{name}' action changed from '{baseRule.Action}' to '{updatedRule.Action}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseRule.Priority != updatedRule.Priority)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}].priority",
|
||||
OldValue = baseRule.Priority,
|
||||
NewValue = updatedRule.Priority,
|
||||
Description = $"Rule '{name}' priority changed from {baseRule.Priority} to {updatedRule.Priority}."
|
||||
});
|
||||
}
|
||||
|
||||
DiffDictionary(baseRule.Match, updatedRule.Match, $"spec.rules[{name}].match", "rule", changes);
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffDictionary(
|
||||
IReadOnlyDictionary<string, object?> baseDict,
|
||||
IReadOnlyDictionary<string, object?> updatedDict,
|
||||
string pathPrefix,
|
||||
string category,
|
||||
List<PolicyChange> changes)
|
||||
{
|
||||
var allKeys = baseDict.Keys.Union(updatedDict.Keys).OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in allKeys)
|
||||
{
|
||||
var hasBase = baseDict.TryGetValue(key, out var baseVal);
|
||||
var hasUpdated = updatedDict.TryGetValue(key, out var updatedVal);
|
||||
|
||||
if (hasBase && !hasUpdated)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Removed,
|
||||
Category = category,
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
OldValue = baseVal,
|
||||
NewValue = null,
|
||||
Description = $"Config key '{key}' removed."
|
||||
});
|
||||
}
|
||||
else if (!hasBase && hasUpdated)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Added,
|
||||
Category = category,
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
OldValue = null,
|
||||
NewValue = updatedVal,
|
||||
Description = $"Config key '{key}' added with value '{updatedVal}'."
|
||||
});
|
||||
}
|
||||
else if (hasBase && hasUpdated && !ValuesEqual(baseVal, updatedVal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = category,
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
OldValue = baseVal,
|
||||
NewValue = updatedVal,
|
||||
Description = $"Config key '{key}' changed from '{baseVal}' to '{updatedVal}'."
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static bool ValuesEqual(object? a, object? b)
|
||||
{
|
||||
if (a is null && b is null) return true;
|
||||
if (a is null || b is null) return false;
|
||||
|
||||
// Handle JsonElement comparison (from System.Text.Json deserialization)
|
||||
if (a is JsonElement ja && b is JsonElement jb)
|
||||
return ja.GetRawText() == jb.GetRawText();
|
||||
|
||||
return a.Equals(b);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merge Methods
|
||||
|
||||
private static PolicyPackMetadata MergeMetadata(
|
||||
PolicyPackMetadata baseM,
|
||||
PolicyPackMetadata overlay,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var name = ResolveConflict(baseM.Name, overlay.Name, "metadata.name", strategy, conflicts);
|
||||
var version = ResolveConflict(baseM.Version, overlay.Version, "metadata.version", strategy, conflicts);
|
||||
var description = overlay.Description ?? baseM.Description;
|
||||
|
||||
return new PolicyPackMetadata
|
||||
{
|
||||
Name = name ?? baseM.Name,
|
||||
Version = version ?? baseM.Version,
|
||||
Description = description,
|
||||
Digest = null, // Digest will be recomputed on export
|
||||
CreatedAt = baseM.CreatedAt,
|
||||
ExportedFrom = baseM.ExportedFrom,
|
||||
Parent = overlay.Parent ?? baseM.Parent,
|
||||
Environment = overlay.Environment ?? baseM.Environment
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyPackSettings MergeSettings(
|
||||
PolicyPackSettings baseS,
|
||||
PolicyPackSettings overlay,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var defaultAction = ResolveConflict(
|
||||
baseS.DefaultAction, overlay.DefaultAction,
|
||||
"spec.settings.defaultAction", strategy, conflicts) ?? baseS.DefaultAction;
|
||||
|
||||
return new PolicyPackSettings
|
||||
{
|
||||
DefaultAction = defaultAction,
|
||||
UnknownsThreshold = strategy == PolicyMergeStrategy.BaseWins
|
||||
? baseS.UnknownsThreshold
|
||||
: overlay.UnknownsThreshold,
|
||||
StopOnFirstFailure = strategy == PolicyMergeStrategy.BaseWins
|
||||
? baseS.StopOnFirstFailure
|
||||
: overlay.StopOnFirstFailure,
|
||||
DeterministicMode = strategy == PolicyMergeStrategy.BaseWins
|
||||
? baseS.DeterministicMode
|
||||
: overlay.DeterministicMode
|
||||
};
|
||||
}
|
||||
|
||||
private static List<PolicyGateDefinition> MergeGates(
|
||||
IReadOnlyList<PolicyGateDefinition> baseGates,
|
||||
IReadOnlyList<PolicyGateDefinition> overlayGates,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var baseMap = baseGates.ToDictionary(g => g.Id);
|
||||
var overlayMap = overlayGates.ToDictionary(g => g.Id);
|
||||
var result = new List<PolicyGateDefinition>();
|
||||
|
||||
// Include all base gates (potentially overridden)
|
||||
foreach (var gate in baseGates)
|
||||
{
|
||||
if (overlayMap.TryGetValue(gate.Id, out var overlayGate))
|
||||
{
|
||||
// Gate exists in both: merge configs
|
||||
var mergedConfig = MergeDictionaries(
|
||||
gate.Config, overlayGate.Config,
|
||||
$"spec.gates[{gate.Id}].config", strategy, conflicts);
|
||||
|
||||
var mergedGate = strategy == PolicyMergeStrategy.BaseWins
|
||||
? gate with { Config = mergedConfig }
|
||||
: overlayGate with { Config = mergedConfig };
|
||||
|
||||
result.Add(mergedGate);
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Add(gate);
|
||||
}
|
||||
}
|
||||
|
||||
// Add overlay-only gates
|
||||
foreach (var gate in overlayGates.Where(g => !baseMap.ContainsKey(g.Id)))
|
||||
{
|
||||
result.Add(gate);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static List<PolicyRuleDefinition> MergeRules(
|
||||
IReadOnlyList<PolicyRuleDefinition> baseRules,
|
||||
IReadOnlyList<PolicyRuleDefinition> overlayRules,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var baseMap = baseRules.ToDictionary(r => r.Name);
|
||||
var overlayMap = overlayRules.ToDictionary(r => r.Name);
|
||||
var result = new List<PolicyRuleDefinition>();
|
||||
|
||||
// Include all base rules (potentially overridden)
|
||||
foreach (var rule in baseRules)
|
||||
{
|
||||
if (overlayMap.TryGetValue(rule.Name, out var overlayRule))
|
||||
{
|
||||
// Rule exists in both: pick winner
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict &&
|
||||
(rule.Action != overlayRule.Action || rule.Priority != overlayRule.Priority))
|
||||
{
|
||||
conflicts.Add(new PolicyMergeConflict
|
||||
{
|
||||
Path = $"spec.rules[{rule.Name}]",
|
||||
BaseValue = $"action={rule.Action}, priority={rule.Priority}",
|
||||
OverlayValue = $"action={overlayRule.Action}, priority={overlayRule.Priority}",
|
||||
Description = $"Rule '{rule.Name}' differs between base and overlay."
|
||||
});
|
||||
result.Add(rule); // Keep base on conflict
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Add(strategy == PolicyMergeStrategy.BaseWins ? rule : overlayRule);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
// Add overlay-only rules
|
||||
foreach (var rule in overlayRules.Where(r => !baseMap.ContainsKey(r.Name)))
|
||||
{
|
||||
result.Add(rule);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> MergeDictionaries(
|
||||
IReadOnlyDictionary<string, object?> baseDict,
|
||||
IReadOnlyDictionary<string, object?> overlayDict,
|
||||
string pathPrefix,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var result = new Dictionary<string, object?>(baseDict);
|
||||
|
||||
foreach (var (key, overlayVal) in overlayDict)
|
||||
{
|
||||
if (result.TryGetValue(key, out var baseVal) && !ValuesEqual(baseVal, overlayVal))
|
||||
{
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict)
|
||||
{
|
||||
conflicts.Add(new PolicyMergeConflict
|
||||
{
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
BaseValue = baseVal,
|
||||
OverlayValue = overlayVal,
|
||||
Description = $"Config key '{key}' differs: base='{baseVal}', overlay='{overlayVal}'."
|
||||
});
|
||||
}
|
||||
else if (strategy == PolicyMergeStrategy.OverlayWins)
|
||||
{
|
||||
result[key] = overlayVal;
|
||||
}
|
||||
// BaseWins: keep existing value
|
||||
}
|
||||
else if (!result.ContainsKey(key))
|
||||
{
|
||||
result[key] = overlayVal;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string? ResolveConflict(
|
||||
string baseVal,
|
||||
string overlayVal,
|
||||
string path,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
if (baseVal == overlayVal) return baseVal;
|
||||
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict)
|
||||
{
|
||||
conflicts.Add(new PolicyMergeConflict
|
||||
{
|
||||
Path = path,
|
||||
BaseValue = baseVal,
|
||||
OverlayValue = overlayVal,
|
||||
Description = $"Conflict at '{path}': base='{baseVal}', overlay='{overlayVal}'."
|
||||
});
|
||||
return baseVal;
|
||||
}
|
||||
|
||||
return strategy == PolicyMergeStrategy.OverlayWins ? overlayVal : baseVal;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,265 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyExporter.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - YAML export support for PolicyPackDocument
|
||||
// Description: Exports PolicyPackDocuments to canonical YAML format.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using YamlDotNet.Serialization;
|
||||
using YamlDotNet.Serialization.NamingConventions;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Exports PolicyPackDocuments to canonical YAML format.
|
||||
/// Output is deterministic: same input produces byte-identical output
|
||||
/// (camelCase keys, sorted properties, consistent formatting).
|
||||
/// </summary>
|
||||
public sealed class YamlPolicyExporter : IPolicyYamlExporter
|
||||
{
|
||||
private static readonly ISerializer YamlSerializer = new SerializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.DisableAliases()
|
||||
.ConfigureDefaultValuesHandling(DefaultValuesHandling.OmitNull)
|
||||
.Build();
|
||||
|
||||
private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.Build();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<YamlExportResult> ExportToYamlAsync(
|
||||
PolicyPackDocument document,
|
||||
PolicyExportRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var exported = document;
|
||||
|
||||
// Apply environment filter if specified
|
||||
if (request.Environment is not null)
|
||||
{
|
||||
exported = FilterByEnvironment(exported, request.Environment);
|
||||
}
|
||||
|
||||
// Strip remediation if not requested
|
||||
if (!request.IncludeRemediation)
|
||||
{
|
||||
exported = StripRemediation(exported);
|
||||
}
|
||||
|
||||
// Serialize to YAML
|
||||
var yamlContent = SerializeToYaml(exported);
|
||||
|
||||
// Compute digest
|
||||
var digest = ComputeDigest(yamlContent);
|
||||
|
||||
return Task.FromResult(new YamlExportResult
|
||||
{
|
||||
Success = true,
|
||||
YamlContent = yamlContent,
|
||||
Digest = digest,
|
||||
Warnings = []
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serializes a PolicyPackDocument to canonical YAML string.
|
||||
/// </summary>
|
||||
public static string SerializeToYaml(PolicyPackDocument document)
|
||||
{
|
||||
// Convert to an intermediate dictionary to ensure consistent output
|
||||
var intermediate = ConvertToSerializable(document);
|
||||
return YamlSerializer.Serialize(intermediate);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serializes a PolicyPackDocument to canonical YAML bytes (UTF-8).
|
||||
/// </summary>
|
||||
public static byte[] SerializeCanonical(PolicyPackDocument document)
|
||||
{
|
||||
return Encoding.UTF8.GetBytes(SerializeToYaml(document));
|
||||
}
|
||||
|
||||
private static object ConvertToSerializable(PolicyPackDocument doc)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["apiVersion"] = doc.ApiVersion,
|
||||
["kind"] = doc.Kind,
|
||||
["metadata"] = ConvertMetadata(doc.Metadata),
|
||||
["spec"] = ConvertSpec(doc.Spec)
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertMetadata(PolicyPackMetadata meta)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["name"] = meta.Name,
|
||||
["version"] = meta.Version
|
||||
};
|
||||
|
||||
if (meta.Description is not null) result["description"] = meta.Description;
|
||||
if (meta.Digest is not null) result["digest"] = meta.Digest;
|
||||
if (meta.CreatedAt is not null) result["createdAt"] = meta.CreatedAt.Value.ToString("O");
|
||||
if (meta.Parent is not null) result["parent"] = meta.Parent;
|
||||
if (meta.Environment is not null) result["environment"] = meta.Environment;
|
||||
|
||||
if (meta.ExportedFrom is not null)
|
||||
{
|
||||
var provenance = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["engine"] = meta.ExportedFrom.Engine,
|
||||
["engineVersion"] = meta.ExportedFrom.EngineVersion
|
||||
};
|
||||
if (meta.ExportedFrom.ExportedAt is not null)
|
||||
provenance["exportedAt"] = meta.ExportedFrom.ExportedAt.Value.ToString("O");
|
||||
result["exportedFrom"] = provenance;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertSpec(PolicyPackSpec spec)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["settings"] = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["defaultAction"] = spec.Settings.DefaultAction,
|
||||
["deterministicMode"] = spec.Settings.DeterministicMode,
|
||||
["stopOnFirstFailure"] = spec.Settings.StopOnFirstFailure,
|
||||
["unknownsThreshold"] = spec.Settings.UnknownsThreshold
|
||||
},
|
||||
["gates"] = spec.Gates.Select(ConvertGate).ToList(),
|
||||
["rules"] = spec.Rules.Select(ConvertRule).ToList()
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertGate(PolicyGateDefinition gate)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["id"] = gate.Id,
|
||||
["type"] = gate.Type,
|
||||
["enabled"] = gate.Enabled
|
||||
};
|
||||
|
||||
if (gate.Config.Count > 0)
|
||||
result["config"] = new SortedDictionary<string, object?>(gate.Config.ToDictionary(kv => kv.Key, kv => kv.Value), StringComparer.Ordinal);
|
||||
|
||||
if (gate.Environments is not null)
|
||||
{
|
||||
var envs = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
foreach (var (env, cfg) in gate.Environments.OrderBy(e => e.Key, StringComparer.Ordinal))
|
||||
{
|
||||
envs[env] = new SortedDictionary<string, object?>(cfg.ToDictionary(kv => kv.Key, kv => kv.Value), StringComparer.Ordinal);
|
||||
}
|
||||
result["environments"] = envs;
|
||||
}
|
||||
|
||||
if (gate.Remediation is not null)
|
||||
result["remediation"] = ConvertRemediation(gate.Remediation);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertRule(PolicyRuleDefinition rule)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["name"] = rule.Name,
|
||||
["action"] = rule.Action,
|
||||
["priority"] = rule.Priority
|
||||
};
|
||||
|
||||
if (rule.Match.Count > 0)
|
||||
result["match"] = new SortedDictionary<string, object?>(rule.Match.ToDictionary(kv => kv.Key, kv => kv.Value), StringComparer.Ordinal);
|
||||
|
||||
if (rule.Remediation is not null)
|
||||
result["remediation"] = ConvertRemediation(rule.Remediation);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertRemediation(RemediationHint hint)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["code"] = hint.Code,
|
||||
["title"] = hint.Title
|
||||
};
|
||||
if (hint.Description is not null) result["description"] = hint.Description;
|
||||
if (hint.Actions.Count > 0)
|
||||
{
|
||||
result["actions"] = hint.Actions.Select(a =>
|
||||
{
|
||||
var actionDict = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["type"] = a.Type
|
||||
};
|
||||
if (a.Description is not null) actionDict["description"] = a.Description;
|
||||
if (a.Command is not null) actionDict["command"] = a.Command;
|
||||
return (object)actionDict;
|
||||
}).ToList();
|
||||
}
|
||||
if (hint.References.Count > 0)
|
||||
{
|
||||
result["references"] = hint.References.Select(r =>
|
||||
{
|
||||
var refDict = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["title"] = r.Title
|
||||
};
|
||||
if (r.Url is not null) refDict["url"] = r.Url;
|
||||
return (object)refDict;
|
||||
}).ToList();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static PolicyPackDocument FilterByEnvironment(PolicyPackDocument doc, string environment)
|
||||
{
|
||||
var filteredGates = doc.Spec.Gates.Select(g =>
|
||||
{
|
||||
if (g.Environments is null || !g.Environments.ContainsKey(environment))
|
||||
return g;
|
||||
|
||||
var envConfig = g.Environments[environment];
|
||||
var mergedConfig = new Dictionary<string, object?>(g.Config);
|
||||
foreach (var (key, value) in envConfig)
|
||||
{
|
||||
mergedConfig[key] = value;
|
||||
}
|
||||
|
||||
return g with { Config = mergedConfig, Environments = null };
|
||||
}).ToList();
|
||||
|
||||
return doc with
|
||||
{
|
||||
Spec = doc.Spec with { Gates = filteredGates }
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyPackDocument StripRemediation(PolicyPackDocument doc)
|
||||
{
|
||||
var gates = doc.Spec.Gates.Select(g => g with { Remediation = null }).ToList();
|
||||
var rules = doc.Spec.Rules.Select(r => r with { Remediation = null }).ToList();
|
||||
return doc with
|
||||
{
|
||||
Spec = doc.Spec with { Gates = gates, Rules = rules }
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeDigest(string content)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -45,6 +45,15 @@ public static class FormatDetector
|
||||
return PolicyFormats.Rego;
|
||||
}
|
||||
|
||||
// YAML detection: starts with apiVersion: or --- or has YAML-like key: value structure
|
||||
if (trimmed.StartsWith("---", StringComparison.Ordinal) ||
|
||||
trimmed.StartsWith("apiVersion:", StringComparison.Ordinal) ||
|
||||
(trimmed.Contains("apiVersion:", StringComparison.Ordinal) &&
|
||||
trimmed.Contains("kind:", StringComparison.Ordinal)))
|
||||
{
|
||||
return PolicyFormats.Yaml;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -58,6 +67,7 @@ public static class FormatDetector
|
||||
{
|
||||
".json" => PolicyFormats.Json,
|
||||
".rego" => PolicyFormats.Rego,
|
||||
".yaml" or ".yml" => PolicyFormats.Yaml,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,137 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyImporter.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - YAML import support for PolicyPackDocument
|
||||
// Description: Imports PolicyPackDocuments from YAML format.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using YamlDotNet.Core;
|
||||
using YamlDotNet.Serialization;
|
||||
using YamlDotNet.Serialization.NamingConventions;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Import;
|
||||
|
||||
/// <summary>
|
||||
/// Imports PolicyPack v2 YAML documents into the native model.
|
||||
/// Converts YAML to JSON intermediary then delegates to the JSON importer for validation.
|
||||
/// This ensures consistent validation behavior across all import formats.
|
||||
/// </summary>
|
||||
public sealed class YamlPolicyImporter : IPolicyImporter
|
||||
{
|
||||
private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.Build();
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
private readonly JsonPolicyImporter _jsonImporter = new();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<PolicyImportResult> ImportAsync(
|
||||
Stream policyStream,
|
||||
PolicyImportOptions options,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
using var reader = new StreamReader(policyStream, Encoding.UTF8);
|
||||
var content = await reader.ReadToEndAsync(ct);
|
||||
return await ImportFromStringAsync(content, options, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<PolicyImportResult> ImportFromStringAsync(
|
||||
string content,
|
||||
PolicyImportOptions options,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Parse YAML into object graph
|
||||
object? yamlObject;
|
||||
try
|
||||
{
|
||||
yamlObject = YamlDeserializer.Deserialize<object>(content);
|
||||
}
|
||||
catch (YamlException ex)
|
||||
{
|
||||
return Task.FromResult(new PolicyImportResult
|
||||
{
|
||||
Success = false,
|
||||
DetectedFormat = PolicyFormats.Yaml,
|
||||
Diagnostics =
|
||||
[
|
||||
new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "YAML_PARSE_ERROR",
|
||||
Message = $"YAML parse error at line {ex.Start.Line}, column {ex.Start.Column}: {ex.Message}",
|
||||
Location = $"line {ex.Start.Line}, column {ex.Start.Column}"
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (yamlObject is null)
|
||||
{
|
||||
return Task.FromResult(new PolicyImportResult
|
||||
{
|
||||
Success = false,
|
||||
DetectedFormat = PolicyFormats.Yaml,
|
||||
Diagnostics =
|
||||
[
|
||||
new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "YAML_EMPTY",
|
||||
Message = "YAML document is empty or null."
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
// Convert YAML object graph to JSON string (YamlDotNet -> System.Text.Json roundtrip)
|
||||
string jsonContent;
|
||||
try
|
||||
{
|
||||
jsonContent = JsonSerializer.Serialize(yamlObject, JsonOptions);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return Task.FromResult(new PolicyImportResult
|
||||
{
|
||||
Success = false,
|
||||
DetectedFormat = PolicyFormats.Yaml,
|
||||
Diagnostics =
|
||||
[
|
||||
new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "YAML_CONVERSION_ERROR",
|
||||
Message = $"Failed to convert YAML to JSON: {ex.Message}"
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
// Delegate to JSON importer for validation and deserialization
|
||||
var jsonOptions = options with { Format = PolicyFormats.Json };
|
||||
var result = _jsonImporter.ImportFromStringAsync(jsonContent, jsonOptions, ct);
|
||||
|
||||
return result.ContinueWith(t =>
|
||||
{
|
||||
var importResult = t.Result;
|
||||
|
||||
// Update detected format to YAML
|
||||
return importResult with
|
||||
{
|
||||
DetectedFormat = PolicyFormats.Yaml
|
||||
};
|
||||
}, ct, TaskContinuationOptions.ExecuteSynchronously, TaskScheduler.Default);
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
<PackageReference Include="JsonSchema.Net" />
|
||||
<PackageReference Include="YamlDotNet" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,421 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyDiffMergeEngineTests.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Tests for diff/merge engine
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using StellaOps.Policy.Interop.DiffMerge;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Tests.DiffMerge;
|
||||
|
||||
public sealed class PolicyDiffMergeEngineTests
|
||||
{
|
||||
private readonly PolicyDiffMergeEngine _engine = new();
|
||||
|
||||
private static PolicyPackDocument LoadGoldenFixture()
|
||||
{
|
||||
var fixturePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "golden-policy-pack-v2.json");
|
||||
var json = File.ReadAllText(fixturePath);
|
||||
return JsonSerializer.Deserialize<PolicyPackDocument>(json,
|
||||
new JsonSerializerOptions { PropertyNameCaseInsensitive = true })!;
|
||||
}
|
||||
|
||||
private static PolicyPackDocument CreateMinimalDoc(
|
||||
string name = "test", string version = "1.0.0", string defaultAction = "block")
|
||||
{
|
||||
return new PolicyPackDocument
|
||||
{
|
||||
ApiVersion = PolicyPackDocument.ApiVersionV2,
|
||||
Kind = PolicyPackDocument.KindPolicyPack,
|
||||
Metadata = new PolicyPackMetadata { Name = name, Version = version },
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = defaultAction },
|
||||
Gates = [],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#region Diff Tests
|
||||
|
||||
[Fact]
|
||||
public void Diff_IdenticalDocuments_ReturnsNoChanges()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var result = _engine.Diff(doc, doc);
|
||||
|
||||
result.AreIdentical.Should().BeTrue();
|
||||
result.Changes.Should().BeEmpty();
|
||||
result.Summary.Total.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_MetadataVersionChange_DetectsModification()
|
||||
{
|
||||
var baseline = CreateMinimalDoc(version: "1.0.0");
|
||||
var updated = baseline with
|
||||
{
|
||||
Metadata = baseline.Metadata with { Version = "2.0.0" }
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.AreIdentical.Should().BeFalse();
|
||||
result.Summary.Modifications.Should().Be(1);
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.Path == "metadata.version" && c.ChangeType == PolicyChangeType.Modified);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_SettingsChange_DetectsDefaultActionModification()
|
||||
{
|
||||
var baseline = CreateMinimalDoc(defaultAction: "block");
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with
|
||||
{
|
||||
Settings = baseline.Spec.Settings with { DefaultAction = "warn" }
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.AreIdentical.Should().BeFalse();
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.Path == "spec.settings.defaultAction" &&
|
||||
c.OldValue!.ToString() == "block" &&
|
||||
c.NewValue!.ToString() == "warn");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GateAdded_DetectsAddition()
|
||||
{
|
||||
var baseline = CreateMinimalDoc();
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with
|
||||
{
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition
|
||||
{
|
||||
Id = "new-gate",
|
||||
Type = "CvssThresholdGate"
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Summary.Additions.Should().Be(1);
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.ChangeType == PolicyChangeType.Added && c.Category == "gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GateRemoved_DetectsRemoval()
|
||||
{
|
||||
var baseline = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition
|
||||
{
|
||||
Id = "old-gate",
|
||||
Type = "SbomPresenceGate"
|
||||
}
|
||||
],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with { Gates = [] }
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Summary.Removals.Should().Be(1);
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.ChangeType == PolicyChangeType.Removed && c.Category == "gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_RuleActionChanged_DetectsModification()
|
||||
{
|
||||
var rule = new PolicyRuleDefinition
|
||||
{
|
||||
Name = "test-rule",
|
||||
Action = "block",
|
||||
Priority = 10
|
||||
};
|
||||
|
||||
var baseline = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [],
|
||||
Rules = [rule]
|
||||
}
|
||||
};
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with
|
||||
{
|
||||
Rules = [rule with { Action = "warn" }]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Changes.Should().Contain(c =>
|
||||
c.Path == "spec.rules[test-rule].action" && c.ChangeType == PolicyChangeType.Modified);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GateConfigChanged_DetectsConfigModification()
|
||||
{
|
||||
var gate = new PolicyGateDefinition
|
||||
{
|
||||
Id = "cvss-gate",
|
||||
Type = "CvssThresholdGate",
|
||||
Config = new Dictionary<string, object?> { ["threshold"] = (JsonElement)JsonDocument.Parse("7.0").RootElement.Clone() }
|
||||
};
|
||||
|
||||
var baseline = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [gate],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var updatedGate = gate with
|
||||
{
|
||||
Config = new Dictionary<string, object?> { ["threshold"] = (JsonElement)JsonDocument.Parse("9.0").RootElement.Clone() }
|
||||
};
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with { Gates = [updatedGate] }
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Changes.Should().Contain(c =>
|
||||
c.Path == "spec.gates[cvss-gate].config.threshold" && c.ChangeType == PolicyChangeType.Modified);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GoldenFixture_AgainstItself_IsIdentical()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var result = _engine.Diff(doc, doc);
|
||||
|
||||
result.AreIdentical.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_MultipleChanges_ReturnsCorrectSummary()
|
||||
{
|
||||
var baseline = CreateMinimalDoc(name: "base", version: "1.0.0", defaultAction: "block");
|
||||
var updated = CreateMinimalDoc(name: "updated", version: "2.0.0", defaultAction: "warn");
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Summary.Modifications.Should().Be(3); // name, version, defaultAction
|
||||
result.Summary.Total.Should().Be(3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merge Tests
|
||||
|
||||
[Fact]
|
||||
public void Merge_IdenticalDocuments_ReturnsIdenticalResult()
|
||||
{
|
||||
var doc = CreateMinimalDoc();
|
||||
|
||||
var result = _engine.Merge(doc, doc);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document.Should().NotBeNull();
|
||||
result.Conflicts.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayWins_OverlayValuesPreferred()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc(defaultAction: "block");
|
||||
var overlay = CreateMinimalDoc(defaultAction: "warn");
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.OverlayWins);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Settings.DefaultAction.Should().Be("warn");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_BaseWins_BaseValuesPreferred()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc(defaultAction: "block");
|
||||
var overlay = CreateMinimalDoc(defaultAction: "warn");
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.BaseWins);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Settings.DefaultAction.Should().Be("block");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_FailOnConflict_ReportsConflicts()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc(defaultAction: "block");
|
||||
var overlay = CreateMinimalDoc(defaultAction: "warn");
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.FailOnConflict);
|
||||
|
||||
result.Success.Should().BeFalse();
|
||||
result.Conflicts.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayAddsNewGate_GateIncluded()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc();
|
||||
var overlay = baseDoc with
|
||||
{
|
||||
Spec = baseDoc.Spec with
|
||||
{
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition
|
||||
{
|
||||
Id = "overlay-gate",
|
||||
Type = "CvssThresholdGate"
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Gates.Should().ContainSingle(g => g.Id == "overlay-gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayAddsNewRule_RuleIncluded()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc();
|
||||
var overlay = baseDoc with
|
||||
{
|
||||
Spec = baseDoc.Spec with
|
||||
{
|
||||
Rules =
|
||||
[
|
||||
new PolicyRuleDefinition
|
||||
{
|
||||
Name = "overlay-rule",
|
||||
Action = "warn",
|
||||
Priority = 50
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Rules.Should().ContainSingle(r => r.Name == "overlay-rule");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_BothHaveGates_MergesAllGates()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition { Id = "base-gate", Type = "SbomPresenceGate" }
|
||||
],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
var overlay = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition { Id = "overlay-gate", Type = "CvssThresholdGate" }
|
||||
],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Gates.Should().HaveCount(2);
|
||||
result.Document.Spec.Gates.Should().Contain(g => g.Id == "base-gate");
|
||||
result.Document.Spec.Gates.Should().Contain(g => g.Id == "overlay-gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayWins_OverridesMatchingGate()
|
||||
{
|
||||
var gate = new PolicyGateDefinition
|
||||
{
|
||||
Id = "shared-gate",
|
||||
Type = "CvssThresholdGate",
|
||||
Enabled = true
|
||||
};
|
||||
|
||||
var baseDoc = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [gate],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
var overlay = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [gate with { Enabled = false }],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.OverlayWins);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Gates.Should().ContainSingle(g =>
|
||||
g.Id == "shared-gate" && !g.Enabled);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,151 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyExporterTests.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Tests for YAML export
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using StellaOps.Policy.Interop.Export;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Tests.Export;
|
||||
|
||||
public sealed class YamlPolicyExporterTests
|
||||
{
|
||||
private readonly YamlPolicyExporter _exporter = new();
|
||||
|
||||
private static PolicyPackDocument LoadGoldenFixture()
|
||||
{
|
||||
var fixturePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "golden-policy-pack-v2.json");
|
||||
var json = File.ReadAllText(fixturePath);
|
||||
return JsonSerializer.Deserialize<PolicyPackDocument>(json,
|
||||
new JsonSerializerOptions { PropertyNameCaseInsensitive = true })!;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_ProducesValidOutput()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.YamlContent.Should().NotBeNullOrEmpty();
|
||||
result.Digest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_ContainsApiVersionAndKind()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result.YamlContent.Should().Contain("apiVersion: policy.stellaops.io/v2");
|
||||
result.YamlContent.Should().Contain("kind: PolicyPack");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_IsDeterministic()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml };
|
||||
|
||||
var result1 = await _exporter.ExportToYamlAsync(doc, request);
|
||||
var result2 = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result1.Digest.Should().Be(result2.Digest);
|
||||
result1.YamlContent.Should().Be(result2.YamlContent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_WithEnvironment_MergesConfig()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml, Environment = "staging" };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
// Environment-specific config is merged; environments key should not appear
|
||||
result.YamlContent.Should().NotContain("environments:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_WithoutRemediation_StripsHints()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml, IncludeRemediation = false };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result.YamlContent.Should().NotContain("remediation:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeCanonical_ProducesDeterministicBytes()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var bytes1 = YamlPolicyExporter.SerializeCanonical(doc);
|
||||
var bytes2 = YamlPolicyExporter.SerializeCanonical(doc);
|
||||
|
||||
bytes1.Should().BeEquivalentTo(bytes2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeToYaml_PreservesGateIds()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(doc);
|
||||
|
||||
yaml.Should().Contain("cvss-threshold");
|
||||
yaml.Should().Contain("signature-required");
|
||||
yaml.Should().Contain("evidence-freshness");
|
||||
yaml.Should().Contain("sbom-presence");
|
||||
yaml.Should().Contain("minimum-confidence");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeToYaml_PreservesRuleNames()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(doc);
|
||||
|
||||
yaml.Should().Contain("require-dsse-signature");
|
||||
yaml.Should().Contain("require-rekor-proof");
|
||||
yaml.Should().Contain("require-sbom-digest");
|
||||
yaml.Should().Contain("require-freshness-tst");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeToYaml_MinimalDocument_Succeeds()
|
||||
{
|
||||
var doc = new PolicyPackDocument
|
||||
{
|
||||
ApiVersion = PolicyPackDocument.ApiVersionV2,
|
||||
Kind = PolicyPackDocument.KindPolicyPack,
|
||||
Metadata = new PolicyPackMetadata
|
||||
{
|
||||
Name = "minimal",
|
||||
Version = "1.0.0"
|
||||
},
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "allow" },
|
||||
Gates = [],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(doc);
|
||||
|
||||
yaml.Should().Contain("name: minimal");
|
||||
yaml.Should().Contain("defaultAction: allow");
|
||||
}
|
||||
}
|
||||
@@ -79,8 +79,30 @@ public class FormatDetectorTests
|
||||
[Fact]
|
||||
public void DetectFromExtension_UnknownExtension_ReturnsNull()
|
||||
{
|
||||
FormatDetector.DetectFromExtension("policy.yaml").Should().BeNull();
|
||||
FormatDetector.DetectFromExtension("policy.txt").Should().BeNull();
|
||||
FormatDetector.DetectFromExtension("policy.xml").Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectFromExtension_YamlFile_ReturnsYaml()
|
||||
{
|
||||
FormatDetector.DetectFromExtension("policy.yaml").Should().Be(PolicyFormats.Yaml);
|
||||
FormatDetector.DetectFromExtension("policy.yml").Should().Be(PolicyFormats.Yaml);
|
||||
FormatDetector.DetectFromExtension("/path/to/my-policy.yaml").Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Detect_YamlContent_WithApiVersion_ReturnsYaml()
|
||||
{
|
||||
var content = "apiVersion: policy.stellaops.io/v2\nkind: PolicyPack\n";
|
||||
FormatDetector.Detect(content).Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Detect_YamlContent_WithDocumentSeparator_ReturnsYaml()
|
||||
{
|
||||
var content = "---\napiVersion: policy.stellaops.io/v2\n";
|
||||
FormatDetector.Detect(content).Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyImporterTests.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Tests for YAML import
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using StellaOps.Policy.Interop.Export;
|
||||
using StellaOps.Policy.Interop.Import;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Tests.Import;
|
||||
|
||||
public sealed class YamlPolicyImporterTests
|
||||
{
|
||||
private readonly YamlPolicyImporter _importer = new();
|
||||
|
||||
private static PolicyPackDocument LoadGoldenFixture()
|
||||
{
|
||||
var fixturePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "golden-policy-pack-v2.json");
|
||||
var json = File.ReadAllText(fixturePath);
|
||||
return JsonSerializer.Deserialize<PolicyPackDocument>(json,
|
||||
new JsonSerializerOptions { PropertyNameCaseInsensitive = true })!;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_ValidDocument_Succeeds()
|
||||
{
|
||||
// Export golden fixture to YAML, then re-import
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.DetectedFormat.Should().Be(PolicyFormats.Yaml);
|
||||
result.Document.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesApiVersion()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Document!.ApiVersion.Should().Be(PolicyPackDocument.ApiVersionV2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesGateCount()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.GateCount.Should().Be(original.Spec.Gates.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesRuleCount()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.RuleCount.Should().Be(original.Spec.Rules.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_InvalidYaml_ReturnsDiagnostic()
|
||||
{
|
||||
var invalidYaml = "invalid: yaml:\n bad: [\nincomplete";
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(invalidYaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeFalse();
|
||||
result.DetectedFormat.Should().Be(PolicyFormats.Yaml);
|
||||
result.Diagnostics.Should().Contain(d => d.Code == "YAML_PARSE_ERROR");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_EmptyContent_ReturnsDiagnostic()
|
||||
{
|
||||
var result = await _importer.ImportFromStringAsync("",
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeFalse();
|
||||
result.DetectedFormat.Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesMetadataName()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Document!.Metadata.Name.Should().Be(original.Metadata.Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_MinimalDocument_Succeeds()
|
||||
{
|
||||
var yaml = """
|
||||
apiVersion: policy.stellaops.io/v2
|
||||
kind: PolicyPack
|
||||
metadata:
|
||||
name: test-minimal
|
||||
version: "1.0.0"
|
||||
spec:
|
||||
settings:
|
||||
defaultAction: allow
|
||||
gates: []
|
||||
rules: []
|
||||
""";
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Metadata.Name.Should().Be("test-minimal");
|
||||
result.Document.Spec.Settings.DefaultAction.Should().Be("allow");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_Stream_Succeeds()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(yaml));
|
||||
|
||||
var result = await _importer.ImportAsync(stream,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="NSubstitute" />
|
||||
<PackageReference Include="JsonSchema.Net" />
|
||||
<PackageReference Include="YamlDotNet" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
Reference in New Issue
Block a user