Implement VEX document verification system with issuer management and signature verification

- Added IIssuerDirectory interface for managing VEX document issuers, including methods for registration, revocation, and trust validation.
- Created InMemoryIssuerDirectory class as an in-memory implementation of IIssuerDirectory for testing and single-instance deployments.
- Introduced ISignatureVerifier interface for verifying signatures on VEX documents, with support for multiple signature formats.
- Developed SignatureVerifier class as the default implementation of ISignatureVerifier, allowing extensibility for different signature formats.
- Implemented handlers for DSSE and JWS signature formats, including methods for verification and signature extraction.
- Defined various records and enums for issuer and signature metadata, enhancing the structure and clarity of the verification process.
This commit is contained in:
StellaOps Bot
2025-12-06 13:41:22 +02:00
parent 2141196496
commit 5e514532df
112 changed files with 24861 additions and 211 deletions

View File

@@ -0,0 +1,295 @@
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Detailed breakdown of a risk simulation result.
/// Per POLICY-RISK-67-003.
/// </summary>
public sealed record RiskSimulationBreakdown(
[property: JsonPropertyName("simulation_id")] string SimulationId,
[property: JsonPropertyName("profile_ref")] ProfileReference ProfileRef,
[property: JsonPropertyName("signal_analysis")] SignalAnalysis SignalAnalysis,
[property: JsonPropertyName("override_analysis")] OverrideAnalysis OverrideAnalysis,
[property: JsonPropertyName("score_distribution")] ScoreDistributionAnalysis ScoreDistribution,
[property: JsonPropertyName("severity_breakdown")] SeverityBreakdownAnalysis SeverityBreakdown,
[property: JsonPropertyName("action_breakdown")] ActionBreakdownAnalysis ActionBreakdown,
[property: JsonPropertyName("component_breakdown")] ComponentBreakdownAnalysis? ComponentBreakdown,
[property: JsonPropertyName("risk_trends")] RiskTrendAnalysis? RiskTrends,
[property: JsonPropertyName("determinism_hash")] string DeterminismHash);
/// <summary>
/// Reference to the risk profile used in simulation.
/// </summary>
public sealed record ProfileReference(
[property: JsonPropertyName("id")] string Id,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("hash")] string Hash,
[property: JsonPropertyName("description")] string? Description,
[property: JsonPropertyName("extends")] string? Extends);
/// <summary>
/// Analysis of signal contributions to risk scores.
/// </summary>
public sealed record SignalAnalysis(
[property: JsonPropertyName("total_signals")] int TotalSignals,
[property: JsonPropertyName("signals_used")] int SignalsUsed,
[property: JsonPropertyName("signals_missing")] int SignalsMissing,
[property: JsonPropertyName("signal_coverage")] double SignalCoverage,
[property: JsonPropertyName("signal_stats")] ImmutableArray<SignalStatistics> SignalStats,
[property: JsonPropertyName("top_contributors")] ImmutableArray<SignalContributor> TopContributors,
[property: JsonPropertyName("missing_signal_impact")] MissingSignalImpact MissingSignalImpact);
/// <summary>
/// Statistics for a single signal across all findings.
/// </summary>
public sealed record SignalStatistics(
[property: JsonPropertyName("signal_name")] string SignalName,
[property: JsonPropertyName("signal_type")] string SignalType,
[property: JsonPropertyName("weight")] double Weight,
[property: JsonPropertyName("findings_with_signal")] int FindingsWithSignal,
[property: JsonPropertyName("findings_missing_signal")] int FindingsMissingSignal,
[property: JsonPropertyName("coverage_percentage")] double CoveragePercentage,
[property: JsonPropertyName("value_distribution")] ValueDistribution? ValueDistribution,
[property: JsonPropertyName("total_contribution")] double TotalContribution,
[property: JsonPropertyName("avg_contribution")] double AvgContribution);
/// <summary>
/// Distribution of values for a signal.
/// </summary>
public sealed record ValueDistribution(
[property: JsonPropertyName("min")] double? Min,
[property: JsonPropertyName("max")] double? Max,
[property: JsonPropertyName("mean")] double? Mean,
[property: JsonPropertyName("median")] double? Median,
[property: JsonPropertyName("std_dev")] double? StdDev,
[property: JsonPropertyName("histogram")] ImmutableArray<HistogramBucket>? Histogram);
/// <summary>
/// Histogram bucket for value distribution.
/// </summary>
public sealed record HistogramBucket(
[property: JsonPropertyName("range_min")] double RangeMin,
[property: JsonPropertyName("range_max")] double RangeMax,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("percentage")] double Percentage);
/// <summary>
/// A signal that significantly contributed to risk scores.
/// </summary>
public sealed record SignalContributor(
[property: JsonPropertyName("signal_name")] string SignalName,
[property: JsonPropertyName("total_contribution")] double TotalContribution,
[property: JsonPropertyName("contribution_percentage")] double ContributionPercentage,
[property: JsonPropertyName("avg_value")] double AvgValue,
[property: JsonPropertyName("weight")] double Weight,
[property: JsonPropertyName("impact_direction")] string ImpactDirection);
/// <summary>
/// Impact of missing signals on scoring.
/// </summary>
public sealed record MissingSignalImpact(
[property: JsonPropertyName("findings_with_missing_signals")] int FindingsWithMissingSignals,
[property: JsonPropertyName("avg_missing_signals_per_finding")] double AvgMissingSignalsPerFinding,
[property: JsonPropertyName("estimated_score_impact")] double EstimatedScoreImpact,
[property: JsonPropertyName("most_impactful_missing")] ImmutableArray<string> MostImpactfulMissing);
/// <summary>
/// Analysis of override applications.
/// </summary>
public sealed record OverrideAnalysis(
[property: JsonPropertyName("total_overrides_evaluated")] int TotalOverridesEvaluated,
[property: JsonPropertyName("severity_overrides_applied")] int SeverityOverridesApplied,
[property: JsonPropertyName("decision_overrides_applied")] int DecisionOverridesApplied,
[property: JsonPropertyName("override_application_rate")] double OverrideApplicationRate,
[property: JsonPropertyName("severity_override_details")] ImmutableArray<SeverityOverrideDetail> SeverityOverrideDetails,
[property: JsonPropertyName("decision_override_details")] ImmutableArray<DecisionOverrideDetail> DecisionOverrideDetails,
[property: JsonPropertyName("override_conflicts")] ImmutableArray<OverrideConflict> OverrideConflicts);
/// <summary>
/// Details of severity override applications.
/// </summary>
public sealed record SeverityOverrideDetail(
[property: JsonPropertyName("predicate_hash")] string PredicateHash,
[property: JsonPropertyName("predicate_summary")] string PredicateSummary,
[property: JsonPropertyName("target_severity")] string TargetSeverity,
[property: JsonPropertyName("applications_count")] int ApplicationsCount,
[property: JsonPropertyName("original_severities")] ImmutableDictionary<string, int> OriginalSeverities);
/// <summary>
/// Details of decision override applications.
/// </summary>
public sealed record DecisionOverrideDetail(
[property: JsonPropertyName("predicate_hash")] string PredicateHash,
[property: JsonPropertyName("predicate_summary")] string PredicateSummary,
[property: JsonPropertyName("target_action")] string TargetAction,
[property: JsonPropertyName("reason")] string? Reason,
[property: JsonPropertyName("applications_count")] int ApplicationsCount,
[property: JsonPropertyName("original_actions")] ImmutableDictionary<string, int> OriginalActions);
/// <summary>
/// Override conflict detected during evaluation.
/// </summary>
public sealed record OverrideConflict(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("conflict_type")] string ConflictType,
[property: JsonPropertyName("override_1")] string Override1,
[property: JsonPropertyName("override_2")] string Override2,
[property: JsonPropertyName("resolution")] string Resolution);
/// <summary>
/// Analysis of score distribution.
/// </summary>
public sealed record ScoreDistributionAnalysis(
[property: JsonPropertyName("raw_score_stats")] ScoreStatistics RawScoreStats,
[property: JsonPropertyName("normalized_score_stats")] ScoreStatistics NormalizedScoreStats,
[property: JsonPropertyName("score_buckets")] ImmutableArray<ScoreBucket> ScoreBuckets,
[property: JsonPropertyName("percentiles")] ImmutableDictionary<string, double> Percentiles,
[property: JsonPropertyName("outliers")] OutlierAnalysis Outliers);
/// <summary>
/// Statistical summary of scores.
/// </summary>
public sealed record ScoreStatistics(
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("min")] double Min,
[property: JsonPropertyName("max")] double Max,
[property: JsonPropertyName("mean")] double Mean,
[property: JsonPropertyName("median")] double Median,
[property: JsonPropertyName("std_dev")] double StdDev,
[property: JsonPropertyName("variance")] double Variance,
[property: JsonPropertyName("skewness")] double Skewness,
[property: JsonPropertyName("kurtosis")] double Kurtosis);
/// <summary>
/// Score bucket for distribution.
/// </summary>
public sealed record ScoreBucket(
[property: JsonPropertyName("range_min")] double RangeMin,
[property: JsonPropertyName("range_max")] double RangeMax,
[property: JsonPropertyName("label")] string Label,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("percentage")] double Percentage);
/// <summary>
/// Outlier analysis for scores.
/// </summary>
public sealed record OutlierAnalysis(
[property: JsonPropertyName("outlier_count")] int OutlierCount,
[property: JsonPropertyName("outlier_threshold")] double OutlierThreshold,
[property: JsonPropertyName("outlier_finding_ids")] ImmutableArray<string> OutlierFindingIds);
/// <summary>
/// Breakdown by severity level.
/// </summary>
public sealed record SeverityBreakdownAnalysis(
[property: JsonPropertyName("by_severity")] ImmutableDictionary<string, SeverityBucket> BySeverity,
[property: JsonPropertyName("severity_flow")] ImmutableArray<SeverityFlow> SeverityFlow,
[property: JsonPropertyName("severity_concentration")] double SeverityConcentration);
/// <summary>
/// Details for a severity bucket.
/// </summary>
public sealed record SeverityBucket(
[property: JsonPropertyName("severity")] string Severity,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("percentage")] double Percentage,
[property: JsonPropertyName("avg_score")] double AvgScore,
[property: JsonPropertyName("score_range")] ScoreRange ScoreRange,
[property: JsonPropertyName("top_contributors")] ImmutableArray<string> TopContributors);
/// <summary>
/// Score range for a bucket.
/// </summary>
public sealed record ScoreRange(
[property: JsonPropertyName("min")] double Min,
[property: JsonPropertyName("max")] double Max);
/// <summary>
/// Flow from original to final severity after overrides.
/// </summary>
public sealed record SeverityFlow(
[property: JsonPropertyName("from_severity")] string FromSeverity,
[property: JsonPropertyName("to_severity")] string ToSeverity,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("is_escalation")] bool IsEscalation);
/// <summary>
/// Breakdown by recommended action.
/// </summary>
public sealed record ActionBreakdownAnalysis(
[property: JsonPropertyName("by_action")] ImmutableDictionary<string, ActionBucket> ByAction,
[property: JsonPropertyName("action_flow")] ImmutableArray<ActionFlow> ActionFlow,
[property: JsonPropertyName("decision_stability")] double DecisionStability);
/// <summary>
/// Details for an action bucket.
/// </summary>
public sealed record ActionBucket(
[property: JsonPropertyName("action")] string Action,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("percentage")] double Percentage,
[property: JsonPropertyName("avg_score")] double AvgScore,
[property: JsonPropertyName("severity_breakdown")] ImmutableDictionary<string, int> SeverityBreakdown);
/// <summary>
/// Flow from original to final action after overrides.
/// </summary>
public sealed record ActionFlow(
[property: JsonPropertyName("from_action")] string FromAction,
[property: JsonPropertyName("to_action")] string ToAction,
[property: JsonPropertyName("count")] int Count);
/// <summary>
/// Breakdown by component/package.
/// </summary>
public sealed record ComponentBreakdownAnalysis(
[property: JsonPropertyName("total_components")] int TotalComponents,
[property: JsonPropertyName("components_with_findings")] int ComponentsWithFindings,
[property: JsonPropertyName("top_risk_components")] ImmutableArray<ComponentRiskSummary> TopRiskComponents,
[property: JsonPropertyName("ecosystem_breakdown")] ImmutableDictionary<string, EcosystemSummary> EcosystemBreakdown);
/// <summary>
/// Risk summary for a component.
/// </summary>
public sealed record ComponentRiskSummary(
[property: JsonPropertyName("component_purl")] string ComponentPurl,
[property: JsonPropertyName("finding_count")] int FindingCount,
[property: JsonPropertyName("max_score")] double MaxScore,
[property: JsonPropertyName("avg_score")] double AvgScore,
[property: JsonPropertyName("highest_severity")] string HighestSeverity,
[property: JsonPropertyName("recommended_action")] string RecommendedAction);
/// <summary>
/// Summary for a package ecosystem.
/// </summary>
public sealed record EcosystemSummary(
[property: JsonPropertyName("ecosystem")] string Ecosystem,
[property: JsonPropertyName("component_count")] int ComponentCount,
[property: JsonPropertyName("finding_count")] int FindingCount,
[property: JsonPropertyName("avg_score")] double AvgScore,
[property: JsonPropertyName("critical_count")] int CriticalCount,
[property: JsonPropertyName("high_count")] int HighCount);
/// <summary>
/// Risk trend analysis (for comparison simulations).
/// </summary>
public sealed record RiskTrendAnalysis(
[property: JsonPropertyName("comparison_type")] string ComparisonType,
[property: JsonPropertyName("score_trend")] TrendMetric ScoreTrend,
[property: JsonPropertyName("severity_trend")] TrendMetric SeverityTrend,
[property: JsonPropertyName("action_trend")] TrendMetric ActionTrend,
[property: JsonPropertyName("findings_improved")] int FindingsImproved,
[property: JsonPropertyName("findings_worsened")] int FindingsWorsened,
[property: JsonPropertyName("findings_unchanged")] int FindingsUnchanged);
/// <summary>
/// Trend metric for comparison.
/// </summary>
public sealed record TrendMetric(
[property: JsonPropertyName("direction")] string Direction,
[property: JsonPropertyName("magnitude")] double Magnitude,
[property: JsonPropertyName("percentage_change")] double PercentageChange,
[property: JsonPropertyName("is_significant")] bool IsSignificant);

View File

@@ -0,0 +1,897 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Service for generating detailed breakdowns of risk simulation results.
/// Per POLICY-RISK-67-003.
/// </summary>
public sealed class RiskSimulationBreakdownService
{
private readonly ILogger<RiskSimulationBreakdownService> _logger;
private static readonly ImmutableArray<string> SeverityOrder = ImmutableArray.Create(
"informational", "low", "medium", "high", "critical");
private static readonly ImmutableArray<string> ActionOrder = ImmutableArray.Create(
"allow", "review", "deny");
public RiskSimulationBreakdownService(ILogger<RiskSimulationBreakdownService> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Generates a detailed breakdown of a risk simulation result.
/// </summary>
public RiskSimulationBreakdown GenerateBreakdown(
RiskSimulationResult result,
RiskProfileModel profile,
IReadOnlyList<SimulationFinding> findings,
RiskSimulationBreakdownOptions? options = null)
{
ArgumentNullException.ThrowIfNull(result);
ArgumentNullException.ThrowIfNull(profile);
ArgumentNullException.ThrowIfNull(findings);
options ??= RiskSimulationBreakdownOptions.Default;
_logger.LogDebug(
"Generating breakdown for simulation {SimulationId} with {FindingCount} findings",
result.SimulationId, findings.Count);
var profileRef = new ProfileReference(
profile.Id,
profile.Version,
result.ProfileHash,
profile.Description,
profile.Extends);
var signalAnalysis = ComputeSignalAnalysis(result, profile, findings, options);
var overrideAnalysis = ComputeOverrideAnalysis(result, profile);
var scoreDistribution = ComputeScoreDistributionAnalysis(result, options);
var severityBreakdown = ComputeSeverityBreakdownAnalysis(result);
var actionBreakdown = ComputeActionBreakdownAnalysis(result);
var componentBreakdown = options.IncludeComponentBreakdown
? ComputeComponentBreakdownAnalysis(result, findings, options)
: null;
var determinismHash = ComputeDeterminismHash(result, profile);
return new RiskSimulationBreakdown(
result.SimulationId,
profileRef,
signalAnalysis,
overrideAnalysis,
scoreDistribution,
severityBreakdown,
actionBreakdown,
componentBreakdown,
RiskTrends: null, // Set by comparison operations
determinismHash);
}
/// <summary>
/// Generates a breakdown with trend analysis comparing two simulations.
/// </summary>
public RiskSimulationBreakdown GenerateComparisonBreakdown(
RiskSimulationResult baselineResult,
RiskSimulationResult compareResult,
RiskProfileModel baselineProfile,
RiskProfileModel compareProfile,
IReadOnlyList<SimulationFinding> findings,
RiskSimulationBreakdownOptions? options = null)
{
var breakdown = GenerateBreakdown(compareResult, compareProfile, findings, options);
var trends = ComputeRiskTrends(baselineResult, compareResult);
return breakdown with { RiskTrends = trends };
}
private SignalAnalysis ComputeSignalAnalysis(
RiskSimulationResult result,
RiskProfileModel profile,
IReadOnlyList<SimulationFinding> findings,
RiskSimulationBreakdownOptions options)
{
var signalStats = new List<SignalStatistics>();
var totalContribution = 0.0;
var signalsUsed = 0;
var findingsWithMissingSignals = 0;
var missingSignalCounts = new Dictionary<string, int>();
foreach (var signal in profile.Signals)
{
var weight = profile.Weights.GetValueOrDefault(signal.Name, 0.0);
var contributions = new List<double>();
var values = new List<double>();
var findingsWithSignal = 0;
var findingsMissing = 0;
foreach (var findingScore in result.FindingScores)
{
var contribution = findingScore.Contributions?
.FirstOrDefault(c => c.SignalName == signal.Name);
if (contribution != null)
{
findingsWithSignal++;
contributions.Add(contribution.Contribution);
if (contribution.SignalValue is double dv)
values.Add(dv);
else if (contribution.SignalValue is JsonElement je && je.TryGetDouble(out var jd))
values.Add(jd);
}
else
{
findingsMissing++;
missingSignalCounts.TryGetValue(signal.Name, out var count);
missingSignalCounts[signal.Name] = count + 1;
}
}
if (findingsWithSignal > 0)
{
signalsUsed++;
}
var signalTotalContribution = contributions.Sum();
totalContribution += signalTotalContribution;
var valueDistribution = values.Count > 0 && options.IncludeHistograms
? ComputeValueDistribution(values, options.HistogramBuckets)
: null;
signalStats.Add(new SignalStatistics(
signal.Name,
signal.Type.ToString().ToLowerInvariant(),
weight,
findingsWithSignal,
findingsMissing,
result.FindingScores.Count > 0
? (double)findingsWithSignal / result.FindingScores.Count * 100
: 0,
valueDistribution,
signalTotalContribution,
findingsWithSignal > 0 ? signalTotalContribution / findingsWithSignal : 0));
}
// Compute top contributors
var topContributors = signalStats
.Where(s => s.TotalContribution > 0)
.OrderByDescending(s => s.TotalContribution)
.Take(options.TopContributorsCount)
.Select(s => new SignalContributor(
s.SignalName,
s.TotalContribution,
totalContribution > 0 ? s.TotalContribution / totalContribution * 100 : 0,
s.ValueDistribution?.Mean ?? 0,
s.Weight,
s.Weight >= 0 ? "increase" : "decrease"))
.ToImmutableArray();
// Missing signal impact analysis
var avgMissingPerFinding = result.FindingScores.Count > 0
? missingSignalCounts.Values.Sum() / (double)result.FindingScores.Count
: 0;
var mostImpactfulMissing = missingSignalCounts
.OrderByDescending(kvp => kvp.Value * profile.Weights.GetValueOrDefault(kvp.Key, 0))
.Take(5)
.Select(kvp => kvp.Key)
.ToImmutableArray();
var missingImpact = new MissingSignalImpact(
findingsWithMissingSignals,
avgMissingPerFinding,
EstimateMissingSignalImpact(missingSignalCounts, profile),
mostImpactfulMissing);
return new SignalAnalysis(
profile.Signals.Count,
signalsUsed,
profile.Signals.Count - signalsUsed,
profile.Signals.Count > 0 ? (double)signalsUsed / profile.Signals.Count * 100 : 0,
signalStats.ToImmutableArray(),
topContributors,
missingImpact);
}
private OverrideAnalysis ComputeOverrideAnalysis(
RiskSimulationResult result,
RiskProfileModel profile)
{
var severityOverrideDetails = new Dictionary<string, SeverityOverrideTracker>();
var decisionOverrideDetails = new Dictionary<string, DecisionOverrideTracker>();
var severityOverrideCount = 0;
var decisionOverrideCount = 0;
var conflicts = new List<OverrideConflict>();
foreach (var score in result.FindingScores)
{
if (score.OverridesApplied == null)
continue;
foreach (var applied in score.OverridesApplied)
{
var predicateHash = ComputePredicateHash(applied.Predicate);
if (applied.OverrideType == "severity")
{
severityOverrideCount++;
if (!severityOverrideDetails.TryGetValue(predicateHash, out var tracker))
{
tracker = new SeverityOverrideTracker(
predicateHash,
SummarizePredicate(applied.Predicate),
applied.AppliedValue?.ToString() ?? "unknown");
severityOverrideDetails[predicateHash] = tracker;
}
tracker.Count++;
var origSev = applied.OriginalValue?.ToString() ?? "unknown";
tracker.OriginalSeverities.TryGetValue(origSev, out var count);
tracker.OriginalSeverities[origSev] = count + 1;
}
else if (applied.OverrideType == "decision")
{
decisionOverrideCount++;
if (!decisionOverrideDetails.TryGetValue(predicateHash, out var tracker))
{
tracker = new DecisionOverrideTracker(
predicateHash,
SummarizePredicate(applied.Predicate),
applied.AppliedValue?.ToString() ?? "unknown",
applied.Reason);
decisionOverrideDetails[predicateHash] = tracker;
}
tracker.Count++;
var origAction = applied.OriginalValue?.ToString() ?? "unknown";
tracker.OriginalActions.TryGetValue(origAction, out var count);
tracker.OriginalActions[origAction] = count + 1;
}
}
// Check for conflicts (multiple overrides of same type)
var severityOverrides = score.OverridesApplied.Where(o => o.OverrideType == "severity").ToList();
if (severityOverrides.Count > 1)
{
conflicts.Add(new OverrideConflict(
score.FindingId,
"severity_conflict",
SummarizePredicate(severityOverrides[0].Predicate),
SummarizePredicate(severityOverrides[1].Predicate),
"first_match"));
}
}
var totalOverridesEvaluated = profile.Overrides.Severity.Count + profile.Overrides.Decisions.Count;
var overrideApplicationRate = result.FindingScores.Count > 0
? (double)(severityOverrideCount + decisionOverrideCount) / result.FindingScores.Count * 100
: 0;
return new OverrideAnalysis(
totalOverridesEvaluated * result.FindingScores.Count,
severityOverrideCount,
decisionOverrideCount,
overrideApplicationRate,
severityOverrideDetails.Values
.Select(t => new SeverityOverrideDetail(
t.Hash, t.Summary, t.TargetSeverity, t.Count,
t.OriginalSeverities.ToImmutableDictionary()))
.ToImmutableArray(),
decisionOverrideDetails.Values
.Select(t => new DecisionOverrideDetail(
t.Hash, t.Summary, t.TargetAction, t.Reason, t.Count,
t.OriginalActions.ToImmutableDictionary()))
.ToImmutableArray(),
conflicts.ToImmutableArray());
}
private ScoreDistributionAnalysis ComputeScoreDistributionAnalysis(
RiskSimulationResult result,
RiskSimulationBreakdownOptions options)
{
var rawScores = result.FindingScores.Select(s => s.RawScore).ToList();
var normalizedScores = result.FindingScores.Select(s => s.NormalizedScore).ToList();
var rawStats = ComputeScoreStatistics(rawScores);
var normalizedStats = ComputeScoreStatistics(normalizedScores);
var buckets = ComputeScoreBuckets(normalizedScores, options.ScoreBucketCount);
var percentiles = ComputePercentiles(normalizedScores);
var outliers = ComputeOutliers(result.FindingScores, normalizedStats);
return new ScoreDistributionAnalysis(
rawStats,
normalizedStats,
buckets,
percentiles.ToImmutableDictionary(),
outliers);
}
private SeverityBreakdownAnalysis ComputeSeverityBreakdownAnalysis(RiskSimulationResult result)
{
var bySeverity = new Dictionary<string, SeverityBucketBuilder>();
var severityFlows = new Dictionary<(string from, string to), int>();
foreach (var score in result.FindingScores)
{
var severity = score.Severity.ToString().ToLowerInvariant();
if (!bySeverity.TryGetValue(severity, out var bucket))
{
bucket = new SeverityBucketBuilder(severity);
bySeverity[severity] = bucket;
}
bucket.Count++;
bucket.Scores.Add(score.NormalizedScore);
// Track top contributors
var topContributor = score.Contributions?
.OrderByDescending(c => c.ContributionPercentage)
.FirstOrDefault();
if (topContributor != null)
{
bucket.TopContributors.TryGetValue(topContributor.SignalName, out var count);
bucket.TopContributors[topContributor.SignalName] = count + 1;
}
// Track severity flows (from score-based to override-based)
var originalSeverity = DetermineSeverityFromScore(score.NormalizedScore).ToString().ToLowerInvariant();
if (originalSeverity != severity)
{
var flowKey = (originalSeverity, severity);
severityFlows.TryGetValue(flowKey, out var flowCount);
severityFlows[flowKey] = flowCount + 1;
}
}
var total = result.FindingScores.Count;
var severityBuckets = bySeverity.Values
.Select(b => new SeverityBucket(
b.Severity,
b.Count,
total > 0 ? (double)b.Count / total * 100 : 0,
b.Scores.Count > 0 ? b.Scores.Average() : 0,
new ScoreRange(
b.Scores.Count > 0 ? b.Scores.Min() : 0,
b.Scores.Count > 0 ? b.Scores.Max() : 0),
b.TopContributors
.OrderByDescending(kvp => kvp.Value)
.Take(3)
.Select(kvp => kvp.Key)
.ToImmutableArray()))
.ToImmutableDictionary(b => b.Severity);
var flows = severityFlows
.Select(kvp => new SeverityFlow(
kvp.Key.from,
kvp.Key.to,
kvp.Value,
SeverityOrder.IndexOf(kvp.Key.to) > SeverityOrder.IndexOf(kvp.Key.from)))
.ToImmutableArray();
// Severity concentration (HHI - higher = more concentrated)
var concentration = bySeverity.Values.Sum(b =>
Math.Pow((double)b.Count / (total > 0 ? total : 1), 2));
return new SeverityBreakdownAnalysis(severityBuckets, flows, concentration);
}
private ActionBreakdownAnalysis ComputeActionBreakdownAnalysis(RiskSimulationResult result)
{
var byAction = new Dictionary<string, ActionBucketBuilder>();
var actionFlows = new Dictionary<(string from, string to), int>();
foreach (var score in result.FindingScores)
{
var action = score.RecommendedAction.ToString().ToLowerInvariant();
var severity = score.Severity.ToString().ToLowerInvariant();
if (!byAction.TryGetValue(action, out var bucket))
{
bucket = new ActionBucketBuilder(action);
byAction[action] = bucket;
}
bucket.Count++;
bucket.Scores.Add(score.NormalizedScore);
bucket.SeverityCounts.TryGetValue(severity, out var sevCount);
bucket.SeverityCounts[severity] = sevCount + 1;
// Track action flows
var originalAction = DetermineActionFromSeverity(score.Severity).ToString().ToLowerInvariant();
if (originalAction != action)
{
var flowKey = (originalAction, action);
actionFlows.TryGetValue(flowKey, out var flowCount);
actionFlows[flowKey] = flowCount + 1;
}
}
var total = result.FindingScores.Count;
var actionBuckets = byAction.Values
.Select(b => new ActionBucket(
b.Action,
b.Count,
total > 0 ? (double)b.Count / total * 100 : 0,
b.Scores.Count > 0 ? b.Scores.Average() : 0,
b.SeverityCounts.ToImmutableDictionary()))
.ToImmutableDictionary(b => b.Action);
var flows = actionFlows
.Select(kvp => new ActionFlow(kvp.Key.from, kvp.Key.to, kvp.Value))
.ToImmutableArray();
// Decision stability (1 - flow rate)
var totalFlows = flows.Sum(f => f.Count);
var stability = total > 0 ? 1.0 - (double)totalFlows / total : 1.0;
return new ActionBreakdownAnalysis(actionBuckets, flows, stability);
}
private ComponentBreakdownAnalysis ComputeComponentBreakdownAnalysis(
RiskSimulationResult result,
IReadOnlyList<SimulationFinding> findings,
RiskSimulationBreakdownOptions options)
{
var componentScores = new Dictionary<string, ComponentScoreTracker>();
var ecosystemStats = new Dictionary<string, EcosystemTracker>();
foreach (var score in result.FindingScores)
{
var finding = findings.FirstOrDefault(f => f.FindingId == score.FindingId);
var purl = finding?.ComponentPurl ?? "unknown";
var ecosystem = ExtractEcosystem(purl);
// Component tracking
if (!componentScores.TryGetValue(purl, out var tracker))
{
tracker = new ComponentScoreTracker(purl);
componentScores[purl] = tracker;
}
tracker.Scores.Add(score.NormalizedScore);
tracker.Severities.Add(score.Severity);
tracker.Actions.Add(score.RecommendedAction);
// Ecosystem tracking
if (!ecosystemStats.TryGetValue(ecosystem, out var ecoTracker))
{
ecoTracker = new EcosystemTracker(ecosystem);
ecosystemStats[ecosystem] = ecoTracker;
}
ecoTracker.Components.Add(purl);
ecoTracker.FindingCount++;
ecoTracker.Scores.Add(score.NormalizedScore);
if (score.Severity == RiskSeverity.Critical) ecoTracker.CriticalCount++;
if (score.Severity == RiskSeverity.High) ecoTracker.HighCount++;
}
var topComponents = componentScores.Values
.OrderByDescending(c => c.Scores.Max())
.ThenByDescending(c => c.Scores.Count)
.Take(options.TopComponentsCount)
.Select(c => new ComponentRiskSummary(
c.Purl,
c.Scores.Count,
c.Scores.Max(),
c.Scores.Average(),
GetHighestSeverity(c.Severities),
GetMostRestrictiveAction(c.Actions)))
.ToImmutableArray();
var ecosystemBreakdown = ecosystemStats.Values
.Select(e => new EcosystemSummary(
e.Ecosystem,
e.Components.Count,
e.FindingCount,
e.Scores.Count > 0 ? e.Scores.Average() : 0,
e.CriticalCount,
e.HighCount))
.ToImmutableDictionary(e => e.Ecosystem);
return new ComponentBreakdownAnalysis(
componentScores.Count,
componentScores.Values.Count(c => c.Scores.Count > 0),
topComponents,
ecosystemBreakdown);
}
private RiskTrendAnalysis ComputeRiskTrends(
RiskSimulationResult baseline,
RiskSimulationResult compare)
{
var baselineScores = baseline.FindingScores.ToDictionary(s => s.FindingId);
var compareScores = compare.FindingScores.ToDictionary(s => s.FindingId);
var improved = 0;
var worsened = 0;
var unchanged = 0;
var scoreDeltaSum = 0.0;
var severityEscalations = 0;
var severityDeescalations = 0;
var actionChanges = 0;
foreach (var (findingId, baseScore) in baselineScores)
{
if (!compareScores.TryGetValue(findingId, out var compScore))
continue;
var scoreDelta = compScore.NormalizedScore - baseScore.NormalizedScore;
scoreDeltaSum += scoreDelta;
if (Math.Abs(scoreDelta) < 1.0)
unchanged++;
else if (scoreDelta < 0)
improved++;
else
worsened++;
var baseSevIdx = SeverityOrder.IndexOf(baseScore.Severity.ToString().ToLowerInvariant());
var compSevIdx = SeverityOrder.IndexOf(compScore.Severity.ToString().ToLowerInvariant());
if (compSevIdx > baseSevIdx) severityEscalations++;
else if (compSevIdx < baseSevIdx) severityDeescalations++;
if (baseScore.RecommendedAction != compScore.RecommendedAction)
actionChanges++;
}
var baselineAvg = baseline.AggregateMetrics.MeanScore;
var compareAvg = compare.AggregateMetrics.MeanScore;
var scorePercentChange = baselineAvg > 0
? (compareAvg - baselineAvg) / baselineAvg * 100
: 0;
var scoreTrend = new TrendMetric(
scorePercentChange < -1 ? "improving" : scorePercentChange > 1 ? "worsening" : "stable",
Math.Abs(compareAvg - baselineAvg),
scorePercentChange,
Math.Abs(scorePercentChange) > 5);
var severityTrend = new TrendMetric(
severityDeescalations > severityEscalations ? "improving" :
severityEscalations > severityDeescalations ? "worsening" : "stable",
Math.Abs(severityEscalations - severityDeescalations),
baselineScores.Count > 0
? (double)(severityEscalations - severityDeescalations) / baselineScores.Count * 100
: 0,
Math.Abs(severityEscalations - severityDeescalations) > baselineScores.Count * 0.05);
var actionTrend = new TrendMetric(
"changed",
actionChanges,
baselineScores.Count > 0 ? (double)actionChanges / baselineScores.Count * 100 : 0,
actionChanges > baselineScores.Count * 0.1);
return new RiskTrendAnalysis(
"profile_comparison",
scoreTrend,
severityTrend,
actionTrend,
improved,
worsened,
unchanged);
}
private static ValueDistribution ComputeValueDistribution(List<double> values, int bucketCount)
{
if (values.Count == 0)
return new ValueDistribution(null, null, null, null, null, null);
var sorted = values.OrderBy(v => v).ToList();
var min = sorted.First();
var max = sorted.Last();
var mean = values.Average();
var median = sorted.Count % 2 == 0
? (sorted[sorted.Count / 2 - 1] + sorted[sorted.Count / 2]) / 2
: sorted[sorted.Count / 2];
var variance = values.Average(v => Math.Pow(v - mean, 2));
var stdDev = Math.Sqrt(variance);
var histogram = new List<HistogramBucket>();
if (max > min)
{
var bucketSize = (max - min) / bucketCount;
for (var i = 0; i < bucketCount; i++)
{
var rangeMin = min + i * bucketSize;
var rangeMax = min + (i + 1) * bucketSize;
var count = values.Count(v => v >= rangeMin && (i == bucketCount - 1 ? v <= rangeMax : v < rangeMax));
histogram.Add(new HistogramBucket(rangeMin, rangeMax, count, (double)count / values.Count * 100));
}
}
return new ValueDistribution(min, max, mean, median, stdDev, histogram.ToImmutableArray());
}
private static ScoreStatistics ComputeScoreStatistics(List<double> scores)
{
if (scores.Count == 0)
return new ScoreStatistics(0, 0, 0, 0, 0, 0, 0, 0, 0);
var sorted = scores.OrderBy(s => s).ToList();
var mean = scores.Average();
var median = sorted.Count % 2 == 0
? (sorted[sorted.Count / 2 - 1] + sorted[sorted.Count / 2]) / 2
: sorted[sorted.Count / 2];
var variance = scores.Average(s => Math.Pow(s - mean, 2));
var stdDev = Math.Sqrt(variance);
// Skewness and kurtosis
var skewness = stdDev > 0
? scores.Average(s => Math.Pow((s - mean) / stdDev, 3))
: 0;
var kurtosis = stdDev > 0
? scores.Average(s => Math.Pow((s - mean) / stdDev, 4)) - 3
: 0;
return new ScoreStatistics(
scores.Count,
sorted.First(),
sorted.Last(),
Math.Round(mean, 2),
Math.Round(median, 2),
Math.Round(stdDev, 2),
Math.Round(variance, 2),
Math.Round(skewness, 3),
Math.Round(kurtosis, 3));
}
private static ImmutableArray<ScoreBucket> ComputeScoreBuckets(List<double> scores, int bucketCount)
{
var buckets = new List<ScoreBucket>();
var bucketSize = 100.0 / bucketCount;
for (var i = 0; i < bucketCount; i++)
{
var rangeMin = i * bucketSize;
var rangeMax = (i + 1) * bucketSize;
var count = scores.Count(s => s >= rangeMin && s < rangeMax);
var label = i switch
{
0 => "Very Low",
1 => "Low",
2 => "Low-Medium",
3 => "Medium",
4 => "Medium",
5 => "Medium-High",
6 => "High",
7 => "High",
8 => "Very High",
9 => "Critical",
_ => $"Bucket {i + 1}"
};
buckets.Add(new ScoreBucket(
rangeMin, rangeMax, label, count,
scores.Count > 0 ? (double)count / scores.Count * 100 : 0));
}
return buckets.ToImmutableArray();
}
private static Dictionary<string, double> ComputePercentiles(List<double> scores)
{
var percentiles = new Dictionary<string, double>();
if (scores.Count == 0)
return percentiles;
var sorted = scores.OrderBy(s => s).ToList();
var levels = new[] { 0.25, 0.50, 0.75, 0.90, 0.95, 0.99 };
foreach (var level in levels)
{
var index = (int)(level * (sorted.Count - 1));
percentiles[$"p{(int)(level * 100)}"] = sorted[index];
}
return percentiles;
}
private static OutlierAnalysis ComputeOutliers(
IReadOnlyList<FindingScore> scores,
ScoreStatistics stats)
{
if (scores.Count == 0)
return new OutlierAnalysis(0, 0, ImmutableArray<string>.Empty);
// Use IQR method for outlier detection
var sorted = scores.OrderBy(s => s.NormalizedScore).ToList();
var q1Idx = sorted.Count / 4;
var q3Idx = sorted.Count * 3 / 4;
var q1 = sorted[q1Idx].NormalizedScore;
var q3 = sorted[q3Idx].NormalizedScore;
var iqr = q3 - q1;
var threshold = q3 + 1.5 * iqr;
var outliers = scores
.Where(s => s.NormalizedScore > threshold)
.Select(s => s.FindingId)
.ToImmutableArray();
return new OutlierAnalysis(outliers.Length, threshold, outliers);
}
private static double EstimateMissingSignalImpact(
Dictionary<string, int> missingCounts,
RiskProfileModel profile)
{
var impact = 0.0;
foreach (var (signal, count) in missingCounts)
{
var weight = profile.Weights.GetValueOrDefault(signal, 0.0);
// Estimate impact as weight * average value (0.5) * missing count
impact += Math.Abs(weight) * 0.5 * count;
}
return impact;
}
private static RiskSeverity DetermineSeverityFromScore(double score)
{
return score switch
{
>= 90 => RiskSeverity.Critical,
>= 70 => RiskSeverity.High,
>= 40 => RiskSeverity.Medium,
>= 10 => RiskSeverity.Low,
_ => RiskSeverity.Informational
};
}
private static RiskAction DetermineActionFromSeverity(RiskSeverity severity)
{
return severity switch
{
RiskSeverity.Critical => RiskAction.Deny,
RiskSeverity.High => RiskAction.Deny,
RiskSeverity.Medium => RiskAction.Review,
_ => RiskAction.Allow
};
}
private static string ExtractEcosystem(string purl)
{
if (string.IsNullOrWhiteSpace(purl) || !purl.StartsWith("pkg:"))
return "unknown";
var colonIdx = purl.IndexOf(':', 4);
if (colonIdx < 0)
colonIdx = purl.IndexOf('/');
if (colonIdx < 0)
return "unknown";
return purl[4..colonIdx];
}
private static string GetHighestSeverity(List<RiskSeverity> severities)
{
if (severities.Count == 0) return "unknown";
return severities.Max().ToString().ToLowerInvariant();
}
private static string GetMostRestrictiveAction(List<RiskAction> actions)
{
if (actions.Count == 0) return "unknown";
return actions.Max().ToString().ToLowerInvariant();
}
private static string ComputePredicateHash(Dictionary<string, object> predicate)
{
var json = JsonSerializer.Serialize(predicate, new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
});
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(bytes)[..8].ToLowerInvariant();
}
private static string SummarizePredicate(Dictionary<string, object> predicate)
{
var parts = predicate.Select(kvp => $"{kvp.Key}={kvp.Value}");
return string.Join(", ", parts);
}
private static string ComputeDeterminismHash(RiskSimulationResult result, RiskProfileModel profile)
{
var input = $"{result.SimulationId}:{result.ProfileHash}:{result.FindingScores.Count}";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexString(bytes)[..16].ToLowerInvariant()}";
}
// Helper classes for tracking state during computation
private sealed class SeverityOverrideTracker(string hash, string summary, string targetSeverity)
{
public string Hash { get; } = hash;
public string Summary { get; } = summary;
public string TargetSeverity { get; } = targetSeverity;
public int Count { get; set; }
public Dictionary<string, int> OriginalSeverities { get; } = new();
}
private sealed class DecisionOverrideTracker(string hash, string summary, string targetAction, string? reason)
{
public string Hash { get; } = hash;
public string Summary { get; } = summary;
public string TargetAction { get; } = targetAction;
public string? Reason { get; } = reason;
public int Count { get; set; }
public Dictionary<string, int> OriginalActions { get; } = new();
}
private sealed class SeverityBucketBuilder(string severity)
{
public string Severity { get; } = severity;
public int Count { get; set; }
public List<double> Scores { get; } = new();
public Dictionary<string, int> TopContributors { get; } = new();
}
private sealed class ActionBucketBuilder(string action)
{
public string Action { get; } = action;
public int Count { get; set; }
public List<double> Scores { get; } = new();
public Dictionary<string, int> SeverityCounts { get; } = new();
}
private sealed class ComponentScoreTracker(string purl)
{
public string Purl { get; } = purl;
public List<double> Scores { get; } = new();
public List<RiskSeverity> Severities { get; } = new();
public List<RiskAction> Actions { get; } = new();
}
private sealed class EcosystemTracker(string ecosystem)
{
public string Ecosystem { get; } = ecosystem;
public HashSet<string> Components { get; } = new();
public int FindingCount { get; set; }
public List<double> Scores { get; } = new();
public int CriticalCount { get; set; }
public int HighCount { get; set; }
}
}
/// <summary>
/// Options for risk simulation breakdown generation.
/// </summary>
public sealed record RiskSimulationBreakdownOptions
{
/// <summary>Whether to include component breakdown analysis.</summary>
public bool IncludeComponentBreakdown { get; init; } = true;
/// <summary>Whether to include value histograms for signals.</summary>
public bool IncludeHistograms { get; init; } = true;
/// <summary>Number of histogram buckets.</summary>
public int HistogramBuckets { get; init; } = 10;
/// <summary>Number of score buckets for distribution.</summary>
public int ScoreBucketCount { get; init; } = 10;
/// <summary>Number of top signal contributors to include.</summary>
public int TopContributorsCount { get; init; } = 10;
/// <summary>Number of top components to include.</summary>
public int TopComponentsCount { get; init; } = 20;
/// <summary>Default options.</summary>
public static RiskSimulationBreakdownOptions Default { get; } = new();
/// <summary>Minimal options for quick analysis.</summary>
public static RiskSimulationBreakdownOptions Quick { get; } = new()
{
IncludeComponentBreakdown = false,
IncludeHistograms = false,
TopContributorsCount = 5,
TopComponentsCount = 10
};
}

View File

@@ -12,6 +12,7 @@ namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Service for running risk simulations with score distributions and contribution breakdowns.
/// Enhanced with detailed breakdown analytics per POLICY-RISK-67-003.
/// </summary>
public sealed class RiskSimulationService
{
@@ -20,6 +21,7 @@ public sealed class RiskSimulationService
private readonly RiskProfileConfigurationService _profileService;
private readonly RiskProfileHasher _hasher;
private readonly ICryptoHash _cryptoHash;
private readonly RiskSimulationBreakdownService? _breakdownService;
private static readonly double[] PercentileLevels = { 0.25, 0.50, 0.75, 0.90, 0.95, 0.99 };
private const int TopMoverCount = 10;
@@ -29,13 +31,15 @@ public sealed class RiskSimulationService
ILogger<RiskSimulationService> logger,
TimeProvider timeProvider,
RiskProfileConfigurationService profileService,
ICryptoHash cryptoHash)
ICryptoHash cryptoHash,
RiskSimulationBreakdownService? breakdownService = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_profileService = profileService ?? throw new ArgumentNullException(nameof(profileService));
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_hasher = new RiskProfileHasher(cryptoHash);
_breakdownService = breakdownService;
}
/// <summary>
@@ -461,4 +465,183 @@ public sealed class RiskSimulationService
var hash = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(seed), HashPurpose.Content);
return $"rsim-{hash[..16]}";
}
/// <summary>
/// Runs a risk simulation with detailed breakdown analytics.
/// Per POLICY-RISK-67-003.
/// </summary>
public RiskSimulationWithBreakdown SimulateWithBreakdown(
RiskSimulationRequest request,
RiskSimulationBreakdownOptions? breakdownOptions = null)
{
ArgumentNullException.ThrowIfNull(request);
if (_breakdownService == null)
{
throw new InvalidOperationException(
"Breakdown service not available. Register RiskSimulationBreakdownService in DI.");
}
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("risk_simulation.run_with_breakdown");
activity?.SetTag("profile.id", request.ProfileId);
activity?.SetTag("finding.count", request.Findings.Count);
var sw = Stopwatch.StartNew();
// Run simulation with contributions enabled for breakdown
var simulationRequest = request with { IncludeContributions = true };
var result = Simulate(simulationRequest);
var profile = _profileService.GetProfile(request.ProfileId);
if (profile == null)
{
throw new InvalidOperationException($"Risk profile '{request.ProfileId}' not found.");
}
// Generate breakdown
var breakdown = _breakdownService.GenerateBreakdown(
result,
profile,
request.Findings,
breakdownOptions);
sw.Stop();
_logger.LogInformation(
"Risk simulation with breakdown {SimulationId} completed in {ElapsedMs}ms",
result.SimulationId, sw.Elapsed.TotalMilliseconds);
PolicyEngineTelemetry.RiskSimulationsRun.Add(1);
return new RiskSimulationWithBreakdown(result, breakdown, sw.Elapsed.TotalMilliseconds);
}
/// <summary>
/// Runs a comparison simulation between two profiles with trend analysis.
/// Per POLICY-RISK-67-003.
/// </summary>
public RiskProfileComparisonResult CompareProfilesWithBreakdown(
string baseProfileId,
string compareProfileId,
IReadOnlyList<SimulationFinding> findings,
RiskSimulationBreakdownOptions? breakdownOptions = null)
{
ArgumentNullException.ThrowIfNullOrWhiteSpace(baseProfileId);
ArgumentNullException.ThrowIfNullOrWhiteSpace(compareProfileId);
ArgumentNullException.ThrowIfNull(findings);
if (_breakdownService == null)
{
throw new InvalidOperationException(
"Breakdown service not available. Register RiskSimulationBreakdownService in DI.");
}
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("risk_simulation.compare_profiles");
activity?.SetTag("profile.base", baseProfileId);
activity?.SetTag("profile.compare", compareProfileId);
activity?.SetTag("finding.count", findings.Count);
var sw = Stopwatch.StartNew();
// Run baseline simulation
var baselineRequest = new RiskSimulationRequest(
ProfileId: baseProfileId,
ProfileVersion: null,
Findings: findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var baselineResult = Simulate(baselineRequest);
// Run comparison simulation
var compareRequest = new RiskSimulationRequest(
ProfileId: compareProfileId,
ProfileVersion: null,
Findings: findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var compareResult = Simulate(compareRequest);
// Get profiles
var baseProfile = _profileService.GetProfile(baseProfileId)
?? throw new InvalidOperationException($"Profile '{baseProfileId}' not found.");
var compareProfile = _profileService.GetProfile(compareProfileId)
?? throw new InvalidOperationException($"Profile '{compareProfileId}' not found.");
// Generate breakdown with trends
var breakdown = _breakdownService.GenerateComparisonBreakdown(
baselineResult,
compareResult,
baseProfile,
compareProfile,
findings,
breakdownOptions);
sw.Stop();
_logger.LogInformation(
"Profile comparison completed between {BaseProfile} and {CompareProfile} in {ElapsedMs}ms",
baseProfileId, compareProfileId, sw.Elapsed.TotalMilliseconds);
return new RiskProfileComparisonResult(
BaselineResult: baselineResult,
CompareResult: compareResult,
Breakdown: breakdown,
ExecutionTimeMs: sw.Elapsed.TotalMilliseconds);
}
/// <summary>
/// Generates a standalone breakdown for an existing simulation result.
/// </summary>
public RiskSimulationBreakdown GenerateBreakdown(
RiskSimulationResult result,
IReadOnlyList<SimulationFinding> findings,
RiskSimulationBreakdownOptions? options = null)
{
ArgumentNullException.ThrowIfNull(result);
ArgumentNullException.ThrowIfNull(findings);
if (_breakdownService == null)
{
throw new InvalidOperationException(
"Breakdown service not available. Register RiskSimulationBreakdownService in DI.");
}
var profile = _profileService.GetProfile(result.ProfileId)
?? throw new InvalidOperationException($"Profile '{result.ProfileId}' not found.");
return _breakdownService.GenerateBreakdown(result, profile, findings, options);
}
}
/// <summary>
/// Risk simulation result with detailed breakdown.
/// Per POLICY-RISK-67-003.
/// </summary>
public sealed record RiskSimulationWithBreakdown(
/// <summary>The simulation result.</summary>
RiskSimulationResult Result,
/// <summary>Detailed breakdown analytics.</summary>
RiskSimulationBreakdown Breakdown,
/// <summary>Total execution time including breakdown generation.</summary>
double TotalExecutionTimeMs);
/// <summary>
/// Result of comparing two risk profiles.
/// Per POLICY-RISK-67-003.
/// </summary>
public sealed record RiskProfileComparisonResult(
/// <summary>Baseline simulation result.</summary>
RiskSimulationResult BaselineResult,
/// <summary>Comparison simulation result.</summary>
RiskSimulationResult CompareResult,
/// <summary>Breakdown with trend analysis.</summary>
RiskSimulationBreakdown Breakdown,
/// <summary>Total execution time.</summary>
double ExecutionTimeMs);