up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-27 23:44:42 +02:00
parent ef6e4b2067
commit 3b96b2e3ea
298 changed files with 47516 additions and 1168 deletions

View File

@@ -0,0 +1,352 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Service that enforces determinism constraints during policy evaluation.
/// Combines static analysis and runtime monitoring.
/// </summary>
public sealed class DeterminismGuardService
{
private readonly ProhibitedPatternAnalyzer _analyzer;
private readonly DeterminismGuardOptions _options;
private readonly RuntimeDeterminismMonitor _runtimeMonitor;
public DeterminismGuardService(DeterminismGuardOptions? options = null)
{
_options = options ?? DeterminismGuardOptions.Default;
_analyzer = new ProhibitedPatternAnalyzer();
_runtimeMonitor = new RuntimeDeterminismMonitor(_options);
}
/// <summary>
/// Analyzes source code for determinism violations.
/// </summary>
public DeterminismAnalysisResult AnalyzeSource(string sourceCode, string? fileName = null)
{
return _analyzer.AnalyzeSource(sourceCode, fileName, _options);
}
/// <summary>
/// Creates a guarded execution scope for policy evaluation.
/// </summary>
public EvaluationScope CreateScope(string scopeId, DateTimeOffset evaluationTimestamp)
{
return new EvaluationScope(scopeId, evaluationTimestamp, _options, _runtimeMonitor);
}
/// <summary>
/// Validates that a policy evaluation context is deterministic.
/// </summary>
public DeterminismAnalysisResult ValidateContext<TContext>(TContext context, string contextName)
{
var stopwatch = Stopwatch.StartNew();
var violations = new List<DeterminismViolation>();
// Check for null
if (context is null)
{
violations.Add(new DeterminismViolation
{
Category = DeterminismViolationCategory.Other,
ViolationType = "NullContext",
Message = $"Evaluation context '{contextName}' is null",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Provide a valid evaluation context"
});
}
stopwatch.Stop();
var countBySeverity = violations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = violations.Any(v => v.Severity >= _options.FailOnSeverity);
var passed = !_options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = violations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = stopwatch.ElapsedMilliseconds,
EnforcementEnabled = _options.EnforcementEnabled
};
}
/// <summary>
/// Gets a determinism-safe time provider that only returns injected timestamps.
/// </summary>
public DeterministicTimeProvider GetTimeProvider(DateTimeOffset fixedTimestamp)
{
return new DeterministicTimeProvider(fixedTimestamp);
}
}
/// <summary>
/// A guarded scope for policy evaluation that tracks determinism violations.
/// </summary>
public sealed class EvaluationScope : IDisposable
{
private readonly string _scopeId;
private readonly DateTimeOffset _evaluationTimestamp;
private readonly DeterminismGuardOptions _options;
private readonly RuntimeDeterminismMonitor _monitor;
private readonly Stopwatch _stopwatch;
private readonly List<DeterminismViolation> _violations;
private bool _disposed;
internal EvaluationScope(
string scopeId,
DateTimeOffset evaluationTimestamp,
DeterminismGuardOptions options,
RuntimeDeterminismMonitor monitor)
{
_scopeId = scopeId ?? throw new ArgumentNullException(nameof(scopeId));
_evaluationTimestamp = evaluationTimestamp;
_options = options;
_monitor = monitor;
_stopwatch = Stopwatch.StartNew();
_violations = new List<DeterminismViolation>();
if (_options.EnableRuntimeMonitoring)
{
_monitor.EnterScope(scopeId);
}
}
/// <summary>
/// Scope identifier for tracing.
/// </summary>
public string ScopeId => _scopeId;
/// <summary>
/// The fixed evaluation timestamp for this scope.
/// </summary>
public DateTimeOffset EvaluationTimestamp => _evaluationTimestamp;
/// <summary>
/// Reports a runtime violation detected during evaluation.
/// </summary>
public void ReportViolation(DeterminismViolation violation)
{
ArgumentNullException.ThrowIfNull(violation);
lock (_violations)
{
_violations.Add(violation);
}
if (_options.EnforcementEnabled && violation.Severity >= _options.FailOnSeverity)
{
throw new DeterminismViolationException(violation);
}
}
/// <summary>
/// Gets the current timestamp (always returns the fixed evaluation timestamp).
/// </summary>
public DateTimeOffset GetTimestamp() => _evaluationTimestamp;
/// <summary>
/// Gets all violations recorded in this scope.
/// </summary>
public IReadOnlyList<DeterminismViolation> GetViolations()
{
lock (_violations)
{
return _violations.ToList();
}
}
/// <summary>
/// Completes the scope and returns analysis results.
/// </summary>
public DeterminismAnalysisResult Complete()
{
_stopwatch.Stop();
IReadOnlyList<DeterminismViolation> allViolations;
lock (_violations)
{
allViolations = _violations.ToList();
}
var countBySeverity = allViolations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = allViolations.Any(v => v.Severity >= _options.FailOnSeverity);
var passed = !_options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = allViolations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = _stopwatch.ElapsedMilliseconds,
EnforcementEnabled = _options.EnforcementEnabled
};
}
public void Dispose()
{
if (_disposed)
{
return;
}
_disposed = true;
if (_options.EnableRuntimeMonitoring)
{
_monitor.ExitScope(_scopeId);
}
}
}
/// <summary>
/// Exception thrown when a determinism violation is detected with enforcement enabled.
/// </summary>
public sealed class DeterminismViolationException : Exception
{
public DeterminismViolationException(DeterminismViolation violation)
: base($"Determinism violation: {violation.Message}")
{
Violation = violation;
}
public DeterminismViolation Violation { get; }
}
/// <summary>
/// Time provider that always returns a fixed timestamp.
/// </summary>
public sealed class DeterministicTimeProvider : TimeProvider
{
private readonly DateTimeOffset _fixedTimestamp;
public DeterministicTimeProvider(DateTimeOffset fixedTimestamp)
{
_fixedTimestamp = fixedTimestamp;
}
public override DateTimeOffset GetUtcNow() => _fixedTimestamp;
public override TimeZoneInfo LocalTimeZone => TimeZoneInfo.Utc;
}
/// <summary>
/// Runtime monitor for detecting non-deterministic operations.
/// </summary>
internal sealed class RuntimeDeterminismMonitor
{
private readonly DeterminismGuardOptions _options;
private readonly HashSet<string> _activeScopes = new(StringComparer.Ordinal);
private readonly object _lock = new();
public RuntimeDeterminismMonitor(DeterminismGuardOptions options)
{
_options = options;
}
public void EnterScope(string scopeId)
{
lock (_lock)
{
_activeScopes.Add(scopeId);
}
}
public void ExitScope(string scopeId)
{
lock (_lock)
{
_activeScopes.Remove(scopeId);
}
}
public bool IsInScope => _activeScopes.Count > 0;
/// <summary>
/// Checks if we're in a guarded scope and should intercept operations.
/// </summary>
public bool ShouldIntercept()
{
return _options.EnableRuntimeMonitoring && IsInScope;
}
}
/// <summary>
/// Extension methods for integrating determinism guard with evaluation.
/// </summary>
public static class DeterminismGuardExtensions
{
/// <summary>
/// Executes an evaluation function within a determinism-guarded scope.
/// </summary>
public static TResult ExecuteGuarded<TResult>(
this DeterminismGuardService guard,
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, TResult> evaluation)
{
ArgumentNullException.ThrowIfNull(guard);
ArgumentNullException.ThrowIfNull(evaluation);
using var scope = guard.CreateScope(scopeId, evaluationTimestamp);
try
{
return evaluation(scope);
}
finally
{
var result = scope.Complete();
if (!result.Passed)
{
// Log violations even if not throwing
foreach (var violation in result.Violations)
{
// In production, this would log to structured logging
System.Diagnostics.Debug.WriteLine(
$"[DeterminismGuard] {violation.Severity}: {violation.Message}");
}
}
}
}
/// <summary>
/// Executes an async evaluation function within a determinism-guarded scope.
/// </summary>
public static async Task<TResult> ExecuteGuardedAsync<TResult>(
this DeterminismGuardService guard,
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, Task<TResult>> evaluation)
{
ArgumentNullException.ThrowIfNull(guard);
ArgumentNullException.ThrowIfNull(evaluation);
using var scope = guard.CreateScope(scopeId, evaluationTimestamp);
try
{
return await evaluation(scope).ConfigureAwait(false);
}
finally
{
var result = scope.Complete();
if (!result.Passed)
{
foreach (var violation in result.Violations)
{
System.Diagnostics.Debug.WriteLine(
$"[DeterminismGuard] {violation.Severity}: {violation.Message}");
}
}
}
}
}

View File

@@ -0,0 +1,197 @@
using System.Collections.Immutable;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Represents a determinism violation detected during static analysis or runtime.
/// </summary>
public sealed record DeterminismViolation
{
/// <summary>
/// Category of the violation.
/// </summary>
public required DeterminismViolationCategory Category { get; init; }
/// <summary>
/// Specific violation type.
/// </summary>
public required string ViolationType { get; init; }
/// <summary>
/// Human-readable description of the violation.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Source location (file path, if known).
/// </summary>
public string? SourceFile { get; init; }
/// <summary>
/// Line number (if known from static analysis).
/// </summary>
public int? LineNumber { get; init; }
/// <summary>
/// Member or method name where violation occurred.
/// </summary>
public string? MemberName { get; init; }
/// <summary>
/// Severity of the violation.
/// </summary>
public required DeterminismViolationSeverity Severity { get; init; }
/// <summary>
/// Suggested remediation.
/// </summary>
public string? Remediation { get; init; }
}
/// <summary>
/// Category of determinism violation.
/// </summary>
public enum DeterminismViolationCategory
{
/// <summary>Wall-clock time access (DateTime.Now, etc.).</summary>
WallClock,
/// <summary>Random number generation.</summary>
RandomNumber,
/// <summary>Network access (HttpClient, sockets, etc.).</summary>
NetworkAccess,
/// <summary>Filesystem access.</summary>
FileSystemAccess,
/// <summary>Environment variable access.</summary>
EnvironmentAccess,
/// <summary>GUID generation.</summary>
GuidGeneration,
/// <summary>Thread/Task operations that may introduce non-determinism.</summary>
ConcurrencyHazard,
/// <summary>Floating-point operations that may have platform variance.</summary>
FloatingPointHazard,
/// <summary>Dictionary iteration without stable ordering.</summary>
UnstableIteration,
/// <summary>Other non-deterministic operation.</summary>
Other
}
/// <summary>
/// Severity level of a determinism violation.
/// </summary>
public enum DeterminismViolationSeverity
{
/// <summary>Informational - may not cause issues.</summary>
Info,
/// <summary>Warning - potential non-determinism.</summary>
Warning,
/// <summary>Error - definite non-determinism source.</summary>
Error,
/// <summary>Critical - must be fixed before deployment.</summary>
Critical
}
/// <summary>
/// Result of determinism analysis.
/// </summary>
public sealed record DeterminismAnalysisResult
{
/// <summary>
/// Whether the analysis passed (no critical/error violations).
/// </summary>
public required bool Passed { get; init; }
/// <summary>
/// All violations found.
/// </summary>
public required ImmutableArray<DeterminismViolation> Violations { get; init; }
/// <summary>
/// Count of violations by severity.
/// </summary>
public required ImmutableDictionary<DeterminismViolationSeverity, int> CountBySeverity { get; init; }
/// <summary>
/// Analysis duration in milliseconds.
/// </summary>
public required long AnalysisDurationMs { get; init; }
/// <summary>
/// Whether the guard is currently enforcing (blocking on violations).
/// </summary>
public required bool EnforcementEnabled { get; init; }
/// <summary>
/// Creates a passing result with no violations.
/// </summary>
public static DeterminismAnalysisResult Pass(long durationMs, bool enforcementEnabled) => new()
{
Passed = true,
Violations = ImmutableArray<DeterminismViolation>.Empty,
CountBySeverity = ImmutableDictionary<DeterminismViolationSeverity, int>.Empty,
AnalysisDurationMs = durationMs,
EnforcementEnabled = enforcementEnabled
};
}
/// <summary>
/// Configuration for determinism guard behavior.
/// </summary>
public sealed record DeterminismGuardOptions
{
/// <summary>
/// Whether enforcement is enabled (blocks on violations).
/// </summary>
public bool EnforcementEnabled { get; init; } = true;
/// <summary>
/// Minimum severity level to fail enforcement.
/// </summary>
public DeterminismViolationSeverity FailOnSeverity { get; init; } = DeterminismViolationSeverity.Error;
/// <summary>
/// Whether to log all violations regardless of enforcement.
/// </summary>
public bool LogAllViolations { get; init; } = true;
/// <summary>
/// Whether to analyze code statically before execution.
/// </summary>
public bool EnableStaticAnalysis { get; init; } = true;
/// <summary>
/// Whether to monitor runtime behavior.
/// </summary>
public bool EnableRuntimeMonitoring { get; init; } = true;
/// <summary>
/// Patterns to exclude from analysis (e.g., test code).
/// </summary>
public ImmutableArray<string> ExcludePatterns { get; init; } = ImmutableArray<string>.Empty;
/// <summary>
/// Default options for production use.
/// </summary>
public static DeterminismGuardOptions Default { get; } = new();
/// <summary>
/// Options for development/testing (warnings only).
/// </summary>
public static DeterminismGuardOptions Development { get; } = new()
{
EnforcementEnabled = false,
FailOnSeverity = DeterminismViolationSeverity.Critical,
LogAllViolations = true
};
}

View File

@@ -0,0 +1,375 @@
using System.Collections.Immutable;
using System.Diagnostics;
using StellaOps.PolicyDsl;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Wraps policy evaluation with determinism guard protection.
/// Enforces static analysis and runtime monitoring during evaluation.
/// </summary>
public sealed class GuardedPolicyEvaluator
{
private readonly DeterminismGuardService _guard;
private readonly ProhibitedPatternAnalyzer _analyzer;
public GuardedPolicyEvaluator(DeterminismGuardOptions? options = null)
{
var opts = options ?? DeterminismGuardOptions.Default;
_guard = new DeterminismGuardService(opts);
_analyzer = new ProhibitedPatternAnalyzer();
}
/// <summary>
/// Pre-validates policy source code for determinism violations.
/// Should be called during policy compilation/registration.
/// </summary>
public DeterminismAnalysisResult ValidatePolicySource(
string sourceCode,
string? fileName = null,
DeterminismGuardOptions? options = null)
{
return _guard.AnalyzeSource(sourceCode, fileName);
}
/// <summary>
/// Pre-validates multiple policy source files.
/// </summary>
public DeterminismAnalysisResult ValidatePolicySources(
IEnumerable<(string SourceCode, string FileName)> sources,
DeterminismGuardOptions? options = null)
{
var opts = options ?? DeterminismGuardOptions.Default;
return _analyzer.AnalyzeMultiple(sources, opts);
}
/// <summary>
/// Evaluates a policy within a determinism-guarded scope.
/// </summary>
public GuardedEvaluationResult<TResult> Evaluate<TResult>(
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, TResult> evaluation)
{
ArgumentNullException.ThrowIfNull(evaluation);
var stopwatch = Stopwatch.StartNew();
using var scope = _guard.CreateScope(scopeId, evaluationTimestamp);
try
{
var result = evaluation(scope);
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = guardResult.Passed,
Result = result,
Violations = guardResult.Violations,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (DeterminismViolationException ex)
{
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = guardResult.Violations,
BlockingViolation = ex.Violation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (Exception ex)
{
var violations = scope.GetViolations();
stopwatch.Stop();
// Record the unexpected exception as a violation
var exceptionViolation = new DeterminismViolation
{
Category = DeterminismViolationCategory.Other,
ViolationType = "EvaluationException",
Message = $"Unexpected exception during evaluation: {ex.Message}",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Review policy logic for errors"
};
var allViolations = violations
.Append(exceptionViolation)
.ToImmutableArray();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = allViolations,
BlockingViolation = exceptionViolation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp,
Exception = ex
};
}
}
/// <summary>
/// Evaluates a policy asynchronously within a determinism-guarded scope.
/// </summary>
public async Task<GuardedEvaluationResult<TResult>> EvaluateAsync<TResult>(
string scopeId,
DateTimeOffset evaluationTimestamp,
Func<EvaluationScope, Task<TResult>> evaluation,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(evaluation);
var stopwatch = Stopwatch.StartNew();
using var scope = _guard.CreateScope(scopeId, evaluationTimestamp);
try
{
var result = await evaluation(scope).ConfigureAwait(false);
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = guardResult.Passed,
Result = result,
Violations = guardResult.Violations,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
throw;
}
catch (DeterminismViolationException ex)
{
var guardResult = scope.Complete();
stopwatch.Stop();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = guardResult.Violations,
BlockingViolation = ex.Violation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp
};
}
catch (Exception ex)
{
var violations = scope.GetViolations();
stopwatch.Stop();
var exceptionViolation = new DeterminismViolation
{
Category = DeterminismViolationCategory.Other,
ViolationType = "EvaluationException",
Message = $"Unexpected exception during evaluation: {ex.Message}",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Review policy logic for errors"
};
var allViolations = violations
.Append(exceptionViolation)
.ToImmutableArray();
return new GuardedEvaluationResult<TResult>
{
Succeeded = false,
Result = default,
Violations = allViolations,
BlockingViolation = exceptionViolation,
EvaluationDurationMs = stopwatch.ElapsedMilliseconds,
ScopeId = scopeId,
EvaluationTimestamp = evaluationTimestamp,
Exception = ex
};
}
}
/// <summary>
/// Gets the determinism guard service for advanced usage.
/// </summary>
public DeterminismGuardService Guard => _guard;
}
/// <summary>
/// Result of a guarded policy evaluation.
/// </summary>
public sealed record GuardedEvaluationResult<TResult>
{
/// <summary>
/// Whether the evaluation succeeded without blocking violations.
/// </summary>
public required bool Succeeded { get; init; }
/// <summary>
/// The evaluation result (may be default if failed).
/// </summary>
public TResult? Result { get; init; }
/// <summary>
/// All violations detected during evaluation.
/// </summary>
public required ImmutableArray<DeterminismViolation> Violations { get; init; }
/// <summary>
/// The violation that caused evaluation to be blocked (if any).
/// </summary>
public DeterminismViolation? BlockingViolation { get; init; }
/// <summary>
/// Evaluation duration in milliseconds.
/// </summary>
public required long EvaluationDurationMs { get; init; }
/// <summary>
/// Scope identifier for tracing.
/// </summary>
public required string ScopeId { get; init; }
/// <summary>
/// The fixed evaluation timestamp used.
/// </summary>
public required DateTimeOffset EvaluationTimestamp { get; init; }
/// <summary>
/// Exception that occurred during evaluation (if any).
/// </summary>
public Exception? Exception { get; init; }
/// <summary>
/// Number of violations by severity.
/// </summary>
public ImmutableDictionary<DeterminismViolationSeverity, int> ViolationCountBySeverity =>
Violations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
/// <summary>
/// Whether there are any violations (blocking or not).
/// </summary>
public bool HasViolations => !Violations.IsDefaultOrEmpty;
/// <summary>
/// Whether the evaluation was blocked by a violation.
/// </summary>
public bool WasBlocked => BlockingViolation is not null;
}
/// <summary>
/// Builder for creating guarded policy evaluator with custom configuration.
/// </summary>
public sealed class GuardedPolicyEvaluatorBuilder
{
private bool _enforcementEnabled = true;
private DeterminismViolationSeverity _failOnSeverity = DeterminismViolationSeverity.Error;
private bool _enableStaticAnalysis = true;
private bool _enableRuntimeMonitoring = true;
private bool _logAllViolations = true;
private ImmutableArray<string> _excludePatterns = ImmutableArray<string>.Empty;
/// <summary>
/// Enables or disables enforcement (blocking on violations).
/// </summary>
public GuardedPolicyEvaluatorBuilder WithEnforcement(bool enabled)
{
_enforcementEnabled = enabled;
return this;
}
/// <summary>
/// Sets the minimum severity level to block evaluation.
/// </summary>
public GuardedPolicyEvaluatorBuilder FailOnSeverity(DeterminismViolationSeverity severity)
{
_failOnSeverity = severity;
return this;
}
/// <summary>
/// Enables or disables static code analysis.
/// </summary>
public GuardedPolicyEvaluatorBuilder WithStaticAnalysis(bool enabled)
{
_enableStaticAnalysis = enabled;
return this;
}
/// <summary>
/// Enables or disables runtime monitoring.
/// </summary>
public GuardedPolicyEvaluatorBuilder WithRuntimeMonitoring(bool enabled)
{
_enableRuntimeMonitoring = enabled;
return this;
}
/// <summary>
/// Enables or disables logging of all violations.
/// </summary>
public GuardedPolicyEvaluatorBuilder WithViolationLogging(bool enabled)
{
_logAllViolations = enabled;
return this;
}
/// <summary>
/// Adds patterns to exclude from analysis.
/// </summary>
public GuardedPolicyEvaluatorBuilder ExcludePatterns(params string[] patterns)
{
_excludePatterns = _excludePatterns.AddRange(patterns);
return this;
}
/// <summary>
/// Creates the configured GuardedPolicyEvaluator.
/// </summary>
public GuardedPolicyEvaluator Build()
{
var options = new DeterminismGuardOptions
{
EnforcementEnabled = _enforcementEnabled,
FailOnSeverity = _failOnSeverity,
EnableStaticAnalysis = _enableStaticAnalysis,
EnableRuntimeMonitoring = _enableRuntimeMonitoring,
LogAllViolations = _logAllViolations,
ExcludePatterns = _excludePatterns
};
return new GuardedPolicyEvaluator(options);
}
/// <summary>
/// Creates a development-mode evaluator (warnings only, no blocking).
/// </summary>
public static GuardedPolicyEvaluator CreateDevelopment()
{
return new GuardedPolicyEvaluator(DeterminismGuardOptions.Development);
}
/// <summary>
/// Creates a production-mode evaluator (full enforcement).
/// </summary>
public static GuardedPolicyEvaluator CreateProduction()
{
return new GuardedPolicyEvaluator(DeterminismGuardOptions.Default);
}
}

View File

@@ -0,0 +1,412 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.RegularExpressions;
namespace StellaOps.Policy.Engine.DeterminismGuard;
/// <summary>
/// Static analyzer that detects prohibited non-deterministic patterns in source code.
/// </summary>
public sealed partial class ProhibitedPatternAnalyzer
{
private static readonly ImmutableArray<ProhibitedPattern> Patterns = CreatePatterns();
/// <summary>
/// Analyzes source code for prohibited patterns.
/// </summary>
public DeterminismAnalysisResult AnalyzeSource(
string sourceCode,
string? fileName,
DeterminismGuardOptions options)
{
ArgumentNullException.ThrowIfNull(sourceCode);
options ??= DeterminismGuardOptions.Default;
var stopwatch = Stopwatch.StartNew();
var violations = new List<DeterminismViolation>();
// Check exclusions
if (fileName is not null && IsExcluded(fileName, options.ExcludePatterns))
{
return DeterminismAnalysisResult.Pass(stopwatch.ElapsedMilliseconds, options.EnforcementEnabled);
}
// Split into lines for line number tracking
var lines = sourceCode.Split('\n');
for (var lineIndex = 0; lineIndex < lines.Length; lineIndex++)
{
var line = lines[lineIndex];
var lineNumber = lineIndex + 1;
// Skip comments
var trimmedLine = line.TrimStart();
if (trimmedLine.StartsWith("//") || trimmedLine.StartsWith("/*") || trimmedLine.StartsWith("*"))
{
continue;
}
foreach (var pattern in Patterns)
{
if (pattern.Regex.IsMatch(line))
{
violations.Add(new DeterminismViolation
{
Category = pattern.Category,
ViolationType = pattern.ViolationType,
Message = pattern.Message,
SourceFile = fileName,
LineNumber = lineNumber,
MemberName = ExtractMemberContext(lines, lineIndex),
Severity = pattern.Severity,
Remediation = pattern.Remediation
});
}
}
}
stopwatch.Stop();
var countBySeverity = violations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = violations.Any(v => v.Severity >= options.FailOnSeverity);
var passed = !options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = violations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = stopwatch.ElapsedMilliseconds,
EnforcementEnabled = options.EnforcementEnabled
};
}
/// <summary>
/// Analyzes multiple source files.
/// </summary>
public DeterminismAnalysisResult AnalyzeMultiple(
IEnumerable<(string SourceCode, string FileName)> sources,
DeterminismGuardOptions options)
{
ArgumentNullException.ThrowIfNull(sources);
options ??= DeterminismGuardOptions.Default;
var stopwatch = Stopwatch.StartNew();
var allViolations = new List<DeterminismViolation>();
foreach (var (sourceCode, fileName) in sources)
{
var result = AnalyzeSource(sourceCode, fileName, options with { EnforcementEnabled = false });
allViolations.AddRange(result.Violations);
}
stopwatch.Stop();
var countBySeverity = allViolations
.GroupBy(v => v.Severity)
.ToImmutableDictionary(g => g.Key, g => g.Count());
var hasBlockingViolation = allViolations.Any(v => v.Severity >= options.FailOnSeverity);
var passed = !options.EnforcementEnabled || !hasBlockingViolation;
return new DeterminismAnalysisResult
{
Passed = passed,
Violations = allViolations.ToImmutableArray(),
CountBySeverity = countBySeverity,
AnalysisDurationMs = stopwatch.ElapsedMilliseconds,
EnforcementEnabled = options.EnforcementEnabled
};
}
private static bool IsExcluded(string fileName, ImmutableArray<string> excludePatterns)
{
if (excludePatterns.IsDefaultOrEmpty)
{
return false;
}
return excludePatterns.Any(pattern =>
fileName.Contains(pattern, StringComparison.OrdinalIgnoreCase));
}
private static string? ExtractMemberContext(string[] lines, int lineIndex)
{
// Look backwards for method/property/class declaration
for (var i = lineIndex; i >= 0 && i > lineIndex - 20; i--)
{
var line = lines[i].Trim();
// Method pattern
var methodMatch = MethodDeclarationRegex().Match(line);
if (methodMatch.Success)
{
return methodMatch.Groups[1].Value;
}
// Property pattern
var propertyMatch = PropertyDeclarationRegex().Match(line);
if (propertyMatch.Success)
{
return propertyMatch.Groups[1].Value;
}
// Class pattern
var classMatch = ClassDeclarationRegex().Match(line);
if (classMatch.Success)
{
return classMatch.Groups[1].Value;
}
}
return null;
}
[GeneratedRegex(@"(?:public|private|protected|internal)\s+.*?\s+(\w+)\s*\(")]
private static partial Regex MethodDeclarationRegex();
[GeneratedRegex(@"(?:public|private|protected|internal)\s+.*?\s+(\w+)\s*\{")]
private static partial Regex PropertyDeclarationRegex();
[GeneratedRegex(@"(?:class|struct|record)\s+(\w+)")]
private static partial Regex ClassDeclarationRegex();
private static ImmutableArray<ProhibitedPattern> CreatePatterns()
{
return ImmutableArray.Create(
// Wall-clock violations
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTime.Now",
Regex = DateTimeNowRegex(),
Message = "DateTime.Now usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTime.UtcNow",
Regex = DateTimeUtcNowRegex(),
Message = "DateTime.UtcNow usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTimeOffset.Now",
Regex = DateTimeOffsetNowRegex(),
Message = "DateTimeOffset.Now usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.WallClock,
ViolationType = "DateTimeOffset.UtcNow",
Regex = DateTimeOffsetUtcNowRegex(),
Message = "DateTimeOffset.UtcNow usage detected - non-deterministic wall-clock access",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use injected timestamp from evaluation context (context.Now)"
},
// Random number violations
new ProhibitedPattern
{
Category = DeterminismViolationCategory.RandomNumber,
ViolationType = "Random",
Regex = RandomClassRegex(),
Message = "Random class usage detected - non-deterministic random number generation",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use deterministic seeded random if needed, or remove randomness"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.RandomNumber,
ViolationType = "RandomNumberGenerator",
Regex = CryptoRandomRegex(),
Message = "Cryptographic random usage detected - non-deterministic",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Remove cryptographic random from evaluation path"
},
// GUID generation
new ProhibitedPattern
{
Category = DeterminismViolationCategory.GuidGeneration,
ViolationType = "Guid.NewGuid",
Regex = GuidNewGuidRegex(),
Message = "Guid.NewGuid() usage detected - non-deterministic identifier generation",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use deterministic ID generation based on content hash"
},
// Network access
new ProhibitedPattern
{
Category = DeterminismViolationCategory.NetworkAccess,
ViolationType = "HttpClient",
Regex = HttpClientRegex(),
Message = "HttpClient usage detected - network access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove network access from evaluation path"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.NetworkAccess,
ViolationType = "WebClient",
Regex = WebClientRegex(),
Message = "WebClient usage detected - network access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove network access from evaluation path"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.NetworkAccess,
ViolationType = "Socket",
Regex = SocketRegex(),
Message = "Socket usage detected - network access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove socket access from evaluation path"
},
// Environment access
new ProhibitedPattern
{
Category = DeterminismViolationCategory.EnvironmentAccess,
ViolationType = "Environment.GetEnvironmentVariable",
Regex = EnvironmentGetEnvRegex(),
Message = "Environment variable access detected - host-dependent",
Severity = DeterminismViolationSeverity.Error,
Remediation = "Use evaluation context environment properties instead"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.EnvironmentAccess,
ViolationType = "Environment.MachineName",
Regex = EnvironmentMachineNameRegex(),
Message = "Environment.MachineName access detected - host-dependent",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Remove host-specific information from evaluation"
},
// Filesystem access
new ProhibitedPattern
{
Category = DeterminismViolationCategory.FileSystemAccess,
ViolationType = "File.Read",
Regex = FileReadRegex(),
Message = "File read operation detected - filesystem access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove file access from evaluation path"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.FileSystemAccess,
ViolationType = "File.Write",
Regex = FileWriteRegex(),
Message = "File write operation detected - filesystem access is non-deterministic",
Severity = DeterminismViolationSeverity.Critical,
Remediation = "Remove file access from evaluation path"
},
// Floating-point hazards
new ProhibitedPattern
{
Category = DeterminismViolationCategory.FloatingPointHazard,
ViolationType = "double comparison",
Regex = DoubleComparisonRegex(),
Message = "Direct double comparison detected - may have platform variance",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Use decimal type for precise comparisons"
},
// Unstable iteration
new ProhibitedPattern
{
Category = DeterminismViolationCategory.UnstableIteration,
ViolationType = "Dictionary iteration",
Regex = DictionaryIterationRegex(),
Message = "Dictionary iteration detected - may have unstable ordering",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Use SortedDictionary or OrderBy before iteration"
},
new ProhibitedPattern
{
Category = DeterminismViolationCategory.UnstableIteration,
ViolationType = "HashSet iteration",
Regex = HashSetIterationRegex(),
Message = "HashSet iteration detected - may have unstable ordering",
Severity = DeterminismViolationSeverity.Warning,
Remediation = "Use SortedSet or OrderBy before iteration"
}
);
}
// Generated regex patterns for prohibited patterns
[GeneratedRegex(@"DateTime\.Now(?!\w)")]
private static partial Regex DateTimeNowRegex();
[GeneratedRegex(@"DateTime\.UtcNow(?!\w)")]
private static partial Regex DateTimeUtcNowRegex();
[GeneratedRegex(@"DateTimeOffset\.Now(?!\w)")]
private static partial Regex DateTimeOffsetNowRegex();
[GeneratedRegex(@"DateTimeOffset\.UtcNow(?!\w)")]
private static partial Regex DateTimeOffsetUtcNowRegex();
[GeneratedRegex(@"new\s+Random\s*\(")]
private static partial Regex RandomClassRegex();
[GeneratedRegex(@"RandomNumberGenerator")]
private static partial Regex CryptoRandomRegex();
[GeneratedRegex(@"Guid\.NewGuid\s*\(")]
private static partial Regex GuidNewGuidRegex();
[GeneratedRegex(@"HttpClient")]
private static partial Regex HttpClientRegex();
[GeneratedRegex(@"WebClient")]
private static partial Regex WebClientRegex();
[GeneratedRegex(@"(?:TcpClient|UdpClient|Socket)\s*\(")]
private static partial Regex SocketRegex();
[GeneratedRegex(@"Environment\.GetEnvironmentVariable")]
private static partial Regex EnvironmentGetEnvRegex();
[GeneratedRegex(@"Environment\.MachineName")]
private static partial Regex EnvironmentMachineNameRegex();
[GeneratedRegex(@"File\.(?:Read|Open|ReadAll)")]
private static partial Regex FileReadRegex();
[GeneratedRegex(@"File\.(?:Write|Create|Append)")]
private static partial Regex FileWriteRegex();
[GeneratedRegex(@"(?:double|float)\s+\w+\s*[=<>!]=")]
private static partial Regex DoubleComparisonRegex();
[GeneratedRegex(@"foreach\s*\([^)]+\s+in\s+\w*[Dd]ictionary")]
private static partial Regex DictionaryIterationRegex();
[GeneratedRegex(@"foreach\s*\([^)]+\s+in\s+\w*[Hh]ashSet")]
private static partial Regex HashSetIterationRegex();
private sealed record ProhibitedPattern
{
public required DeterminismViolationCategory Category { get; init; }
public required string ViolationType { get; init; }
public required Regex Regex { get; init; }
public required string Message { get; init; }
public required DeterminismViolationSeverity Severity { get; init; }
public string? Remediation { get; init; }
}
}

View File

@@ -0,0 +1,81 @@
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Domain;
/// <summary>
/// Request for a policy decision with source evidence summaries (POLICY-ENGINE-40-003).
/// </summary>
public sealed record PolicyDecisionRequest(
[property: JsonPropertyName("snapshot_id")] string SnapshotId,
[property: JsonPropertyName("tenant_id")] string? TenantId = null,
[property: JsonPropertyName("component_purl")] string? ComponentPurl = null,
[property: JsonPropertyName("advisory_id")] string? AdvisoryId = null,
[property: JsonPropertyName("include_evidence")] bool IncludeEvidence = true,
[property: JsonPropertyName("max_sources")] int MaxSources = 5);
/// <summary>
/// Response containing policy decisions with source evidence summaries.
/// </summary>
public sealed record PolicyDecisionResponse(
[property: JsonPropertyName("snapshot_id")] string SnapshotId,
[property: JsonPropertyName("decisions")] IReadOnlyList<PolicyDecisionItem> Decisions,
[property: JsonPropertyName("summary")] PolicyDecisionSummary Summary);
/// <summary>
/// A single policy decision with associated evidence.
/// </summary>
public sealed record PolicyDecisionItem(
[property: JsonPropertyName("tenant_id")] string TenantId,
[property: JsonPropertyName("component_purl")] string ComponentPurl,
[property: JsonPropertyName("advisory_id")] string AdvisoryId,
[property: JsonPropertyName("severity_fused")] string SeverityFused,
[property: JsonPropertyName("score")] decimal Score,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("top_sources")] IReadOnlyList<PolicyDecisionSource> TopSources,
[property: JsonPropertyName("evidence")] PolicyDecisionEvidence? Evidence,
[property: JsonPropertyName("conflict_count")] int ConflictCount,
[property: JsonPropertyName("reason_codes")] IReadOnlyList<string> ReasonCodes);
/// <summary>
/// Top severity source information for a decision.
/// </summary>
public sealed record PolicyDecisionSource(
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("weight")] decimal Weight,
[property: JsonPropertyName("severity")] string Severity,
[property: JsonPropertyName("score")] decimal Score,
[property: JsonPropertyName("rank")] int Rank);
/// <summary>
/// Evidence summary for a policy decision.
/// </summary>
public sealed record PolicyDecisionEvidence(
[property: JsonPropertyName("headline")] string Headline,
[property: JsonPropertyName("severity")] string Severity,
[property: JsonPropertyName("locator")] PolicyDecisionLocator Locator,
[property: JsonPropertyName("signals")] IReadOnlyList<string> Signals);
/// <summary>
/// Evidence locator information.
/// </summary>
public sealed record PolicyDecisionLocator(
[property: JsonPropertyName("file_path")] string FilePath,
[property: JsonPropertyName("digest")] string? Digest);
/// <summary>
/// Summary statistics for the decision response.
/// </summary>
public sealed record PolicyDecisionSummary(
[property: JsonPropertyName("total_decisions")] int TotalDecisions,
[property: JsonPropertyName("total_conflicts")] int TotalConflicts,
[property: JsonPropertyName("severity_counts")] IReadOnlyDictionary<string, int> SeverityCounts,
[property: JsonPropertyName("top_severity_sources")] IReadOnlyList<PolicyDecisionSourceRank> TopSeveritySources);
/// <summary>
/// Aggregated source rank across all decisions.
/// </summary>
public sealed record PolicyDecisionSourceRank(
[property: JsonPropertyName("source")] string Source,
[property: JsonPropertyName("total_weight")] decimal TotalWeight,
[property: JsonPropertyName("decision_count")] int DecisionCount,
[property: JsonPropertyName("average_score")] decimal AverageScore);

View File

@@ -0,0 +1,360 @@
using System.Security.Claims;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Overrides;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class OverrideEndpoints
{
public static IEndpointRouteBuilder MapOverrides(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/overrides")
.RequireAuthorization()
.WithTags("Risk Overrides");
group.MapPost("/", CreateOverride)
.WithName("CreateOverride")
.WithSummary("Create a new override with audit metadata.")
.Produces<OverrideResponse>(StatusCodes.Status201Created)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapGet("/{overrideId}", GetOverride)
.WithName("GetOverride")
.WithSummary("Get an override by ID.")
.Produces<OverrideResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapDelete("/{overrideId}", DeleteOverride)
.WithName("DeleteOverride")
.WithSummary("Delete an override.")
.Produces(StatusCodes.Status204NoContent)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/profile/{profileId}", ListProfileOverrides)
.WithName("ListProfileOverrides")
.WithSummary("List all overrides for a risk profile.")
.Produces<OverrideListResponse>(StatusCodes.Status200OK);
group.MapPost("/validate", ValidateOverride)
.WithName("ValidateOverride")
.WithSummary("Validate an override for conflicts before creating.")
.Produces<OverrideValidationResponse>(StatusCodes.Status200OK);
group.MapPost("/{overrideId}:approve", ApproveOverride)
.WithName("ApproveOverride")
.WithSummary("Approve an override that requires review.")
.Produces<OverrideResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/{overrideId}:disable", DisableOverride)
.WithName("DisableOverride")
.WithSummary("Disable an active override.")
.Produces<OverrideResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/{overrideId}/history", GetOverrideHistory)
.WithName("GetOverrideHistory")
.WithSummary("Get application history for an override.")
.Produces<OverrideHistoryResponse>(StatusCodes.Status200OK);
return endpoints;
}
private static IResult CreateOverride(
HttpContext context,
[FromBody] CreateOverrideRequest request,
OverrideService overrideService,
RiskProfileConfigurationService profileService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
if (string.IsNullOrWhiteSpace(request.Reason))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Reason is required for audit purposes.",
Status = StatusCodes.Status400BadRequest
});
}
// Verify profile exists
var profile = profileService.GetProfile(request.ProfileId);
if (profile == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profile not found",
Detail = $"Risk profile '{request.ProfileId}' was not found.",
Status = StatusCodes.Status400BadRequest
});
}
// Validate for conflicts
var validation = overrideService.ValidateConflicts(request);
if (validation.HasConflicts)
{
var conflictDetails = string.Join("; ", validation.Conflicts.Select(c => c.Description));
return Results.BadRequest(new ProblemDetails
{
Title = "Override conflicts detected",
Detail = conflictDetails,
Status = StatusCodes.Status400BadRequest,
Extensions = { ["conflicts"] = validation.Conflicts }
});
}
var actorId = ResolveActorId(context);
try
{
var auditedOverride = overrideService.Create(request, actorId);
return Results.Created(
$"/api/risk/overrides/{auditedOverride.OverrideId}",
new OverrideResponse(auditedOverride, validation.Warnings));
}
catch (ArgumentException ex)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult GetOverride(
HttpContext context,
[FromRoute] string overrideId,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var auditedOverride = overrideService.Get(overrideId);
if (auditedOverride == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new OverrideResponse(auditedOverride, null));
}
private static IResult DeleteOverride(
HttpContext context,
[FromRoute] string overrideId,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (!overrideService.Delete(overrideId))
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.NoContent();
}
private static IResult ListProfileOverrides(
HttpContext context,
[FromRoute] string profileId,
[FromQuery] bool includeInactive,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var overrides = overrideService.ListByProfile(profileId, includeInactive);
return Results.Ok(new OverrideListResponse(profileId, overrides));
}
private static IResult ValidateOverride(
HttpContext context,
[FromBody] CreateOverrideRequest request,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Request body is required.",
Status = StatusCodes.Status400BadRequest
});
}
var validation = overrideService.ValidateConflicts(request);
return Results.Ok(new OverrideValidationResponse(validation));
}
private static IResult ApproveOverride(
HttpContext context,
[FromRoute] string overrideId,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyActivate);
if (scopeResult is not null)
{
return scopeResult;
}
var actorId = ResolveActorId(context);
try
{
var auditedOverride = overrideService.Approve(overrideId, actorId ?? "system");
if (auditedOverride == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new OverrideResponse(auditedOverride, null));
}
catch (InvalidOperationException ex)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Approval failed",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult DisableOverride(
HttpContext context,
[FromRoute] string overrideId,
[FromQuery] string? reason,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
var actorId = ResolveActorId(context);
var auditedOverride = overrideService.Disable(overrideId, actorId ?? "system", reason);
if (auditedOverride == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Override not found",
Detail = $"Override '{overrideId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new OverrideResponse(auditedOverride, null));
}
private static IResult GetOverrideHistory(
HttpContext context,
[FromRoute] string overrideId,
[FromQuery] int limit,
OverrideService overrideService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var history = overrideService.GetApplicationHistory(overrideId, effectiveLimit);
return Results.Ok(new OverrideHistoryResponse(overrideId, history));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Response DTOs
internal sealed record OverrideResponse(
AuditedOverride Override,
IReadOnlyList<string>? Warnings);
internal sealed record OverrideListResponse(
string ProfileId,
IReadOnlyList<AuditedOverride> Overrides);
internal sealed record OverrideValidationResponse(OverrideConflictValidation Validation);
internal sealed record OverrideHistoryResponse(
string OverrideId,
IReadOnlyList<OverrideApplicationRecord> History);
#endregion

View File

@@ -0,0 +1,77 @@
using Microsoft.AspNetCore.Mvc;
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Services;
namespace StellaOps.Policy.Engine.Endpoints;
/// <summary>
/// API endpoint for policy decisions with source evidence summaries (POLICY-ENGINE-40-003).
/// </summary>
public static class PolicyDecisionEndpoint
{
public static IEndpointRouteBuilder MapPolicyDecisions(this IEndpointRouteBuilder routes)
{
routes.MapPost("/policy/decisions", GetDecisionsAsync)
.WithName("PolicyEngine.Decisions")
.WithDescription("Request policy decisions with source evidence summaries, top severity sources, and conflict counts.");
routes.MapGet("/policy/decisions/{snapshotId}", GetDecisionsBySnapshotAsync)
.WithName("PolicyEngine.Decisions.BySnapshot")
.WithDescription("Get policy decisions for a specific snapshot.");
return routes;
}
private static async Task<IResult> GetDecisionsAsync(
[FromBody] PolicyDecisionRequest request,
PolicyDecisionService service,
CancellationToken cancellationToken)
{
try
{
var response = await service.GetDecisionsAsync(request, cancellationToken).ConfigureAwait(false);
return Results.Ok(response);
}
catch (ArgumentException ex)
{
return Results.BadRequest(new { message = ex.Message });
}
catch (KeyNotFoundException ex)
{
return Results.NotFound(new { message = ex.Message });
}
}
private static async Task<IResult> GetDecisionsBySnapshotAsync(
[FromRoute] string snapshotId,
[FromQuery] string? tenantId,
[FromQuery] string? componentPurl,
[FromQuery] string? advisoryId,
[FromQuery] bool includeEvidence = true,
[FromQuery] int maxSources = 5,
PolicyDecisionService service = default!,
CancellationToken cancellationToken = default)
{
try
{
var request = new PolicyDecisionRequest(
SnapshotId: snapshotId,
TenantId: tenantId,
ComponentPurl: componentPurl,
AdvisoryId: advisoryId,
IncludeEvidence: includeEvidence,
MaxSources: maxSources);
var response = await service.GetDecisionsAsync(request, cancellationToken).ConfigureAwait(false);
return Results.Ok(response);
}
catch (ArgumentException ex)
{
return Results.BadRequest(new { message = ex.Message });
}
catch (KeyNotFoundException ex)
{
return Results.NotFound(new { message = ex.Message });
}
}
}

View File

@@ -0,0 +1,195 @@
using System.Security.Claims;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Events;
using StellaOps.Policy.Engine.Services;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class ProfileEventEndpoints
{
public static IEndpointRouteBuilder MapProfileEvents(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/events")
.RequireAuthorization()
.WithTags("Profile Events");
group.MapGet("/", GetRecentEvents)
.WithName("GetRecentProfileEvents")
.WithSummary("Get recent profile lifecycle events.")
.Produces<EventListResponse>(StatusCodes.Status200OK);
group.MapGet("/filter", GetFilteredEvents)
.WithName("GetFilteredProfileEvents")
.WithSummary("Get profile events with optional filtering.")
.Produces<EventListResponse>(StatusCodes.Status200OK);
group.MapPost("/subscribe", CreateSubscription)
.WithName("CreateEventSubscription")
.WithSummary("Subscribe to profile lifecycle events.")
.Produces<SubscriptionResponse>(StatusCodes.Status201Created);
group.MapDelete("/subscribe/{subscriptionId}", DeleteSubscription)
.WithName("DeleteEventSubscription")
.WithSummary("Unsubscribe from profile lifecycle events.")
.Produces(StatusCodes.Status204NoContent)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/subscribe/{subscriptionId}/poll", PollSubscription)
.WithName("PollEventSubscription")
.WithSummary("Poll for events from a subscription.")
.Produces<EventListResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
return endpoints;
}
private static IResult GetRecentEvents(
HttpContext context,
[FromQuery] int limit,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var events = eventPublisher.GetRecentEvents(effectiveLimit);
return Results.Ok(new EventListResponse(events));
}
private static IResult GetFilteredEvents(
HttpContext context,
[FromQuery] ProfileEventType? eventType,
[FromQuery] string? profileId,
[FromQuery] DateTimeOffset? since,
[FromQuery] int limit,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var events = eventPublisher.GetEventsFiltered(eventType, profileId, since, effectiveLimit);
return Results.Ok(new EventListResponse(events));
}
private static IResult CreateSubscription(
HttpContext context,
[FromBody] CreateSubscriptionRequest request,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.EventTypes == null || request.EventTypes.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one event type is required.",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
var subscription = eventPublisher.Subscribe(
request.EventTypes,
request.ProfileFilter,
request.WebhookUrl,
actorId);
return Results.Created(
$"/api/risk/events/subscribe/{subscription.SubscriptionId}",
new SubscriptionResponse(subscription));
}
private static IResult DeleteSubscription(
HttpContext context,
[FromRoute] string subscriptionId,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (!eventPublisher.Unsubscribe(subscriptionId))
{
return Results.NotFound(new ProblemDetails
{
Title = "Subscription not found",
Detail = $"Subscription '{subscriptionId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.NoContent();
}
private static IResult PollSubscription(
HttpContext context,
[FromRoute] string subscriptionId,
[FromQuery] int limit,
ProfileEventPublisher eventPublisher)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var effectiveLimit = limit > 0 ? limit : 100;
var events = eventPublisher.GetEvents(subscriptionId, effectiveLimit);
// If no events, the subscription might not exist
// We return empty list either way since the subscription might just have no events
return Results.Ok(new EventListResponse(events));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Request/Response DTOs
internal sealed record EventListResponse(IReadOnlyList<ProfileEvent> Events);
internal sealed record CreateSubscriptionRequest(
IReadOnlyList<ProfileEventType> EventTypes,
string? ProfileFilter,
string? WebhookUrl);
internal sealed record SubscriptionResponse(EventSubscription Subscription);
#endregion

View File

@@ -0,0 +1,238 @@
using System.Security.Claims;
using System.Text.Json;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Export;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class ProfileExportEndpoints
{
public static IEndpointRouteBuilder MapProfileExport(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/profiles/export")
.RequireAuthorization()
.WithTags("Profile Export/Import");
group.MapPost("/", ExportProfiles)
.WithName("ExportProfiles")
.WithSummary("Export risk profiles as a signed bundle.")
.Produces<ExportResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapPost("/download", DownloadBundle)
.WithName("DownloadProfileBundle")
.WithSummary("Export and download risk profiles as a JSON file.")
.Produces<FileContentHttpResult>(StatusCodes.Status200OK, contentType: "application/json");
endpoints.MapPost("/api/risk/profiles/import", ImportProfiles)
.RequireAuthorization()
.WithName("ImportProfiles")
.WithSummary("Import risk profiles from a signed bundle.")
.WithTags("Profile Export/Import")
.Produces<ImportResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
endpoints.MapPost("/api/risk/profiles/verify", VerifyBundle)
.RequireAuthorization()
.WithName("VerifyProfileBundle")
.WithSummary("Verify the signature of a profile bundle without importing.")
.WithTags("Profile Export/Import")
.Produces<VerifyResponse>(StatusCodes.Status200OK);
return endpoints;
}
private static IResult ExportProfiles(
HttpContext context,
[FromBody] ExportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.ProfileIds == null || request.ProfileIds.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one profile ID is required.",
Status = StatusCodes.Status400BadRequest
});
}
var profiles = new List<StellaOps.Policy.RiskProfile.Models.RiskProfileModel>();
var notFound = new List<string>();
foreach (var profileId in request.ProfileIds)
{
var profile = profileService.GetProfile(profileId);
if (profile != null)
{
profiles.Add(profile);
}
else
{
notFound.Add(profileId);
}
}
if (notFound.Count > 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profiles not found",
Detail = $"The following profiles were not found: {string.Join(", ", notFound)}",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
var bundle = exportService.Export(profiles, request, actorId);
return Results.Ok(new ExportResponse(bundle));
}
private static IResult DownloadBundle(
HttpContext context,
[FromBody] ExportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.ProfileIds == null || request.ProfileIds.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one profile ID is required.",
Status = StatusCodes.Status400BadRequest
});
}
var profiles = new List<StellaOps.Policy.RiskProfile.Models.RiskProfileModel>();
foreach (var profileId in request.ProfileIds)
{
var profile = profileService.GetProfile(profileId);
if (profile != null)
{
profiles.Add(profile);
}
}
var actorId = ResolveActorId(context);
var bundle = exportService.Export(profiles, request, actorId);
var json = exportService.SerializeBundle(bundle);
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var fileName = $"risk-profiles-{bundle.BundleId}.json";
return Results.File(bytes, "application/json", fileName);
}
private static IResult ImportProfiles(
HttpContext context,
[FromBody] ImportProfilesRequest request,
RiskProfileConfigurationService profileService,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || request.Bundle == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Bundle is required.",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
// Create an export service with save capability
var importExportService = new ProfileExportService(
timeProvider: TimeProvider.System,
profileLookup: id => profileService.GetProfile(id),
lifecycleLookup: null,
profileSave: profile => profileService.RegisterProfile(profile),
keyLookup: null);
var result = importExportService.Import(request, actorId);
return Results.Ok(new ImportResponse(result));
}
private static IResult VerifyBundle(
HttpContext context,
[FromBody] RiskProfileBundle bundle,
ProfileExportService exportService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (bundle == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Bundle is required.",
Status = StatusCodes.Status400BadRequest
});
}
var verification = exportService.VerifySignature(bundle);
return Results.Ok(new VerifyResponse(verification, bundle.Metadata));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Response DTOs
internal sealed record ExportResponse(RiskProfileBundle Bundle);
internal sealed record ImportResponse(ImportResult Result);
internal sealed record VerifyResponse(
SignatureVerificationResult Verification,
BundleMetadata Metadata);
#endregion

View File

@@ -0,0 +1,433 @@
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Simulation;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class RiskSimulationEndpoints
{
public static IEndpointRouteBuilder MapRiskSimulation(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/simulation")
.RequireAuthorization()
.WithTags("Risk Simulation");
group.MapPost("/", RunSimulation)
.WithName("RunRiskSimulation")
.WithSummary("Run a risk simulation with score distributions and contribution breakdowns.")
.Produces<RiskSimulationResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/quick", RunQuickSimulation)
.WithName("RunQuickRiskSimulation")
.WithSummary("Run a quick risk simulation without detailed breakdowns.")
.Produces<QuickSimulationResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/compare", CompareProfiles)
.WithName("CompareProfileSimulations")
.WithSummary("Compare risk scoring between two profile configurations.")
.Produces<ProfileComparisonResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapPost("/whatif", RunWhatIfSimulation)
.WithName("RunWhatIfSimulation")
.WithSummary("Run a what-if simulation with hypothetical signal changes.")
.Produces<WhatIfSimulationResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
return endpoints;
}
private static IResult RunSimulation(
HttpContext context,
[FromBody] RiskSimulationRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
if (request.Findings == null || request.Findings.Count == 0)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "At least one finding is required.",
Status = StatusCodes.Status400BadRequest
});
}
try
{
var result = simulationService.Simulate(request);
return Results.Ok(new RiskSimulationResponse(result));
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status404NotFound
});
}
}
private static IResult RunQuickSimulation(
HttpContext context,
[FromBody] QuickSimulationRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
var fullRequest = new RiskSimulationRequest(
ProfileId: request.ProfileId,
ProfileVersion: request.ProfileVersion,
Findings: request.Findings,
IncludeContributions: false,
IncludeDistribution: true,
Mode: SimulationMode.Quick);
try
{
var result = simulationService.Simulate(fullRequest);
var quickResponse = new QuickSimulationResponse(
SimulationId: result.SimulationId,
ProfileId: result.ProfileId,
ProfileVersion: result.ProfileVersion,
Timestamp: result.Timestamp,
AggregateMetrics: result.AggregateMetrics,
Distribution: result.Distribution,
ExecutionTimeMs: result.ExecutionTimeMs);
return Results.Ok(quickResponse);
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status404NotFound
});
}
}
private static IResult CompareProfiles(
HttpContext context,
[FromBody] ProfileComparisonRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null ||
string.IsNullOrWhiteSpace(request.BaseProfileId) ||
string.IsNullOrWhiteSpace(request.CompareProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Both BaseProfileId and CompareProfileId are required.",
Status = StatusCodes.Status400BadRequest
});
}
try
{
var baseRequest = new RiskSimulationRequest(
ProfileId: request.BaseProfileId,
ProfileVersion: request.BaseProfileVersion,
Findings: request.Findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var compareRequest = new RiskSimulationRequest(
ProfileId: request.CompareProfileId,
ProfileVersion: request.CompareProfileVersion,
Findings: request.Findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var baseResult = simulationService.Simulate(baseRequest);
var compareResult = simulationService.Simulate(compareRequest);
var deltas = ComputeDeltas(baseResult, compareResult);
return Results.Ok(new ProfileComparisonResponse(
BaseProfile: new ProfileSimulationSummary(
baseResult.ProfileId,
baseResult.ProfileVersion,
baseResult.AggregateMetrics),
CompareProfile: new ProfileSimulationSummary(
compareResult.ProfileId,
compareResult.ProfileVersion,
compareResult.AggregateMetrics),
Deltas: deltas));
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult RunWhatIfSimulation(
HttpContext context,
[FromBody] WhatIfSimulationRequest request,
RiskSimulationService simulationService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
try
{
// Run baseline simulation
var baselineRequest = new RiskSimulationRequest(
ProfileId: request.ProfileId,
ProfileVersion: request.ProfileVersion,
Findings: request.Findings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.Full);
var baselineResult = simulationService.Simulate(baselineRequest);
// Apply hypothetical changes to findings and re-simulate
var modifiedFindings = ApplyHypotheticalChanges(request.Findings, request.HypotheticalChanges);
var modifiedRequest = new RiskSimulationRequest(
ProfileId: request.ProfileId,
ProfileVersion: request.ProfileVersion,
Findings: modifiedFindings,
IncludeContributions: true,
IncludeDistribution: true,
Mode: SimulationMode.WhatIf);
var modifiedResult = simulationService.Simulate(modifiedRequest);
return Results.Ok(new WhatIfSimulationResponse(
BaselineResult: baselineResult,
ModifiedResult: modifiedResult,
ImpactSummary: ComputeImpactSummary(baselineResult, modifiedResult)));
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new ProblemDetails
{
Title = "Profile not found",
Detail = ex.Message,
Status = StatusCodes.Status404NotFound
});
}
}
private static ComparisonDeltas ComputeDeltas(
RiskSimulationResult baseResult,
RiskSimulationResult compareResult)
{
return new ComparisonDeltas(
MeanScoreDelta: compareResult.AggregateMetrics.MeanScore - baseResult.AggregateMetrics.MeanScore,
MedianScoreDelta: compareResult.AggregateMetrics.MedianScore - baseResult.AggregateMetrics.MedianScore,
CriticalCountDelta: compareResult.AggregateMetrics.CriticalCount - baseResult.AggregateMetrics.CriticalCount,
HighCountDelta: compareResult.AggregateMetrics.HighCount - baseResult.AggregateMetrics.HighCount,
MediumCountDelta: compareResult.AggregateMetrics.MediumCount - baseResult.AggregateMetrics.MediumCount,
LowCountDelta: compareResult.AggregateMetrics.LowCount - baseResult.AggregateMetrics.LowCount);
}
private static IReadOnlyList<SimulationFinding> ApplyHypotheticalChanges(
IReadOnlyList<SimulationFinding> findings,
IReadOnlyList<HypotheticalChange> changes)
{
var result = new List<SimulationFinding>();
foreach (var finding in findings)
{
var modifiedSignals = new Dictionary<string, object?>(finding.Signals);
foreach (var change in changes)
{
if (change.ApplyToAll || change.FindingIds.Contains(finding.FindingId))
{
modifiedSignals[change.SignalName] = change.NewValue;
}
}
result.Add(finding with { Signals = modifiedSignals });
}
return result.AsReadOnly();
}
private static WhatIfImpactSummary ComputeImpactSummary(
RiskSimulationResult baseline,
RiskSimulationResult modified)
{
var baseScores = baseline.FindingScores.ToDictionary(f => f.FindingId, f => f.NormalizedScore);
var modScores = modified.FindingScores.ToDictionary(f => f.FindingId, f => f.NormalizedScore);
var improved = 0;
var worsened = 0;
var unchanged = 0;
var totalDelta = 0.0;
foreach (var (findingId, baseScore) in baseScores)
{
if (modScores.TryGetValue(findingId, out var modScore))
{
var delta = modScore - baseScore;
totalDelta += delta;
if (Math.Abs(delta) < 0.1)
unchanged++;
else if (delta < 0)
improved++;
else
worsened++;
}
}
return new WhatIfImpactSummary(
FindingsImproved: improved,
FindingsWorsened: worsened,
FindingsUnchanged: unchanged,
AverageScoreDelta: baseline.FindingScores.Count > 0
? totalDelta / baseline.FindingScores.Count
: 0,
SeverityShifts: new SeverityShifts(
ToLower: improved,
ToHigher: worsened,
Unchanged: unchanged));
}
}
#region Request/Response DTOs
internal sealed record RiskSimulationResponse(RiskSimulationResult Result);
internal sealed record QuickSimulationRequest(
string ProfileId,
string? ProfileVersion,
IReadOnlyList<SimulationFinding> Findings);
internal sealed record QuickSimulationResponse(
string SimulationId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
AggregateRiskMetrics AggregateMetrics,
RiskDistribution? Distribution,
double ExecutionTimeMs);
internal sealed record ProfileComparisonRequest(
string BaseProfileId,
string? BaseProfileVersion,
string CompareProfileId,
string? CompareProfileVersion,
IReadOnlyList<SimulationFinding> Findings);
internal sealed record ProfileComparisonResponse(
ProfileSimulationSummary BaseProfile,
ProfileSimulationSummary CompareProfile,
ComparisonDeltas Deltas);
internal sealed record ProfileSimulationSummary(
string ProfileId,
string ProfileVersion,
AggregateRiskMetrics Metrics);
internal sealed record ComparisonDeltas(
double MeanScoreDelta,
double MedianScoreDelta,
int CriticalCountDelta,
int HighCountDelta,
int MediumCountDelta,
int LowCountDelta);
internal sealed record WhatIfSimulationRequest(
string ProfileId,
string? ProfileVersion,
IReadOnlyList<SimulationFinding> Findings,
IReadOnlyList<HypotheticalChange> HypotheticalChanges);
internal sealed record HypotheticalChange(
string SignalName,
object? NewValue,
bool ApplyToAll = true,
IReadOnlyList<string>? FindingIds = null)
{
public IReadOnlyList<string> FindingIds { get; init; } = FindingIds ?? Array.Empty<string>();
}
internal sealed record WhatIfSimulationResponse(
RiskSimulationResult BaselineResult,
RiskSimulationResult ModifiedResult,
WhatIfImpactSummary ImpactSummary);
internal sealed record WhatIfImpactSummary(
int FindingsImproved,
int FindingsWorsened,
int FindingsUnchanged,
double AverageScoreDelta,
SeverityShifts SeverityShifts);
internal sealed record SeverityShifts(
int ToLower,
int ToHigher,
int Unchanged);
#endregion

View File

@@ -0,0 +1,290 @@
using System.Security.Claims;
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.RiskProfile.Scope;
namespace StellaOps.Policy.Engine.Endpoints;
internal static class ScopeAttachmentEndpoints
{
public static IEndpointRouteBuilder MapScopeAttachments(this IEndpointRouteBuilder endpoints)
{
var group = endpoints.MapGroup("/api/risk/scopes")
.RequireAuthorization()
.WithTags("Risk Profile Scopes");
group.MapPost("/attachments", CreateAttachment)
.WithName("CreateScopeAttachment")
.WithSummary("Attach a risk profile to a scope (organization, project, environment, or component).")
.Produces<ScopeAttachmentResponse>(StatusCodes.Status201Created)
.Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest);
group.MapGet("/attachments/{attachmentId}", GetAttachment)
.WithName("GetScopeAttachment")
.WithSummary("Get a scope attachment by ID.")
.Produces<ScopeAttachmentResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapDelete("/attachments/{attachmentId}", DeleteAttachment)
.WithName("DeleteScopeAttachment")
.WithSummary("Delete a scope attachment.")
.Produces(StatusCodes.Status204NoContent)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapPost("/attachments/{attachmentId}:expire", ExpireAttachment)
.WithName("ExpireScopeAttachment")
.WithSummary("Expire a scope attachment immediately.")
.Produces<ScopeAttachmentResponse>(StatusCodes.Status200OK)
.Produces<ProblemHttpResult>(StatusCodes.Status404NotFound);
group.MapGet("/attachments", ListAttachments)
.WithName("ListScopeAttachments")
.WithSummary("List scope attachments with optional filtering.")
.Produces<ScopeAttachmentListResponse>(StatusCodes.Status200OK);
group.MapPost("/resolve", ResolveScope)
.WithName("ResolveScope")
.WithSummary("Resolve the effective risk profile for a given scope selector.")
.Produces<ScopeResolutionResponse>(StatusCodes.Status200OK);
group.MapGet("/{scopeType}/{scopeId}/attachments", GetScopeAttachments)
.WithName("GetScopeAttachments")
.WithSummary("Get all attachments for a specific scope.")
.Produces<ScopeAttachmentListResponse>(StatusCodes.Status200OK);
return endpoints;
}
private static IResult CreateAttachment(
HttpContext context,
[FromBody] CreateScopeAttachmentRequest request,
ScopeAttachmentService attachmentService,
RiskProfileConfigurationService profileService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (request == null || string.IsNullOrWhiteSpace(request.ProfileId))
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "ProfileId is required.",
Status = StatusCodes.Status400BadRequest
});
}
// Verify profile exists
var profile = profileService.GetProfile(request.ProfileId);
if (profile == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Profile not found",
Detail = $"Risk profile '{request.ProfileId}' was not found.",
Status = StatusCodes.Status400BadRequest
});
}
var actorId = ResolveActorId(context);
try
{
var attachment = attachmentService.Create(request, actorId);
return Results.Created(
$"/api/risk/scopes/attachments/{attachment.Id}",
new ScopeAttachmentResponse(attachment));
}
catch (ArgumentException ex)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = ex.Message,
Status = StatusCodes.Status400BadRequest
});
}
}
private static IResult GetAttachment(
HttpContext context,
[FromRoute] string attachmentId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var attachment = attachmentService.Get(attachmentId);
if (attachment == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Attachment not found",
Detail = $"Scope attachment '{attachmentId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new ScopeAttachmentResponse(attachment));
}
private static IResult DeleteAttachment(
HttpContext context,
[FromRoute] string attachmentId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
if (!attachmentService.Delete(attachmentId))
{
return Results.NotFound(new ProblemDetails
{
Title = "Attachment not found",
Detail = $"Scope attachment '{attachmentId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.NoContent();
}
private static IResult ExpireAttachment(
HttpContext context,
[FromRoute] string attachmentId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit);
if (scopeResult is not null)
{
return scopeResult;
}
var actorId = ResolveActorId(context);
var attachment = attachmentService.Expire(attachmentId, actorId);
if (attachment == null)
{
return Results.NotFound(new ProblemDetails
{
Title = "Attachment not found",
Detail = $"Scope attachment '{attachmentId}' was not found.",
Status = StatusCodes.Status404NotFound
});
}
return Results.Ok(new ScopeAttachmentResponse(attachment));
}
private static IResult ListAttachments(
HttpContext context,
[FromQuery] ScopeType? scopeType,
[FromQuery] string? scopeId,
[FromQuery] string? profileId,
[FromQuery] bool includeExpired,
[FromQuery] int limit,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var query = new ScopeAttachmentQuery(
ScopeType: scopeType,
ScopeId: scopeId,
ProfileId: profileId,
IncludeExpired: includeExpired,
Limit: limit > 0 ? limit : 100);
var attachments = attachmentService.Query(query);
return Results.Ok(new ScopeAttachmentListResponse(attachments));
}
private static IResult ResolveScope(
HttpContext context,
[FromBody] ScopeSelector selector,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
if (selector == null)
{
return Results.BadRequest(new ProblemDetails
{
Title = "Invalid request",
Detail = "Scope selector is required.",
Status = StatusCodes.Status400BadRequest
});
}
var result = attachmentService.Resolve(selector);
return Results.Ok(new ScopeResolutionResponse(result));
}
private static IResult GetScopeAttachments(
HttpContext context,
[FromRoute] ScopeType scopeType,
[FromRoute] string scopeId,
ScopeAttachmentService attachmentService)
{
var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead);
if (scopeResult is not null)
{
return scopeResult;
}
var attachments = attachmentService.GetAttachmentsForScope(scopeType, scopeId);
return Results.Ok(new ScopeAttachmentListResponse(attachments));
}
private static string? ResolveActorId(HttpContext context)
{
var user = context.User;
var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value
?? user?.FindFirst(ClaimTypes.Upn)?.Value
?? user?.FindFirst("sub")?.Value;
if (!string.IsNullOrWhiteSpace(actor))
{
return actor;
}
if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header))
{
return header.ToString();
}
return null;
}
}
#region Response DTOs
internal sealed record ScopeAttachmentResponse(ScopeAttachment Attachment);
internal sealed record ScopeAttachmentListResponse(IReadOnlyList<ScopeAttachment> Attachments);
internal sealed record ScopeResolutionResponse(ScopeResolutionResult Result);
#endregion

View File

@@ -17,7 +17,32 @@ internal sealed record PolicyEvaluationContext(
PolicyEvaluationAdvisory Advisory,
PolicyEvaluationVexEvidence Vex,
PolicyEvaluationSbom Sbom,
PolicyEvaluationExceptions Exceptions);
PolicyEvaluationExceptions Exceptions,
PolicyEvaluationReachability Reachability,
DateTimeOffset? EvaluationTimestamp = null)
{
/// <summary>
/// Gets the evaluation timestamp for deterministic time-based operations.
/// This value is injected at evaluation time rather than using DateTime.UtcNow
/// to ensure deterministic, reproducible results.
/// </summary>
public DateTimeOffset Now => EvaluationTimestamp ?? DateTimeOffset.MinValue;
/// <summary>
/// Creates a context without reachability data (for backwards compatibility).
/// </summary>
public PolicyEvaluationContext(
PolicyEvaluationSeverity severity,
PolicyEvaluationEnvironment environment,
PolicyEvaluationAdvisory advisory,
PolicyEvaluationVexEvidence vex,
PolicyEvaluationSbom sbom,
PolicyEvaluationExceptions exceptions,
DateTimeOffset? evaluationTimestamp = null)
: this(severity, environment, advisory, vex, sbom, exceptions, PolicyEvaluationReachability.Unknown, evaluationTimestamp)
{
}
}
internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null);
@@ -158,3 +183,96 @@ internal sealed record PolicyExceptionApplication(
string AppliedStatus,
string? AppliedSeverity,
ImmutableDictionary<string, string> Metadata);
/// <summary>
/// Reachability evidence for policy evaluation.
/// </summary>
internal sealed record PolicyEvaluationReachability(
string State,
decimal Confidence,
decimal Score,
bool HasRuntimeEvidence,
string? Source,
string? Method,
string? EvidenceRef)
{
/// <summary>
/// Default unknown reachability state.
/// </summary>
public static readonly PolicyEvaluationReachability Unknown = new(
State: "unknown",
Confidence: 0m,
Score: 0m,
HasRuntimeEvidence: false,
Source: null,
Method: null,
EvidenceRef: null);
/// <summary>
/// Reachable state.
/// </summary>
public static PolicyEvaluationReachability Reachable(
decimal confidence = 1m,
decimal score = 1m,
bool hasRuntimeEvidence = false,
string? source = null,
string? method = null) => new(
State: "reachable",
Confidence: confidence,
Score: score,
HasRuntimeEvidence: hasRuntimeEvidence,
Source: source,
Method: method,
EvidenceRef: null);
/// <summary>
/// Unreachable state.
/// </summary>
public static PolicyEvaluationReachability Unreachable(
decimal confidence = 1m,
bool hasRuntimeEvidence = false,
string? source = null,
string? method = null) => new(
State: "unreachable",
Confidence: confidence,
Score: 0m,
HasRuntimeEvidence: hasRuntimeEvidence,
Source: source,
Method: method,
EvidenceRef: null);
/// <summary>
/// Whether the reachability state is definitively reachable.
/// </summary>
public bool IsReachable => State.Equals("reachable", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether the reachability state is definitively unreachable.
/// </summary>
public bool IsUnreachable => State.Equals("unreachable", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether the reachability state is unknown.
/// </summary>
public bool IsUnknown => State.Equals("unknown", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether the reachability state is under investigation.
/// </summary>
public bool IsUnderInvestigation => State.Equals("under_investigation", StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Whether this reachability data has high confidence (>= 0.8).
/// </summary>
public bool IsHighConfidence => Confidence >= 0.8m;
/// <summary>
/// Whether this reachability data has medium confidence (>= 0.5 and &lt; 0.8).
/// </summary>
public bool IsMediumConfidence => Confidence >= 0.5m && Confidence < 0.8m;
/// <summary>
/// Whether this reachability data has low confidence (&lt; 0.5).
/// </summary>
public bool IsLowConfidence => Confidence < 0.5m;
}

View File

@@ -63,6 +63,8 @@ internal sealed class PolicyExpressionEvaluator
"vex" => new EvaluationValue(new VexScope(this, context.Vex)),
"advisory" => new EvaluationValue(new AdvisoryScope(context.Advisory)),
"sbom" => new EvaluationValue(new SbomScope(context.Sbom)),
"reachability" => new EvaluationValue(new ReachabilityScope(context.Reachability)),
"now" => new EvaluationValue(context.Now),
"true" => EvaluationValue.True,
"false" => EvaluationValue.False,
_ => EvaluationValue.Null,
@@ -98,6 +100,11 @@ internal sealed class PolicyExpressionEvaluator
return sbom.Get(member.Member);
}
if (raw is ReachabilityScope reachability)
{
return reachability.Get(member.Member);
}
if (raw is ComponentScope componentScope)
{
return componentScope.Get(member.Member);
@@ -811,4 +818,51 @@ internal sealed class PolicyExpressionEvaluator
return vex.Statements[^1];
}
}
/// <summary>
/// SPL scope for reachability predicates.
/// Provides access to reachability state, confidence, score, and evidence.
/// </summary>
/// <example>
/// SPL predicates supported:
/// - reachability.state == "reachable"
/// - reachability.state == "unreachable"
/// - reachability.state == "unknown"
/// - reachability.confidence >= 0.8
/// - reachability.score > 0.5
/// - reachability.has_runtime_evidence == true
/// - reachability.is_reachable == true
/// - reachability.is_unreachable == true
/// - reachability.is_high_confidence == true
/// - reachability.source == "grype"
/// - reachability.method == "static"
/// </example>
private sealed class ReachabilityScope
{
private readonly PolicyEvaluationReachability reachability;
public ReachabilityScope(PolicyEvaluationReachability reachability)
{
this.reachability = reachability;
}
public EvaluationValue Get(string member) => member.ToLowerInvariant() switch
{
"state" => new EvaluationValue(reachability.State),
"confidence" => new EvaluationValue(reachability.Confidence),
"score" => new EvaluationValue(reachability.Score),
"has_runtime_evidence" or "hasruntimeevidence" => new EvaluationValue(reachability.HasRuntimeEvidence),
"source" => new EvaluationValue(reachability.Source),
"method" => new EvaluationValue(reachability.Method),
"evidence_ref" or "evidenceref" => new EvaluationValue(reachability.EvidenceRef),
"is_reachable" or "isreachable" => new EvaluationValue(reachability.IsReachable),
"is_unreachable" or "isunreachable" => new EvaluationValue(reachability.IsUnreachable),
"is_unknown" or "isunknown" => new EvaluationValue(reachability.IsUnknown),
"is_under_investigation" or "isunderinvestigation" => new EvaluationValue(reachability.IsUnderInvestigation),
"is_high_confidence" or "ishighconfidence" => new EvaluationValue(reachability.IsHighConfidence),
"is_medium_confidence" or "ismediumconfidence" => new EvaluationValue(reachability.IsMediumConfidence),
"is_low_confidence" or "islowconfidence" => new EvaluationValue(reachability.IsLowConfidence),
_ => EvaluationValue.Null,
};
}
}

View File

@@ -0,0 +1,172 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Lifecycle;
namespace StellaOps.Policy.Engine.Events;
/// <summary>
/// Base class for profile lifecycle events.
/// </summary>
public abstract record ProfileEvent(
[property: JsonPropertyName("event_id")] string EventId,
[property: JsonPropertyName("event_type")] ProfileEventType EventType,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string ProfileVersion,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("actor")] string? Actor,
[property: JsonPropertyName("correlation_id")] string? CorrelationId);
/// <summary>
/// Type of profile event.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ProfileEventType>))]
public enum ProfileEventType
{
[JsonPropertyName("profile_created")]
ProfileCreated,
[JsonPropertyName("profile_published")]
ProfilePublished,
[JsonPropertyName("profile_activated")]
ProfileActivated,
[JsonPropertyName("profile_deprecated")]
ProfileDeprecated,
[JsonPropertyName("profile_archived")]
ProfileArchived,
[JsonPropertyName("severity_threshold_changed")]
SeverityThresholdChanged,
[JsonPropertyName("weight_changed")]
WeightChanged,
[JsonPropertyName("override_added")]
OverrideAdded,
[JsonPropertyName("override_removed")]
OverrideRemoved,
[JsonPropertyName("scope_attached")]
ScopeAttached,
[JsonPropertyName("scope_detached")]
ScopeDetached
}
/// <summary>
/// Event emitted when a profile is created.
/// </summary>
public sealed record ProfileCreatedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("content_hash")] string ContentHash,
[property: JsonPropertyName("description")] string? Description)
: ProfileEvent(EventId, ProfileEventType.ProfileCreated, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a profile is published/activated.
/// </summary>
public sealed record ProfilePublishedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("content_hash")] string ContentHash,
[property: JsonPropertyName("previous_active_version")] string? PreviousActiveVersion)
: ProfileEvent(EventId, ProfileEventType.ProfilePublished, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a profile is deprecated.
/// </summary>
public sealed record ProfileDeprecatedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("reason")] string? Reason,
[property: JsonPropertyName("successor_version")] string? SuccessorVersion)
: ProfileEvent(EventId, ProfileEventType.ProfileDeprecated, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a profile is archived.
/// </summary>
public sealed record ProfileArchivedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId)
: ProfileEvent(EventId, ProfileEventType.ProfileArchived, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when severity thresholds change.
/// </summary>
public sealed record SeverityThresholdChangedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("changes")] IReadOnlyList<ThresholdChange> Changes)
: ProfileEvent(EventId, ProfileEventType.SeverityThresholdChanged, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Details of a threshold change.
/// </summary>
public sealed record ThresholdChange(
[property: JsonPropertyName("threshold_name")] string ThresholdName,
[property: JsonPropertyName("old_value")] double? OldValue,
[property: JsonPropertyName("new_value")] double? NewValue);
/// <summary>
/// Event emitted when weights change.
/// </summary>
public sealed record WeightChangedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("signal_name")] string SignalName,
[property: JsonPropertyName("old_weight")] double OldWeight,
[property: JsonPropertyName("new_weight")] double NewWeight)
: ProfileEvent(EventId, ProfileEventType.WeightChanged, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event emitted when a scope is attached.
/// </summary>
public sealed record ScopeAttachedEvent(
string EventId,
string ProfileId,
string ProfileVersion,
DateTimeOffset Timestamp,
string? Actor,
string? CorrelationId,
[property: JsonPropertyName("scope_type")] string ScopeType,
[property: JsonPropertyName("scope_id")] string ScopeId,
[property: JsonPropertyName("attachment_id")] string AttachmentId)
: ProfileEvent(EventId, ProfileEventType.ScopeAttached, ProfileId, ProfileVersion, Timestamp, Actor, CorrelationId);
/// <summary>
/// Event subscription request.
/// </summary>
public sealed record EventSubscription(
[property: JsonPropertyName("subscription_id")] string SubscriptionId,
[property: JsonPropertyName("event_types")] IReadOnlyList<ProfileEventType> EventTypes,
[property: JsonPropertyName("profile_filter")] string? ProfileFilter,
[property: JsonPropertyName("webhook_url")] string? WebhookUrl,
[property: JsonPropertyName("created_at")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("created_by")] string? CreatedBy);

View File

@@ -0,0 +1,412 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.Events;
/// <summary>
/// Service for publishing and managing profile lifecycle events.
/// </summary>
public sealed class ProfileEventPublisher
{
private readonly ILogger<ProfileEventPublisher> _logger;
private readonly TimeProvider _timeProvider;
private readonly ConcurrentDictionary<string, EventSubscription> _subscriptions;
private readonly ConcurrentDictionary<string, ConcurrentQueue<ProfileEvent>> _eventQueues;
private readonly ConcurrentQueue<ProfileEvent> _globalEventStream;
private readonly List<Func<ProfileEvent, Task>> _eventHandlers;
private readonly object _handlersLock = new();
private const int MaxEventsPerQueue = 10000;
private const int MaxGlobalEvents = 50000;
public ProfileEventPublisher(
ILogger<ProfileEventPublisher> logger,
TimeProvider timeProvider)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_subscriptions = new ConcurrentDictionary<string, EventSubscription>(StringComparer.OrdinalIgnoreCase);
_eventQueues = new ConcurrentDictionary<string, ConcurrentQueue<ProfileEvent>>(StringComparer.OrdinalIgnoreCase);
_globalEventStream = new ConcurrentQueue<ProfileEvent>();
_eventHandlers = new List<Func<ProfileEvent, Task>>();
}
/// <summary>
/// Publishes a profile created event.
/// </summary>
public async Task PublishProfileCreatedAsync(
string profileId,
string version,
string contentHash,
string? description,
string? actor,
string? correlationId = null)
{
var evt = new ProfileCreatedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
ContentHash: contentHash,
Description: description);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a profile published/activated event.
/// </summary>
public async Task PublishProfilePublishedAsync(
string profileId,
string version,
string contentHash,
string? previousActiveVersion,
string? actor,
string? correlationId = null)
{
var evt = new ProfilePublishedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
ContentHash: contentHash,
PreviousActiveVersion: previousActiveVersion);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a profile deprecated event.
/// </summary>
public async Task PublishProfileDeprecatedAsync(
string profileId,
string version,
string? reason,
string? successorVersion,
string? actor,
string? correlationId = null)
{
var evt = new ProfileDeprecatedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
Reason: reason,
SuccessorVersion: successorVersion);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a profile archived event.
/// </summary>
public async Task PublishProfileArchivedAsync(
string profileId,
string version,
string? actor,
string? correlationId = null)
{
var evt = new ProfileArchivedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a severity threshold changed event.
/// </summary>
public async Task PublishSeverityThresholdChangedAsync(
string profileId,
string version,
IReadOnlyList<ThresholdChange> changes,
string? actor,
string? correlationId = null)
{
var evt = new SeverityThresholdChangedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
Changes: changes);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a weight changed event.
/// </summary>
public async Task PublishWeightChangedAsync(
string profileId,
string version,
string signalName,
double oldWeight,
double newWeight,
string? actor,
string? correlationId = null)
{
var evt = new WeightChangedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
SignalName: signalName,
OldWeight: oldWeight,
NewWeight: newWeight);
await PublishAsync(evt);
}
/// <summary>
/// Publishes a scope attached event.
/// </summary>
public async Task PublishScopeAttachedAsync(
string profileId,
string version,
string scopeType,
string scopeId,
string attachmentId,
string? actor,
string? correlationId = null)
{
var evt = new ScopeAttachedEvent(
EventId: GenerateEventId(),
ProfileId: profileId,
ProfileVersion: version,
Timestamp: _timeProvider.GetUtcNow(),
Actor: actor,
CorrelationId: correlationId,
ScopeType: scopeType,
ScopeId: scopeId,
AttachmentId: attachmentId);
await PublishAsync(evt);
}
/// <summary>
/// Registers an event handler.
/// </summary>
public void RegisterHandler(Func<ProfileEvent, Task> handler)
{
ArgumentNullException.ThrowIfNull(handler);
lock (_handlersLock)
{
_eventHandlers.Add(handler);
}
}
/// <summary>
/// Creates a subscription for events.
/// </summary>
public EventSubscription Subscribe(
IReadOnlyList<ProfileEventType> eventTypes,
string? profileFilter,
string? webhookUrl,
string? createdBy)
{
var subscription = new EventSubscription(
SubscriptionId: GenerateSubscriptionId(),
EventTypes: eventTypes,
ProfileFilter: profileFilter,
WebhookUrl: webhookUrl,
CreatedAt: _timeProvider.GetUtcNow(),
CreatedBy: createdBy);
_subscriptions[subscription.SubscriptionId] = subscription;
_eventQueues[subscription.SubscriptionId] = new ConcurrentQueue<ProfileEvent>();
return subscription;
}
/// <summary>
/// Unsubscribes from events.
/// </summary>
public bool Unsubscribe(string subscriptionId)
{
var removed = _subscriptions.TryRemove(subscriptionId, out _);
_eventQueues.TryRemove(subscriptionId, out _);
return removed;
}
/// <summary>
/// Gets events for a subscription.
/// </summary>
public IReadOnlyList<ProfileEvent> GetEvents(string subscriptionId, int limit = 100)
{
if (!_eventQueues.TryGetValue(subscriptionId, out var queue))
{
return Array.Empty<ProfileEvent>();
}
var events = new List<ProfileEvent>();
while (events.Count < limit && queue.TryDequeue(out var evt))
{
events.Add(evt);
}
return events.AsReadOnly();
}
/// <summary>
/// Gets recent events from the global stream.
/// </summary>
public IReadOnlyList<ProfileEvent> GetRecentEvents(int limit = 100)
{
return _globalEventStream
.ToArray()
.OrderByDescending(e => e.Timestamp)
.Take(limit)
.ToList()
.AsReadOnly();
}
/// <summary>
/// Gets events filtered by criteria.
/// </summary>
public IReadOnlyList<ProfileEvent> GetEventsFiltered(
ProfileEventType? eventType,
string? profileId,
DateTimeOffset? since,
int limit = 100)
{
IEnumerable<ProfileEvent> events = _globalEventStream.ToArray();
if (eventType.HasValue)
{
events = events.Where(e => e.EventType == eventType.Value);
}
if (!string.IsNullOrWhiteSpace(profileId))
{
events = events.Where(e => e.ProfileId.Equals(profileId, StringComparison.OrdinalIgnoreCase));
}
if (since.HasValue)
{
events = events.Where(e => e.Timestamp >= since.Value);
}
return events
.OrderByDescending(e => e.Timestamp)
.Take(limit)
.ToList()
.AsReadOnly();
}
private async Task PublishAsync(ProfileEvent evt)
{
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("profile_event.publish");
activity?.SetTag("event.type", evt.EventType.ToString());
activity?.SetTag("profile.id", evt.ProfileId);
// Add to global stream
_globalEventStream.Enqueue(evt);
// Trim global stream if too large
while (_globalEventStream.Count > MaxGlobalEvents)
{
_globalEventStream.TryDequeue(out _);
}
// Distribute to matching subscriptions
foreach (var (subscriptionId, subscription) in _subscriptions)
{
if (MatchesSubscription(evt, subscription))
{
if (_eventQueues.TryGetValue(subscriptionId, out var queue))
{
queue.Enqueue(evt);
// Trim queue if too large
while (queue.Count > MaxEventsPerQueue)
{
queue.TryDequeue(out _);
}
}
}
}
// Invoke registered handlers
List<Func<ProfileEvent, Task>> handlers;
lock (_handlersLock)
{
handlers = _eventHandlers.ToList();
}
foreach (var handler in handlers)
{
try
{
await handler(evt).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error invoking event handler for {EventType}", evt.EventType);
}
}
PolicyEngineTelemetry.ProfileEventsPublished.Add(1);
_logger.LogInformation(
"Published {EventType} event for profile {ProfileId} v{Version}",
evt.EventType, evt.ProfileId, evt.ProfileVersion);
}
private static bool MatchesSubscription(ProfileEvent evt, EventSubscription subscription)
{
// Check event type filter
if (!subscription.EventTypes.Contains(evt.EventType))
{
return false;
}
// Check profile filter (supports wildcards)
if (!string.IsNullOrWhiteSpace(subscription.ProfileFilter))
{
if (subscription.ProfileFilter.EndsWith("*"))
{
var prefix = subscription.ProfileFilter[..^1];
if (!evt.ProfileId.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return false;
}
}
else if (!evt.ProfileId.Equals(subscription.ProfileFilter, StringComparison.OrdinalIgnoreCase))
{
return false;
}
}
return true;
}
private static string GenerateEventId()
{
var guid = Guid.NewGuid().ToByteArray();
return $"pev-{Convert.ToHexStringLower(guid)[..16]}";
}
private static string GenerateSubscriptionId()
{
var guid = Guid.NewGuid().ToByteArray();
return $"psub-{Convert.ToHexStringLower(guid)[..16]}";
}
}

View File

@@ -0,0 +1,376 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.Materialization;
/// <summary>
/// Represents an effective finding after policy evaluation.
/// Stored in tenant-scoped collections: effective_finding_{policyId}.
/// </summary>
public sealed record EffectiveFinding
{
/// <summary>
/// Unique identifier for this effective finding.
/// Format: sha256:{hash of tenantId|policyId|componentPurl|advisoryId}
/// </summary>
[JsonPropertyName("_id")]
public required string Id { get; init; }
/// <summary>
/// Tenant identifier (normalized to lowercase).
/// </summary>
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
/// <summary>
/// Policy identifier that produced this finding.
/// </summary>
[JsonPropertyName("policyId")]
public required string PolicyId { get; init; }
/// <summary>
/// Policy version at time of evaluation.
/// </summary>
[JsonPropertyName("policyVersion")]
public required int PolicyVersion { get; init; }
/// <summary>
/// Component PURL from the SBOM.
/// </summary>
[JsonPropertyName("componentPurl")]
public required string ComponentPurl { get; init; }
/// <summary>
/// Component name.
/// </summary>
[JsonPropertyName("componentName")]
public required string ComponentName { get; init; }
/// <summary>
/// Component version.
/// </summary>
[JsonPropertyName("componentVersion")]
public required string ComponentVersion { get; init; }
/// <summary>
/// Advisory identifier (CVE, GHSA, etc.).
/// </summary>
[JsonPropertyName("advisoryId")]
public required string AdvisoryId { get; init; }
/// <summary>
/// Advisory source.
/// </summary>
[JsonPropertyName("advisorySource")]
public required string AdvisorySource { get; init; }
/// <summary>
/// Policy evaluation status (affected, blocked, suppressed, etc.).
/// </summary>
[JsonPropertyName("status")]
public required string Status { get; init; }
/// <summary>
/// Normalized severity (Critical, High, Medium, Low, etc.).
/// </summary>
[JsonPropertyName("severity")]
public string? Severity { get; init; }
/// <summary>
/// Rule name that matched (if any).
/// </summary>
[JsonPropertyName("ruleName")]
public string? RuleName { get; init; }
/// <summary>
/// VEX status overlay (if VEX was applied).
/// </summary>
[JsonPropertyName("vexStatus")]
public string? VexStatus { get; init; }
/// <summary>
/// VEX justification (if VEX was applied).
/// </summary>
[JsonPropertyName("vexJustification")]
public string? VexJustification { get; init; }
/// <summary>
/// Policy evaluation annotations.
/// </summary>
[JsonPropertyName("annotations")]
public ImmutableDictionary<string, string> Annotations { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Current history version (incremented on each update).
/// </summary>
[JsonPropertyName("historyVersion")]
public required long HistoryVersion { get; init; }
/// <summary>
/// Reference to the policy run that produced this finding.
/// </summary>
[JsonPropertyName("policyRunId")]
public string? PolicyRunId { get; init; }
/// <summary>
/// Trace ID for distributed tracing.
/// </summary>
[JsonPropertyName("traceId")]
public string? TraceId { get; init; }
/// <summary>
/// Span ID for distributed tracing.
/// </summary>
[JsonPropertyName("spanId")]
public string? SpanId { get; init; }
/// <summary>
/// When this finding was first created.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// When this finding was last updated.
/// </summary>
[JsonPropertyName("updatedAt")]
public required DateTimeOffset UpdatedAt { get; init; }
/// <summary>
/// Content hash for deduplication.
/// </summary>
[JsonPropertyName("contentHash")]
public required string ContentHash { get; init; }
/// <summary>
/// Creates a deterministic finding ID.
/// </summary>
public static string CreateId(string tenantId, string policyId, string componentPurl, string advisoryId)
{
var normalizedTenant = (tenantId ?? string.Empty).Trim().ToLowerInvariant();
var normalizedPolicy = (policyId ?? string.Empty).Trim();
var normalizedPurl = (componentPurl ?? string.Empty).Trim().ToLowerInvariant();
var normalizedAdvisory = (advisoryId ?? string.Empty).Trim();
var input = $"{normalizedTenant}|{normalizedPolicy}|{normalizedPurl}|{normalizedAdvisory}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>
/// Computes a content hash for change detection.
/// </summary>
public static string ComputeContentHash(
string status,
string? severity,
string? ruleName,
string? vexStatus,
IReadOnlyDictionary<string, string>? annotations)
{
var sb = new StringBuilder();
sb.Append(status ?? string.Empty);
sb.Append('|');
sb.Append(severity ?? string.Empty);
sb.Append('|');
sb.Append(ruleName ?? string.Empty);
sb.Append('|');
sb.Append(vexStatus ?? string.Empty);
if (annotations is not null)
{
foreach (var kvp in annotations.OrderBy(x => x.Key, StringComparer.OrdinalIgnoreCase))
{
sb.Append('|');
sb.Append(kvp.Key);
sb.Append('=');
sb.Append(kvp.Value);
}
}
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(sb.ToString()));
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
/// <summary>
/// Append-only history entry for effective finding changes.
/// Stored in: effective_finding_history_{policyId}.
/// </summary>
public sealed record EffectiveFindingHistoryEntry
{
/// <summary>
/// Unique identifier for this history entry.
/// </summary>
[JsonPropertyName("_id")]
public required string Id { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
/// <summary>
/// Reference to the effective finding.
/// </summary>
[JsonPropertyName("findingId")]
public required string FindingId { get; init; }
/// <summary>
/// Policy identifier.
/// </summary>
[JsonPropertyName("policyId")]
public required string PolicyId { get; init; }
/// <summary>
/// History version number (monotonically increasing).
/// </summary>
[JsonPropertyName("version")]
public required long Version { get; init; }
/// <summary>
/// Type of change.
/// </summary>
[JsonPropertyName("changeType")]
public required EffectiveFindingChangeType ChangeType { get; init; }
/// <summary>
/// Previous status (for status changes).
/// </summary>
[JsonPropertyName("previousStatus")]
public string? PreviousStatus { get; init; }
/// <summary>
/// New status.
/// </summary>
[JsonPropertyName("newStatus")]
public required string NewStatus { get; init; }
/// <summary>
/// Previous severity (for severity changes).
/// </summary>
[JsonPropertyName("previousSeverity")]
public string? PreviousSeverity { get; init; }
/// <summary>
/// New severity.
/// </summary>
[JsonPropertyName("newSeverity")]
public string? NewSeverity { get; init; }
/// <summary>
/// Previous content hash.
/// </summary>
[JsonPropertyName("previousContentHash")]
public string? PreviContentHash { get; init; }
/// <summary>
/// New content hash.
/// </summary>
[JsonPropertyName("newContentHash")]
public required string NewContentHash { get; init; }
/// <summary>
/// Policy run that triggered this change.
/// </summary>
[JsonPropertyName("policyRunId")]
public string? PolicyRunId { get; init; }
/// <summary>
/// Trace ID for distributed tracing.
/// </summary>
[JsonPropertyName("traceId")]
public string? TraceId { get; init; }
/// <summary>
/// When this change occurred.
/// </summary>
[JsonPropertyName("occurredAt")]
public required DateTimeOffset OccurredAt { get; init; }
/// <summary>
/// Creates a deterministic history entry ID.
/// </summary>
public static string CreateId(string findingId, long version)
{
return $"{findingId}:v{version}";
}
}
/// <summary>
/// Type of change to an effective finding.
/// </summary>
public enum EffectiveFindingChangeType
{
/// <summary>Finding was created.</summary>
Created,
/// <summary>Status changed.</summary>
StatusChanged,
/// <summary>Severity changed.</summary>
SeverityChanged,
/// <summary>VEX overlay applied.</summary>
VexApplied,
/// <summary>Annotations changed.</summary>
AnnotationsChanged,
/// <summary>Policy version changed.</summary>
PolicyVersionChanged
}
/// <summary>
/// Input for materializing effective findings.
/// </summary>
public sealed record MaterializeFindingInput
{
public required string TenantId { get; init; }
public required string PolicyId { get; init; }
public required int PolicyVersion { get; init; }
public required string ComponentPurl { get; init; }
public required string ComponentName { get; init; }
public required string ComponentVersion { get; init; }
public required string AdvisoryId { get; init; }
public required string AdvisorySource { get; init; }
public required string Status { get; init; }
public string? Severity { get; init; }
public string? RuleName { get; init; }
public string? VexStatus { get; init; }
public string? VexJustification { get; init; }
public ImmutableDictionary<string, string>? Annotations { get; init; }
public string? PolicyRunId { get; init; }
public string? TraceId { get; init; }
public string? SpanId { get; init; }
}
/// <summary>
/// Result of a materialization operation.
/// </summary>
public sealed record MaterializeFindingResult
{
public required string FindingId { get; init; }
public required bool WasCreated { get; init; }
public required bool WasUpdated { get; init; }
public required long HistoryVersion { get; init; }
public EffectiveFindingChangeType? ChangeType { get; init; }
}
/// <summary>
/// Result of a batch materialization operation.
/// </summary>
public sealed record MaterializeBatchResult
{
public required int TotalInputs { get; init; }
public required int Created { get; init; }
public required int Updated { get; init; }
public required int Unchanged { get; init; }
public required int Errors { get; init; }
public required long ProcessingTimeMs { get; init; }
public ImmutableArray<MaterializeFindingResult> Results { get; init; } =
ImmutableArray<MaterializeFindingResult>.Empty;
}

View File

@@ -0,0 +1,412 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.Materialization;
/// <summary>
/// Interface for the effective finding materialization store.
/// </summary>
public interface IEffectiveFindingStore
{
/// <summary>
/// Gets an effective finding by ID.
/// </summary>
Task<EffectiveFinding?> GetByIdAsync(
string tenantId,
string policyId,
string findingId,
CancellationToken cancellationToken);
/// <summary>
/// Upserts an effective finding (insert or update).
/// </summary>
Task UpsertFindingAsync(
EffectiveFinding finding,
CancellationToken cancellationToken);
/// <summary>
/// Appends a history entry (insert only, never updates).
/// </summary>
Task AppendHistoryAsync(
EffectiveFindingHistoryEntry entry,
CancellationToken cancellationToken);
/// <summary>
/// Gets the collection name for findings.
/// </summary>
string GetFindingsCollectionName(string policyId);
/// <summary>
/// Gets the collection name for history.
/// </summary>
string GetHistoryCollectionName(string policyId);
}
/// <summary>
/// Materializes effective findings from policy evaluation results.
/// Implements upsert semantics with append-only history tracking.
/// </summary>
public sealed class EffectiveFindingWriter
{
private readonly IEffectiveFindingStore _store;
private readonly TimeProvider _timeProvider;
public EffectiveFindingWriter(IEffectiveFindingStore store, TimeProvider? timeProvider = null)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Materializes a single effective finding.
/// </summary>
public async Task<MaterializeFindingResult> MaterializeAsync(
MaterializeFindingInput input,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(input);
var findingId = EffectiveFinding.CreateId(
input.TenantId,
input.PolicyId,
input.ComponentPurl,
input.AdvisoryId);
var contentHash = EffectiveFinding.ComputeContentHash(
input.Status,
input.Severity,
input.RuleName,
input.VexStatus,
input.Annotations);
var now = _timeProvider.GetUtcNow();
// Try to get existing finding
var existing = await _store.GetByIdAsync(
input.TenantId,
input.PolicyId,
findingId,
cancellationToken).ConfigureAwait(false);
if (existing is null)
{
// Create new finding
var newFinding = CreateFinding(input, findingId, contentHash, now, historyVersion: 1);
await _store.UpsertFindingAsync(newFinding, cancellationToken).ConfigureAwait(false);
// Append creation history
var historyEntry = CreateHistoryEntry(
findingId,
input,
version: 1,
EffectiveFindingChangeType.Created,
previousStatus: null,
previousSeverity: null,
previousContentHash: null,
newContentHash: contentHash,
now);
await _store.AppendHistoryAsync(historyEntry, cancellationToken).ConfigureAwait(false);
return new MaterializeFindingResult
{
FindingId = findingId,
WasCreated = true,
WasUpdated = false,
HistoryVersion = 1,
ChangeType = EffectiveFindingChangeType.Created
};
}
// Check if content changed
if (string.Equals(existing.ContentHash, contentHash, StringComparison.Ordinal))
{
// No change - skip update
return new MaterializeFindingResult
{
FindingId = findingId,
WasCreated = false,
WasUpdated = false,
HistoryVersion = existing.HistoryVersion,
ChangeType = null
};
}
// Determine change type
var changeType = DetermineChangeType(existing, input);
var newVersion = existing.HistoryVersion + 1;
// Update finding
var updatedFinding = CreateFinding(input, findingId, contentHash, existing.CreatedAt, newVersion) with
{
UpdatedAt = now
};
await _store.UpsertFindingAsync(updatedFinding, cancellationToken).ConfigureAwait(false);
// Append history entry
var updateHistory = CreateHistoryEntry(
findingId,
input,
newVersion,
changeType,
existing.Status,
existing.Severity,
existing.ContentHash,
contentHash,
now);
await _store.AppendHistoryAsync(updateHistory, cancellationToken).ConfigureAwait(false);
return new MaterializeFindingResult
{
FindingId = findingId,
WasCreated = false,
WasUpdated = true,
HistoryVersion = newVersion,
ChangeType = changeType
};
}
/// <summary>
/// Materializes a batch of effective findings with deterministic ordering.
/// </summary>
public async Task<MaterializeBatchResult> MaterializeBatchAsync(
IEnumerable<MaterializeFindingInput> inputs,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(inputs);
var stopwatch = Stopwatch.StartNew();
// Process in deterministic order
var orderedInputs = inputs
.OrderBy(i => i.TenantId, StringComparer.OrdinalIgnoreCase)
.ThenBy(i => i.PolicyId, StringComparer.OrdinalIgnoreCase)
.ThenBy(i => i.ComponentPurl, StringComparer.OrdinalIgnoreCase)
.ThenBy(i => i.AdvisoryId, StringComparer.OrdinalIgnoreCase)
.ToList();
var results = new List<MaterializeFindingResult>();
var created = 0;
var updated = 0;
var unchanged = 0;
var errors = 0;
foreach (var input in orderedInputs)
{
try
{
var result = await MaterializeAsync(input, cancellationToken).ConfigureAwait(false);
results.Add(result);
if (result.WasCreated)
{
created++;
}
else if (result.WasUpdated)
{
updated++;
}
else
{
unchanged++;
}
}
catch (OperationCanceledException)
{
throw;
}
catch
{
errors++;
}
}
stopwatch.Stop();
return new MaterializeBatchResult
{
TotalInputs = orderedInputs.Count,
Created = created,
Updated = updated,
Unchanged = unchanged,
Errors = errors,
ProcessingTimeMs = stopwatch.ElapsedMilliseconds,
Results = results.ToImmutableArray()
};
}
private static EffectiveFinding CreateFinding(
MaterializeFindingInput input,
string findingId,
string contentHash,
DateTimeOffset createdAt,
long historyVersion)
{
return new EffectiveFinding
{
Id = findingId,
TenantId = input.TenantId.ToLowerInvariant(),
PolicyId = input.PolicyId,
PolicyVersion = input.PolicyVersion,
ComponentPurl = input.ComponentPurl,
ComponentName = input.ComponentName,
ComponentVersion = input.ComponentVersion,
AdvisoryId = input.AdvisoryId,
AdvisorySource = input.AdvisorySource,
Status = input.Status,
Severity = input.Severity,
RuleName = input.RuleName,
VexStatus = input.VexStatus,
VexJustification = input.VexJustification,
Annotations = input.Annotations ?? ImmutableDictionary<string, string>.Empty,
HistoryVersion = historyVersion,
PolicyRunId = input.PolicyRunId,
TraceId = input.TraceId,
SpanId = input.SpanId,
CreatedAt = createdAt,
UpdatedAt = createdAt,
ContentHash = contentHash
};
}
private static EffectiveFindingHistoryEntry CreateHistoryEntry(
string findingId,
MaterializeFindingInput input,
long version,
EffectiveFindingChangeType changeType,
string? previousStatus,
string? previousSeverity,
string? previousContentHash,
string newContentHash,
DateTimeOffset occurredAt)
{
return new EffectiveFindingHistoryEntry
{
Id = EffectiveFindingHistoryEntry.CreateId(findingId, version),
TenantId = input.TenantId.ToLowerInvariant(),
FindingId = findingId,
PolicyId = input.PolicyId,
Version = version,
ChangeType = changeType,
PreviousStatus = previousStatus,
NewStatus = input.Status,
PreviousSeverity = previousSeverity,
NewSeverity = input.Severity,
PreviContentHash = previousContentHash,
NewContentHash = newContentHash,
PolicyRunId = input.PolicyRunId,
TraceId = input.TraceId,
OccurredAt = occurredAt
};
}
private static EffectiveFindingChangeType DetermineChangeType(
EffectiveFinding existing,
MaterializeFindingInput input)
{
// Check for status change
if (!string.Equals(existing.Status, input.Status, StringComparison.OrdinalIgnoreCase))
{
return EffectiveFindingChangeType.StatusChanged;
}
// Check for severity change
if (!string.Equals(existing.Severity, input.Severity, StringComparison.OrdinalIgnoreCase))
{
return EffectiveFindingChangeType.SeverityChanged;
}
// Check for VEX change
if (!string.Equals(existing.VexStatus, input.VexStatus, StringComparison.OrdinalIgnoreCase))
{
return EffectiveFindingChangeType.VexApplied;
}
// Check for policy version change
if (existing.PolicyVersion != input.PolicyVersion)
{
return EffectiveFindingChangeType.PolicyVersionChanged;
}
// Default to annotations changed
return EffectiveFindingChangeType.AnnotationsChanged;
}
}
/// <summary>
/// In-memory implementation of effective finding store for testing.
/// </summary>
public sealed class InMemoryEffectiveFindingStore : IEffectiveFindingStore
{
private readonly Dictionary<string, EffectiveFinding> _findings = new(StringComparer.OrdinalIgnoreCase);
private readonly List<EffectiveFindingHistoryEntry> _history = new();
private readonly object _lock = new();
public Task<EffectiveFinding?> GetByIdAsync(
string tenantId,
string policyId,
string findingId,
CancellationToken cancellationToken)
{
var key = $"{tenantId.ToLowerInvariant()}:{policyId}:{findingId}";
lock (_lock)
{
_findings.TryGetValue(key, out var finding);
return Task.FromResult(finding);
}
}
public Task UpsertFindingAsync(EffectiveFinding finding, CancellationToken cancellationToken)
{
var key = $"{finding.TenantId}:{finding.PolicyId}:{finding.Id}";
lock (_lock)
{
_findings[key] = finding;
}
return Task.CompletedTask;
}
public Task AppendHistoryAsync(EffectiveFindingHistoryEntry entry, CancellationToken cancellationToken)
{
lock (_lock)
{
_history.Add(entry);
}
return Task.CompletedTask;
}
public string GetFindingsCollectionName(string policyId) =>
$"effective_finding_{policyId.ToLowerInvariant()}";
public string GetHistoryCollectionName(string policyId) =>
$"effective_finding_history_{policyId.ToLowerInvariant()}";
public IReadOnlyList<EffectiveFinding> GetAllFindings()
{
lock (_lock)
{
return _findings.Values.ToList();
}
}
public IReadOnlyList<EffectiveFindingHistoryEntry> GetAllHistory()
{
lock (_lock)
{
return _history.ToList();
}
}
public IReadOnlyList<EffectiveFindingHistoryEntry> GetHistoryForFinding(string findingId)
{
lock (_lock)
{
return _history
.Where(h => h.FindingId == findingId)
.OrderBy(h => h.Version)
.ToList();
}
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.ObjectModel;
using StellaOps.Auth.Abstractions;
using StellaOps.Policy.Engine.ReachabilityFacts;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.Options;
@@ -27,6 +28,8 @@ public sealed class PolicyEngineOptions
public PolicyEngineRiskProfileOptions RiskProfile { get; } = new();
public ReachabilityFactsCacheOptions ReachabilityCache { get; } = new();
public void Validate()
{
Authority.Validate();

View File

@@ -16,6 +16,7 @@ using StellaOps.Policy.Engine.Streaming;
using StellaOps.Policy.Engine.Telemetry;
using StellaOps.AirGap.Policy;
using StellaOps.Policy.Engine.Orchestration;
using StellaOps.Policy.Engine.ReachabilityFacts;
var builder = WebApplication.CreateBuilder(args);
@@ -116,8 +117,13 @@ builder.Services.AddSingleton<PolicyEvaluationAttestationService>();
builder.Services.AddSingleton<IncidentModeService>();
builder.Services.AddSingleton<RiskProfileConfigurationService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Lifecycle.RiskProfileLifecycleService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Scope.ScopeAttachmentService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Overrides.OverrideService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.IRiskScoringJobStore, StellaOps.Policy.Engine.Scoring.InMemoryRiskScoringJobStore>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Scoring.RiskScoringTriggerService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Simulation.RiskSimulationService>();
builder.Services.AddSingleton<StellaOps.Policy.RiskProfile.Export.ProfileExportService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Events.ProfileEventPublisher>();
builder.Services.AddHostedService<IncidentModeExpirationWorker>();
builder.Services.AddHostedService<PolicyEngineBootstrapWorker>();
builder.Services.AddSingleton<StellaOps.PolicyDsl.PolicyCompiler>();
@@ -148,6 +154,10 @@ builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.IViolationEvent
builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.ViolationEventService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.SeverityFusionService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Violations.ConflictHandlingService>();
builder.Services.AddSingleton<StellaOps.Policy.Engine.Services.PolicyDecisionService>();
builder.Services.AddSingleton<IReachabilityFactsStore, InMemoryReachabilityFactsStore>();
builder.Services.AddSingleton<IReachabilityFactsOverlayCache, InMemoryReachabilityFactsOverlayCache>();
builder.Services.AddSingleton<ReachabilityFactsJoiningService>();
builder.Services.AddHttpContextAccessor();
builder.Services.AddRouting(options => options.LowercaseUrls = true);
@@ -205,7 +215,13 @@ app.MapPolicyWorker();
app.MapLedgerExport();
app.MapSnapshots();
app.MapViolations();
app.MapPolicyDecisions();
app.MapRiskProfiles();
app.MapRiskProfileSchema();
app.MapScopeAttachments();
app.MapRiskSimulation();
app.MapOverrides();
app.MapProfileExport();
app.MapProfileEvents();
app.Run();

View File

@@ -0,0 +1,270 @@
using System.Diagnostics;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Service for joining reachability facts with policy evaluation inputs.
/// Provides efficient batch lookups with caching and metrics.
/// </summary>
public sealed class ReachabilityFactsJoiningService
{
private readonly IReachabilityFactsStore _store;
private readonly IReachabilityFactsOverlayCache _cache;
private readonly ILogger<ReachabilityFactsJoiningService> _logger;
private readonly TimeProvider _timeProvider;
public ReachabilityFactsJoiningService(
IReachabilityFactsStore store,
IReachabilityFactsOverlayCache cache,
ILogger<ReachabilityFactsJoiningService> logger,
TimeProvider timeProvider)
{
_store = store ?? throw new ArgumentNullException(nameof(store));
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
}
/// <summary>
/// Gets reachability facts for a batch of component-advisory pairs.
/// Uses cache-first strategy with store fallback.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="items">List of component-advisory pairs.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Batch result with facts and cache statistics.</returns>
public async Task<ReachabilityFactsBatch> GetFactsBatchAsync(
string tenantId,
IReadOnlyList<ReachabilityFactsRequest> items,
CancellationToken cancellationToken = default)
{
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity(
"reachability_facts.batch_lookup",
ActivityKind.Internal);
activity?.SetTag("tenant", tenantId);
activity?.SetTag("batch_size", items.Count);
var keys = items
.Select(i => new ReachabilityFactKey(tenantId, i.ComponentPurl, i.AdvisoryId))
.Distinct()
.ToList();
// Try cache first
var cacheResult = await _cache.GetBatchAsync(keys, cancellationToken).ConfigureAwait(false);
ReachabilityFactsTelemetry.RecordCacheHits(cacheResult.CacheHits);
ReachabilityFactsTelemetry.RecordCacheMisses(cacheResult.CacheMisses);
activity?.SetTag("cache_hits", cacheResult.CacheHits);
activity?.SetTag("cache_misses", cacheResult.CacheMisses);
if (cacheResult.NotFound.Count == 0)
{
// All items found in cache
return cacheResult;
}
// Fetch missing items from store
var storeResults = await _store.GetBatchAsync(cacheResult.NotFound, cancellationToken)
.ConfigureAwait(false);
activity?.SetTag("store_hits", storeResults.Count);
// Populate cache with store results
if (storeResults.Count > 0)
{
await _cache.SetBatchAsync(storeResults, cancellationToken).ConfigureAwait(false);
}
// Merge results
var allFound = new Dictionary<ReachabilityFactKey, ReachabilityFact>(cacheResult.Found);
foreach (var (key, fact) in storeResults)
{
allFound[key] = fact;
}
var stillNotFound = cacheResult.NotFound
.Where(k => !storeResults.ContainsKey(k))
.ToList();
_logger.LogDebug(
"Reachability facts lookup: {Total} requested, {CacheHits} cache hits, {StoreFetched} from store, {NotFound} not found",
keys.Count,
cacheResult.CacheHits,
storeResults.Count,
stillNotFound.Count);
return new ReachabilityFactsBatch
{
Found = allFound,
NotFound = stillNotFound,
CacheHits = cacheResult.CacheHits,
CacheMisses = cacheResult.CacheMisses,
};
}
/// <summary>
/// Gets a single reachability fact.
/// </summary>
public async Task<ReachabilityFact?> GetFactAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
// Try cache first
var (cached, cacheHit) = await _cache.GetAsync(key, cancellationToken).ConfigureAwait(false);
if (cacheHit)
{
ReachabilityFactsTelemetry.RecordCacheHits(1);
return cached;
}
ReachabilityFactsTelemetry.RecordCacheMisses(1);
// Fall back to store
var fact = await _store.GetAsync(tenantId, componentPurl, advisoryId, cancellationToken)
.ConfigureAwait(false);
if (fact != null)
{
await _cache.SetAsync(key, fact, cancellationToken).ConfigureAwait(false);
}
return fact;
}
/// <summary>
/// Enriches signal context with reachability facts.
/// </summary>
/// <param name="tenantId">Tenant identifier.</param>
/// <param name="componentPurl">Component PURL.</param>
/// <param name="advisoryId">Advisory ID.</param>
/// <param name="signals">Signal context to enrich.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if reachability fact was found and applied.</returns>
public async Task<bool> EnrichSignalsAsync(
string tenantId,
string componentPurl,
string advisoryId,
IDictionary<string, object?> signals,
CancellationToken cancellationToken = default)
{
var fact = await GetFactAsync(tenantId, componentPurl, advisoryId, cancellationToken)
.ConfigureAwait(false);
if (fact == null)
{
// Set default unknown state
signals["reachability"] = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["state"] = "unknown",
["confidence"] = 0m,
["score"] = 0m,
["has_runtime_evidence"] = false,
};
return false;
}
signals["reachability"] = new Dictionary<string, object?>(StringComparer.Ordinal)
{
["state"] = fact.State.ToString().ToLowerInvariant(),
["confidence"] = fact.Confidence,
["score"] = fact.Score,
["has_runtime_evidence"] = fact.HasRuntimeEvidence,
["source"] = fact.Source,
["method"] = fact.Method.ToString().ToLowerInvariant(),
};
ReachabilityFactsTelemetry.RecordFactApplied(fact.State.ToString().ToLowerInvariant());
return true;
}
/// <summary>
/// Saves a new reachability fact and updates the cache.
/// </summary>
public async Task SaveFactAsync(
ReachabilityFact fact,
CancellationToken cancellationToken = default)
{
await _store.SaveAsync(fact, cancellationToken).ConfigureAwait(false);
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
await _cache.SetAsync(key, fact, cancellationToken).ConfigureAwait(false);
_logger.LogDebug(
"Saved reachability fact: {TenantId}/{ComponentPurl}/{AdvisoryId} = {State} ({Confidence:P0})",
fact.TenantId,
fact.ComponentPurl,
fact.AdvisoryId,
fact.State,
fact.Confidence);
}
/// <summary>
/// Invalidates cache entries when reachability facts are updated externally.
/// </summary>
public Task InvalidateCacheAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
return _cache.InvalidateAsync(key, cancellationToken);
}
/// <summary>
/// Gets cache statistics.
/// </summary>
public ReachabilityFactsCacheStats GetCacheStats() => _cache.GetStats();
}
/// <summary>
/// Request item for batch reachability facts lookup.
/// </summary>
public sealed record ReachabilityFactsRequest(string ComponentPurl, string AdvisoryId);
/// <summary>
/// Telemetry for reachability facts operations.
/// Delegates to PolicyEngineTelemetry for centralized metrics.
/// </summary>
public static class ReachabilityFactsTelemetry
{
/// <summary>
/// Records cache hits.
/// </summary>
public static void RecordCacheHits(int count)
{
PolicyEngineTelemetry.RecordReachabilityCacheHits(count);
}
/// <summary>
/// Records cache misses.
/// </summary>
public static void RecordCacheMisses(int count)
{
PolicyEngineTelemetry.RecordReachabilityCacheMisses(count);
}
/// <summary>
/// Records a reachability fact being applied.
/// </summary>
public static void RecordFactApplied(string state)
{
PolicyEngineTelemetry.RecordReachabilityApplied(state);
}
/// <summary>
/// Gets the current cache hit ratio from stats.
/// </summary>
public static double GetCacheHitRatio(ReachabilityFactsCacheStats stats)
{
return stats.HitRatio;
}
}

View File

@@ -0,0 +1,258 @@
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Represents a reachability fact for a component-vulnerability pair.
/// </summary>
public sealed record ReachabilityFact
{
/// <summary>
/// Unique identifier for this reachability fact.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
/// <summary>
/// Component PURL this fact applies to.
/// </summary>
[JsonPropertyName("component_purl")]
public required string ComponentPurl { get; init; }
/// <summary>
/// Vulnerability/advisory identifier (CVE, GHSA, etc.).
/// </summary>
[JsonPropertyName("advisory_id")]
public required string AdvisoryId { get; init; }
/// <summary>
/// Reachability state (reachable, unreachable, unknown, under_investigation).
/// </summary>
[JsonPropertyName("state")]
public required ReachabilityState State { get; init; }
/// <summary>
/// Confidence score (0.0 to 1.0).
/// </summary>
[JsonPropertyName("confidence")]
public required decimal Confidence { get; init; }
/// <summary>
/// Reachability score (0.0 to 1.0, higher = more reachable).
/// </summary>
[JsonPropertyName("score")]
public decimal Score { get; init; }
/// <summary>
/// Whether this fact has runtime evidence backing it.
/// </summary>
[JsonPropertyName("has_runtime_evidence")]
public bool HasRuntimeEvidence { get; init; }
/// <summary>
/// Source of the reachability analysis.
/// </summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>
/// Analysis method used (static, dynamic, hybrid).
/// </summary>
[JsonPropertyName("method")]
public required AnalysisMethod Method { get; init; }
/// <summary>
/// Reference to the call graph or evidence artifact.
/// </summary>
[JsonPropertyName("evidence_ref")]
public string? EvidenceRef { get; init; }
/// <summary>
/// Content hash of the analysis evidence.
/// </summary>
[JsonPropertyName("evidence_hash")]
public string? EvidenceHash { get; init; }
/// <summary>
/// Timestamp when this fact was computed.
/// </summary>
[JsonPropertyName("computed_at")]
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Timestamp when this fact expires and should be recomputed.
/// </summary>
[JsonPropertyName("expires_at")]
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Additional metadata.
/// </summary>
[JsonPropertyName("metadata")]
public Dictionary<string, object?>? Metadata { get; init; }
}
/// <summary>
/// Reachability state enumeration aligned with VEX status semantics.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<ReachabilityState>))]
public enum ReachabilityState
{
/// <summary>
/// The vulnerable code path is reachable from application entry points.
/// </summary>
[JsonPropertyName("reachable")]
Reachable,
/// <summary>
/// The vulnerable code path is not reachable from application entry points.
/// </summary>
[JsonPropertyName("unreachable")]
Unreachable,
/// <summary>
/// Reachability status is unknown or could not be determined.
/// </summary>
[JsonPropertyName("unknown")]
Unknown,
/// <summary>
/// Reachability is under investigation and may change.
/// </summary>
[JsonPropertyName("under_investigation")]
UnderInvestigation,
}
/// <summary>
/// Analysis method enumeration.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<AnalysisMethod>))]
public enum AnalysisMethod
{
/// <summary>
/// Static analysis (call graph, data flow).
/// </summary>
[JsonPropertyName("static")]
Static,
/// <summary>
/// Dynamic analysis (runtime profiling, instrumentation).
/// </summary>
[JsonPropertyName("dynamic")]
Dynamic,
/// <summary>
/// Hybrid approach combining static and dynamic analysis.
/// </summary>
[JsonPropertyName("hybrid")]
Hybrid,
/// <summary>
/// Manual assessment or expert judgment.
/// </summary>
[JsonPropertyName("manual")]
Manual,
}
/// <summary>
/// Query parameters for fetching reachability facts.
/// </summary>
public sealed record ReachabilityFactsQuery
{
/// <summary>
/// Tenant identifier (required).
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Component PURLs to query (optional filter).
/// </summary>
public IReadOnlyList<string>? ComponentPurls { get; init; }
/// <summary>
/// Advisory IDs to query (optional filter).
/// </summary>
public IReadOnlyList<string>? AdvisoryIds { get; init; }
/// <summary>
/// Filter by reachability states (optional).
/// </summary>
public IReadOnlyList<ReachabilityState>? States { get; init; }
/// <summary>
/// Minimum confidence threshold (optional).
/// </summary>
public decimal? MinConfidence { get; init; }
/// <summary>
/// Include expired facts (default: false).
/// </summary>
public bool IncludeExpired { get; init; }
/// <summary>
/// Maximum number of results.
/// </summary>
public int Limit { get; init; } = 1000;
/// <summary>
/// Skip count for pagination.
/// </summary>
public int Skip { get; init; }
}
/// <summary>
/// Composite key for caching reachability facts.
/// </summary>
public readonly record struct ReachabilityFactKey(string TenantId, string ComponentPurl, string AdvisoryId)
{
/// <summary>
/// Creates a cache key string from this composite key.
/// </summary>
public string ToCacheKey() => $"rf:{TenantId}:{ComponentPurl}:{AdvisoryId}";
/// <summary>
/// Parses a cache key back into a composite key.
/// </summary>
public static ReachabilityFactKey? FromCacheKey(string key)
{
var parts = key.Split(':', 4);
if (parts.Length < 4 || parts[0] != "rf")
{
return null;
}
return new ReachabilityFactKey(parts[1], parts[2], parts[3]);
}
}
/// <summary>
/// Batch lookup result for reachability facts.
/// </summary>
public sealed record ReachabilityFactsBatch
{
/// <summary>
/// Facts that were found.
/// </summary>
public required IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> Found { get; init; }
/// <summary>
/// Keys that were not found.
/// </summary>
public required IReadOnlyList<ReachabilityFactKey> NotFound { get; init; }
/// <summary>
/// Number of cache hits.
/// </summary>
public int CacheHits { get; init; }
/// <summary>
/// Number of cache misses that required store lookup.
/// </summary>
public int CacheMisses { get; init; }
}

View File

@@ -0,0 +1,333 @@
using System.Collections.Concurrent;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Policy.Engine.Options;
using StellaOps.Policy.Engine.Telemetry;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Interface for the reachability facts overlay cache.
/// Provides fast in-memory/Redis caching layer above the persistent store.
/// </summary>
public interface IReachabilityFactsOverlayCache
{
/// <summary>
/// Gets a reachability fact from the cache.
/// </summary>
Task<(ReachabilityFact? Fact, bool CacheHit)> GetAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets multiple reachability facts from the cache.
/// </summary>
Task<ReachabilityFactsBatch> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets a reachability fact in the cache.
/// </summary>
Task SetAsync(
ReachabilityFactKey key,
ReachabilityFact fact,
CancellationToken cancellationToken = default);
/// <summary>
/// Sets multiple reachability facts in the cache.
/// </summary>
Task SetBatchAsync(
IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> facts,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates a cache entry.
/// </summary>
Task InvalidateAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates all cache entries for a tenant.
/// </summary>
Task InvalidateTenantAsync(
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets cache statistics.
/// </summary>
ReachabilityFactsCacheStats GetStats();
}
/// <summary>
/// Cache statistics.
/// </summary>
public sealed record ReachabilityFactsCacheStats
{
public long TotalRequests { get; init; }
public long CacheHits { get; init; }
public long CacheMisses { get; init; }
public double HitRatio => TotalRequests > 0 ? (double)CacheHits / TotalRequests : 0;
public long ItemCount { get; init; }
public long EvictionCount { get; init; }
}
/// <summary>
/// In-memory implementation of the reachability facts overlay cache.
/// Uses a time-based eviction strategy with configurable TTL.
/// </summary>
public sealed class InMemoryReachabilityFactsOverlayCache : IReachabilityFactsOverlayCache
{
private readonly ConcurrentDictionary<string, CacheEntry> _cache;
private readonly TimeProvider _timeProvider;
private readonly ILogger<InMemoryReachabilityFactsOverlayCache> _logger;
private readonly TimeSpan _defaultTtl;
private readonly int _maxItems;
private long _totalRequests;
private long _cacheHits;
private long _cacheMisses;
private long _evictionCount;
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public InMemoryReachabilityFactsOverlayCache(
ILogger<InMemoryReachabilityFactsOverlayCache> logger,
TimeProvider timeProvider,
IOptions<PolicyEngineOptions> options)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_cache = new ConcurrentDictionary<string, CacheEntry>(StringComparer.Ordinal);
var cacheOptions = options?.Value.ReachabilityCache ?? new ReachabilityFactsCacheOptions();
_defaultTtl = TimeSpan.FromMinutes(cacheOptions.DefaultTtlMinutes);
_maxItems = cacheOptions.MaxItems;
}
public Task<(ReachabilityFact? Fact, bool CacheHit)> GetAsync(
ReachabilityFactKey key,
CancellationToken cancellationToken = default)
{
Interlocked.Increment(ref _totalRequests);
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
if (_cache.TryGetValue(cacheKey, out var entry) && entry.ExpiresAt > now)
{
Interlocked.Increment(ref _cacheHits);
return Task.FromResult<(ReachabilityFact?, bool)>((entry.Fact, true));
}
Interlocked.Increment(ref _cacheMisses);
// Remove expired entry if present
if (entry != null)
{
_cache.TryRemove(cacheKey, out _);
}
return Task.FromResult<(ReachabilityFact?, bool)>((null, false));
}
public async Task<ReachabilityFactsBatch> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default)
{
var found = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
var notFound = new List<ReachabilityFactKey>();
var cacheHits = 0;
var cacheMisses = 0;
foreach (var key in keys)
{
var (fact, hit) = await GetAsync(key, cancellationToken).ConfigureAwait(false);
if (fact != null)
{
found[key] = fact;
cacheHits++;
}
else
{
notFound.Add(key);
cacheMisses++;
}
}
return new ReachabilityFactsBatch
{
Found = found,
NotFound = notFound,
CacheHits = cacheHits,
CacheMisses = cacheMisses,
};
}
public Task SetAsync(
ReachabilityFactKey key,
ReachabilityFact fact,
CancellationToken cancellationToken = default)
{
EnsureCapacity();
var cacheKey = key.ToCacheKey();
var now = _timeProvider.GetUtcNow();
var ttl = fact.ExpiresAt.HasValue && fact.ExpiresAt.Value > now
? fact.ExpiresAt.Value - now
: _defaultTtl;
var entry = new CacheEntry(fact, now.Add(ttl));
_cache[cacheKey] = entry;
return Task.CompletedTask;
}
public Task SetBatchAsync(
IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact> facts,
CancellationToken cancellationToken = default)
{
EnsureCapacity(facts.Count);
var now = _timeProvider.GetUtcNow();
foreach (var (key, fact) in facts)
{
var cacheKey = key.ToCacheKey();
var ttl = fact.ExpiresAt.HasValue && fact.ExpiresAt.Value > now
? fact.ExpiresAt.Value - now
: _defaultTtl;
var entry = new CacheEntry(fact, now.Add(ttl));
_cache[cacheKey] = entry;
}
return Task.CompletedTask;
}
public Task InvalidateAsync(ReachabilityFactKey key, CancellationToken cancellationToken = default)
{
var cacheKey = key.ToCacheKey();
_cache.TryRemove(cacheKey, out _);
return Task.CompletedTask;
}
public Task InvalidateTenantAsync(string tenantId, CancellationToken cancellationToken = default)
{
var prefix = $"rf:{tenantId}:";
var keysToRemove = _cache.Keys.Where(k => k.StartsWith(prefix, StringComparison.Ordinal)).ToList();
foreach (var key in keysToRemove)
{
_cache.TryRemove(key, out _);
}
_logger.LogDebug("Invalidated {Count} cache entries for tenant {TenantId}", keysToRemove.Count, tenantId);
return Task.CompletedTask;
}
public ReachabilityFactsCacheStats GetStats()
{
return new ReachabilityFactsCacheStats
{
TotalRequests = Interlocked.Read(ref _totalRequests),
CacheHits = Interlocked.Read(ref _cacheHits),
CacheMisses = Interlocked.Read(ref _cacheMisses),
ItemCount = _cache.Count,
EvictionCount = Interlocked.Read(ref _evictionCount),
};
}
private void EnsureCapacity(int additionalItems = 1)
{
if (_cache.Count + additionalItems <= _maxItems)
{
return;
}
var now = _timeProvider.GetUtcNow();
var itemsToRemove = _cache.Count + additionalItems - _maxItems + (_maxItems / 10); // Remove 10% extra
// First, remove expired items
var expiredKeys = _cache
.Where(kvp => kvp.Value.ExpiresAt <= now)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in expiredKeys)
{
if (_cache.TryRemove(key, out _))
{
Interlocked.Increment(ref _evictionCount);
itemsToRemove--;
}
}
if (itemsToRemove <= 0)
{
return;
}
// Then, remove oldest items by expiration time
var oldestKeys = _cache
.OrderBy(kvp => kvp.Value.ExpiresAt)
.Take(itemsToRemove)
.Select(kvp => kvp.Key)
.ToList();
foreach (var key in oldestKeys)
{
if (_cache.TryRemove(key, out _))
{
Interlocked.Increment(ref _evictionCount);
}
}
_logger.LogDebug(
"Evicted {EvictedCount} cache entries (expired: {ExpiredCount}, oldest: {OldestCount})",
expiredKeys.Count + oldestKeys.Count,
expiredKeys.Count,
oldestKeys.Count);
}
private sealed record CacheEntry(ReachabilityFact Fact, DateTimeOffset ExpiresAt);
}
/// <summary>
/// Configuration options for the reachability facts cache.
/// </summary>
public sealed class ReachabilityFactsCacheOptions
{
/// <summary>
/// Default TTL for cache entries in minutes.
/// </summary>
public int DefaultTtlMinutes { get; set; } = 15;
/// <summary>
/// Maximum number of items in the cache.
/// </summary>
public int MaxItems { get; set; } = 100000;
/// <summary>
/// Whether to enable Redis as a distributed cache layer.
/// </summary>
public bool EnableRedis { get; set; }
/// <summary>
/// Redis connection string.
/// </summary>
public string? RedisConnectionString { get; set; }
/// <summary>
/// Redis key prefix for reachability facts.
/// </summary>
public string RedisKeyPrefix { get; set; } = "stellaops:rf:";
}

View File

@@ -0,0 +1,213 @@
using System.Collections.Concurrent;
namespace StellaOps.Policy.Engine.ReachabilityFacts;
/// <summary>
/// Store interface for reachability facts persistence.
/// </summary>
public interface IReachabilityFactsStore
{
/// <summary>
/// Gets a single reachability fact by key.
/// </summary>
Task<ReachabilityFact?> GetAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets multiple reachability facts by keys.
/// </summary>
Task<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default);
/// <summary>
/// Queries reachability facts with filtering.
/// </summary>
Task<IReadOnlyList<ReachabilityFact>> QueryAsync(
ReachabilityFactsQuery query,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves or updates a reachability fact.
/// </summary>
Task SaveAsync(
ReachabilityFact fact,
CancellationToken cancellationToken = default);
/// <summary>
/// Saves multiple reachability facts.
/// </summary>
Task SaveBatchAsync(
IReadOnlyList<ReachabilityFact> facts,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a reachability fact.
/// </summary>
Task DeleteAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the count of facts for a tenant.
/// </summary>
Task<long> CountAsync(
string tenantId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// In-memory implementation of the reachability facts store for development and testing.
/// </summary>
public sealed class InMemoryReachabilityFactsStore : IReachabilityFactsStore
{
private readonly ConcurrentDictionary<ReachabilityFactKey, ReachabilityFact> _facts = new();
private readonly TimeProvider _timeProvider;
public InMemoryReachabilityFactsStore(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task<ReachabilityFact?> GetAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
_facts.TryGetValue(key, out var fact);
return Task.FromResult(fact);
}
public Task<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>> GetBatchAsync(
IReadOnlyList<ReachabilityFactKey> keys,
CancellationToken cancellationToken = default)
{
var result = new Dictionary<ReachabilityFactKey, ReachabilityFact>();
foreach (var key in keys)
{
if (_facts.TryGetValue(key, out var fact))
{
result[key] = fact;
}
}
return Task.FromResult<IReadOnlyDictionary<ReachabilityFactKey, ReachabilityFact>>(result);
}
public Task<IReadOnlyList<ReachabilityFact>> QueryAsync(
ReachabilityFactsQuery query,
CancellationToken cancellationToken = default)
{
var now = _timeProvider.GetUtcNow();
var results = _facts.Values
.Where(f => f.TenantId == query.TenantId)
.Where(f => query.ComponentPurls == null || query.ComponentPurls.Contains(f.ComponentPurl))
.Where(f => query.AdvisoryIds == null || query.AdvisoryIds.Contains(f.AdvisoryId))
.Where(f => query.States == null || query.States.Contains(f.State))
.Where(f => !query.MinConfidence.HasValue || f.Confidence >= query.MinConfidence.Value)
.Where(f => query.IncludeExpired || !f.ExpiresAt.HasValue || f.ExpiresAt > now)
.OrderByDescending(f => f.ComputedAt)
.Skip(query.Skip)
.Take(query.Limit)
.ToList();
return Task.FromResult<IReadOnlyList<ReachabilityFact>>(results);
}
public Task SaveAsync(ReachabilityFact fact, CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
_facts[key] = fact;
return Task.CompletedTask;
}
public Task SaveBatchAsync(IReadOnlyList<ReachabilityFact> facts, CancellationToken cancellationToken = default)
{
foreach (var fact in facts)
{
var key = new ReachabilityFactKey(fact.TenantId, fact.ComponentPurl, fact.AdvisoryId);
_facts[key] = fact;
}
return Task.CompletedTask;
}
public Task DeleteAsync(
string tenantId,
string componentPurl,
string advisoryId,
CancellationToken cancellationToken = default)
{
var key = new ReachabilityFactKey(tenantId, componentPurl, advisoryId);
_facts.TryRemove(key, out _);
return Task.CompletedTask;
}
public Task<long> CountAsync(string tenantId, CancellationToken cancellationToken = default)
{
var count = _facts.Values.Count(f => f.TenantId == tenantId);
return Task.FromResult((long)count);
}
}
/// <summary>
/// Index definitions for MongoDB reachability_facts collection.
/// </summary>
public static class ReachabilityFactsIndexes
{
/// <summary>
/// Primary compound index for efficient lookups.
/// </summary>
public const string PrimaryIndex = "tenant_component_advisory";
/// <summary>
/// Index for querying by tenant and state.
/// </summary>
public const string TenantStateIndex = "tenant_state_computed";
/// <summary>
/// Index for TTL expiration.
/// </summary>
public const string ExpirationIndex = "expires_at_ttl";
/// <summary>
/// Gets the index definitions for creating MongoDB indexes.
/// </summary>
public static IReadOnlyList<ReachabilityIndexDefinition> GetIndexDefinitions()
{
return new[]
{
new ReachabilityIndexDefinition(
PrimaryIndex,
new[] { "tenant_id", "component_purl", "advisory_id" },
Unique: true),
new ReachabilityIndexDefinition(
TenantStateIndex,
new[] { "tenant_id", "state", "computed_at" },
Unique: false),
new ReachabilityIndexDefinition(
ExpirationIndex,
new[] { "expires_at" },
Unique: false,
ExpireAfterSeconds: 0),
};
}
}
/// <summary>
/// Index definition for MongoDB collection.
/// </summary>
public sealed record ReachabilityIndexDefinition(
string Name,
IReadOnlyList<string> Fields,
bool Unique,
int? ExpireAfterSeconds = null);

View File

@@ -0,0 +1,308 @@
using System.Collections.Immutable;
namespace StellaOps.Policy.Engine.SelectionJoin;
/// <summary>
/// PURL equivalence table for mapping package identifiers across ecosystems.
/// Enables matching when the same package has different identifiers in
/// different sources (e.g., npm vs GitHub advisory database naming).
/// </summary>
public sealed class PurlEquivalenceTable
{
private readonly ImmutableDictionary<string, ImmutableHashSet<string>> _equivalenceGroups;
private readonly ImmutableDictionary<string, string> _canonicalMapping;
private PurlEquivalenceTable(
ImmutableDictionary<string, ImmutableHashSet<string>> equivalenceGroups,
ImmutableDictionary<string, string> canonicalMapping)
{
_equivalenceGroups = equivalenceGroups;
_canonicalMapping = canonicalMapping;
}
/// <summary>
/// Creates an empty equivalence table.
/// </summary>
public static PurlEquivalenceTable Empty { get; } = new(
ImmutableDictionary<string, ImmutableHashSet<string>>.Empty,
ImmutableDictionary<string, string>.Empty);
/// <summary>
/// Creates an equivalence table from a list of equivalence groups.
/// Each group contains PURLs that should be considered equivalent.
/// </summary>
public static PurlEquivalenceTable FromGroups(IEnumerable<IEnumerable<string>> groups)
{
var equivalenceBuilder = ImmutableDictionary.CreateBuilder<string, ImmutableHashSet<string>>(
StringComparer.OrdinalIgnoreCase);
var canonicalBuilder = ImmutableDictionary.CreateBuilder<string, string>(
StringComparer.OrdinalIgnoreCase);
foreach (var group in groups)
{
var normalizedGroup = group
.Where(p => !string.IsNullOrWhiteSpace(p))
.Select(p => p.Trim().ToLowerInvariant())
.Distinct()
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase);
if (normalizedGroup.Count < 2)
{
continue;
}
// First item (lexicographically) is the canonical form
var canonical = normalizedGroup.First();
foreach (var purl in normalizedGroup)
{
equivalenceBuilder[purl] = normalizedGroup;
canonicalBuilder[purl] = canonical;
}
}
return new PurlEquivalenceTable(
equivalenceBuilder.ToImmutable(),
canonicalBuilder.ToImmutable());
}
/// <summary>
/// Gets the canonical form of a PURL, or the original if not in the table.
/// </summary>
public string GetCanonical(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return string.Empty;
}
var normalized = purl.Trim().ToLowerInvariant();
return _canonicalMapping.TryGetValue(normalized, out var canonical)
? canonical
: normalized;
}
/// <summary>
/// Gets all equivalent PURLs for a given PURL.
/// </summary>
public IReadOnlySet<string> GetEquivalents(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return ImmutableHashSet<string>.Empty;
}
var normalized = purl.Trim().ToLowerInvariant();
return _equivalenceGroups.TryGetValue(normalized, out var group)
? group
: ImmutableHashSet.Create(StringComparer.OrdinalIgnoreCase, normalized);
}
/// <summary>
/// Checks if two PURLs are equivalent.
/// </summary>
public bool AreEquivalent(string purl1, string purl2)
{
if (string.IsNullOrWhiteSpace(purl1) || string.IsNullOrWhiteSpace(purl2))
{
return false;
}
var norm1 = purl1.Trim().ToLowerInvariant();
var norm2 = purl2.Trim().ToLowerInvariant();
if (string.Equals(norm1, norm2, StringComparison.Ordinal))
{
return true;
}
var canonical1 = GetCanonical(norm1);
var canonical2 = GetCanonical(norm2);
return string.Equals(canonical1, canonical2, StringComparison.Ordinal);
}
/// <summary>
/// Number of equivalence groups in the table.
/// </summary>
public int GroupCount => _equivalenceGroups
.Values
.Select(g => g.First())
.Distinct()
.Count();
/// <summary>
/// Total number of PURLs in the table.
/// </summary>
public int TotalEntries => _canonicalMapping.Count;
}
/// <summary>
/// Static utilities for PURL equivalence matching.
/// </summary>
public static class PurlEquivalence
{
/// <summary>
/// Extracts the package key from a PURL (removes version suffix).
/// Example: "pkg:npm/lodash@4.17.21" → "pkg:npm/lodash"
/// </summary>
public static string ExtractPackageKey(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return string.Empty;
}
var trimmed = purl.Trim();
var atIndex = trimmed.LastIndexOf('@');
// Handle case where @ is part of namespace (e.g., pkg:npm/@scope/package@1.0.0)
if (atIndex > 0)
{
// Check if there's another @ before this one (scoped package)
var firstAt = trimmed.IndexOf('@');
if (firstAt < atIndex)
{
// This is a scoped package, @ at atIndex is the version separator
return trimmed[..atIndex];
}
// Check if we have a proper version after @
var afterAt = trimmed[(atIndex + 1)..];
if (afterAt.Length > 0 && (char.IsDigit(afterAt[0]) || afterAt[0] == 'v'))
{
return trimmed[..atIndex];
}
}
return trimmed;
}
/// <summary>
/// Extracts the ecosystem from a PURL.
/// Example: "pkg:npm/lodash@4.17.21" → "npm"
/// </summary>
public static string? ExtractEcosystem(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var trimmed = purl.Trim();
if (!trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return null;
}
var afterPrefix = trimmed[4..]; // Skip "pkg:"
var slashIndex = afterPrefix.IndexOf('/');
return slashIndex > 0 ? afterPrefix[..slashIndex] : null;
}
/// <summary>
/// Extracts the namespace from a PURL (if present).
/// Example: "pkg:npm/@scope/package@1.0.0" → "@scope"
/// </summary>
public static string? ExtractNamespace(string purl)
{
if (string.IsNullOrWhiteSpace(purl))
{
return null;
}
var trimmed = purl.Trim();
if (!trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase))
{
return null;
}
var afterPrefix = trimmed[4..];
var slashIndex = afterPrefix.IndexOf('/');
if (slashIndex < 0)
{
return null;
}
var afterEcosystem = afterPrefix[(slashIndex + 1)..];
var nextSlashIndex = afterEcosystem.IndexOf('/');
if (nextSlashIndex > 0)
{
// Has namespace
return afterEcosystem[..nextSlashIndex];
}
return null;
}
/// <summary>
/// Extracts the package name from a PURL.
/// Example: "pkg:npm/@scope/package@1.0.0" → "package"
/// </summary>
public static string? ExtractName(string purl)
{
var packageKey = ExtractPackageKey(purl);
if (string.IsNullOrWhiteSpace(packageKey))
{
return null;
}
var lastSlashIndex = packageKey.LastIndexOf('/');
return lastSlashIndex >= 0 ? packageKey[(lastSlashIndex + 1)..] : null;
}
/// <summary>
/// Computes match confidence between two PURLs.
/// Returns 1.0 for exact match, 0.8 for package key match, 0.0 for no match.
/// </summary>
public static double ComputeMatchConfidence(string purl1, string purl2, PurlEquivalenceTable? equivalenceTable = null)
{
if (string.IsNullOrWhiteSpace(purl1) || string.IsNullOrWhiteSpace(purl2))
{
return 0.0;
}
var norm1 = purl1.Trim().ToLowerInvariant();
var norm2 = purl2.Trim().ToLowerInvariant();
// Exact match
if (string.Equals(norm1, norm2, StringComparison.Ordinal))
{
return 1.0;
}
// Equivalence table match
if (equivalenceTable is not null && equivalenceTable.AreEquivalent(norm1, norm2))
{
return 0.95;
}
// Package key match (same package, different version)
var key1 = ExtractPackageKey(norm1);
var key2 = ExtractPackageKey(norm2);
if (!string.IsNullOrEmpty(key1) && string.Equals(key1, key2, StringComparison.OrdinalIgnoreCase))
{
return 0.8;
}
// Same ecosystem and name (different namespace)
var eco1 = ExtractEcosystem(norm1);
var eco2 = ExtractEcosystem(norm2);
var name1 = ExtractName(norm1);
var name2 = ExtractName(norm2);
if (!string.IsNullOrEmpty(eco1) &&
string.Equals(eco1, eco2, StringComparison.OrdinalIgnoreCase) &&
!string.IsNullOrEmpty(name1) &&
string.Equals(name1, name2, StringComparison.OrdinalIgnoreCase))
{
return 0.5;
}
return 0.0;
}
}

View File

@@ -0,0 +1,192 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Policy.Engine.SelectionJoin;
/// <summary>
/// Represents an SBOM component for selection joining.
/// </summary>
/// <param name="Purl">Package URL (e.g., pkg:npm/lodash@4.17.21).</param>
/// <param name="Name">Component name.</param>
/// <param name="Version">Component version.</param>
/// <param name="Ecosystem">Package ecosystem (npm, maven, pypi, etc.).</param>
/// <param name="Metadata">Additional component metadata.</param>
public sealed record SbomComponentInput(
string Purl,
string Name,
string Version,
string? Ecosystem,
ImmutableDictionary<string, string> Metadata)
{
/// <summary>
/// Extracts the package key from the PURL (removes version suffix).
/// </summary>
public string PackageKey => PurlEquivalence.ExtractPackageKey(Purl);
}
/// <summary>
/// Represents an advisory linkset reference for selection joining.
/// </summary>
/// <param name="AdvisoryId">Advisory identifier (CVE, GHSA, etc.).</param>
/// <param name="Source">Advisory source.</param>
/// <param name="Purls">Affected PURLs from the advisory.</param>
/// <param name="Cpes">Affected CPEs from the advisory.</param>
/// <param name="Aliases">Advisory aliases (e.g., CVE-2021-1234, GHSA-xxxx).</param>
/// <param name="Confidence">Linkset confidence score.</param>
public sealed record AdvisoryLinksetInput(
string AdvisoryId,
string Source,
ImmutableArray<string> Purls,
ImmutableArray<string> Cpes,
ImmutableArray<string> Aliases,
double? Confidence);
/// <summary>
/// Represents a VEX linkset reference for selection joining.
/// </summary>
/// <param name="LinksetId">VEX linkset identifier.</param>
/// <param name="VulnerabilityId">Vulnerability identifier.</param>
/// <param name="ProductKey">Product key (PURL or CPE).</param>
/// <param name="Status">VEX status (not_affected, affected, fixed, under_investigation).</param>
/// <param name="Justification">VEX justification.</param>
/// <param name="Confidence">Linkset confidence level.</param>
public sealed record VexLinksetInput(
string LinksetId,
string VulnerabilityId,
string ProductKey,
string Status,
string? Justification,
VexConfidenceLevel Confidence);
/// <summary>
/// VEX confidence level enumeration.
/// </summary>
public enum VexConfidenceLevel
{
Low = 0,
Medium = 1,
High = 2
}
/// <summary>
/// Represents a resolved SBOM↔Advisory↔VEX tuple.
/// </summary>
/// <param name="TupleId">Deterministic identifier for this tuple.</param>
/// <param name="Component">The SBOM component.</param>
/// <param name="Advisory">The matched advisory linkset.</param>
/// <param name="Vex">The matched VEX linkset (if any).</param>
/// <param name="MatchType">How the match was determined.</param>
/// <param name="MatchConfidence">Overall confidence in the match.</param>
public sealed record SelectionJoinTuple(
string TupleId,
SbomComponentInput Component,
AdvisoryLinksetInput Advisory,
VexLinksetInput? Vex,
SelectionMatchType MatchType,
double MatchConfidence)
{
/// <summary>
/// Creates a deterministic tuple ID from the key components.
/// </summary>
public static string CreateTupleId(string tenantId, string componentPurl, string advisoryId)
{
var normalizedTenant = (tenantId ?? string.Empty).Trim().ToLowerInvariant();
var normalizedPurl = (componentPurl ?? string.Empty).Trim().ToLowerInvariant();
var normalizedAdvisory = (advisoryId ?? string.Empty).Trim();
var input = $"{normalizedTenant}|{normalizedPurl}|{normalizedAdvisory}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"tuple:sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}
/// <summary>
/// How the selection match was determined.
/// </summary>
public enum SelectionMatchType
{
/// <summary>Exact PURL match.</summary>
ExactPurl,
/// <summary>Package key match (same package, different version).</summary>
PackageKeyMatch,
/// <summary>CPE vendor/product match.</summary>
CpeMatch,
/// <summary>Alias-based match.</summary>
AliasMatch,
/// <summary>Equivalence table match.</summary>
EquivalenceMatch,
/// <summary>No direct match, linked via advisory reference.</summary>
IndirectMatch
}
/// <summary>
/// Input for a selection join batch operation.
/// </summary>
/// <param name="TenantId">Tenant identifier.</param>
/// <param name="BatchId">Unique batch identifier for tracing.</param>
/// <param name="Components">SBOM components to match.</param>
/// <param name="Advisories">Advisory linksets to match against.</param>
/// <param name="VexLinksets">VEX linksets to include.</param>
/// <param name="EquivalenceTable">Optional PURL equivalence mappings.</param>
/// <param name="Options">Batch processing options.</param>
public sealed record SelectionJoinBatchInput(
string TenantId,
string BatchId,
ImmutableArray<SbomComponentInput> Components,
ImmutableArray<AdvisoryLinksetInput> Advisories,
ImmutableArray<VexLinksetInput> VexLinksets,
PurlEquivalenceTable? EquivalenceTable,
SelectionJoinOptions Options);
/// <summary>
/// Options for selection join batch processing.
/// </summary>
/// <param name="MaxBatchSize">Maximum items per batch for deterministic chunking.</param>
/// <param name="IncludeIndirectMatches">Include indirect matches via advisory references.</param>
/// <param name="MinConfidenceThreshold">Minimum confidence to include in results.</param>
public sealed record SelectionJoinOptions(
int MaxBatchSize = 1000,
bool IncludeIndirectMatches = false,
double MinConfidenceThreshold = 0.0);
/// <summary>
/// Result of a selection join batch operation.
/// </summary>
/// <param name="BatchId">Batch identifier for tracing.</param>
/// <param name="Tuples">Resolved tuples.</param>
/// <param name="UnmatchedComponents">Components with no advisory matches.</param>
/// <param name="Statistics">Batch statistics.</param>
public sealed record SelectionJoinBatchResult(
string BatchId,
ImmutableArray<SelectionJoinTuple> Tuples,
ImmutableArray<SbomComponentInput> UnmatchedComponents,
SelectionJoinStatistics Statistics);
/// <summary>
/// Statistics for a selection join batch.
/// </summary>
/// <param name="TotalComponents">Total components in input.</param>
/// <param name="TotalAdvisories">Total advisories in input.</param>
/// <param name="MatchedTuples">Number of matched tuples.</param>
/// <param name="ExactPurlMatches">Exact PURL matches.</param>
/// <param name="PackageKeyMatches">Package key matches.</param>
/// <param name="CpeMatches">CPE matches.</param>
/// <param name="EquivalenceMatches">Equivalence table matches.</param>
/// <param name="VexOverlays">Tuples with VEX overlays.</param>
/// <param name="ProcessingTimeMs">Processing time in milliseconds.</param>
public sealed record SelectionJoinStatistics(
int TotalComponents,
int TotalAdvisories,
int MatchedTuples,
int ExactPurlMatches,
int PackageKeyMatches,
int CpeMatches,
int EquivalenceMatches,
int VexOverlays,
long ProcessingTimeMs);

View File

@@ -0,0 +1,390 @@
using System.Collections.Immutable;
using System.Diagnostics;
namespace StellaOps.Policy.Engine.SelectionJoin;
/// <summary>
/// Service for resolving SBOM↔Advisory↔VEX tuples using linksets and PURL equivalence.
/// All operations are deterministic: given identical inputs, produces identical outputs.
/// </summary>
public sealed class SelectionJoinService
{
/// <summary>
/// Resolves SBOM components against advisory and VEX linksets.
/// Uses deterministic batching for large datasets.
/// </summary>
public SelectionJoinBatchResult ResolveTuples(SelectionJoinBatchInput input)
{
ArgumentNullException.ThrowIfNull(input);
var stopwatch = Stopwatch.StartNew();
var equivalenceTable = input.EquivalenceTable ?? PurlEquivalenceTable.Empty;
var options = input.Options;
// Build lookup indexes for deterministic matching
var advisoryIndex = BuildAdvisoryIndex(input.Advisories);
var vexIndex = BuildVexIndex(input.VexLinksets);
// Process components in deterministic order
var orderedComponents = input.Components
.OrderBy(c => c.Purl, StringComparer.OrdinalIgnoreCase)
.ThenBy(c => c.Name, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var tuples = new List<SelectionJoinTuple>();
var unmatched = new List<SbomComponentInput>();
var stats = new SelectionJoinStatsBuilder();
stats.TotalComponents = orderedComponents.Length;
stats.TotalAdvisories = input.Advisories.Length;
// Process in batches for memory efficiency
var batches = CreateDeterministicBatches(orderedComponents, options.MaxBatchSize);
foreach (var batch in batches)
{
ProcessBatch(
batch,
input.TenantId,
advisoryIndex,
vexIndex,
equivalenceTable,
options,
tuples,
unmatched,
stats);
}
stopwatch.Stop();
stats.ProcessingTimeMs = stopwatch.ElapsedMilliseconds;
// Sort results for deterministic output
var sortedTuples = tuples
.OrderBy(t => t.Component.Purl, StringComparer.OrdinalIgnoreCase)
.ThenBy(t => t.Advisory.AdvisoryId, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
var sortedUnmatched = unmatched
.OrderBy(c => c.Purl, StringComparer.OrdinalIgnoreCase)
.ToImmutableArray();
return new SelectionJoinBatchResult(
input.BatchId,
sortedTuples,
sortedUnmatched,
stats.Build());
}
private static void ProcessBatch(
IReadOnlyList<SbomComponentInput> components,
string tenantId,
AdvisoryIndex advisoryIndex,
VexIndex vexIndex,
PurlEquivalenceTable equivalenceTable,
SelectionJoinOptions options,
List<SelectionJoinTuple> tuples,
List<SbomComponentInput> unmatched,
SelectionJoinStatsBuilder stats)
{
foreach (var component in components)
{
var matches = FindAdvisoryMatches(component, advisoryIndex, equivalenceTable, options);
if (matches.Count == 0)
{
unmatched.Add(component);
continue;
}
foreach (var (advisory, matchType, confidence) in matches)
{
if (confidence < options.MinConfidenceThreshold)
{
continue;
}
// Find matching VEX linkset
var vex = FindVexMatch(component, advisory, vexIndex);
var tupleId = SelectionJoinTuple.CreateTupleId(
tenantId,
component.Purl,
advisory.AdvisoryId);
var tuple = new SelectionJoinTuple(
tupleId,
component,
advisory,
vex,
matchType,
confidence);
tuples.Add(tuple);
// Update statistics
stats.MatchedTuples++;
switch (matchType)
{
case SelectionMatchType.ExactPurl:
stats.ExactPurlMatches++;
break;
case SelectionMatchType.PackageKeyMatch:
stats.PackageKeyMatches++;
break;
case SelectionMatchType.CpeMatch:
stats.CpeMatches++;
break;
case SelectionMatchType.EquivalenceMatch:
stats.EquivalenceMatches++;
break;
}
if (vex is not null)
{
stats.VexOverlays++;
}
}
}
}
private static IReadOnlyList<(AdvisoryLinksetInput Advisory, SelectionMatchType MatchType, double Confidence)> FindAdvisoryMatches(
SbomComponentInput component,
AdvisoryIndex index,
PurlEquivalenceTable equivalenceTable,
SelectionJoinOptions options)
{
var matches = new List<(AdvisoryLinksetInput, SelectionMatchType, double)>();
var componentPurl = component.Purl.ToLowerInvariant();
var componentKey = component.PackageKey.ToLowerInvariant();
// 1. Exact PURL match (highest confidence)
if (index.ByExactPurl.TryGetValue(componentPurl, out var exactMatches))
{
foreach (var advisory in exactMatches)
{
var confidence = ComputeFinalConfidence(1.0, advisory.Confidence);
matches.Add((advisory, SelectionMatchType.ExactPurl, confidence));
}
}
// 2. Package key match (same package, possibly different version)
if (index.ByPackageKey.TryGetValue(componentKey, out var keyMatches))
{
foreach (var advisory in keyMatches)
{
// Skip if already matched by exact PURL
if (matches.Any(m => m.Item1.AdvisoryId == advisory.AdvisoryId))
{
continue;
}
var confidence = ComputeFinalConfidence(0.8, advisory.Confidence);
matches.Add((advisory, SelectionMatchType.PackageKeyMatch, confidence));
}
}
// 3. Equivalence table match
var equivalents = equivalenceTable.GetEquivalents(componentPurl);
foreach (var equivalent in equivalents)
{
if (string.Equals(equivalent, componentPurl, StringComparison.OrdinalIgnoreCase))
{
continue;
}
var equivalentKey = PurlEquivalence.ExtractPackageKey(equivalent).ToLowerInvariant();
if (index.ByPackageKey.TryGetValue(equivalentKey, out var equivMatches))
{
foreach (var advisory in equivMatches)
{
if (matches.Any(m => m.Item1.AdvisoryId == advisory.AdvisoryId))
{
continue;
}
var confidence = ComputeFinalConfidence(0.9, advisory.Confidence);
matches.Add((advisory, SelectionMatchType.EquivalenceMatch, confidence));
}
}
}
// Sort matches by confidence (descending) for deterministic ordering
return matches
.OrderByDescending(m => m.Item3)
.ThenBy(m => m.Item1.AdvisoryId, StringComparer.OrdinalIgnoreCase)
.ToList();
}
private static VexLinksetInput? FindVexMatch(
SbomComponentInput component,
AdvisoryLinksetInput advisory,
VexIndex vexIndex)
{
// Try exact vulnerability ID + product key match
foreach (var alias in advisory.Aliases)
{
var key = $"{alias.ToLowerInvariant()}|{component.Purl.ToLowerInvariant()}";
if (vexIndex.ByVulnAndProduct.TryGetValue(key, out var vex))
{
return vex;
}
// Try package key match
var pkgKey = $"{alias.ToLowerInvariant()}|{component.PackageKey.ToLowerInvariant()}";
if (vexIndex.ByVulnAndPackageKey.TryGetValue(pkgKey, out vex))
{
return vex;
}
}
// Try advisory ID directly
var directKey = $"{advisory.AdvisoryId.ToLowerInvariant()}|{component.Purl.ToLowerInvariant()}";
if (vexIndex.ByVulnAndProduct.TryGetValue(directKey, out var directVex))
{
return directVex;
}
return null;
}
private static double ComputeFinalConfidence(double matchConfidence, double? linksetConfidence)
{
var linkset = linksetConfidence ?? 1.0;
// Geometric mean of match confidence and linkset confidence
return Math.Sqrt(matchConfidence * linkset);
}
private static AdvisoryIndex BuildAdvisoryIndex(ImmutableArray<AdvisoryLinksetInput> advisories)
{
var byExactPurl = new Dictionary<string, List<AdvisoryLinksetInput>>(StringComparer.OrdinalIgnoreCase);
var byPackageKey = new Dictionary<string, List<AdvisoryLinksetInput>>(StringComparer.OrdinalIgnoreCase);
foreach (var advisory in advisories)
{
foreach (var purl in advisory.Purls)
{
var normalizedPurl = purl.ToLowerInvariant();
var packageKey = PurlEquivalence.ExtractPackageKey(normalizedPurl);
if (!byExactPurl.TryGetValue(normalizedPurl, out var exactList))
{
exactList = new List<AdvisoryLinksetInput>();
byExactPurl[normalizedPurl] = exactList;
}
exactList.Add(advisory);
if (!string.IsNullOrEmpty(packageKey))
{
if (!byPackageKey.TryGetValue(packageKey, out var keyList))
{
keyList = new List<AdvisoryLinksetInput>();
byPackageKey[packageKey] = keyList;
}
// Avoid duplicates in the same advisory
if (!keyList.Any(a => a.AdvisoryId == advisory.AdvisoryId))
{
keyList.Add(advisory);
}
}
}
}
return new AdvisoryIndex(
byExactPurl.ToImmutableDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray(),
StringComparer.OrdinalIgnoreCase),
byPackageKey.ToImmutableDictionary(
kvp => kvp.Key,
kvp => kvp.Value.ToImmutableArray(),
StringComparer.OrdinalIgnoreCase));
}
private static VexIndex BuildVexIndex(ImmutableArray<VexLinksetInput> vexLinksets)
{
var byVulnAndProduct = new Dictionary<string, VexLinksetInput>(StringComparer.OrdinalIgnoreCase);
var byVulnAndPackageKey = new Dictionary<string, VexLinksetInput>(StringComparer.OrdinalIgnoreCase);
foreach (var vex in vexLinksets)
{
var vulnKey = vex.VulnerabilityId.ToLowerInvariant();
var productKey = vex.ProductKey.ToLowerInvariant();
var packageKey = PurlEquivalence.ExtractPackageKey(productKey);
var exactKey = $"{vulnKey}|{productKey}";
byVulnAndProduct.TryAdd(exactKey, vex);
if (!string.IsNullOrEmpty(packageKey))
{
var pkgLookupKey = $"{vulnKey}|{packageKey}";
byVulnAndPackageKey.TryAdd(pkgLookupKey, vex);
}
}
return new VexIndex(
byVulnAndProduct.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase),
byVulnAndPackageKey.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase));
}
private static IReadOnlyList<IReadOnlyList<SbomComponentInput>> CreateDeterministicBatches(
ImmutableArray<SbomComponentInput> components,
int batchSize)
{
if (batchSize <= 0)
{
batchSize = 1000;
}
var batches = new List<IReadOnlyList<SbomComponentInput>>();
for (var i = 0; i < components.Length; i += batchSize)
{
var remaining = components.Length - i;
var count = Math.Min(batchSize, remaining);
var batch = new List<SbomComponentInput>(count);
for (var j = 0; j < count; j++)
{
batch.Add(components[i + j]);
}
batches.Add(batch);
}
return batches;
}
private sealed record AdvisoryIndex(
ImmutableDictionary<string, ImmutableArray<AdvisoryLinksetInput>> ByExactPurl,
ImmutableDictionary<string, ImmutableArray<AdvisoryLinksetInput>> ByPackageKey);
private sealed record VexIndex(
ImmutableDictionary<string, VexLinksetInput> ByVulnAndProduct,
ImmutableDictionary<string, VexLinksetInput> ByVulnAndPackageKey);
private sealed class SelectionJoinStatsBuilder
{
public int TotalComponents { get; set; }
public int TotalAdvisories { get; set; }
public int MatchedTuples { get; set; }
public int ExactPurlMatches { get; set; }
public int PackageKeyMatches { get; set; }
public int CpeMatches { get; set; }
public int EquivalenceMatches { get; set; }
public int VexOverlays { get; set; }
public long ProcessingTimeMs { get; set; }
public SelectionJoinStatistics Build() => new(
TotalComponents,
TotalAdvisories,
MatchedTuples,
ExactPurlMatches,
PackageKeyMatches,
CpeMatches,
EquivalenceMatches,
VexOverlays,
ProcessingTimeMs);
}
}

View File

@@ -0,0 +1,212 @@
using StellaOps.Policy.Engine.Domain;
using StellaOps.Policy.Engine.Violations;
namespace StellaOps.Policy.Engine.Services;
/// <summary>
/// API/SDK utilities for consumers to request policy decisions with source evidence summaries (POLICY-ENGINE-40-003).
/// Combines policy evaluation with severity fusion, conflict detection, and evidence summaries.
/// </summary>
internal sealed class PolicyDecisionService
{
private readonly ViolationEventService _eventService;
private readonly SeverityFusionService _fusionService;
private readonly ConflictHandlingService _conflictService;
private readonly EvidenceSummaryService _evidenceService;
public PolicyDecisionService(
ViolationEventService eventService,
SeverityFusionService fusionService,
ConflictHandlingService conflictService,
EvidenceSummaryService evidenceService)
{
_eventService = eventService ?? throw new ArgumentNullException(nameof(eventService));
_fusionService = fusionService ?? throw new ArgumentNullException(nameof(fusionService));
_conflictService = conflictService ?? throw new ArgumentNullException(nameof(conflictService));
_evidenceService = evidenceService ?? throw new ArgumentNullException(nameof(evidenceService));
}
/// <summary>
/// Request policy decisions with source evidence summaries for a given snapshot.
/// </summary>
public async Task<PolicyDecisionResponse> GetDecisionsAsync(
PolicyDecisionRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (string.IsNullOrWhiteSpace(request.SnapshotId))
{
throw new ArgumentException("snapshot_id is required", nameof(request));
}
// 1. Emit violation events from snapshot
var eventRequest = new ViolationEventRequest(request.SnapshotId);
await _eventService.EmitAsync(eventRequest, cancellationToken).ConfigureAwait(false);
// 2. Get fused severities with sources
var fused = await _fusionService.FuseAsync(request.SnapshotId, cancellationToken).ConfigureAwait(false);
// 3. Compute conflicts
var conflicts = await _conflictService.ComputeAsync(request.SnapshotId, fused, cancellationToken).ConfigureAwait(false);
// 4. Build decision items with evidence summaries
var decisions = BuildDecisionItems(request, fused, conflicts);
// 5. Build summary statistics
var summary = BuildSummary(decisions, fused);
return new PolicyDecisionResponse(
SnapshotId: request.SnapshotId,
Decisions: decisions,
Summary: summary);
}
private IReadOnlyList<PolicyDecisionItem> BuildDecisionItems(
PolicyDecisionRequest request,
IReadOnlyList<SeverityFusionResult> fused,
IReadOnlyList<ConflictRecord> conflicts)
{
var conflictLookup = conflicts
.GroupBy(c => (c.ComponentPurl, c.AdvisoryId))
.ToDictionary(
g => g.Key,
g => g.Sum(c => c.Conflicts.Count));
var items = new List<PolicyDecisionItem>(fused.Count);
foreach (var fusion in fused)
{
// Apply filters if specified
if (!string.IsNullOrWhiteSpace(request.TenantId) &&
!string.Equals(fusion.TenantId, request.TenantId, StringComparison.OrdinalIgnoreCase))
{
continue;
}
if (!string.IsNullOrWhiteSpace(request.ComponentPurl) &&
!string.Equals(fusion.ComponentPurl, request.ComponentPurl, StringComparison.Ordinal))
{
continue;
}
if (!string.IsNullOrWhiteSpace(request.AdvisoryId) &&
!string.Equals(fusion.AdvisoryId, request.AdvisoryId, StringComparison.OrdinalIgnoreCase))
{
continue;
}
// Build top sources (limited by MaxSources)
var topSources = fusion.Sources
.OrderByDescending(s => s.Score)
.ThenByDescending(s => s.Weight)
.Take(request.MaxSources)
.Select((s, index) => new PolicyDecisionSource(
Source: s.Source,
Weight: s.Weight,
Severity: s.Severity,
Score: s.Score,
Rank: index + 1))
.ToList();
// Build evidence summary if requested
PolicyDecisionEvidence? evidence = null;
if (request.IncludeEvidence)
{
evidence = BuildEvidence(fusion);
}
// Get conflict count for this component/advisory pair
var conflictKey = (fusion.ComponentPurl, fusion.AdvisoryId);
var conflictCount = conflictLookup.GetValueOrDefault(conflictKey, 0);
// Derive status from severity
var status = DeriveStatus(fusion.SeverityFused);
items.Add(new PolicyDecisionItem(
TenantId: fusion.TenantId,
ComponentPurl: fusion.ComponentPurl,
AdvisoryId: fusion.AdvisoryId,
SeverityFused: fusion.SeverityFused,
Score: fusion.Score,
Status: status,
TopSources: topSources,
Evidence: evidence,
ConflictCount: conflictCount,
ReasonCodes: fusion.ReasonCodes));
}
// Return deterministically ordered results
return items
.OrderBy(i => i.ComponentPurl, StringComparer.Ordinal)
.ThenBy(i => i.AdvisoryId, StringComparer.Ordinal)
.ThenBy(i => i.TenantId, StringComparer.Ordinal)
.ToList();
}
private PolicyDecisionEvidence BuildEvidence(SeverityFusionResult fusion)
{
// Build a deterministic evidence hash from the fusion result
var evidenceHash = $"{fusion.ComponentPurl}|{fusion.AdvisoryId}|{fusion.SnapshotId}";
var evidenceRequest = new EvidenceSummaryRequest(
EvidenceHash: evidenceHash,
FilePath: fusion.ComponentPurl,
Digest: null,
IngestedAt: null,
ConnectorId: fusion.Sources.FirstOrDefault()?.Source);
var response = _evidenceService.Summarize(evidenceRequest);
return new PolicyDecisionEvidence(
Headline: response.Summary.Headline,
Severity: response.Summary.Severity,
Locator: new PolicyDecisionLocator(
FilePath: response.Summary.Locator.FilePath,
Digest: response.Summary.Locator.Digest),
Signals: response.Summary.Signals);
}
private static PolicyDecisionSummary BuildSummary(
IReadOnlyList<PolicyDecisionItem> decisions,
IReadOnlyList<SeverityFusionResult> fused)
{
// Count decisions by severity
var severityCounts = decisions
.GroupBy(d => d.SeverityFused, StringComparer.OrdinalIgnoreCase)
.ToDictionary(
g => g.Key,
g => g.Count(),
StringComparer.OrdinalIgnoreCase);
// Calculate total conflicts
var totalConflicts = decisions.Sum(d => d.ConflictCount);
// Aggregate source ranks across all fused results
var sourceStats = fused
.SelectMany(f => f.Sources)
.GroupBy(s => s.Source, StringComparer.OrdinalIgnoreCase)
.Select(g => new PolicyDecisionSourceRank(
Source: g.Key,
TotalWeight: g.Sum(s => s.Weight),
DecisionCount: g.Count(),
AverageScore: g.Average(s => s.Score)))
.OrderByDescending(r => r.TotalWeight)
.ThenByDescending(r => r.AverageScore)
.ToList();
return new PolicyDecisionSummary(
TotalDecisions: decisions.Count,
TotalConflicts: totalConflicts,
SeverityCounts: severityCounts,
TopSeveritySources: sourceStats);
}
private static string DeriveStatus(string severity) => severity.ToLowerInvariant() switch
{
"critical" => "violation",
"high" => "violation",
"medium" => "warn",
_ => "ok"
};
}

View File

@@ -198,10 +198,11 @@ public sealed class RiskProfileConfigurationService
var validation = _validator.Validate(json);
if (!validation.IsValid)
{
var errorMessages = validation.Errors?.Values ?? Enumerable.Empty<string>();
_logger.LogWarning(
"Risk profile file '{File}' failed validation: {Errors}",
file,
string.Join("; ", validation.Message ?? "Unknown error"));
string.Join("; ", errorMessages.Any() ? errorMessages : new[] { "Unknown error" }));
continue;
}
}

View File

@@ -0,0 +1,140 @@
using System.Text.Json.Serialization;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Request to run a risk simulation.
/// </summary>
public sealed record RiskSimulationRequest(
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string? ProfileVersion,
[property: JsonPropertyName("findings")] IReadOnlyList<SimulationFinding> Findings,
[property: JsonPropertyName("include_contributions")] bool IncludeContributions = true,
[property: JsonPropertyName("include_distribution")] bool IncludeDistribution = true,
[property: JsonPropertyName("simulation_mode")] SimulationMode Mode = SimulationMode.Full);
/// <summary>
/// A finding to include in the simulation.
/// </summary>
public sealed record SimulationFinding(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("component_purl")] string? ComponentPurl,
[property: JsonPropertyName("advisory_id")] string? AdvisoryId,
[property: JsonPropertyName("signals")] Dictionary<string, object?> Signals);
/// <summary>
/// Simulation mode.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter<SimulationMode>))]
public enum SimulationMode
{
/// <summary>
/// Run full simulation with all computations.
/// </summary>
[JsonPropertyName("full")]
Full,
/// <summary>
/// Quick estimation without detailed breakdowns.
/// </summary>
[JsonPropertyName("quick")]
Quick,
/// <summary>
/// What-if analysis with hypothetical changes.
/// </summary>
[JsonPropertyName("whatif")]
WhatIf
}
/// <summary>
/// Result of a risk simulation.
/// </summary>
public sealed record RiskSimulationResult(
[property: JsonPropertyName("simulation_id")] string SimulationId,
[property: JsonPropertyName("profile_id")] string ProfileId,
[property: JsonPropertyName("profile_version")] string ProfileVersion,
[property: JsonPropertyName("profile_hash")] string ProfileHash,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("finding_scores")] IReadOnlyList<FindingScore> FindingScores,
[property: JsonPropertyName("distribution")] RiskDistribution? Distribution,
[property: JsonPropertyName("top_movers")] IReadOnlyList<TopMover>? TopMovers,
[property: JsonPropertyName("aggregate_metrics")] AggregateRiskMetrics AggregateMetrics,
[property: JsonPropertyName("execution_time_ms")] double ExecutionTimeMs);
/// <summary>
/// Computed risk score for a finding.
/// </summary>
public sealed record FindingScore(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("raw_score")] double RawScore,
[property: JsonPropertyName("normalized_score")] double NormalizedScore,
[property: JsonPropertyName("severity")] RiskSeverity Severity,
[property: JsonPropertyName("action")] RiskAction RecommendedAction,
[property: JsonPropertyName("contributions")] IReadOnlyList<SignalContribution>? Contributions,
[property: JsonPropertyName("overrides_applied")] IReadOnlyList<AppliedOverride>? OverridesApplied);
/// <summary>
/// Contribution of a signal to the risk score.
/// </summary>
public sealed record SignalContribution(
[property: JsonPropertyName("signal_name")] string SignalName,
[property: JsonPropertyName("signal_value")] object? SignalValue,
[property: JsonPropertyName("weight")] double Weight,
[property: JsonPropertyName("contribution")] double Contribution,
[property: JsonPropertyName("contribution_percentage")] double ContributionPercentage);
/// <summary>
/// An override that was applied during scoring.
/// </summary>
public sealed record AppliedOverride(
[property: JsonPropertyName("override_type")] string OverrideType,
[property: JsonPropertyName("predicate")] Dictionary<string, object> Predicate,
[property: JsonPropertyName("original_value")] object? OriginalValue,
[property: JsonPropertyName("applied_value")] object? AppliedValue,
[property: JsonPropertyName("reason")] string? Reason);
/// <summary>
/// Distribution of risk scores across findings.
/// </summary>
public sealed record RiskDistribution(
[property: JsonPropertyName("buckets")] IReadOnlyList<RiskBucket> Buckets,
[property: JsonPropertyName("percentiles")] Dictionary<string, double> Percentiles,
[property: JsonPropertyName("severity_breakdown")] Dictionary<string, int> SeverityBreakdown);
/// <summary>
/// A bucket in the risk distribution.
/// </summary>
public sealed record RiskBucket(
[property: JsonPropertyName("range_min")] double RangeMin,
[property: JsonPropertyName("range_max")] double RangeMax,
[property: JsonPropertyName("count")] int Count,
[property: JsonPropertyName("percentage")] double Percentage);
/// <summary>
/// A top mover in risk scoring (highest impact findings).
/// </summary>
public sealed record TopMover(
[property: JsonPropertyName("finding_id")] string FindingId,
[property: JsonPropertyName("component_purl")] string? ComponentPurl,
[property: JsonPropertyName("score")] double Score,
[property: JsonPropertyName("severity")] RiskSeverity Severity,
[property: JsonPropertyName("primary_driver")] string PrimaryDriver,
[property: JsonPropertyName("driver_contribution")] double DriverContribution);
/// <summary>
/// Aggregate risk metrics across all findings.
/// </summary>
public sealed record AggregateRiskMetrics(
[property: JsonPropertyName("total_findings")] int TotalFindings,
[property: JsonPropertyName("mean_score")] double MeanScore,
[property: JsonPropertyName("median_score")] double MedianScore,
[property: JsonPropertyName("std_deviation")] double StdDeviation,
[property: JsonPropertyName("max_score")] double MaxScore,
[property: JsonPropertyName("min_score")] double MinScore,
[property: JsonPropertyName("critical_count")] int CriticalCount,
[property: JsonPropertyName("high_count")] int HighCount,
[property: JsonPropertyName("medium_count")] int MediumCount,
[property: JsonPropertyName("low_count")] int LowCount,
[property: JsonPropertyName("informational_count")] int InformationalCount);

View File

@@ -0,0 +1,461 @@
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Policy.Engine.Services;
using StellaOps.Policy.Engine.Telemetry;
using StellaOps.Policy.RiskProfile.Hashing;
using StellaOps.Policy.RiskProfile.Models;
namespace StellaOps.Policy.Engine.Simulation;
/// <summary>
/// Service for running risk simulations with score distributions and contribution breakdowns.
/// </summary>
public sealed class RiskSimulationService
{
private readonly ILogger<RiskSimulationService> _logger;
private readonly TimeProvider _timeProvider;
private readonly RiskProfileConfigurationService _profileService;
private readonly RiskProfileHasher _hasher;
private static readonly double[] PercentileLevels = { 0.25, 0.50, 0.75, 0.90, 0.95, 0.99 };
private const int TopMoverCount = 10;
private const int BucketCount = 10;
public RiskSimulationService(
ILogger<RiskSimulationService> logger,
TimeProvider timeProvider,
RiskProfileConfigurationService profileService)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_profileService = profileService ?? throw new ArgumentNullException(nameof(profileService));
_hasher = new RiskProfileHasher();
}
/// <summary>
/// Runs a risk simulation.
/// </summary>
public RiskSimulationResult Simulate(RiskSimulationRequest request)
{
ArgumentNullException.ThrowIfNull(request);
using var activity = PolicyEngineTelemetry.ActivitySource.StartActivity("risk_simulation.run");
activity?.SetTag("profile.id", request.ProfileId);
activity?.SetTag("finding.count", request.Findings.Count);
var sw = Stopwatch.StartNew();
var profile = _profileService.GetProfile(request.ProfileId);
if (profile == null)
{
throw new InvalidOperationException($"Risk profile '{request.ProfileId}' not found.");
}
var profileHash = _hasher.ComputeHash(profile);
var simulationId = GenerateSimulationId(request, profileHash);
var findingScores = request.Findings
.Select(f => ComputeFindingScore(f, profile, request.IncludeContributions))
.ToList();
var distribution = request.IncludeDistribution
? ComputeDistribution(findingScores)
: null;
var topMovers = request.IncludeContributions
? ComputeTopMovers(findingScores, request.Findings)
: null;
var aggregateMetrics = ComputeAggregateMetrics(findingScores);
sw.Stop();
_logger.LogInformation(
"Risk simulation {SimulationId} completed for {FindingCount} findings in {ElapsedMs}ms",
simulationId, request.Findings.Count, sw.Elapsed.TotalMilliseconds);
PolicyEngineTelemetry.RiskSimulationsRun.Add(1);
return new RiskSimulationResult(
SimulationId: simulationId,
ProfileId: profile.Id,
ProfileVersion: profile.Version,
ProfileHash: profileHash,
Timestamp: _timeProvider.GetUtcNow(),
FindingScores: findingScores.AsReadOnly(),
Distribution: distribution,
TopMovers: topMovers,
AggregateMetrics: aggregateMetrics,
ExecutionTimeMs: sw.Elapsed.TotalMilliseconds);
}
private FindingScore ComputeFindingScore(
SimulationFinding finding,
RiskProfileModel profile,
bool includeContributions)
{
var contributions = new List<SignalContribution>();
var overridesApplied = new List<AppliedOverride>();
var rawScore = 0.0;
// Compute score from signals and weights
foreach (var signal in profile.Signals)
{
if (!finding.Signals.TryGetValue(signal.Name, out var signalValue))
{
continue;
}
var numericValue = ConvertToNumeric(signalValue, signal.Type);
var weight = profile.Weights.GetValueOrDefault(signal.Name, 0.0);
var contribution = numericValue * weight;
rawScore += contribution;
if (includeContributions)
{
contributions.Add(new SignalContribution(
SignalName: signal.Name,
SignalValue: signalValue,
Weight: weight,
Contribution: contribution,
ContributionPercentage: 0.0)); // Will be computed after total
}
}
// Normalize score to 0-100 range
var normalizedScore = Math.Clamp(rawScore * 10, 0, 100);
// Apply severity overrides
var severity = DetermineSeverity(normalizedScore);
foreach (var severityOverride in profile.Overrides.Severity)
{
if (MatchesPredicate(finding.Signals, severityOverride.When))
{
var originalSeverity = severity;
severity = severityOverride.Set;
if (includeContributions)
{
overridesApplied.Add(new AppliedOverride(
OverrideType: "severity",
Predicate: severityOverride.When,
OriginalValue: originalSeverity.ToString(),
AppliedValue: severity.ToString(),
Reason: null));
}
break;
}
}
// Apply decision overrides
var recommendedAction = DetermineAction(severity);
foreach (var decisionOverride in profile.Overrides.Decisions)
{
if (MatchesPredicate(finding.Signals, decisionOverride.When))
{
var originalAction = recommendedAction;
recommendedAction = decisionOverride.Action;
if (includeContributions)
{
overridesApplied.Add(new AppliedOverride(
OverrideType: "decision",
Predicate: decisionOverride.When,
OriginalValue: originalAction.ToString(),
AppliedValue: recommendedAction.ToString(),
Reason: decisionOverride.Reason));
}
break;
}
}
// Update contribution percentages
if (includeContributions && rawScore > 0)
{
contributions = contributions
.Select(c => c with { ContributionPercentage = (c.Contribution / rawScore) * 100 })
.ToList();
}
return new FindingScore(
FindingId: finding.FindingId,
RawScore: rawScore,
NormalizedScore: normalizedScore,
Severity: severity,
RecommendedAction: recommendedAction,
Contributions: includeContributions ? contributions.AsReadOnly() : null,
OverridesApplied: includeContributions && overridesApplied.Count > 0
? overridesApplied.AsReadOnly()
: null);
}
private static double ConvertToNumeric(object? value, RiskSignalType signalType)
{
if (value == null)
{
return 0.0;
}
return signalType switch
{
RiskSignalType.Boolean => value switch
{
bool b => b ? 1.0 : 0.0,
JsonElement je when je.ValueKind == JsonValueKind.True => 1.0,
JsonElement je when je.ValueKind == JsonValueKind.False => 0.0,
string s when bool.TryParse(s, out var b) => b ? 1.0 : 0.0,
_ => 0.0
},
RiskSignalType.Numeric => value switch
{
double d => d,
float f => f,
int i => i,
long l => l,
decimal dec => (double)dec,
JsonElement je when je.TryGetDouble(out var d) => d,
string s when double.TryParse(s, out var d) => d,
_ => 0.0
},
RiskSignalType.Categorical => value switch
{
string s => MapCategoricalToNumeric(s),
JsonElement je when je.ValueKind == JsonValueKind.String => MapCategoricalToNumeric(je.GetString() ?? ""),
_ => 0.0
},
_ => 0.0
};
}
private static double MapCategoricalToNumeric(string category)
{
return category.ToLowerInvariant() switch
{
"none" or "unknown" => 0.0,
"indirect" or "low" => 0.3,
"direct" or "medium" => 0.6,
"high" or "critical" => 1.0,
_ => 0.5
};
}
private static RiskSeverity DetermineSeverity(double score)
{
return score switch
{
>= 90 => RiskSeverity.Critical,
>= 70 => RiskSeverity.High,
>= 40 => RiskSeverity.Medium,
>= 10 => RiskSeverity.Low,
_ => RiskSeverity.Informational
};
}
private static RiskAction DetermineAction(RiskSeverity severity)
{
return severity switch
{
RiskSeverity.Critical => RiskAction.Deny,
RiskSeverity.High => RiskAction.Deny,
RiskSeverity.Medium => RiskAction.Review,
_ => RiskAction.Allow
};
}
private static bool MatchesPredicate(Dictionary<string, object?> signals, Dictionary<string, object> predicate)
{
foreach (var (key, expected) in predicate)
{
if (!signals.TryGetValue(key, out var actual))
{
return false;
}
if (!ValuesEqual(actual, expected))
{
return false;
}
}
return true;
}
private static bool ValuesEqual(object? a, object? b)
{
if (a == null && b == null) return true;
if (a == null || b == null) return false;
// Handle JsonElement comparisons
if (a is JsonElement jeA && b is JsonElement jeB)
{
return jeA.GetRawText() == jeB.GetRawText();
}
if (a is JsonElement je)
{
a = je.ValueKind switch
{
JsonValueKind.String => je.GetString(),
JsonValueKind.Number => je.GetDouble(),
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => je.GetRawText()
};
}
if (b is JsonElement jeb)
{
b = jeb.ValueKind switch
{
JsonValueKind.String => jeb.GetString(),
JsonValueKind.Number => jeb.GetDouble(),
JsonValueKind.True => true,
JsonValueKind.False => false,
_ => jeb.GetRawText()
};
}
return Equals(a, b);
}
private static RiskDistribution ComputeDistribution(List<FindingScore> scores)
{
if (scores.Count == 0)
{
return new RiskDistribution(
Buckets: Array.Empty<RiskBucket>(),
Percentiles: new Dictionary<string, double>(),
SeverityBreakdown: new Dictionary<string, int>
{
["critical"] = 0,
["high"] = 0,
["medium"] = 0,
["low"] = 0,
["informational"] = 0
});
}
var normalizedScores = scores.Select(s => s.NormalizedScore).OrderBy(x => x).ToList();
// Compute buckets
var buckets = new List<RiskBucket>();
var bucketSize = 100.0 / BucketCount;
for (var i = 0; i < BucketCount; i++)
{
var rangeMin = i * bucketSize;
var rangeMax = (i + 1) * bucketSize;
var count = normalizedScores.Count(s => s >= rangeMin && s < rangeMax);
buckets.Add(new RiskBucket(
RangeMin: rangeMin,
RangeMax: rangeMax,
Count: count,
Percentage: (double)count / scores.Count * 100));
}
// Compute percentiles
var percentiles = new Dictionary<string, double>();
foreach (var level in PercentileLevels)
{
var index = (int)(level * (normalizedScores.Count - 1));
percentiles[$"p{(int)(level * 100)}"] = normalizedScores[index];
}
// Severity breakdown
var severityBreakdown = scores
.GroupBy(s => s.Severity.ToString().ToLowerInvariant())
.ToDictionary(g => g.Key, g => g.Count());
// Ensure all severities are present
foreach (var sev in new[] { "critical", "high", "medium", "low", "informational" })
{
severityBreakdown.TryAdd(sev, 0);
}
return new RiskDistribution(
Buckets: buckets.AsReadOnly(),
Percentiles: percentiles,
SeverityBreakdown: severityBreakdown);
}
private static IReadOnlyList<TopMover> ComputeTopMovers(
List<FindingScore> scores,
IReadOnlyList<SimulationFinding> findings)
{
var findingLookup = findings.ToDictionary(f => f.FindingId, StringComparer.OrdinalIgnoreCase);
return scores
.OrderByDescending(s => s.NormalizedScore)
.Take(TopMoverCount)
.Select(s =>
{
var finding = findingLookup.GetValueOrDefault(s.FindingId);
var primaryContribution = s.Contributions?
.OrderByDescending(c => c.ContributionPercentage)
.FirstOrDefault();
return new TopMover(
FindingId: s.FindingId,
ComponentPurl: finding?.ComponentPurl,
Score: s.NormalizedScore,
Severity: s.Severity,
PrimaryDriver: primaryContribution?.SignalName ?? "unknown",
DriverContribution: primaryContribution?.ContributionPercentage ?? 0);
})
.ToList()
.AsReadOnly();
}
private static AggregateRiskMetrics ComputeAggregateMetrics(List<FindingScore> scores)
{
if (scores.Count == 0)
{
return new AggregateRiskMetrics(
TotalFindings: 0,
MeanScore: 0,
MedianScore: 0,
StdDeviation: 0,
MaxScore: 0,
MinScore: 0,
CriticalCount: 0,
HighCount: 0,
MediumCount: 0,
LowCount: 0,
InformationalCount: 0);
}
var normalizedScores = scores.Select(s => s.NormalizedScore).ToList();
var mean = normalizedScores.Average();
var sortedScores = normalizedScores.OrderBy(x => x).ToList();
var median = sortedScores.Count % 2 == 0
? (sortedScores[sortedScores.Count / 2 - 1] + sortedScores[sortedScores.Count / 2]) / 2
: sortedScores[sortedScores.Count / 2];
var variance = normalizedScores.Average(s => Math.Pow(s - mean, 2));
var stdDev = Math.Sqrt(variance);
return new AggregateRiskMetrics(
TotalFindings: scores.Count,
MeanScore: Math.Round(mean, 2),
MedianScore: Math.Round(median, 2),
StdDeviation: Math.Round(stdDev, 2),
MaxScore: normalizedScores.Max(),
MinScore: normalizedScores.Min(),
CriticalCount: scores.Count(s => s.Severity == RiskSeverity.Critical),
HighCount: scores.Count(s => s.Severity == RiskSeverity.High),
MediumCount: scores.Count(s => s.Severity == RiskSeverity.Medium),
LowCount: scores.Count(s => s.Severity == RiskSeverity.Low),
InformationalCount: scores.Count(s => s.Severity == RiskSeverity.Informational));
}
private static string GenerateSimulationId(RiskSimulationRequest request, string profileHash)
{
var seed = $"{request.ProfileId}|{profileHash}|{request.Findings.Count}|{Guid.NewGuid()}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(seed));
return $"rsim-{Convert.ToHexStringLower(hash)[..16]}";
}
}

View File

@@ -139,10 +139,7 @@ public sealed class IncidentModeSampler : Sampler
// During incident mode, always sample
if (_incidentModeService.IsActive)
{
return new SamplingResult(
SamplingDecision.RecordAndSample,
samplingParameters.Tags,
samplingParameters.Links);
return new SamplingResult(SamplingDecision.RecordAndSample);
}
// Otherwise, use the base sampler

View File

@@ -35,9 +35,9 @@ public static class PolicyEngineTelemetry
// Gauge: policy_run_queue_depth{tenant}
private static readonly ObservableGauge<int> PolicyRunQueueDepthGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<int>(
"policy_run_queue_depth",
observeValue: () => QueueDepthObservations,
observeValues: () => QueueDepthObservations ?? Enumerable.Empty<Measurement<int>>(),
unit: "jobs",
description: "Current depth of pending policy run jobs per tenant.");
@@ -148,17 +148,17 @@ public static class PolicyEngineTelemetry
// Gauge: policy_concurrent_evaluations{tenant}
private static readonly ObservableGauge<int> ConcurrentEvaluationsGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<int>(
"policy_concurrent_evaluations",
observeValue: () => ConcurrentEvaluationsObservations,
observeValues: () => ConcurrentEvaluationsObservations ?? Enumerable.Empty<Measurement<int>>(),
unit: "evaluations",
description: "Current number of concurrent policy evaluations.");
// Gauge: policy_worker_utilization
private static readonly ObservableGauge<double> WorkerUtilizationGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<double>(
"policy_worker_utilization",
observeValue: () => WorkerUtilizationObservations,
observeValues: () => WorkerUtilizationObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "Worker pool utilization ratio (0.0 to 1.0).");
@@ -168,17 +168,17 @@ public static class PolicyEngineTelemetry
// Gauge: policy_slo_burn_rate{slo_name}
private static readonly ObservableGauge<double> SloBurnRateGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<double>(
"policy_slo_burn_rate",
observeValue: () => SloBurnRateObservations,
observeValues: () => SloBurnRateObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "SLO burn rate over configured window.");
// Gauge: policy_error_budget_remaining{slo_name}
private static readonly ObservableGauge<double> ErrorBudgetRemainingGauge =
Meter.CreateObservableGauge(
Meter.CreateObservableGauge<double>(
"policy_error_budget_remaining",
observeValue: () => ErrorBudgetObservations,
observeValues: () => ErrorBudgetObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "Remaining error budget as ratio (0.0 to 1.0).");
@@ -265,6 +265,143 @@ public static class PolicyEngineTelemetry
#endregion
#region Risk Simulation and Events Metrics
// Counter: policy_risk_simulations_run_total
private static readonly Counter<long> RiskSimulationsRunCounter =
Meter.CreateCounter<long>(
"policy_risk_simulations_run_total",
unit: "simulations",
description: "Total risk simulations executed.");
// Counter: policy_profile_events_published_total
private static readonly Counter<long> ProfileEventsPublishedCounter =
Meter.CreateCounter<long>(
"policy_profile_events_published_total",
unit: "events",
description: "Total profile lifecycle events published.");
/// <summary>
/// Counter for risk simulations run.
/// </summary>
public static Counter<long> RiskSimulationsRun => RiskSimulationsRunCounter;
/// <summary>
/// Counter for profile events published.
/// </summary>
public static Counter<long> ProfileEventsPublished => ProfileEventsPublishedCounter;
#endregion
#region Reachability Metrics
// Counter: policy_reachability_applied_total{state}
private static readonly Counter<long> ReachabilityAppliedCounter =
Meter.CreateCounter<long>(
"policy_reachability_applied_total",
unit: "facts",
description: "Total reachability facts applied during policy evaluation.");
// Counter: policy_reachability_cache_hits_total
private static readonly Counter<long> ReachabilityCacheHitsCounter =
Meter.CreateCounter<long>(
"policy_reachability_cache_hits_total",
unit: "hits",
description: "Total reachability facts cache hits.");
// Counter: policy_reachability_cache_misses_total
private static readonly Counter<long> ReachabilityCacheMissesCounter =
Meter.CreateCounter<long>(
"policy_reachability_cache_misses_total",
unit: "misses",
description: "Total reachability facts cache misses.");
// Gauge: policy_reachability_cache_hit_ratio
private static readonly ObservableGauge<double> ReachabilityCacheHitRatioGauge =
Meter.CreateObservableGauge<double>(
"policy_reachability_cache_hit_ratio",
observeValues: () => ReachabilityCacheHitRatioObservations ?? Enumerable.Empty<Measurement<double>>(),
unit: "ratio",
description: "Reachability facts cache hit ratio (0.0 to 1.0).");
// Counter: policy_reachability_lookups_total{outcome}
private static readonly Counter<long> ReachabilityLookupsCounter =
Meter.CreateCounter<long>(
"policy_reachability_lookups_total",
unit: "lookups",
description: "Total reachability facts lookup operations.");
// Histogram: policy_reachability_lookup_seconds
private static readonly Histogram<double> ReachabilityLookupSecondsHistogram =
Meter.CreateHistogram<double>(
"policy_reachability_lookup_seconds",
unit: "s",
description: "Duration of reachability facts lookup operations.");
private static IEnumerable<Measurement<double>> ReachabilityCacheHitRatioObservations = Enumerable.Empty<Measurement<double>>();
/// <summary>
/// Records reachability fact applied during evaluation.
/// </summary>
/// <param name="state">Reachability state (reachable, unreachable, unknown, under_investigation).</param>
/// <param name="count">Number of facts.</param>
public static void RecordReachabilityApplied(string state, long count = 1)
{
var tags = new TagList
{
{ "state", NormalizeTag(state) },
};
ReachabilityAppliedCounter.Add(count, tags);
}
/// <summary>
/// Records reachability cache hits.
/// </summary>
/// <param name="count">Number of hits.</param>
public static void RecordReachabilityCacheHits(long count)
{
ReachabilityCacheHitsCounter.Add(count);
}
/// <summary>
/// Records reachability cache misses.
/// </summary>
/// <param name="count">Number of misses.</param>
public static void RecordReachabilityCacheMisses(long count)
{
ReachabilityCacheMissesCounter.Add(count);
}
/// <summary>
/// Records a reachability lookup operation.
/// </summary>
/// <param name="outcome">Outcome (found, not_found, error).</param>
/// <param name="seconds">Duration in seconds.</param>
/// <param name="batchSize">Number of items looked up.</param>
public static void RecordReachabilityLookup(string outcome, double seconds, int batchSize)
{
var tags = new TagList
{
{ "outcome", NormalizeTag(outcome) },
};
ReachabilityLookupsCounter.Add(batchSize, tags);
ReachabilityLookupSecondsHistogram.Record(seconds, tags);
}
/// <summary>
/// Registers a callback to observe reachability cache hit ratio.
/// </summary>
/// <param name="observeFunc">Function that returns current cache hit ratio measurements.</param>
public static void RegisterReachabilityCacheHitRatioObservation(Func<IEnumerable<Measurement<double>>> observeFunc)
{
ArgumentNullException.ThrowIfNull(observeFunc);
ReachabilityCacheHitRatioObservations = observeFunc();
}
#endregion
// Storage for observable gauge observations
private static IEnumerable<Measurement<int>> QueueDepthObservations = Enumerable.Empty<Measurement<int>>();
private static IEnumerable<Measurement<int>> ConcurrentEvaluationsObservations = Enumerable.Empty<Measurement<int>>();