partly or unimplemented features - now implemented
This commit is contained in:
@@ -0,0 +1,529 @@
|
||||
// <copyright file="DeltaIfPresentEndpoints.cs" company="StellaOps">
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
// Sprint: SPRINT_20260208_043_Policy_delta_if_present_calculations_for_missing_signals (TSF-004)
|
||||
// </copyright>
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// API endpoints for delta-if-present calculations (TSF-004).
|
||||
/// Shows hypothetical score changes when missing signals are filled with assumed values.
|
||||
/// </summary>
|
||||
public static class DeltaIfPresentEndpoints
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps delta-if-present endpoints.
|
||||
/// </summary>
|
||||
public static IEndpointRouteBuilder MapDeltaIfPresentEndpoints(this IEndpointRouteBuilder endpoints)
|
||||
{
|
||||
var group = endpoints.MapGroup("/api/v1/policy/delta-if-present")
|
||||
.WithTags("Delta If Present")
|
||||
.WithOpenApi();
|
||||
|
||||
// Calculate single signal delta
|
||||
group.MapPost("/signal", CalculateSingleSignalDeltaAsync)
|
||||
.WithName("CalculateSingleSignalDelta")
|
||||
.WithSummary("Calculate hypothetical score change for a single signal")
|
||||
.WithDescription("Shows what the trust score would be if a specific missing signal had a particular value")
|
||||
.Produces<SingleSignalDeltaResponse>(StatusCodes.Status200OK)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization("PolicyViewer");
|
||||
|
||||
// Calculate full gap analysis
|
||||
group.MapPost("/analysis", CalculateFullAnalysisAsync)
|
||||
.WithName("CalculateFullGapAnalysis")
|
||||
.WithSummary("Calculate full gap analysis for all missing signals")
|
||||
.WithDescription("Analyzes all signal gaps with best/worst/prior case scenarios and prioritization by impact")
|
||||
.Produces<FullAnalysisResponse>(StatusCodes.Status200OK)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization("PolicyViewer");
|
||||
|
||||
// Calculate score bounds
|
||||
group.MapPost("/bounds", CalculateScoreBoundsAsync)
|
||||
.WithName("CalculateScoreBounds")
|
||||
.WithSummary("Calculate minimum and maximum possible scores")
|
||||
.WithDescription("Computes the range of possible trust scores given current gaps")
|
||||
.Produces<ScoreBoundsResponse>(StatusCodes.Status200OK)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization("PolicyViewer");
|
||||
|
||||
return endpoints;
|
||||
}
|
||||
|
||||
private static IResult CalculateSingleSignalDeltaAsync(
|
||||
[FromBody] SingleSignalDeltaRequest request,
|
||||
IDeltaIfPresentCalculator calculator,
|
||||
ILogger<DeltaIfPresentEndpoints> logger)
|
||||
{
|
||||
if (request.Snapshot is null)
|
||||
{
|
||||
return Results.BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Invalid request",
|
||||
Detail = "Snapshot is required"
|
||||
});
|
||||
}
|
||||
|
||||
if (string.IsNullOrEmpty(request.SignalName))
|
||||
{
|
||||
return Results.BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Invalid request",
|
||||
Detail = "SignalName is required"
|
||||
});
|
||||
}
|
||||
|
||||
logger.LogDebug(
|
||||
"Calculating single signal delta for {Signal} with assumed value {Value}",
|
||||
request.SignalName,
|
||||
request.AssumedValue);
|
||||
|
||||
var result = calculator.CalculateSingleSignalDelta(
|
||||
request.Snapshot,
|
||||
request.SignalName,
|
||||
request.AssumedValue,
|
||||
request.CustomWeights);
|
||||
|
||||
return Results.Ok(new SingleSignalDeltaResponse
|
||||
{
|
||||
Signal = result.Signal,
|
||||
CurrentScore = result.CurrentScore,
|
||||
HypotheticalScore = result.HypotheticalScore,
|
||||
ScoreDelta = result.Delta,
|
||||
AssumedValue = result.AssumedValue,
|
||||
SignalWeight = result.SignalWeight,
|
||||
CurrentEntropy = result.CurrentEntropy,
|
||||
HypotheticalEntropy = result.HypotheticalEntropy,
|
||||
EntropyDelta = result.EntropyDelta
|
||||
});
|
||||
}
|
||||
|
||||
private static IResult CalculateFullAnalysisAsync(
|
||||
[FromBody] FullAnalysisRequest request,
|
||||
IDeltaIfPresentCalculator calculator,
|
||||
ILogger<DeltaIfPresentEndpoints> logger)
|
||||
{
|
||||
if (request.Snapshot is null)
|
||||
{
|
||||
return Results.BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Invalid request",
|
||||
Detail = "Snapshot is required"
|
||||
});
|
||||
}
|
||||
|
||||
logger.LogDebug(
|
||||
"Calculating full gap analysis for CVE {Cve}, PURL {Purl}",
|
||||
request.Snapshot.Cve,
|
||||
request.Snapshot.Purl);
|
||||
|
||||
var analysis = calculator.CalculateFullAnalysis(request.Snapshot, request.CustomWeights);
|
||||
|
||||
var gaps = analysis.GapAnalysis.Select(g => new GapAnalysisItemResponse
|
||||
{
|
||||
Signal = g.BestCase.Signal,
|
||||
GapReason = g.GapReason.ToString(),
|
||||
BestCase = MapDeltaResult(g.BestCase),
|
||||
WorstCase = MapDeltaResult(g.WorstCase),
|
||||
PriorCase = MapDeltaResult(g.PriorCase),
|
||||
MaxImpact = g.MaxImpact
|
||||
}).ToList();
|
||||
|
||||
return Results.Ok(new FullAnalysisResponse
|
||||
{
|
||||
Cve = request.Snapshot.Cve,
|
||||
Purl = request.Snapshot.Purl,
|
||||
CurrentScore = analysis.CurrentScore,
|
||||
CurrentEntropy = analysis.CurrentEntropy,
|
||||
GapAnalysis = gaps,
|
||||
PrioritizedGaps = analysis.PrioritizedGaps.ToList(),
|
||||
ComputedAt = analysis.ComputedAt
|
||||
});
|
||||
}
|
||||
|
||||
private static IResult CalculateScoreBoundsAsync(
|
||||
[FromBody] ScoreBoundsRequest request,
|
||||
IDeltaIfPresentCalculator calculator,
|
||||
ILogger<DeltaIfPresentEndpoints> logger)
|
||||
{
|
||||
if (request.Snapshot is null)
|
||||
{
|
||||
return Results.BadRequest(new ProblemDetails
|
||||
{
|
||||
Title = "Invalid request",
|
||||
Detail = "Snapshot is required"
|
||||
});
|
||||
}
|
||||
|
||||
logger.LogDebug(
|
||||
"Calculating score bounds for CVE {Cve}, PURL {Purl}",
|
||||
request.Snapshot.Cve,
|
||||
request.Snapshot.Purl);
|
||||
|
||||
var bounds = calculator.CalculateScoreBounds(request.Snapshot, request.CustomWeights);
|
||||
|
||||
return Results.Ok(new ScoreBoundsResponse
|
||||
{
|
||||
Cve = request.Snapshot.Cve,
|
||||
Purl = request.Snapshot.Purl,
|
||||
CurrentScore = bounds.CurrentScore,
|
||||
CurrentEntropy = bounds.CurrentEntropy,
|
||||
MinimumScore = bounds.MinimumScore,
|
||||
MaximumScore = bounds.MaximumScore,
|
||||
Range = bounds.Range,
|
||||
GapCount = bounds.GapCount,
|
||||
MissingWeightPercentage = bounds.MissingWeightPercentage,
|
||||
ComputedAt = bounds.ComputedAt
|
||||
});
|
||||
}
|
||||
|
||||
private static DeltaResultResponse MapDeltaResult(DeltaIfPresentResult result)
|
||||
{
|
||||
return new DeltaResultResponse
|
||||
{
|
||||
AssumedValue = result.AssumedValue,
|
||||
HypotheticalScore = result.HypotheticalScore,
|
||||
ScoreDelta = result.Delta,
|
||||
HypotheticalEntropy = result.HypotheticalEntropy,
|
||||
EntropyDelta = result.EntropyDelta
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#region Request DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Request to calculate delta for a single signal.
|
||||
/// </summary>
|
||||
public sealed record SingleSignalDeltaRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The current signal snapshot.
|
||||
/// </summary>
|
||||
[JsonPropertyName("snapshot")]
|
||||
public required SignalSnapshot Snapshot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Name of the signal to simulate (VEX, EPSS, Reachability, Runtime, Backport, SBOMLineage).
|
||||
/// </summary>
|
||||
[JsonPropertyName("signal_name")]
|
||||
public required string SignalName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The assumed value for the signal (0.0 to 1.0 where 0 = lowest risk, 1 = highest risk).
|
||||
/// </summary>
|
||||
[JsonPropertyName("assumed_value")]
|
||||
public double AssumedValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional custom signal weights. If not provided, defaults are used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("custom_weights")]
|
||||
public SignalWeights? CustomWeights { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to calculate full gap analysis.
|
||||
/// </summary>
|
||||
public sealed record FullAnalysisRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The current signal snapshot.
|
||||
/// </summary>
|
||||
[JsonPropertyName("snapshot")]
|
||||
public required SignalSnapshot Snapshot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional custom signal weights. If not provided, defaults are used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("custom_weights")]
|
||||
public SignalWeights? CustomWeights { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to calculate score bounds.
|
||||
/// </summary>
|
||||
public sealed record ScoreBoundsRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// The current signal snapshot.
|
||||
/// </summary>
|
||||
[JsonPropertyName("snapshot")]
|
||||
public required SignalSnapshot Snapshot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional custom signal weights. If not provided, defaults are used.
|
||||
/// </summary>
|
||||
[JsonPropertyName("custom_weights")]
|
||||
public SignalWeights? CustomWeights { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Response DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Response for single signal delta calculation.
|
||||
/// </summary>
|
||||
public sealed record SingleSignalDeltaResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the signal analyzed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signal")]
|
||||
public required string Signal { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current trust score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("current_score")]
|
||||
public double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical score if the signal had the assumed value.
|
||||
/// </summary>
|
||||
[JsonPropertyName("hypothetical_score")]
|
||||
public double HypotheticalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change in score (hypothetical - current).
|
||||
/// </summary>
|
||||
[JsonPropertyName("score_delta")]
|
||||
public double ScoreDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The assumed value used for simulation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("assumed_value")]
|
||||
public double AssumedValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weight of the signal in scoring.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signal_weight")]
|
||||
public double SignalWeight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy (uncertainty).
|
||||
/// </summary>
|
||||
[JsonPropertyName("current_entropy")]
|
||||
public double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical entropy after adding the signal.
|
||||
/// </summary>
|
||||
[JsonPropertyName("hypothetical_entropy")]
|
||||
public double HypotheticalEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change in entropy (negative = less uncertainty).
|
||||
/// </summary>
|
||||
[JsonPropertyName("entropy_delta")]
|
||||
public double EntropyDelta { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for full gap analysis.
|
||||
/// </summary>
|
||||
public sealed record FullAnalysisResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve")]
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current trust score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("current_score")]
|
||||
public double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy (uncertainty).
|
||||
/// </summary>
|
||||
[JsonPropertyName("current_entropy")]
|
||||
public double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Analysis of each signal gap with best/worst/prior cases.
|
||||
/// </summary>
|
||||
[JsonPropertyName("gap_analysis")]
|
||||
public required IReadOnlyList<GapAnalysisItemResponse> GapAnalysis { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signals prioritized by maximum impact (highest first).
|
||||
/// </summary>
|
||||
[JsonPropertyName("prioritized_gaps")]
|
||||
public required IReadOnlyList<string> PrioritizedGaps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when analysis was computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual gap analysis result.
|
||||
/// </summary>
|
||||
public sealed record GapAnalysisItemResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of the signal.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signal")]
|
||||
public required string Signal { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for the gap.
|
||||
/// </summary>
|
||||
[JsonPropertyName("gap_reason")]
|
||||
public required string GapReason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Best case scenario (lowest risk assumption).
|
||||
/// </summary>
|
||||
[JsonPropertyName("best_case")]
|
||||
public required DeltaResultResponse BestCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Worst case scenario (highest risk assumption).
|
||||
/// </summary>
|
||||
[JsonPropertyName("worst_case")]
|
||||
public required DeltaResultResponse WorstCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Prior case scenario (prior probability assumption).
|
||||
/// </summary>
|
||||
[JsonPropertyName("prior_case")]
|
||||
public required DeltaResultResponse PriorCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum possible score impact (worst - best).
|
||||
/// </summary>
|
||||
[JsonPropertyName("max_impact")]
|
||||
public double MaxImpact { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delta result for a specific scenario.
|
||||
/// </summary>
|
||||
public sealed record DeltaResultResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Assumed value for the signal.
|
||||
/// </summary>
|
||||
[JsonPropertyName("assumed_value")]
|
||||
public double AssumedValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical score with assumed value.
|
||||
/// </summary>
|
||||
[JsonPropertyName("hypothetical_score")]
|
||||
public double HypotheticalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change in score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("score_delta")]
|
||||
public double ScoreDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical entropy with assumed value.
|
||||
/// </summary>
|
||||
[JsonPropertyName("hypothetical_entropy")]
|
||||
public double HypotheticalEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change in entropy.
|
||||
/// </summary>
|
||||
[JsonPropertyName("entropy_delta")]
|
||||
public double EntropyDelta { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for score bounds calculation.
|
||||
/// </summary>
|
||||
public sealed record ScoreBoundsResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve")]
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current trust score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("current_score")]
|
||||
public double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy (uncertainty).
|
||||
/// </summary>
|
||||
[JsonPropertyName("current_entropy")]
|
||||
public double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Minimum possible score (all gaps at best case).
|
||||
/// </summary>
|
||||
[JsonPropertyName("minimum_score")]
|
||||
public double MinimumScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum possible score (all gaps at worst case).
|
||||
/// </summary>
|
||||
[JsonPropertyName("maximum_score")]
|
||||
public double MaximumScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Range of possible scores.
|
||||
/// </summary>
|
||||
[JsonPropertyName("range")]
|
||||
public double Range { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of signal gaps.
|
||||
/// </summary>
|
||||
[JsonPropertyName("gap_count")]
|
||||
public int GapCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Percentage of total weight that is missing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("missing_weight_percentage")]
|
||||
public double MissingWeightPercentage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when bounds were computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
// Logger interface for typed logging
|
||||
internal sealed class DeltaIfPresentEndpoints { }
|
||||
@@ -0,0 +1,114 @@
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Combined score result that integrates impact and uncertainty scores.
|
||||
/// </summary>
|
||||
public sealed record CombinedImpactScore
|
||||
{
|
||||
/// <summary>Impact score from multi-factor calculation.</summary>
|
||||
[JsonPropertyName("impact")]
|
||||
public required ImpactScore Impact { get; init; }
|
||||
|
||||
/// <summary>Uncertainty score from entropy calculation.</summary>
|
||||
[JsonPropertyName("uncertainty")]
|
||||
public required UncertaintyScore Uncertainty { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Effective priority score combining impact and uncertainty.
|
||||
/// Higher uncertainty reduces the effective priority.
|
||||
/// Formula: impact * (1 - uncertainty_entropy * uncertainty_penalty_factor)
|
||||
/// </summary>
|
||||
[JsonPropertyName("effective_priority")]
|
||||
public required double EffectivePriority { get; init; }
|
||||
|
||||
/// <summary>Basis points representation of effective priority (0-10000).</summary>
|
||||
[JsonPropertyName("effective_priority_basis_points")]
|
||||
public required int EffectivePriorityBasisPoints { get; init; }
|
||||
|
||||
/// <summary>When this combined score was calculated (UTC).</summary>
|
||||
[JsonPropertyName("calculated_at")]
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for combined impact-uncertainty score calculation.
|
||||
/// </summary>
|
||||
public interface ICombinedImpactCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates combined impact-uncertainty score for prioritization.
|
||||
/// </summary>
|
||||
/// <param name="impactContext">Impact context with environment, data sensitivity, etc.</param>
|
||||
/// <param name="signalSnapshot">Signal snapshot for uncertainty calculation.</param>
|
||||
/// <param name="uncertaintyPenaltyFactor">How much uncertainty reduces priority (default 0.5).</param>
|
||||
/// <returns>Combined score with impact, uncertainty, and effective priority.</returns>
|
||||
CombinedImpactScore Calculate(
|
||||
ImpactContext impactContext,
|
||||
SignalSnapshot signalSnapshot,
|
||||
double uncertaintyPenaltyFactor = 0.5);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates combined impact-uncertainty scores for unknown triage.
|
||||
/// Integrates ImpactScoreCalculator with UncertaintyScoreCalculator for
|
||||
/// a unified prioritization signal.
|
||||
/// </summary>
|
||||
public sealed class CombinedImpactCalculator : ICombinedImpactCalculator
|
||||
{
|
||||
private readonly IImpactScoreCalculator _impactCalculator;
|
||||
private readonly IUncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly ILogger<CombinedImpactCalculator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public CombinedImpactCalculator(
|
||||
IImpactScoreCalculator impactCalculator,
|
||||
IUncertaintyScoreCalculator uncertaintyCalculator,
|
||||
ILogger<CombinedImpactCalculator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_impactCalculator = impactCalculator;
|
||||
_uncertaintyCalculator = uncertaintyCalculator;
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public CombinedImpactScore Calculate(
|
||||
ImpactContext impactContext,
|
||||
SignalSnapshot signalSnapshot,
|
||||
double uncertaintyPenaltyFactor = 0.5)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(impactContext);
|
||||
ArgumentNullException.ThrowIfNull(signalSnapshot);
|
||||
|
||||
// Calculate individual scores
|
||||
var impact = _impactCalculator.Calculate(impactContext);
|
||||
var uncertainty = _uncertaintyCalculator.Calculate(signalSnapshot);
|
||||
|
||||
// Effective priority = impact * (1 - uncertainty * penalty)
|
||||
// When entropy is high, priority is reduced
|
||||
var penaltyFactor = Math.Clamp(uncertaintyPenaltyFactor, 0.0, 1.0);
|
||||
var effectivePriority = impact.Score * (1.0 - uncertainty.Entropy * penaltyFactor);
|
||||
effectivePriority = Math.Clamp(effectivePriority, 0.0, 1.0);
|
||||
var effectivePriorityBasisPoints = (int)Math.Round(effectivePriority * 10000);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Calculated combined score: impact={Impact:F4}, uncertainty={Uncertainty:F4}, effective={Effective:F4} (penalty_factor={PenaltyFactor:F2})",
|
||||
impact.Score,
|
||||
uncertainty.Entropy,
|
||||
effectivePriority,
|
||||
penaltyFactor);
|
||||
|
||||
return new CombinedImpactScore
|
||||
{
|
||||
Impact = impact,
|
||||
Uncertainty = uncertainty,
|
||||
EffectivePriority = effectivePriority,
|
||||
EffectivePriorityBasisPoints = effectivePriorityBasisPoints,
|
||||
CalculatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,346 @@
|
||||
// <copyright file="DeltaIfPresentCalculator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates hypothetical score changes if missing signals were present.
|
||||
/// Implements TSF-004: Delta-If-Present calculations for policy decision support.
|
||||
/// </summary>
|
||||
public sealed class DeltaIfPresentCalculator : IDeltaIfPresentCalculator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
private static readonly Counter<long> DeltaCalculationsCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_determinization_delta_if_present_calculations_total",
|
||||
description: "Total delta-if-present calculations performed");
|
||||
|
||||
private readonly ILogger<DeltaIfPresentCalculator> _logger;
|
||||
private readonly IUncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly TrustScoreAggregator _trustAggregator;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
// Default prior values for signals when simulating (moderate/neutral assumptions)
|
||||
private static readonly IReadOnlyDictionary<string, double> DefaultPriors = new Dictionary<string, double>
|
||||
{
|
||||
["VEX"] = 0.5, // Neutral: under_investigation
|
||||
["EPSS"] = 0.3, // Below median EPSS score
|
||||
["Reachability"] = 0.5, // Unknown reachability
|
||||
["Runtime"] = 0.3, // Likely not detected at runtime
|
||||
["Backport"] = 0.5, // Unknown backport status
|
||||
["SBOMLineage"] = 0.5 // Neutral lineage contribution
|
||||
};
|
||||
|
||||
public DeltaIfPresentCalculator(
|
||||
ILogger<DeltaIfPresentCalculator> logger,
|
||||
IUncertaintyScoreCalculator uncertaintyCalculator,
|
||||
TrustScoreAggregator trustAggregator,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_uncertaintyCalculator = uncertaintyCalculator;
|
||||
_trustAggregator = trustAggregator;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public DeltaIfPresentResult CalculateSingleSignalDelta(
|
||||
SignalSnapshot snapshot,
|
||||
string signal,
|
||||
double assumedValue,
|
||||
SignalWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(signal);
|
||||
|
||||
var effectiveWeights = weights ?? SignalWeights.Default;
|
||||
var signalWeight = GetSignalWeight(signal, effectiveWeights);
|
||||
|
||||
// Calculate current state
|
||||
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
|
||||
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
|
||||
|
||||
// Create hypothetical snapshot with the signal present
|
||||
var hypotheticalSnapshot = CreateHypotheticalSnapshot(snapshot, signal, assumedValue);
|
||||
|
||||
// Calculate hypothetical state
|
||||
var hypotheticalUncertainty = _uncertaintyCalculator.Calculate(hypotheticalSnapshot, effectiveWeights);
|
||||
var hypotheticalScore = _trustAggregator.Aggregate(hypotheticalSnapshot, hypotheticalUncertainty, effectiveWeights);
|
||||
|
||||
DeltaCalculationsCounter.Add(1,
|
||||
new KeyValuePair<string, object?>("signal", signal),
|
||||
new KeyValuePair<string, object?>("cve", snapshot.Cve));
|
||||
|
||||
_logger.LogDebug(
|
||||
"Delta-if-present for {Signal}={Value:F2}: score {Current:F4} -> {Hypothetical:F4} (delta={Delta:+0.0000;-0.0000})",
|
||||
signal, assumedValue, currentScore, hypotheticalScore, hypotheticalScore - currentScore);
|
||||
|
||||
return new DeltaIfPresentResult
|
||||
{
|
||||
Signal = signal,
|
||||
CurrentScore = currentScore,
|
||||
HypotheticalScore = hypotheticalScore,
|
||||
AssumedValue = assumedValue,
|
||||
SignalWeight = signalWeight,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
HypotheticalEntropy = hypotheticalUncertainty.Entropy
|
||||
};
|
||||
}
|
||||
|
||||
public DeltaIfPresentAnalysis CalculateFullAnalysis(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
var effectiveWeights = weights ?? SignalWeights.Default;
|
||||
|
||||
// Calculate current state
|
||||
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
|
||||
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
|
||||
|
||||
var gapAnalysis = new List<SignalDeltaScenarios>();
|
||||
|
||||
// Analyze each gap
|
||||
foreach (var gap in currentUncertainty.Gaps)
|
||||
{
|
||||
var priorValue = DefaultPriors.GetValueOrDefault(gap.Signal, 0.5);
|
||||
|
||||
var bestCase = CalculateSingleSignalDelta(snapshot, gap.Signal, 0.0, effectiveWeights);
|
||||
var worstCase = CalculateSingleSignalDelta(snapshot, gap.Signal, 1.0, effectiveWeights);
|
||||
var priorCase = CalculateSingleSignalDelta(snapshot, gap.Signal, priorValue, effectiveWeights);
|
||||
|
||||
gapAnalysis.Add(new SignalDeltaScenarios
|
||||
{
|
||||
Signal = gap.Signal,
|
||||
Weight = gap.Weight,
|
||||
GapReason = gap.Reason,
|
||||
BestCase = bestCase,
|
||||
WorstCase = worstCase,
|
||||
PriorCase = priorCase
|
||||
});
|
||||
}
|
||||
|
||||
// Prioritize gaps by maximum potential impact
|
||||
var prioritized = gapAnalysis
|
||||
.OrderByDescending(g => g.MaxImpact)
|
||||
.Select(g => g.Signal)
|
||||
.ToList();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Delta-if-present analysis for {Cve}/{Purl}: {GapCount} gaps, prioritized: [{Priority}]",
|
||||
snapshot.Cve, snapshot.Purl, gapAnalysis.Count,
|
||||
string.Join(", ", prioritized.Take(3)));
|
||||
|
||||
return new DeltaIfPresentAnalysis
|
||||
{
|
||||
CurrentScore = currentScore,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
GapAnalysis = gapAnalysis,
|
||||
PrioritizedGaps = prioritized,
|
||||
ComputedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
public ScoreBounds CalculateScoreBounds(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(snapshot);
|
||||
|
||||
var effectiveWeights = weights ?? SignalWeights.Default;
|
||||
|
||||
// Calculate current state
|
||||
var currentUncertainty = _uncertaintyCalculator.Calculate(snapshot, effectiveWeights);
|
||||
var currentScore = _trustAggregator.Aggregate(snapshot, currentUncertainty, effectiveWeights);
|
||||
|
||||
if (currentUncertainty.Gaps.Count == 0)
|
||||
{
|
||||
// No gaps - current score is the only possibility
|
||||
return new ScoreBounds
|
||||
{
|
||||
CurrentScore = currentScore,
|
||||
MinimumScore = currentScore,
|
||||
MaximumScore = currentScore,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
GapCount = 0,
|
||||
MissingWeightPercentage = 0.0
|
||||
};
|
||||
}
|
||||
|
||||
// Create best-case snapshot (all missing signals at low-risk values)
|
||||
var bestSnapshot = snapshot;
|
||||
foreach (var gap in currentUncertainty.Gaps)
|
||||
{
|
||||
bestSnapshot = CreateHypotheticalSnapshot(bestSnapshot, gap.Signal, 0.0);
|
||||
}
|
||||
|
||||
// Create worst-case snapshot (all missing signals at high-risk values)
|
||||
var worstSnapshot = snapshot;
|
||||
foreach (var gap in currentUncertainty.Gaps)
|
||||
{
|
||||
worstSnapshot = CreateHypotheticalSnapshot(worstSnapshot, gap.Signal, 1.0);
|
||||
}
|
||||
|
||||
// Calculate bounds
|
||||
var bestUncertainty = _uncertaintyCalculator.Calculate(bestSnapshot, effectiveWeights);
|
||||
var worstUncertainty = _uncertaintyCalculator.Calculate(worstSnapshot, effectiveWeights);
|
||||
|
||||
var maxScore = _trustAggregator.Aggregate(bestSnapshot, bestUncertainty, effectiveWeights);
|
||||
var minScore = _trustAggregator.Aggregate(worstSnapshot, worstUncertainty, effectiveWeights);
|
||||
|
||||
// Calculate missing weight percentage
|
||||
var missingWeight = currentUncertainty.Gaps.Sum(g => g.Weight);
|
||||
var totalWeight = effectiveWeights.TotalWeight;
|
||||
var missingPercentage = totalWeight > 0 ? (missingWeight / totalWeight) * 100.0 : 0.0;
|
||||
|
||||
_logger.LogDebug(
|
||||
"Score bounds for {Cve}: current={Current:F4}, min={Min:F4}, max={Max:F4}, range={Range:F4}",
|
||||
snapshot.Cve, currentScore, minScore, maxScore, maxScore - minScore);
|
||||
|
||||
return new ScoreBounds
|
||||
{
|
||||
CurrentScore = currentScore,
|
||||
MinimumScore = minScore,
|
||||
MaximumScore = maxScore,
|
||||
CurrentEntropy = currentUncertainty.Entropy,
|
||||
GapCount = currentUncertainty.Gaps.Count,
|
||||
MissingWeightPercentage = missingPercentage
|
||||
};
|
||||
}
|
||||
|
||||
private static double GetSignalWeight(string signal, SignalWeights weights)
|
||||
{
|
||||
return signal.ToUpperInvariant() switch
|
||||
{
|
||||
"VEX" => weights.VexWeight,
|
||||
"EPSS" => weights.EpssWeight,
|
||||
"REACHABILITY" => weights.ReachabilityWeight,
|
||||
"RUNTIME" => weights.RuntimeWeight,
|
||||
"BACKPORT" => weights.BackportWeight,
|
||||
"SBOMLINEAGE" or "SBOM" => weights.SbomLineageWeight,
|
||||
_ => 0.0
|
||||
};
|
||||
}
|
||||
|
||||
private SignalSnapshot CreateHypotheticalSnapshot(
|
||||
SignalSnapshot original,
|
||||
string signal,
|
||||
double normalizedValue)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
return signal.ToUpperInvariant() switch
|
||||
{
|
||||
"VEX" => original with
|
||||
{
|
||||
Vex = SignalState<VexClaimSummary>.Queried(
|
||||
CreateHypotheticalVex(normalizedValue), now)
|
||||
},
|
||||
"EPSS" => original with
|
||||
{
|
||||
Epss = SignalState<EpssEvidence>.Queried(
|
||||
CreateHypotheticalEpss(normalizedValue), now)
|
||||
},
|
||||
"REACHABILITY" => original with
|
||||
{
|
||||
Reachability = SignalState<ReachabilityEvidence>.Queried(
|
||||
CreateHypotheticalReachability(normalizedValue), now)
|
||||
},
|
||||
"RUNTIME" => original with
|
||||
{
|
||||
Runtime = SignalState<RuntimeEvidence>.Queried(
|
||||
CreateHypotheticalRuntime(normalizedValue), now)
|
||||
},
|
||||
"BACKPORT" => original with
|
||||
{
|
||||
Backport = SignalState<BackportEvidence>.Queried(
|
||||
CreateHypotheticalBackport(normalizedValue), now)
|
||||
},
|
||||
"SBOMLINEAGE" or "SBOM" => original with
|
||||
{
|
||||
Sbom = SignalState<SbomLineageEvidence>.Queried(
|
||||
CreateHypotheticalSbom(normalizedValue), now)
|
||||
},
|
||||
_ => original
|
||||
};
|
||||
}
|
||||
|
||||
private static VexClaimSummary CreateHypotheticalVex(double normalizedValue)
|
||||
{
|
||||
// Map 0.0-1.0 to VEX status
|
||||
var status = normalizedValue switch
|
||||
{
|
||||
< 0.25 => "not_affected",
|
||||
< 0.50 => "under_investigation",
|
||||
< 0.75 => "under_investigation",
|
||||
_ => "affected"
|
||||
};
|
||||
|
||||
return new VexClaimSummary
|
||||
{
|
||||
Status = status,
|
||||
Source = "hypothetical",
|
||||
DocumentId = "delta-if-present-simulation",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static EpssEvidence CreateHypotheticalEpss(double normalizedValue)
|
||||
{
|
||||
return new EpssEvidence
|
||||
{
|
||||
Epss = normalizedValue,
|
||||
Percentile = normalizedValue * 100.0,
|
||||
Date = DateOnly.FromDateTime(DateTime.UtcNow)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilityEvidence CreateHypotheticalReachability(double normalizedValue)
|
||||
{
|
||||
var status = normalizedValue >= 0.5
|
||||
? ReachabilityStatus.Reachable
|
||||
: ReachabilityStatus.Unreachable;
|
||||
|
||||
return new ReachabilityEvidence
|
||||
{
|
||||
Status = status,
|
||||
Confidence = 1.0 - Math.Abs(normalizedValue - 0.5) * 2,
|
||||
PathCount = normalizedValue >= 0.5 ? 1 : 0,
|
||||
Source = "hypothetical"
|
||||
};
|
||||
}
|
||||
|
||||
private static RuntimeEvidence CreateHypotheticalRuntime(double normalizedValue)
|
||||
{
|
||||
return new RuntimeEvidence
|
||||
{
|
||||
Detected = normalizedValue >= 0.5,
|
||||
Source = "hypothetical",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static BackportEvidence CreateHypotheticalBackport(double normalizedValue)
|
||||
{
|
||||
return new BackportEvidence
|
||||
{
|
||||
Detected = normalizedValue < 0.5, // Backport = lower risk
|
||||
Source = "hypothetical",
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static SbomLineageEvidence CreateHypotheticalSbom(double normalizedValue)
|
||||
{
|
||||
return new SbomLineageEvidence
|
||||
{
|
||||
Present = true,
|
||||
Depth = (int)(normalizedValue * 5),
|
||||
Source = "hypothetical"
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,192 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsCalculator.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Unified Evidence-Weighted Score calculator implementation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unified calculator for Evidence-Weighted Scores (EWS).
|
||||
/// Orchestrates 6-dimension normalization, weighting, and guardrails.
|
||||
/// </summary>
|
||||
public sealed class EwsCalculator : IEwsCalculator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization.EWS");
|
||||
private static readonly Histogram<int> EwsScoreHistogram = Meter.CreateHistogram<int>(
|
||||
"stellaops_ews_score",
|
||||
unit: "score",
|
||||
description: "Evidence-Weighted Score distribution (0-100)");
|
||||
private static readonly Counter<int> GuardrailsAppliedCounter = Meter.CreateCounter<int>(
|
||||
"stellaops_ews_guardrails_applied",
|
||||
description: "Count of guardrails applied to EWS scores");
|
||||
|
||||
private readonly ImmutableDictionary<EwsDimension, IEwsDimensionNormalizer> _normalizers;
|
||||
private readonly IGuardrailsEngine _guardrailsEngine;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<EwsCalculator> _logger;
|
||||
|
||||
public EwsCalculator(
|
||||
IEnumerable<IEwsDimensionNormalizer> normalizers,
|
||||
IGuardrailsEngine guardrailsEngine,
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<EwsCalculator>? logger = null)
|
||||
{
|
||||
_normalizers = normalizers.ToImmutableDictionary(n => n.Dimension);
|
||||
_guardrailsEngine = guardrailsEngine;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<EwsCalculator>.Instance;
|
||||
|
||||
ValidateNormalizers();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default EwsCalculator with all standard normalizers.
|
||||
/// </summary>
|
||||
public static EwsCalculator CreateDefault(
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<EwsCalculator>? logger = null)
|
||||
{
|
||||
var normalizers = new IEwsDimensionNormalizer[]
|
||||
{
|
||||
new ReachabilityNormalizer(),
|
||||
new RuntimeSignalsNormalizer(),
|
||||
new BackportEvidenceNormalizer(),
|
||||
new ExploitabilityNormalizer(),
|
||||
new SourceConfidenceNormalizer(),
|
||||
new MitigationStatusNormalizer()
|
||||
};
|
||||
|
||||
return new EwsCalculator(
|
||||
normalizers,
|
||||
new GuardrailsEngine(),
|
||||
timeProvider,
|
||||
logger);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public EwsCompositeScore Calculate(
|
||||
EwsSignalInput signal,
|
||||
EwsDimensionWeights? weights = null,
|
||||
EwsGuardrails? guardrails = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(signal);
|
||||
|
||||
var effectiveWeights = weights ?? EwsDimensionWeights.Default;
|
||||
var effectiveGuardrails = guardrails ?? EwsGuardrails.Default;
|
||||
|
||||
// Validate weights
|
||||
if (!effectiveWeights.IsNormalized())
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"EWS dimension weights are not normalized (total={Total:F4}); results may be unexpected",
|
||||
effectiveWeights.TotalWeight);
|
||||
}
|
||||
|
||||
// Calculate all dimension scores
|
||||
var dimensionScores = new List<EwsDimensionScore>();
|
||||
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
|
||||
{
|
||||
var dimScore = CalculateDimension(dimension, signal, effectiveWeights.GetWeight(dimension));
|
||||
dimensionScores.Add(dimScore);
|
||||
}
|
||||
|
||||
var dimensions = dimensionScores.ToImmutableArray();
|
||||
|
||||
// Calculate raw composite score (weighted sum)
|
||||
var rawScore = (int)Math.Round(dimensions.Sum(d => d.WeightedContribution));
|
||||
rawScore = Math.Clamp(rawScore, 0, 100);
|
||||
|
||||
// Apply guardrails
|
||||
var guardrailsResult = _guardrailsEngine.Apply(rawScore, signal, dimensions, effectiveGuardrails);
|
||||
|
||||
// Calculate overall confidence (weighted average)
|
||||
var confidence = dimensions.Sum(d => d.Confidence * d.Weight);
|
||||
|
||||
// Determine if manual review is needed
|
||||
var needsReview = confidence < effectiveGuardrails.MinConfidenceThreshold;
|
||||
|
||||
var result = new EwsCompositeScore
|
||||
{
|
||||
Score = guardrailsResult.AdjustedScore,
|
||||
RawScore = rawScore,
|
||||
Confidence = confidence,
|
||||
Dimensions = dimensions,
|
||||
AppliedGuardrails = guardrailsResult.AppliedGuardrails,
|
||||
NeedsReview = needsReview,
|
||||
CalculatedAt = _timeProvider.GetUtcNow(),
|
||||
CveId = signal.CveId,
|
||||
Purl = signal.Purl
|
||||
};
|
||||
|
||||
// Emit metrics
|
||||
EwsScoreHistogram.Record(result.Score,
|
||||
new KeyValuePair<string, object?>("risk_tier", result.RiskTier),
|
||||
new KeyValuePair<string, object?>("guardrails_applied", guardrailsResult.WasModified));
|
||||
|
||||
if (guardrailsResult.WasModified)
|
||||
{
|
||||
GuardrailsAppliedCounter.Add(guardrailsResult.AppliedGuardrails.Length);
|
||||
}
|
||||
|
||||
_logger.LogDebug(
|
||||
"Calculated EWS: score={Score} (raw={RawScore}), confidence={Confidence:P0}, tier={Tier}, guardrails={Guardrails}",
|
||||
result.Score,
|
||||
result.RawScore,
|
||||
result.Confidence,
|
||||
result.RiskTier,
|
||||
string.Join(",", guardrailsResult.AppliedGuardrails));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public EwsDimensionScore CalculateDimension(
|
||||
EwsDimension dimension,
|
||||
EwsSignalInput signal,
|
||||
double weight)
|
||||
{
|
||||
var normalizer = GetNormalizer(dimension);
|
||||
|
||||
var score = normalizer.Normalize(signal);
|
||||
var confidence = normalizer.GetConfidence(signal);
|
||||
var explanation = normalizer.GetExplanation(signal, score);
|
||||
|
||||
return new EwsDimensionScore
|
||||
{
|
||||
Dimension = dimension,
|
||||
Score = score,
|
||||
Confidence = confidence,
|
||||
Weight = weight,
|
||||
Explanation = explanation
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public IEwsDimensionNormalizer GetNormalizer(EwsDimension dimension)
|
||||
{
|
||||
if (_normalizers.TryGetValue(dimension, out var normalizer))
|
||||
{
|
||||
return normalizer;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"No normalizer registered for dimension {dimension}");
|
||||
}
|
||||
|
||||
private void ValidateNormalizers()
|
||||
{
|
||||
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
|
||||
{
|
||||
if (!_normalizers.ContainsKey(dimension))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Missing normalizer for dimension {dimension}. All 6 dimensions must have normalizers.");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsDimension.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Defines the 6 canonical dimensions for EWS scoring.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// The 6 canonical dimensions for Evidence-Weighted Score (EWS) model.
|
||||
/// Each dimension maps specific signal inputs to a normalized 0-100 score.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum EwsDimension
|
||||
{
|
||||
/// <summary>
|
||||
/// RCH - Reachability dimension.
|
||||
/// Measures whether vulnerable code paths are reachable from entrypoints.
|
||||
/// Input: Reachability tier (R0-R4), call graph analysis, runtime traces.
|
||||
/// </summary>
|
||||
Reachability = 0,
|
||||
|
||||
/// <summary>
|
||||
/// RTS - Runtime Signals dimension.
|
||||
/// Measures evidence from runtime detection and observability.
|
||||
/// Input: Runtime telemetry, instrumentation coverage, APM signals.
|
||||
/// </summary>
|
||||
RuntimeSignals = 1,
|
||||
|
||||
/// <summary>
|
||||
/// BKP - Backport Evidence dimension.
|
||||
/// Measures evidence of patched code in affected packages.
|
||||
/// Input: Backport detection, binary diff analysis, vendor advisories.
|
||||
/// </summary>
|
||||
BackportEvidence = 2,
|
||||
|
||||
/// <summary>
|
||||
/// XPL - Exploitability dimension.
|
||||
/// Measures likelihood and maturity of exploitation.
|
||||
/// Input: EPSS, KEV status, exploit kit availability, PoC age.
|
||||
/// </summary>
|
||||
Exploitability = 3,
|
||||
|
||||
/// <summary>
|
||||
/// SRC - Source Confidence dimension.
|
||||
/// Measures confidence in SBOM and dependency lineage.
|
||||
/// Input: SBOM completeness, verified signatures, attestations.
|
||||
/// </summary>
|
||||
SourceConfidence = 4,
|
||||
|
||||
/// <summary>
|
||||
/// MIT - Mitigation Status dimension.
|
||||
/// Measures VEX status and compensating controls.
|
||||
/// Input: VEX statements, workarounds applied, network controls.
|
||||
/// </summary>
|
||||
MitigationStatus = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Short codes for dimension serialization and display.
|
||||
/// </summary>
|
||||
public static class EwsDimensionCodes
|
||||
{
|
||||
public const string Reachability = "RCH";
|
||||
public const string RuntimeSignals = "RTS";
|
||||
public const string BackportEvidence = "BKP";
|
||||
public const string Exploitability = "XPL";
|
||||
public const string SourceConfidence = "SRC";
|
||||
public const string MitigationStatus = "MIT";
|
||||
|
||||
/// <summary>
|
||||
/// Gets the short code for a dimension.
|
||||
/// </summary>
|
||||
public static string ToCode(this EwsDimension dimension) => dimension switch
|
||||
{
|
||||
EwsDimension.Reachability => Reachability,
|
||||
EwsDimension.RuntimeSignals => RuntimeSignals,
|
||||
EwsDimension.BackportEvidence => BackportEvidence,
|
||||
EwsDimension.Exploitability => Exploitability,
|
||||
EwsDimension.SourceConfidence => SourceConfidence,
|
||||
EwsDimension.MitigationStatus => MitigationStatus,
|
||||
_ => throw new ArgumentOutOfRangeException(nameof(dimension), dimension, "Unknown dimension")
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Parses a short code to a dimension.
|
||||
/// </summary>
|
||||
public static EwsDimension? FromCode(string code) => code?.ToUpperInvariant() switch
|
||||
{
|
||||
Reachability => EwsDimension.Reachability,
|
||||
RuntimeSignals => EwsDimension.RuntimeSignals,
|
||||
BackportEvidence => EwsDimension.BackportEvidence,
|
||||
Exploitability => EwsDimension.Exploitability,
|
||||
SourceConfidence => EwsDimension.SourceConfidence,
|
||||
MitigationStatus => EwsDimension.MitigationStatus,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,298 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsModels.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Result models for Evidence-Weighted Score calculation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Individual dimension score from normalization.
|
||||
/// </summary>
|
||||
public sealed record EwsDimensionScore
|
||||
{
|
||||
/// <summary>
|
||||
/// The dimension this score represents.
|
||||
/// </summary>
|
||||
[JsonPropertyName("dimension")]
|
||||
public required EwsDimension Dimension { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Short dimension code (RCH, RTS, BKP, XPL, SRC, MIT).
|
||||
/// </summary>
|
||||
[JsonPropertyName("code")]
|
||||
public string Code => Dimension.ToCode();
|
||||
|
||||
/// <summary>
|
||||
/// Normalized score in range [0, 100].
|
||||
/// </summary>
|
||||
[JsonPropertyName("score")]
|
||||
public required int Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence level for this score (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weight applied to this dimension in composite calculation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public required double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weighted contribution to composite score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("weighted_contribution")]
|
||||
public double WeightedContribution => Score * Weight;
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable explanation of how the score was derived.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public required string Explanation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this score is based on actual evidence or assumptions.
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_evidence_based")]
|
||||
public bool IsEvidenceBased => Confidence >= 0.5;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Weights for each dimension in the 6-dimension EWS model.
|
||||
/// </summary>
|
||||
public sealed record EwsDimensionWeights
|
||||
{
|
||||
/// <summary>
|
||||
/// Weight for RCH (Reachability) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rch")]
|
||||
public double Reachability { get; init; } = 0.25;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for RTS (Runtime Signals) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("rts")]
|
||||
public double RuntimeSignals { get; init; } = 0.15;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for BKP (Backport Evidence) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bkp")]
|
||||
public double BackportEvidence { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for XPL (Exploitability) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("xpl")]
|
||||
public double Exploitability { get; init; } = 0.20;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for SRC (Source Confidence) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("src")]
|
||||
public double SourceConfidence { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for MIT (Mitigation Status) dimension.
|
||||
/// </summary>
|
||||
[JsonPropertyName("mit")]
|
||||
public double MitigationStatus { get; init; } = 0.20;
|
||||
|
||||
/// <summary>
|
||||
/// Default weights as per advisory recommendations.
|
||||
/// </summary>
|
||||
public static EwsDimensionWeights Default => new();
|
||||
|
||||
/// <summary>
|
||||
/// Legacy 6-dimension weights for backward compatibility.
|
||||
/// </summary>
|
||||
public static EwsDimensionWeights Legacy => new()
|
||||
{
|
||||
Reachability = 0.20,
|
||||
RuntimeSignals = 0.10,
|
||||
BackportEvidence = 0.15,
|
||||
Exploitability = 0.25,
|
||||
SourceConfidence = 0.10,
|
||||
MitigationStatus = 0.20
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Gets the weight for a specific dimension.
|
||||
/// </summary>
|
||||
public double GetWeight(EwsDimension dimension) => dimension switch
|
||||
{
|
||||
EwsDimension.Reachability => Reachability,
|
||||
EwsDimension.RuntimeSignals => RuntimeSignals,
|
||||
EwsDimension.BackportEvidence => BackportEvidence,
|
||||
EwsDimension.Exploitability => Exploitability,
|
||||
EwsDimension.SourceConfidence => SourceConfidence,
|
||||
EwsDimension.MitigationStatus => MitigationStatus,
|
||||
_ => 0.0
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Sum of all weights (should equal 1.0 for normalized calculations).
|
||||
/// </summary>
|
||||
public double TotalWeight =>
|
||||
Reachability + RuntimeSignals + BackportEvidence +
|
||||
Exploitability + SourceConfidence + MitigationStatus;
|
||||
|
||||
/// <summary>
|
||||
/// Validates that weights sum to approximately 1.0.
|
||||
/// </summary>
|
||||
public bool IsNormalized(double tolerance = 0.001) =>
|
||||
Math.Abs(TotalWeight - 1.0) < tolerance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Guardrails configuration for EWS scoring.
|
||||
/// Defines caps and floors to prevent extreme scores.
|
||||
/// </summary>
|
||||
public sealed record EwsGuardrails
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum score for "not_affected" VEX status (cap).
|
||||
/// Prevents fully mitigated items from being flagged as high risk.
|
||||
/// </summary>
|
||||
[JsonPropertyName("not_affected_cap")]
|
||||
public int NotAffectedCap { get; init; } = 25;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum score when runtime evidence shows active usage (floor).
|
||||
/// Ensures actively used vulnerable code is never fully suppressed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_floor")]
|
||||
public int RuntimeFloor { get; init; } = 30;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum score for speculative findings (no evidence, all assumptions).
|
||||
/// Prevents assumption-based findings from dominating triage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("speculative_cap")]
|
||||
public int SpeculativeCap { get; init; } = 60;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum score when CVE is in KEV (floor).
|
||||
/// Known exploited vulnerabilities always require attention.
|
||||
/// </summary>
|
||||
[JsonPropertyName("kev_floor")]
|
||||
public int KevFloor { get; init; } = 70;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum score for backported findings (cap).
|
||||
/// Confirmed backports should not be high priority.
|
||||
/// </summary>
|
||||
[JsonPropertyName("backported_cap")]
|
||||
public int BackportedCap { get; init; } = 20;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum overall confidence to trust the composite score.
|
||||
/// Below this, the score should be flagged for manual review.
|
||||
/// </summary>
|
||||
[JsonPropertyName("min_confidence_threshold")]
|
||||
public double MinConfidenceThreshold { get; init; } = 0.3;
|
||||
|
||||
/// <summary>
|
||||
/// Default guardrails configuration.
|
||||
/// </summary>
|
||||
public static EwsGuardrails Default => new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Composite Evidence-Weighted Score result.
|
||||
/// </summary>
|
||||
public sealed record EwsCompositeScore
|
||||
{
|
||||
/// <summary>
|
||||
/// Final weighted composite score [0, 100].
|
||||
/// </summary>
|
||||
[JsonPropertyName("score")]
|
||||
public required int Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score before guardrails were applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("raw_score")]
|
||||
public required int RawScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Basis points representation (0-10000) for deterministic storage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("basis_points")]
|
||||
public int BasisPoints => Score * 100;
|
||||
|
||||
/// <summary>
|
||||
/// Overall confidence in the composite score (0.0 to 1.0).
|
||||
/// Weighted average of dimension confidences.
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual dimension scores.
|
||||
/// </summary>
|
||||
[JsonPropertyName("dimensions")]
|
||||
public required ImmutableArray<EwsDimensionScore> Dimensions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Guardrails that were applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("applied_guardrails")]
|
||||
public required ImmutableArray<string> AppliedGuardrails { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether guardrails modified the score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("guardrails_applied")]
|
||||
public bool GuardrailsApplied => Score != RawScore;
|
||||
|
||||
/// <summary>
|
||||
/// Whether manual review is recommended due to low confidence.
|
||||
/// </summary>
|
||||
[JsonPropertyName("needs_review")]
|
||||
public required bool NeedsReview { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this score was calculated (UTC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("calculated_at")]
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier this score relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (purl) this score relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets a dimension score by dimension type.
|
||||
/// </summary>
|
||||
public EwsDimensionScore? GetDimension(EwsDimension dimension) =>
|
||||
Dimensions.FirstOrDefault(d => d.Dimension == dimension);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a risk tier based on the score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("risk_tier")]
|
||||
public string RiskTier => Score switch
|
||||
{
|
||||
>= 80 => "Critical",
|
||||
>= 60 => "High",
|
||||
>= 40 => "Medium",
|
||||
>= 20 => "Low",
|
||||
_ => "Informational"
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,221 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsSignalInput.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Signal input model for EWS dimension normalization.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Raw signal inputs for Evidence-Weighted Score calculation.
|
||||
/// Contains all signals that feed into the 6-dimension model.
|
||||
/// </summary>
|
||||
public sealed record EwsSignalInput
|
||||
{
|
||||
// -------------------------------------------------------------------------
|
||||
// RCH (Reachability) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Reachability tier from static analysis (R0=unreachable to R4=reachable).
|
||||
/// </summary>
|
||||
[JsonPropertyName("reachability_tier")]
|
||||
public int? ReachabilityTier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Call graph analysis confidence (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("call_graph_confidence")]
|
||||
public double? CallGraphConfidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether runtime trace confirmed the path.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_trace_confirmed")]
|
||||
public bool? RuntimeTraceConfirmed { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// RTS (Runtime Signals) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Runtime instrumentation coverage percentage (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("instrumentation_coverage")]
|
||||
public double? InstrumentationCoverage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of runtime invocations observed in the past period.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runtime_invocation_count")]
|
||||
public int? RuntimeInvocationCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether APM signals indicate active usage.
|
||||
/// </summary>
|
||||
[JsonPropertyName("apm_active_usage")]
|
||||
public bool? ApmActiveUsage { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// BKP (Backport Evidence) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Whether backport was detected via binary analysis.
|
||||
/// </summary>
|
||||
[JsonPropertyName("backport_detected")]
|
||||
public bool? BackportDetected { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Backport confidence score from binary diff (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("backport_confidence")]
|
||||
public double? BackportConfidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether vendor advisory confirms backport.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vendor_backport_confirmed")]
|
||||
public bool? VendorBackportConfirmed { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// XPL (Exploitability) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// EPSS probability (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("epss_probability")]
|
||||
public double? EpssProbability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the CVE is in KEV (Known Exploited Vulnerabilities).
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_in_kev")]
|
||||
public bool? IsInKev { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether an exploit kit is available.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exploit_kit_available")]
|
||||
public bool? ExploitKitAvailable { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Age of the public PoC in days (null if no PoC).
|
||||
/// </summary>
|
||||
[JsonPropertyName("poc_age_days")]
|
||||
public int? PocAgeDays { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVSS base score (0.0 to 10.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("cvss_base_score")]
|
||||
public double? CvssBaseScore { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// SRC (Source Confidence) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// SBOM completeness percentage (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom_completeness")]
|
||||
public double? SbomCompleteness { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether SBOM has verified signatures.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbom_signed")]
|
||||
public bool? SbomSigned { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of valid attestations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_count")]
|
||||
public int? AttestationCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether dependency lineage is verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("lineage_verified")]
|
||||
public bool? LineageVerified { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// MIT (Mitigation Status) signals
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// VEX status string (not_affected, affected, fixed, under_investigation).
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex_status")]
|
||||
public string? VexStatus { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX justification string.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex_justification")]
|
||||
public string? VexJustification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether a workaround is applied.
|
||||
/// </summary>
|
||||
[JsonPropertyName("workaround_applied")]
|
||||
public bool? WorkaroundApplied { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether network controls mitigate the vulnerability.
|
||||
/// </summary>
|
||||
[JsonPropertyName("network_controls_applied")]
|
||||
public bool? NetworkControlsApplied { get; init; }
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Metadata
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when these signals were collected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("collected_at")]
|
||||
public DateTimeOffset? CollectedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier this input relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve_id")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (purl) this input relates to.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional signals as key-value pairs for extensibility.
|
||||
/// </summary>
|
||||
[JsonPropertyName("additional_signals")]
|
||||
public ImmutableDictionary<string, object?>? AdditionalSignals { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an empty signal input (all assumptions mode).
|
||||
/// </summary>
|
||||
public static EwsSignalInput Empty => new();
|
||||
|
||||
/// <summary>
|
||||
/// Checks if a signal is present for the specified dimension.
|
||||
/// </summary>
|
||||
public bool HasSignalForDimension(EwsDimension dimension) => dimension switch
|
||||
{
|
||||
EwsDimension.Reachability => ReachabilityTier.HasValue || CallGraphConfidence.HasValue || RuntimeTraceConfirmed.HasValue,
|
||||
EwsDimension.RuntimeSignals => InstrumentationCoverage.HasValue || RuntimeInvocationCount.HasValue || ApmActiveUsage.HasValue,
|
||||
EwsDimension.BackportEvidence => BackportDetected.HasValue || BackportConfidence.HasValue || VendorBackportConfirmed.HasValue,
|
||||
EwsDimension.Exploitability => EpssProbability.HasValue || IsInKev.HasValue || ExploitKitAvailable.HasValue || PocAgeDays.HasValue || CvssBaseScore.HasValue,
|
||||
EwsDimension.SourceConfidence => SbomCompleteness.HasValue || SbomSigned.HasValue || AttestationCount.HasValue || LineageVerified.HasValue,
|
||||
EwsDimension.MitigationStatus => VexStatus != null || WorkaroundApplied.HasValue || NetworkControlsApplied.HasValue,
|
||||
_ => false
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// GuardrailsEngine.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Implementation of guardrails enforcement for EWS scoring.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Applies guardrails (caps and floors) to EWS scores.
|
||||
/// Guardrails prevent extreme scores and ensure business logic constraints.
|
||||
/// </summary>
|
||||
public sealed class GuardrailsEngine : IGuardrailsEngine
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public GuardrailsResult Apply(
|
||||
int rawScore,
|
||||
EwsSignalInput signal,
|
||||
ImmutableArray<EwsDimensionScore> dimensions,
|
||||
EwsGuardrails guardrails)
|
||||
{
|
||||
var score = rawScore;
|
||||
var applied = new List<string>();
|
||||
|
||||
// Check for KEV floor first (highest priority)
|
||||
if (signal.IsInKev == true && score < guardrails.KevFloor)
|
||||
{
|
||||
score = guardrails.KevFloor;
|
||||
applied.Add($"kev_floor:{guardrails.KevFloor}");
|
||||
}
|
||||
|
||||
// Check for backport cap
|
||||
if ((signal.BackportDetected == true || signal.VendorBackportConfirmed == true)
|
||||
&& score > guardrails.BackportedCap)
|
||||
{
|
||||
score = guardrails.BackportedCap;
|
||||
applied.Add($"backported_cap:{guardrails.BackportedCap}");
|
||||
}
|
||||
|
||||
// Check for not_affected cap
|
||||
if (IsNotAffected(signal) && score > guardrails.NotAffectedCap)
|
||||
{
|
||||
score = guardrails.NotAffectedCap;
|
||||
applied.Add($"not_affected_cap:{guardrails.NotAffectedCap}");
|
||||
}
|
||||
|
||||
// Check for runtime floor (if runtime shows active usage)
|
||||
if (HasActiveRuntimeUsage(signal) && score < guardrails.RuntimeFloor)
|
||||
{
|
||||
score = guardrails.RuntimeFloor;
|
||||
applied.Add($"runtime_floor:{guardrails.RuntimeFloor}");
|
||||
}
|
||||
|
||||
// Check for speculative cap (all assumptions, low confidence)
|
||||
if (IsSpeculative(dimensions) && score > guardrails.SpeculativeCap)
|
||||
{
|
||||
score = guardrails.SpeculativeCap;
|
||||
applied.Add($"speculative_cap:{guardrails.SpeculativeCap}");
|
||||
}
|
||||
|
||||
return new GuardrailsResult
|
||||
{
|
||||
AdjustedScore = Math.Clamp(score, 0, 100),
|
||||
OriginalScore = rawScore,
|
||||
AppliedGuardrails = applied.ToImmutableArray()
|
||||
};
|
||||
}
|
||||
|
||||
private static bool IsNotAffected(EwsSignalInput signal)
|
||||
{
|
||||
return signal.VexStatus?.Equals("not_affected", StringComparison.OrdinalIgnoreCase) == true
|
||||
|| signal.VexStatus?.Equals("fixed", StringComparison.OrdinalIgnoreCase) == true;
|
||||
}
|
||||
|
||||
private static bool HasActiveRuntimeUsage(EwsSignalInput signal)
|
||||
{
|
||||
return signal.ApmActiveUsage == true
|
||||
|| (signal.RuntimeInvocationCount.HasValue && signal.RuntimeInvocationCount.Value > 0);
|
||||
}
|
||||
|
||||
private static bool IsSpeculative(ImmutableArray<EwsDimensionScore> dimensions)
|
||||
{
|
||||
if (dimensions.IsDefaultOrEmpty)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
// Calculate weighted confidence
|
||||
var totalWeight = 0.0;
|
||||
var weightedConfidence = 0.0;
|
||||
|
||||
foreach (var dim in dimensions)
|
||||
{
|
||||
totalWeight += dim.Weight;
|
||||
weightedConfidence += dim.Confidence * dim.Weight;
|
||||
}
|
||||
|
||||
if (totalWeight > 0)
|
||||
{
|
||||
var avgConfidence = weightedConfidence / totalWeight;
|
||||
return avgConfidence < 0.3; // Less than 30% confidence = speculative
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEwsCalculator.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Interface for the unified Evidence-Weighted Score calculator.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unified calculator for Evidence-Weighted Scores (EWS).
|
||||
/// Orchestrates 6-dimension normalization, weighting, and guardrails.
|
||||
/// </summary>
|
||||
public interface IEwsCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates a composite EWS from raw signals.
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input.</param>
|
||||
/// <param name="weights">Optional custom weights (defaults to EwsDimensionWeights.Default).</param>
|
||||
/// <param name="guardrails">Optional guardrails configuration (defaults to EwsGuardrails.Default).</param>
|
||||
/// <returns>The composite EWS result.</returns>
|
||||
EwsCompositeScore Calculate(
|
||||
EwsSignalInput signal,
|
||||
EwsDimensionWeights? weights = null,
|
||||
EwsGuardrails? guardrails = null);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates a single dimension score from raw signals.
|
||||
/// </summary>
|
||||
/// <param name="dimension">The dimension to calculate.</param>
|
||||
/// <param name="signal">The raw signal input.</param>
|
||||
/// <param name="weight">The weight to assign to this dimension.</param>
|
||||
/// <returns>The dimension score.</returns>
|
||||
EwsDimensionScore CalculateDimension(
|
||||
EwsDimension dimension,
|
||||
EwsSignalInput signal,
|
||||
double weight);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the normalizer for a specific dimension.
|
||||
/// </summary>
|
||||
/// <param name="dimension">The dimension.</param>
|
||||
/// <returns>The normalizer for that dimension.</returns>
|
||||
IEwsDimensionNormalizer GetNormalizer(EwsDimension dimension);
|
||||
}
|
||||
@@ -0,0 +1,47 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEwsDimensionNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Pluggable interface for normalizing signal inputs to dimension scores.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for normalizing raw signal inputs to a canonical 0-100 dimension score.
|
||||
/// Each dimension has its own normalizer implementation that handles the specific
|
||||
/// signal types and normalization logic for that dimension.
|
||||
/// </summary>
|
||||
public interface IEwsDimensionNormalizer
|
||||
{
|
||||
/// <summary>
|
||||
/// The dimension this normalizer handles.
|
||||
/// </summary>
|
||||
EwsDimension Dimension { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a raw signal value to a dimension score in range [0, 100].
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input for this dimension.</param>
|
||||
/// <returns>Normalized score in range [0, 100], where:
|
||||
/// - 0 = lowest risk/impact (e.g., unreachable, fully mitigated)
|
||||
/// - 100 = highest risk/impact (e.g., reachable, actively exploited)
|
||||
/// </returns>
|
||||
int Normalize(EwsSignalInput signal);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the confidence level for this normalization (0.0 to 1.0).
|
||||
/// Lower confidence when assumptions are made or data is missing.
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input for this dimension.</param>
|
||||
/// <returns>Confidence level from 0.0 (all assumptions) to 1.0 (verified evidence).</returns>
|
||||
double GetConfidence(EwsSignalInput signal);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a human-readable explanation of how the score was derived.
|
||||
/// </summary>
|
||||
/// <param name="signal">The raw signal input for this dimension.</param>
|
||||
/// <param name="normalizedScore">The normalized score that was calculated.</param>
|
||||
/// <returns>Explanation suitable for audit and operator review.</returns>
|
||||
string GetExplanation(EwsSignalInput signal, int normalizedScore);
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IGuardrailsEngine.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Interface for guardrails enforcement in EWS scoring.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Result of guardrails application.
|
||||
/// </summary>
|
||||
public sealed record GuardrailsResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The adjusted score after applying guardrails.
|
||||
/// </summary>
|
||||
public required int AdjustedScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The original score before guardrails.
|
||||
/// </summary>
|
||||
public required int OriginalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of guardrails that were applied.
|
||||
/// </summary>
|
||||
public required ImmutableArray<string> AppliedGuardrails { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the score was modified.
|
||||
/// </summary>
|
||||
public bool WasModified => AdjustedScore != OriginalScore;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Engine for applying guardrails (caps and floors) to EWS scores.
|
||||
/// Guardrails prevent extreme scores in edge cases.
|
||||
/// </summary>
|
||||
public interface IGuardrailsEngine
|
||||
{
|
||||
/// <summary>
|
||||
/// Applies guardrails to a raw composite score.
|
||||
/// </summary>
|
||||
/// <param name="rawScore">The raw composite score before guardrails.</param>
|
||||
/// <param name="signal">The signal input that produced this score.</param>
|
||||
/// <param name="dimensions">The individual dimension scores.</param>
|
||||
/// <param name="guardrails">The guardrails configuration to apply.</param>
|
||||
/// <returns>The result with adjusted score and list of applied guardrails.</returns>
|
||||
GuardrailsResult Apply(
|
||||
int rawScore,
|
||||
EwsSignalInput signal,
|
||||
ImmutableArray<EwsDimensionScore> dimensions,
|
||||
EwsGuardrails guardrails);
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// BackportEvidenceNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for BKP (Backport Evidence) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes backport evidence to the BKP dimension score.
|
||||
/// Higher score = more evidence of vulnerability being present (not backported).
|
||||
/// Lower score = strong evidence of backport (vulnerability patched).
|
||||
/// </summary>
|
||||
public sealed class BackportEvidenceNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.BackportEvidence;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// Vendor confirmation is strongest signal
|
||||
if (signal.VendorBackportConfirmed == true)
|
||||
{
|
||||
return 5; // Almost certainly patched
|
||||
}
|
||||
|
||||
// Binary analysis detected backport
|
||||
if (signal.BackportDetected == true)
|
||||
{
|
||||
if (signal.BackportConfidence.HasValue)
|
||||
{
|
||||
// Lower score = more likely patched
|
||||
return (int)((1.0 - signal.BackportConfidence.Value) * 30);
|
||||
}
|
||||
return 15; // Backport detected with unknown confidence
|
||||
}
|
||||
|
||||
// Binary analysis explicitly found no backport
|
||||
if (signal.BackportDetected == false)
|
||||
{
|
||||
if (signal.BackportConfidence.HasValue)
|
||||
{
|
||||
// Higher confidence in "no backport" = higher risk score
|
||||
return (int)(70 + signal.BackportConfidence.Value * 30);
|
||||
}
|
||||
return 80; // Likely vulnerable
|
||||
}
|
||||
|
||||
// No backport analysis performed - assume vulnerable (conservative)
|
||||
return 75;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (signal.VendorBackportConfirmed.HasValue)
|
||||
{
|
||||
return 0.95; // Vendor confirmation is highly reliable
|
||||
}
|
||||
|
||||
if (signal.BackportDetected.HasValue)
|
||||
{
|
||||
// Use backport confidence if available
|
||||
return signal.BackportConfidence ?? 0.6;
|
||||
}
|
||||
|
||||
return 0.2; // No analysis, low confidence
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
if (signal.VendorBackportConfirmed == true)
|
||||
{
|
||||
return "Vendor confirmed backport; vulnerability patched in this build";
|
||||
}
|
||||
|
||||
if (signal.BackportDetected == true)
|
||||
{
|
||||
var conf = signal.BackportConfidence?.ToString("P0") ?? "unknown";
|
||||
return $"Binary analysis detected backport with {conf} confidence";
|
||||
}
|
||||
|
||||
if (signal.BackportDetected == false)
|
||||
{
|
||||
var conf = signal.BackportConfidence?.ToString("P0") ?? "unknown";
|
||||
return $"Binary analysis found no backport evidence ({conf} confidence)";
|
||||
}
|
||||
|
||||
return "No backport analysis available; assuming vulnerable";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,152 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ExploitabilityNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for XPL (Exploitability) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes exploitability signals to the XPL dimension score.
|
||||
/// Maps EPSS, KEV, exploit availability, and CVSS to a 0-100 score.
|
||||
/// </summary>
|
||||
public sealed class ExploitabilityNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.Exploitability;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// KEV is the strongest signal
|
||||
if (signal.IsInKev == true)
|
||||
{
|
||||
return 100; // Known exploited = maximum exploitability
|
||||
}
|
||||
|
||||
var score = 0.0;
|
||||
var weights = 0.0;
|
||||
|
||||
// EPSS probability (most predictive)
|
||||
if (signal.EpssProbability.HasValue)
|
||||
{
|
||||
weights += 0.4;
|
||||
// EPSS is already 0-1, scale to 0-100
|
||||
// Apply slight non-linear scaling to emphasize high-EPSS items
|
||||
var epssScore = Math.Pow(signal.EpssProbability.Value, 0.7) * 100;
|
||||
score += epssScore * 0.4;
|
||||
}
|
||||
|
||||
// Exploit kit availability
|
||||
if (signal.ExploitKitAvailable == true)
|
||||
{
|
||||
weights += 0.25;
|
||||
score += 90 * 0.25; // Very high if exploit kit exists
|
||||
}
|
||||
else if (signal.ExploitKitAvailable == false)
|
||||
{
|
||||
weights += 0.25;
|
||||
score += 20 * 0.25; // Lower if explicitly no kit
|
||||
}
|
||||
|
||||
// PoC age (older PoC = more likely weaponized)
|
||||
if (signal.PocAgeDays.HasValue)
|
||||
{
|
||||
weights += 0.15;
|
||||
var pocScore = signal.PocAgeDays.Value switch
|
||||
{
|
||||
<= 7 => 60, // Fresh PoC
|
||||
<= 30 => 75, // 1 month old
|
||||
<= 90 => 85, // 3 months old
|
||||
<= 365 => 90, // 1 year old
|
||||
_ => 95 // Very old = likely weaponized
|
||||
};
|
||||
score += pocScore * 0.15;
|
||||
}
|
||||
|
||||
// CVSS base score
|
||||
if (signal.CvssBaseScore.HasValue)
|
||||
{
|
||||
weights += 0.2;
|
||||
// Map 0-10 to 0-100
|
||||
score += signal.CvssBaseScore.Value * 10 * 0.2;
|
||||
}
|
||||
|
||||
if (weights > 0)
|
||||
{
|
||||
return (int)Math.Round(score / weights);
|
||||
}
|
||||
|
||||
// No signals - default to moderate exploitability based on CVSS if available
|
||||
return 50;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (signal.IsInKev == true)
|
||||
{
|
||||
return 1.0; // Absolute certainty
|
||||
}
|
||||
|
||||
var confidence = 0.0;
|
||||
|
||||
if (signal.EpssProbability.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.85);
|
||||
}
|
||||
|
||||
if (signal.ExploitKitAvailable.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.9);
|
||||
}
|
||||
|
||||
if (signal.PocAgeDays.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.7);
|
||||
}
|
||||
|
||||
if (signal.CvssBaseScore.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.5); // CVSS alone is less predictive
|
||||
}
|
||||
|
||||
return confidence > 0 ? confidence : 0.3; // Low confidence if no data
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (signal.IsInKev == true)
|
||||
{
|
||||
parts.Add("CVE is in CISA KEV (Known Exploited Vulnerabilities)");
|
||||
}
|
||||
|
||||
if (signal.EpssProbability.HasValue)
|
||||
{
|
||||
parts.Add($"EPSS probability {signal.EpssProbability.Value:P2}");
|
||||
}
|
||||
|
||||
if (signal.ExploitKitAvailable == true)
|
||||
{
|
||||
parts.Add("exploit kit available");
|
||||
}
|
||||
|
||||
if (signal.PocAgeDays.HasValue)
|
||||
{
|
||||
parts.Add($"PoC available for {signal.PocAgeDays.Value} days");
|
||||
}
|
||||
|
||||
if (signal.CvssBaseScore.HasValue)
|
||||
{
|
||||
parts.Add($"CVSS base score {signal.CvssBaseScore.Value:F1}");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No exploitability signals; assuming moderate risk";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,118 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MitigationStatusNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for MIT (Mitigation Status) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes mitigation status signals to the MIT dimension score.
|
||||
/// Lower score = strong mitigation in place (low residual risk).
|
||||
/// Higher score = no mitigation or vulnerable status.
|
||||
/// </summary>
|
||||
public sealed class MitigationStatusNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.MitigationStatus;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// VEX status is the primary signal
|
||||
var baseScore = ParseVexStatus(signal.VexStatus);
|
||||
|
||||
// Adjust for workarounds
|
||||
if (signal.WorkaroundApplied == true)
|
||||
{
|
||||
baseScore = Math.Max(0, baseScore - 30);
|
||||
}
|
||||
|
||||
// Adjust for network controls
|
||||
if (signal.NetworkControlsApplied == true)
|
||||
{
|
||||
baseScore = Math.Max(0, baseScore - 20);
|
||||
}
|
||||
|
||||
return Math.Clamp(baseScore, 0, 100);
|
||||
}
|
||||
|
||||
private static int ParseVexStatus(string? vexStatus)
|
||||
{
|
||||
return vexStatus?.ToLowerInvariant() switch
|
||||
{
|
||||
"not_affected" => 5, // Confirmed not affected
|
||||
"fixed" => 10, // Fix applied
|
||||
"under_investigation" => 60, // Unknown yet
|
||||
"affected" => 90, // Confirmed vulnerable
|
||||
"exploitable" => 100, // Actively exploitable
|
||||
null => 75, // No VEX = assume affected
|
||||
_ => 75 // Unknown status = assume affected
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(signal.VexStatus))
|
||||
{
|
||||
// VEX status provides good confidence
|
||||
var conf = signal.VexStatus.ToLowerInvariant() switch
|
||||
{
|
||||
"not_affected" => 0.9,
|
||||
"fixed" => 0.85,
|
||||
"affected" => 0.85,
|
||||
"exploitable" => 0.95,
|
||||
"under_investigation" => 0.4,
|
||||
_ => 0.5
|
||||
};
|
||||
|
||||
// Boost confidence if we also have justification
|
||||
if (!string.IsNullOrEmpty(signal.VexJustification))
|
||||
{
|
||||
conf = Math.Min(1.0, conf + 0.1);
|
||||
}
|
||||
|
||||
return conf;
|
||||
}
|
||||
|
||||
// No VEX but have compensating controls
|
||||
if (signal.WorkaroundApplied.HasValue || signal.NetworkControlsApplied.HasValue)
|
||||
{
|
||||
return 0.6;
|
||||
}
|
||||
|
||||
return 0.2; // No mitigation data
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (!string.IsNullOrEmpty(signal.VexStatus))
|
||||
{
|
||||
parts.Add($"VEX status: {signal.VexStatus}");
|
||||
|
||||
if (!string.IsNullOrEmpty(signal.VexJustification))
|
||||
{
|
||||
parts.Add($"justification: {signal.VexJustification}");
|
||||
}
|
||||
}
|
||||
|
||||
if (signal.WorkaroundApplied == true)
|
||||
{
|
||||
parts.Add("workaround applied");
|
||||
}
|
||||
|
||||
if (signal.NetworkControlsApplied == true)
|
||||
{
|
||||
parts.Add("network controls in place");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No mitigation status available; assuming affected";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ReachabilityNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for RCH (Reachability) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes reachability signals to the RCH dimension score.
|
||||
/// Maps R0-R4 tiers and call graph confidence to a 0-100 score.
|
||||
/// </summary>
|
||||
public sealed class ReachabilityNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.Reachability;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// Reachability tier takes precedence
|
||||
if (signal.ReachabilityTier.HasValue)
|
||||
{
|
||||
var tierScore = signal.ReachabilityTier.Value switch
|
||||
{
|
||||
0 => 0, // R0: Unreachable
|
||||
1 => 20, // R1: Present in dependency but not imported
|
||||
2 => 40, // R2: Imported but not called
|
||||
3 => 70, // R3: Called but not reachable from entrypoint
|
||||
4 => 100, // R4: Reachable from entrypoint
|
||||
_ => 50 // Unknown tier - moderate assumption
|
||||
};
|
||||
|
||||
// Adjust by call graph confidence if available
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
// Higher confidence = trust the tier more
|
||||
// Lower confidence = pull toward middle (50)
|
||||
var confidence = signal.CallGraphConfidence.Value;
|
||||
tierScore = (int)(tierScore * confidence + 50 * (1 - confidence));
|
||||
}
|
||||
|
||||
// Runtime trace confirmation boosts the score if reachable
|
||||
if (signal.RuntimeTraceConfirmed == true && tierScore >= 70)
|
||||
{
|
||||
tierScore = Math.Min(100, tierScore + 15);
|
||||
}
|
||||
|
||||
return Math.Clamp(tierScore, 0, 100);
|
||||
}
|
||||
|
||||
// Fall back to call graph confidence only
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
// High confidence but no tier = assume moderate reachability
|
||||
return (int)(50 * signal.CallGraphConfidence.Value) + 25;
|
||||
}
|
||||
|
||||
// Runtime trace only
|
||||
if (signal.RuntimeTraceConfirmed == true)
|
||||
{
|
||||
return 85; // Strong evidence of reachability
|
||||
}
|
||||
|
||||
// No signals - assume reachable (conservative)
|
||||
return 75;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
if (signal.ReachabilityTier.HasValue)
|
||||
{
|
||||
// Tier with call graph confidence
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
return Math.Min(1.0, 0.7 + signal.CallGraphConfidence.Value * 0.3);
|
||||
}
|
||||
return 0.7; // Tier alone
|
||||
}
|
||||
|
||||
if (signal.CallGraphConfidence.HasValue)
|
||||
{
|
||||
return signal.CallGraphConfidence.Value * 0.6;
|
||||
}
|
||||
|
||||
if (signal.RuntimeTraceConfirmed == true)
|
||||
{
|
||||
return 0.9; // High confidence from runtime
|
||||
}
|
||||
|
||||
return 0.2; // No evidence, pure assumption
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
if (signal.ReachabilityTier.HasValue)
|
||||
{
|
||||
var tierName = signal.ReachabilityTier.Value switch
|
||||
{
|
||||
0 => "unreachable",
|
||||
1 => "in-dependency-not-imported",
|
||||
2 => "imported-not-called",
|
||||
3 => "called-not-entrypoint-reachable",
|
||||
4 => "entrypoint-reachable",
|
||||
_ => "unknown-tier"
|
||||
};
|
||||
|
||||
var confidence = signal.CallGraphConfidence?.ToString("P0") ?? "unknown";
|
||||
return $"Reachability tier R{signal.ReachabilityTier.Value} ({tierName}), call graph confidence {confidence}";
|
||||
}
|
||||
|
||||
if (signal.RuntimeTraceConfirmed == true)
|
||||
{
|
||||
return "Runtime trace confirmed reachability";
|
||||
}
|
||||
|
||||
return "No reachability analysis; assumed reachable (conservative)";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// RuntimeSignalsNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for RTS (Runtime Signals) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes runtime signals to the RTS dimension score.
|
||||
/// Higher score = more evidence of runtime activity.
|
||||
/// </summary>
|
||||
public sealed class RuntimeSignalsNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.RuntimeSignals;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
var score = 0.0;
|
||||
var weights = 0.0;
|
||||
|
||||
// Instrumentation coverage
|
||||
if (signal.InstrumentationCoverage.HasValue)
|
||||
{
|
||||
// Higher coverage = more confidence in runtime data
|
||||
// If coverage is high but no invocations, that's good (not used)
|
||||
// If coverage is low, we can't trust the data
|
||||
weights += 0.3;
|
||||
score += signal.InstrumentationCoverage.Value * 0.3;
|
||||
}
|
||||
|
||||
// Runtime invocation count
|
||||
if (signal.RuntimeInvocationCount.HasValue)
|
||||
{
|
||||
weights += 0.4;
|
||||
// Logarithmic scale for invocations
|
||||
// 0 = 0, 1-10 = 25, 11-100 = 50, 101-1000 = 75, 1000+ = 100
|
||||
var invScore = signal.RuntimeInvocationCount.Value switch
|
||||
{
|
||||
0 => 0.0,
|
||||
<= 10 => 0.25,
|
||||
<= 100 => 0.5,
|
||||
<= 1000 => 0.75,
|
||||
_ => 1.0
|
||||
};
|
||||
score += invScore * 0.4;
|
||||
}
|
||||
|
||||
// APM active usage
|
||||
if (signal.ApmActiveUsage.HasValue)
|
||||
{
|
||||
weights += 0.3;
|
||||
score += (signal.ApmActiveUsage.Value ? 1.0 : 0.0) * 0.3;
|
||||
}
|
||||
|
||||
if (weights > 0)
|
||||
{
|
||||
return (int)Math.Round(score / weights * 100);
|
||||
}
|
||||
|
||||
// No runtime signals - assume moderate risk (we don't know)
|
||||
return 50;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
var confidence = 0.0;
|
||||
|
||||
if (signal.InstrumentationCoverage.HasValue)
|
||||
{
|
||||
// Coverage itself tells us confidence
|
||||
confidence = Math.Max(confidence, signal.InstrumentationCoverage.Value);
|
||||
}
|
||||
|
||||
if (signal.RuntimeInvocationCount.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.8); // Good data point
|
||||
}
|
||||
|
||||
if (signal.ApmActiveUsage.HasValue)
|
||||
{
|
||||
confidence = Math.Max(confidence, 0.7);
|
||||
}
|
||||
|
||||
return confidence > 0 ? confidence : 0.2; // Low if no data
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (signal.InstrumentationCoverage.HasValue)
|
||||
{
|
||||
parts.Add($"instrumentation coverage {signal.InstrumentationCoverage.Value:P0}");
|
||||
}
|
||||
|
||||
if (signal.RuntimeInvocationCount.HasValue)
|
||||
{
|
||||
parts.Add($"{signal.RuntimeInvocationCount.Value} runtime invocations observed");
|
||||
}
|
||||
|
||||
if (signal.ApmActiveUsage.HasValue)
|
||||
{
|
||||
parts.Add(signal.ApmActiveUsage.Value ? "APM shows active usage" : "APM shows no active usage");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No runtime signals available; assuming moderate activity";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SourceConfidenceNormalizer.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Normalizer for SRC (Source Confidence) dimension.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes source confidence signals to the SRC dimension score.
|
||||
/// Higher score = less confidence in source data (higher uncertainty risk).
|
||||
/// Lower score = high confidence in source data.
|
||||
/// </summary>
|
||||
public sealed class SourceConfidenceNormalizer : IEwsDimensionNormalizer
|
||||
{
|
||||
/// <inheritdoc />
|
||||
public EwsDimension Dimension => EwsDimension.SourceConfidence;
|
||||
|
||||
/// <inheritdoc />
|
||||
public int Normalize(EwsSignalInput signal)
|
||||
{
|
||||
// This dimension is inverted: high confidence in source = low risk
|
||||
// We calculate confidence then invert
|
||||
|
||||
var confidenceScore = 0.0;
|
||||
var weights = 0.0;
|
||||
|
||||
// SBOM completeness
|
||||
if (signal.SbomCompleteness.HasValue)
|
||||
{
|
||||
weights += 0.35;
|
||||
confidenceScore += signal.SbomCompleteness.Value * 0.35;
|
||||
}
|
||||
|
||||
// SBOM signed
|
||||
if (signal.SbomSigned.HasValue)
|
||||
{
|
||||
weights += 0.25;
|
||||
confidenceScore += (signal.SbomSigned.Value ? 1.0 : 0.0) * 0.25;
|
||||
}
|
||||
|
||||
// Attestation count
|
||||
if (signal.AttestationCount.HasValue)
|
||||
{
|
||||
weights += 0.2;
|
||||
// More attestations = more confidence (diminishing returns)
|
||||
var attScore = signal.AttestationCount.Value switch
|
||||
{
|
||||
0 => 0.0,
|
||||
1 => 0.5,
|
||||
2 => 0.7,
|
||||
3 => 0.85,
|
||||
_ => 1.0
|
||||
};
|
||||
confidenceScore += attScore * 0.2;
|
||||
}
|
||||
|
||||
// Lineage verified
|
||||
if (signal.LineageVerified.HasValue)
|
||||
{
|
||||
weights += 0.2;
|
||||
confidenceScore += (signal.LineageVerified.Value ? 1.0 : 0.0) * 0.2;
|
||||
}
|
||||
|
||||
if (weights > 0)
|
||||
{
|
||||
var normalizedConfidence = confidenceScore / weights;
|
||||
// Invert: high confidence = low score (low risk from source uncertainty)
|
||||
return (int)Math.Round((1.0 - normalizedConfidence) * 100);
|
||||
}
|
||||
|
||||
// No source signals - assume high uncertainty
|
||||
return 80;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double GetConfidence(EwsSignalInput signal)
|
||||
{
|
||||
var hasData = signal.SbomCompleteness.HasValue ||
|
||||
signal.SbomSigned.HasValue ||
|
||||
signal.AttestationCount.HasValue ||
|
||||
signal.LineageVerified.HasValue;
|
||||
|
||||
if (!hasData)
|
||||
{
|
||||
return 0.2;
|
||||
}
|
||||
|
||||
// Count how many signals we have
|
||||
var signalCount = 0;
|
||||
if (signal.SbomCompleteness.HasValue) signalCount++;
|
||||
if (signal.SbomSigned.HasValue) signalCount++;
|
||||
if (signal.AttestationCount.HasValue) signalCount++;
|
||||
if (signal.LineageVerified.HasValue) signalCount++;
|
||||
|
||||
// More signals = higher confidence in our assessment
|
||||
return 0.4 + (signalCount * 0.15);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string GetExplanation(EwsSignalInput signal, int normalizedScore)
|
||||
{
|
||||
var parts = new List<string>();
|
||||
|
||||
if (signal.SbomCompleteness.HasValue)
|
||||
{
|
||||
parts.Add($"SBOM completeness {signal.SbomCompleteness.Value:P0}");
|
||||
}
|
||||
|
||||
if (signal.SbomSigned == true)
|
||||
{
|
||||
parts.Add("SBOM is signed");
|
||||
}
|
||||
else if (signal.SbomSigned == false)
|
||||
{
|
||||
parts.Add("SBOM is not signed");
|
||||
}
|
||||
|
||||
if (signal.AttestationCount.HasValue)
|
||||
{
|
||||
parts.Add($"{signal.AttestationCount.Value} attestation(s) available");
|
||||
}
|
||||
|
||||
if (signal.LineageVerified == true)
|
||||
{
|
||||
parts.Add("dependency lineage verified");
|
||||
}
|
||||
else if (signal.LineageVerified == false)
|
||||
{
|
||||
parts.Add("dependency lineage not verified");
|
||||
}
|
||||
|
||||
return parts.Count > 0
|
||||
? string.Join(", ", parts)
|
||||
: "No source confidence signals; assuming high uncertainty";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
// <copyright file="IDeltaIfPresentCalculator.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
|
||||
// </copyright>
|
||||
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates hypothetical score changes if missing signals were present with various assumed values.
|
||||
/// This enables "what-if" analysis to help operators prioritize signal collection efforts.
|
||||
/// </summary>
|
||||
public interface IDeltaIfPresentCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculate the hypothetical trust score delta if a specific missing signal were present.
|
||||
/// </summary>
|
||||
/// <param name="snapshot">Current signal snapshot.</param>
|
||||
/// <param name="signal">The missing signal to simulate.</param>
|
||||
/// <param name="assumedValue">The assumed value for the signal (0.0-1.0 normalized score).</param>
|
||||
/// <param name="weights">Optional signal weights.</param>
|
||||
/// <returns>Delta calculation result showing impact.</returns>
|
||||
DeltaIfPresentResult CalculateSingleSignalDelta(
|
||||
SignalSnapshot snapshot,
|
||||
string signal,
|
||||
double assumedValue,
|
||||
SignalWeights? weights = null);
|
||||
|
||||
/// <summary>
|
||||
/// Calculate hypothetical impacts for all missing signals at multiple assumed values.
|
||||
/// </summary>
|
||||
/// <param name="snapshot">Current signal snapshot.</param>
|
||||
/// <param name="weights">Optional signal weights.</param>
|
||||
/// <returns>Full delta-if-present analysis for all gaps.</returns>
|
||||
DeltaIfPresentAnalysis CalculateFullAnalysis(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null);
|
||||
|
||||
/// <summary>
|
||||
/// Calculate the best-case and worst-case score bounds if all missing signals were present.
|
||||
/// </summary>
|
||||
/// <param name="snapshot">Current signal snapshot.</param>
|
||||
/// <param name="weights">Optional signal weights.</param>
|
||||
/// <returns>Score bounds with completeness impact.</returns>
|
||||
ScoreBounds CalculateScoreBounds(
|
||||
SignalSnapshot snapshot,
|
||||
SignalWeights? weights = null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a single signal delta-if-present calculation.
|
||||
/// </summary>
|
||||
public sealed record DeltaIfPresentResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The signal that was simulated as present.
|
||||
/// </summary>
|
||||
public required string Signal { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The current score without this signal.
|
||||
/// </summary>
|
||||
public required double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The hypothetical score with this signal present at the assumed value.
|
||||
/// </summary>
|
||||
public required double HypotheticalScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The delta (hypothetical - current). Positive means score would increase.
|
||||
/// </summary>
|
||||
public double Delta => HypotheticalScore - CurrentScore;
|
||||
|
||||
/// <summary>
|
||||
/// The assumed value used for the simulation.
|
||||
/// </summary>
|
||||
public required double AssumedValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The weight of this signal in the scoring model.
|
||||
/// </summary>
|
||||
public required double SignalWeight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy before adding signal.
|
||||
/// </summary>
|
||||
public required double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hypothetical entropy after adding signal.
|
||||
/// </summary>
|
||||
public required double HypotheticalEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Change in entropy (negative means entropy would decrease = less uncertainty).
|
||||
/// </summary>
|
||||
public double EntropyDelta => HypotheticalEntropy - CurrentEntropy;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete analysis of all missing signals with delta-if-present calculations.
|
||||
/// </summary>
|
||||
public sealed record DeltaIfPresentAnalysis
|
||||
{
|
||||
/// <summary>
|
||||
/// Current aggregate score.
|
||||
/// </summary>
|
||||
public required double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy (uncertainty).
|
||||
/// </summary>
|
||||
public required double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of missing signals with their potential impact at different assumed values.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<SignalDeltaScenarios> GapAnalysis { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Prioritized list of signals by maximum potential impact.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> PrioritizedGaps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this analysis was computed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Delta scenarios for a single missing signal at various assumed values.
|
||||
/// </summary>
|
||||
public sealed record SignalDeltaScenarios
|
||||
{
|
||||
/// <summary>
|
||||
/// Signal name.
|
||||
/// </summary>
|
||||
public required string Signal { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signal weight in scoring model.
|
||||
/// </summary>
|
||||
public required double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Why this signal is missing.
|
||||
/// </summary>
|
||||
public required SignalGapReason GapReason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Delta if signal present with best-case value (lowest risk contribution).
|
||||
/// </summary>
|
||||
public required DeltaIfPresentResult BestCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Delta if signal present with worst-case value (highest risk contribution).
|
||||
/// </summary>
|
||||
public required DeltaIfPresentResult WorstCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Delta if signal present with prior/expected value.
|
||||
/// </summary>
|
||||
public required DeltaIfPresentResult PriorCase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum absolute delta magnitude across all scenarios.
|
||||
/// </summary>
|
||||
public double MaxImpact => Math.Max(Math.Abs(BestCase.Delta), Math.Abs(WorstCase.Delta));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Best-case and worst-case score bounds if all missing signals were present.
|
||||
/// </summary>
|
||||
public sealed record ScoreBounds
|
||||
{
|
||||
/// <summary>
|
||||
/// Current score with missing signals.
|
||||
/// </summary>
|
||||
public required double CurrentScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Minimum possible score (all missing signals at worst-case values).
|
||||
/// </summary>
|
||||
public required double MinimumScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum possible score (all missing signals at best-case values).
|
||||
/// </summary>
|
||||
public required double MaximumScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score range (max - min).
|
||||
/// </summary>
|
||||
public double Range => MaximumScore - MinimumScore;
|
||||
|
||||
/// <summary>
|
||||
/// Current entropy.
|
||||
/// </summary>
|
||||
public required double CurrentEntropy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entropy if all signals were present (would be 0).
|
||||
/// </summary>
|
||||
public double CompleteEntropy => 0.0;
|
||||
|
||||
/// <summary>
|
||||
/// Number of missing signals.
|
||||
/// </summary>
|
||||
public required int GapCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Percentage of score weight that is missing.
|
||||
/// </summary>
|
||||
public required double MissingWeightPercentage { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for impact score calculation.
|
||||
/// </summary>
|
||||
public interface IImpactScoreCalculator
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates the multi-factor impact score for unknowns.
|
||||
/// </summary>
|
||||
/// <param name="context">Impact context with environment, data sensitivity, fleet prevalence, SLA tier, and CVSS.</param>
|
||||
/// <param name="weights">Optional custom weights (uses defaults if null).</param>
|
||||
/// <returns>Calculated impact score with all component scores.</returns>
|
||||
ImpactScore Calculate(ImpactContext context, ImpactFactorWeights? weights = null);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an environment type to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeEnvironment(EnvironmentType environment);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a data sensitivity level to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeDataSensitivity(DataSensitivity sensitivity);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes an SLA tier to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeSlaTier(SlaTier tier);
|
||||
|
||||
/// <summary>
|
||||
/// Normalizes a CVSS score [0.0, 10.0] to a score [0.0, 1.0].
|
||||
/// </summary>
|
||||
double NormalizeCvss(double cvssScore);
|
||||
}
|
||||
@@ -0,0 +1,42 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Configurable weights for impact scoring factors.
|
||||
/// All weights are normalized to sum to 1.0.
|
||||
/// </summary>
|
||||
public sealed record ImpactFactorWeights
|
||||
{
|
||||
/// <summary>Default weights following advisory recommendations.</summary>
|
||||
public static readonly ImpactFactorWeights Default = new()
|
||||
{
|
||||
EnvironmentExposureWeight = 0.20,
|
||||
DataSensitivityWeight = 0.20,
|
||||
FleetPrevalenceWeight = 0.15,
|
||||
SlaTierWeight = 0.15,
|
||||
CvssSeverityWeight = 0.30
|
||||
};
|
||||
|
||||
/// <summary>Weight for environment exposure factor (prod/stage/dev).</summary>
|
||||
public required double EnvironmentExposureWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for data sensitivity factor (PII, financial, etc.).</summary>
|
||||
public required double DataSensitivityWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for fleet prevalence factor (how many assets affected).</summary>
|
||||
public required double FleetPrevalenceWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for SLA tier factor (business criticality).</summary>
|
||||
public required double SlaTierWeight { get; init; }
|
||||
|
||||
/// <summary>Weight for CVSS severity factor.</summary>
|
||||
public required double CvssSeverityWeight { get; init; }
|
||||
|
||||
/// <summary>Sum of all weights (should equal 1.0 for normalized calculations).</summary>
|
||||
public double TotalWeight =>
|
||||
EnvironmentExposureWeight + DataSensitivityWeight + FleetPrevalenceWeight +
|
||||
SlaTierWeight + CvssSeverityWeight;
|
||||
|
||||
/// <summary>Validates that weights sum to approximately 1.0.</summary>
|
||||
public bool IsNormalized(double tolerance = 0.001) =>
|
||||
Math.Abs(TotalWeight - 1.0) < tolerance;
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Environment type classification for exposure scoring.
|
||||
/// </summary>
|
||||
public enum EnvironmentType
|
||||
{
|
||||
/// <summary>Development environment - lowest exposure.</summary>
|
||||
Development = 0,
|
||||
|
||||
/// <summary>Testing/QA environment.</summary>
|
||||
Testing = 1,
|
||||
|
||||
/// <summary>Staging/Pre-production environment.</summary>
|
||||
Staging = 2,
|
||||
|
||||
/// <summary>Production environment - highest exposure.</summary>
|
||||
Production = 3
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Data sensitivity classification for impact scoring.
|
||||
/// </summary>
|
||||
public enum DataSensitivity
|
||||
{
|
||||
/// <summary>Public or non-sensitive data.</summary>
|
||||
Public = 0,
|
||||
|
||||
/// <summary>Internal/company-confidential data.</summary>
|
||||
Internal = 1,
|
||||
|
||||
/// <summary>Contains PII (Personally Identifiable Information).</summary>
|
||||
Pii = 2,
|
||||
|
||||
/// <summary>Contains financial data.</summary>
|
||||
Financial = 3,
|
||||
|
||||
/// <summary>Contains healthcare/PHI data.</summary>
|
||||
Healthcare = 4,
|
||||
|
||||
/// <summary>Contains classified/government data.</summary>
|
||||
Classified = 5
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SLA tier for business criticality scoring.
|
||||
/// </summary>
|
||||
public enum SlaTier
|
||||
{
|
||||
/// <summary>Non-critical - can tolerate extended downtime.</summary>
|
||||
NonCritical = 0,
|
||||
|
||||
/// <summary>Standard - normal business operations.</summary>
|
||||
Standard = 1,
|
||||
|
||||
/// <summary>Important - customer-facing or revenue-impacting.</summary>
|
||||
Important = 2,
|
||||
|
||||
/// <summary>Critical - core business functionality.</summary>
|
||||
Critical = 3,
|
||||
|
||||
/// <summary>Mission-critical - business cannot operate without.</summary>
|
||||
MissionCritical = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input context for impact scoring calculation.
|
||||
/// </summary>
|
||||
public sealed record ImpactContext
|
||||
{
|
||||
/// <summary>Environment where the component is deployed.</summary>
|
||||
[JsonPropertyName("environment")]
|
||||
public required EnvironmentType Environment { get; init; }
|
||||
|
||||
/// <summary>Highest data sensitivity level accessed by the component.</summary>
|
||||
[JsonPropertyName("data_sensitivity")]
|
||||
public required DataSensitivity DataSensitivity { get; init; }
|
||||
|
||||
/// <summary>Proportion of fleet affected (0.0-1.0).</summary>
|
||||
[JsonPropertyName("fleet_prevalence")]
|
||||
public required double FleetPrevalence { get; init; }
|
||||
|
||||
/// <summary>SLA tier of the affected service.</summary>
|
||||
[JsonPropertyName("sla_tier")]
|
||||
public required SlaTier SlaTier { get; init; }
|
||||
|
||||
/// <summary>CVSS base score (0.0-10.0).</summary>
|
||||
[JsonPropertyName("cvss_score")]
|
||||
public required double CvssScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a default context for unknowns (conservative scoring).
|
||||
/// </summary>
|
||||
public static ImpactContext DefaultForUnknowns() => new()
|
||||
{
|
||||
Environment = EnvironmentType.Production, // Assume worst-case
|
||||
DataSensitivity = DataSensitivity.Internal, // Conservative default
|
||||
FleetPrevalence = 0.5, // Assume moderate prevalence
|
||||
SlaTier = SlaTier.Standard, // Standard tier
|
||||
CvssScore = 5.0 // Medium severity default
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of impact score calculation.
|
||||
/// </summary>
|
||||
public sealed record ImpactScore
|
||||
{
|
||||
/// <summary>Final weighted impact score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("score")]
|
||||
public required double Score { get; init; }
|
||||
|
||||
/// <summary>Basis points representation (0-10000) for deterministic storage.</summary>
|
||||
[JsonPropertyName("basis_points")]
|
||||
public required int BasisPoints { get; init; }
|
||||
|
||||
/// <summary>Environment exposure component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("env_exposure")]
|
||||
public required double EnvironmentExposure { get; init; }
|
||||
|
||||
/// <summary>Data sensitivity component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("data_sensitivity")]
|
||||
public required double DataSensitivityScore { get; init; }
|
||||
|
||||
/// <summary>Fleet prevalence component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("fleet_prevalence")]
|
||||
public required double FleetPrevalenceScore { get; init; }
|
||||
|
||||
/// <summary>SLA tier component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("sla_tier")]
|
||||
public required double SlaTierScore { get; init; }
|
||||
|
||||
/// <summary>CVSS severity component score [0.0, 1.0].</summary>
|
||||
[JsonPropertyName("cvss_severity")]
|
||||
public required double CvssSeverityScore { get; init; }
|
||||
|
||||
/// <summary>When this score was calculated (UTC).</summary>
|
||||
[JsonPropertyName("calculated_at")]
|
||||
public required DateTimeOffset CalculatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates an impact score from component scores and weights.
|
||||
/// </summary>
|
||||
public static ImpactScore Create(
|
||||
double envExposure,
|
||||
double dataSensitivity,
|
||||
double fleetPrevalence,
|
||||
double slaTier,
|
||||
double cvssSeverity,
|
||||
ImpactFactorWeights weights,
|
||||
DateTimeOffset calculatedAt)
|
||||
{
|
||||
var score =
|
||||
envExposure * weights.EnvironmentExposureWeight +
|
||||
dataSensitivity * weights.DataSensitivityWeight +
|
||||
fleetPrevalence * weights.FleetPrevalenceWeight +
|
||||
slaTier * weights.SlaTierWeight +
|
||||
cvssSeverity * weights.CvssSeverityWeight;
|
||||
|
||||
var clampedScore = Math.Clamp(score, 0.0, 1.0);
|
||||
var basisPoints = (int)Math.Round(clampedScore * 10000);
|
||||
|
||||
return new ImpactScore
|
||||
{
|
||||
Score = clampedScore,
|
||||
BasisPoints = basisPoints,
|
||||
EnvironmentExposure = envExposure,
|
||||
DataSensitivityScore = dataSensitivity,
|
||||
FleetPrevalenceScore = fleetPrevalence,
|
||||
SlaTierScore = slaTier,
|
||||
CvssSeverityScore = cvssSeverity,
|
||||
CalculatedAt = calculatedAt
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,127 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates multi-factor impact scores for unknowns using the formula:
|
||||
/// impact = w_env * EnvExposure + w_data * DataSensitivity + w_fleet * FleetPrevalence + w_sla * SLATier + w_cvss * CVSSSeverity
|
||||
/// </summary>
|
||||
public sealed class ImpactScoreCalculator : IImpactScoreCalculator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
private static readonly Histogram<double> ImpactHistogram = Meter.CreateHistogram<double>(
|
||||
"stellaops_determinization_impact_score",
|
||||
unit: "ratio",
|
||||
description: "Impact score for unknowns (0.0 = minimal impact, 1.0 = critical impact)");
|
||||
|
||||
private readonly ILogger<ImpactScoreCalculator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public ImpactScoreCalculator(ILogger<ImpactScoreCalculator> logger, TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ImpactScore Calculate(ImpactContext context, ImpactFactorWeights? weights = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
var effectiveWeights = weights ?? ImpactFactorWeights.Default;
|
||||
|
||||
// Validate weights
|
||||
if (!effectiveWeights.IsNormalized())
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Impact factor weights are not normalized (total={Total:F4}); results may be unexpected",
|
||||
effectiveWeights.TotalWeight);
|
||||
}
|
||||
|
||||
// Normalize each dimension
|
||||
var envScore = NormalizeEnvironment(context.Environment);
|
||||
var dataScore = NormalizeDataSensitivity(context.DataSensitivity);
|
||||
var fleetScore = Math.Clamp(context.FleetPrevalence, 0.0, 1.0);
|
||||
var slaScore = NormalizeSlaTier(context.SlaTier);
|
||||
var cvssScore = NormalizeCvss(context.CvssScore);
|
||||
|
||||
// Create result with all components
|
||||
var result = ImpactScore.Create(
|
||||
envScore,
|
||||
dataScore,
|
||||
fleetScore,
|
||||
slaScore,
|
||||
cvssScore,
|
||||
effectiveWeights,
|
||||
_timeProvider.GetUtcNow());
|
||||
|
||||
_logger.LogDebug(
|
||||
"Calculated impact score {Score:F4} (basis points={BasisPoints}) from env={Env:F2}, data={Data:F2}, fleet={Fleet:F2}, sla={Sla:F2}, cvss={Cvss:F2}",
|
||||
result.Score,
|
||||
result.BasisPoints,
|
||||
envScore,
|
||||
dataScore,
|
||||
fleetScore,
|
||||
slaScore,
|
||||
cvssScore);
|
||||
|
||||
// Emit metric
|
||||
ImpactHistogram.Record(result.Score,
|
||||
new KeyValuePair<string, object?>("environment", context.Environment.ToString()),
|
||||
new KeyValuePair<string, object?>("data_sensitivity", context.DataSensitivity.ToString()));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeEnvironment(EnvironmentType environment)
|
||||
{
|
||||
// Development = 0.0, Production = 1.0
|
||||
return environment switch
|
||||
{
|
||||
EnvironmentType.Development => 0.0,
|
||||
EnvironmentType.Testing => 0.33,
|
||||
EnvironmentType.Staging => 0.66,
|
||||
EnvironmentType.Production => 1.0,
|
||||
_ => 0.5 // Unknown defaults to moderate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeDataSensitivity(DataSensitivity sensitivity)
|
||||
{
|
||||
// Public = 0.0, Classified = 1.0
|
||||
return sensitivity switch
|
||||
{
|
||||
DataSensitivity.Public => 0.0,
|
||||
DataSensitivity.Internal => 0.2,
|
||||
DataSensitivity.Pii => 0.5,
|
||||
DataSensitivity.Financial => 0.7,
|
||||
DataSensitivity.Healthcare => 0.8,
|
||||
DataSensitivity.Classified => 1.0,
|
||||
_ => 0.5 // Unknown defaults to moderate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeSlaTier(SlaTier tier)
|
||||
{
|
||||
// NonCritical = 0.0, MissionCritical = 1.0
|
||||
return tier switch
|
||||
{
|
||||
SlaTier.NonCritical => 0.0,
|
||||
SlaTier.Standard => 0.25,
|
||||
SlaTier.Important => 0.5,
|
||||
SlaTier.Critical => 0.75,
|
||||
SlaTier.MissionCritical => 1.0,
|
||||
_ => 0.5 // Unknown defaults to moderate
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public double NormalizeCvss(double cvssScore)
|
||||
{
|
||||
// CVSS 0.0-10.0 -> 0.0-1.0
|
||||
return Math.Clamp(cvssScore / 10.0, 0.0, 1.0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,160 @@
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.TrustLattice;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Score.v1 predicate format for DSSE-signable attestation.
|
||||
/// Contains all scoring dimensions in a single, deterministic payload.
|
||||
/// All numeric scores use basis points (0-10000) for bit-exact determinism.
|
||||
/// </summary>
|
||||
public sealed record ScoreV1Predicate
|
||||
{
|
||||
/// <summary>
|
||||
/// Predicate type URI for DSSE/In-Toto attestations.
|
||||
/// </summary>
|
||||
public const string PredicateType = "https://stella-ops.org/predicates/score/v1";
|
||||
|
||||
/// <summary>
|
||||
/// Artifact being scored (PURL or component identifier).
|
||||
/// </summary>
|
||||
public required string ArtifactId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability identifier if applicable (CVE, GHSA, etc.).
|
||||
/// </summary>
|
||||
public string? VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Final trust score in basis points (0-10000).
|
||||
/// </summary>
|
||||
public required int TrustScoreBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Risk tier derived from trust score.
|
||||
/// </summary>
|
||||
public required string Tier { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Lattice verdict from K4 logic evaluation.
|
||||
/// </summary>
|
||||
public required K4Value LatticeVerdict { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Uncertainty entropy in basis points (0-10000).
|
||||
/// </summary>
|
||||
public required int UncertaintyBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual dimension scores in basis points.
|
||||
/// </summary>
|
||||
public required ScoreDimensionsBps Dimensions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weights used for this scoring (in basis points).
|
||||
/// </summary>
|
||||
public required WeightsBps WeightsUsed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy digest (SHA-256) for reproducibility.
|
||||
/// </summary>
|
||||
public required string PolicyDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when score was computed (UTC).
|
||||
/// </summary>
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant/namespace scope.
|
||||
/// </summary>
|
||||
public string? TenantId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual scoring dimension values in basis points.
|
||||
/// </summary>
|
||||
public sealed record ScoreDimensionsBps
|
||||
{
|
||||
/// <summary>
|
||||
/// Base severity score (from CVSS or equivalent) in basis points.
|
||||
/// </summary>
|
||||
public required int BaseSeverityBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability score in basis points.
|
||||
/// </summary>
|
||||
public required int ReachabilityBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence quality score in basis points.
|
||||
/// </summary>
|
||||
public required int EvidenceBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance/supply-chain score in basis points.
|
||||
/// </summary>
|
||||
public required int ProvenanceBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS score in basis points (if available).
|
||||
/// </summary>
|
||||
public int? EpssBps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX status score in basis points (if available).
|
||||
/// </summary>
|
||||
public int? VexBps { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Risk tier enumeration for categorizing trust scores.
|
||||
/// </summary>
|
||||
public enum RiskTier
|
||||
{
|
||||
Info = 0,
|
||||
Low = 1,
|
||||
Medium = 2,
|
||||
High = 3,
|
||||
Critical = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for computing a trust score.
|
||||
/// </summary>
|
||||
public sealed record TrustScoreRequest
|
||||
{
|
||||
public required string ArtifactId { get; init; }
|
||||
public string? VulnerabilityId { get; init; }
|
||||
public string? TenantId { get; init; }
|
||||
public SignalSnapshot? Signals { get; init; }
|
||||
public ScorePolicy? PolicyOverride { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result from trust score computation with full explainability.
|
||||
/// </summary>
|
||||
public sealed record TrustScoreResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The Score.v1 predicate suitable for attestation signing.
|
||||
/// </summary>
|
||||
public required ScoreV1Predicate Predicate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Signal snapshot used for computation.
|
||||
/// </summary>
|
||||
public required SignalSnapshot SignalsUsed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the score computation succeeded.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if computation failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Provides observations to the triage queue evaluator.
|
||||
/// Implementations may read from a database, cache, or in-memory store.
|
||||
/// </summary>
|
||||
public interface ITriageObservationSource
|
||||
{
|
||||
/// <summary>
|
||||
/// Retrieve observations that are candidates for triage evaluation.
|
||||
/// The source should return observations that have not been evaluated recently
|
||||
/// (based on <see cref="TriageQueueOptions.MinEvaluationIntervalMinutes"/>).
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Optional tenant filter. Null returns all tenants.</param>
|
||||
/// <param name="maxItems">Maximum number of observations to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Candidate observations.</returns>
|
||||
Task<IReadOnlyList<TriageObservation>> GetCandidatesAsync(
|
||||
string? tenantId = null,
|
||||
int maxItems = 500,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates a batch of observations and produces a priority-sorted triage queue.
|
||||
/// </summary>
|
||||
public interface ITriageQueueEvaluator
|
||||
{
|
||||
/// <summary>
|
||||
/// Evaluate observations and produce a triage queue snapshot sorted by decay urgency.
|
||||
/// </summary>
|
||||
/// <param name="observations">Observations to evaluate.</param>
|
||||
/// <param name="now">Reference time for decay calculation (deterministic).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Sorted triage queue snapshot.</returns>
|
||||
Task<TriageQueueSnapshot> EvaluateAsync(
|
||||
IReadOnlyList<TriageObservation> observations,
|
||||
DateTimeOffset now,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Evaluate a single observation and determine if it should be queued.
|
||||
/// </summary>
|
||||
/// <param name="observation">The observation to evaluate.</param>
|
||||
/// <param name="now">Reference time for decay calculation.</param>
|
||||
/// <returns>Triage item, or null if the observation does not need triage.</returns>
|
||||
TriageItem? EvaluateSingle(TriageObservation observation, DateTimeOffset now);
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Sink for stale observations that need re-analysis.
|
||||
/// Implementations may enqueue to an in-memory channel, message bus, or database table.
|
||||
/// </summary>
|
||||
public interface ITriageReanalysisSink
|
||||
{
|
||||
/// <summary>
|
||||
/// Enqueue stale observations for re-analysis.
|
||||
/// </summary>
|
||||
/// <param name="items">Triage items to re-analyse (already filtered to stale/approaching).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of items successfully enqueued.</returns>
|
||||
Task<int> EnqueueAsync(
|
||||
IReadOnlyList<TriageItem> items,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of <see cref="ITriageReanalysisSink"/>.
|
||||
/// Stores enqueued items in a thread-safe collection for consumption by re-analysis workers.
|
||||
/// Suitable for single-node deployments, testing, and offline/air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class InMemoryTriageReanalysisSink : ITriageReanalysisSink
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
|
||||
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_inmemory_enqueued_total",
|
||||
unit: "{items}",
|
||||
description: "Items enqueued in the in-memory triage sink");
|
||||
|
||||
private static readonly Counter<long> DequeuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_inmemory_dequeued_total",
|
||||
unit: "{items}",
|
||||
description: "Items dequeued from the in-memory triage sink");
|
||||
|
||||
private readonly ConcurrentQueue<TriageItem> _queue = new();
|
||||
private readonly ILogger<InMemoryTriageReanalysisSink> _logger;
|
||||
|
||||
public InMemoryTriageReanalysisSink(ILogger<InMemoryTriageReanalysisSink> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<int> EnqueueAsync(
|
||||
IReadOnlyList<TriageItem> items,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(items);
|
||||
|
||||
var enqueued = 0;
|
||||
foreach (var item in items)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
_queue.Enqueue(item);
|
||||
enqueued++;
|
||||
}
|
||||
|
||||
EnqueuedCounter.Add(enqueued);
|
||||
_logger.LogDebug("Enqueued {Count} triage items (queue depth: {Depth})", enqueued, _queue.Count);
|
||||
|
||||
return Task.FromResult(enqueued);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Try to dequeue the next item for re-analysis.
|
||||
/// </summary>
|
||||
/// <param name="item">The dequeued item, if available.</param>
|
||||
/// <returns>True if an item was dequeued.</returns>
|
||||
public bool TryDequeue(out TriageItem? item)
|
||||
{
|
||||
var result = _queue.TryDequeue(out item);
|
||||
if (result)
|
||||
DequeuedCounter.Add(1);
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Drain all pending items.
|
||||
/// </summary>
|
||||
/// <returns>All pending triage items.</returns>
|
||||
public IReadOnlyList<TriageItem> DrainAll()
|
||||
{
|
||||
var items = new List<TriageItem>();
|
||||
while (_queue.TryDequeue(out var item))
|
||||
{
|
||||
items.Add(item);
|
||||
}
|
||||
|
||||
if (items.Count > 0)
|
||||
DequeuedCounter.Add(items.Count);
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Current queue depth.
|
||||
/// </summary>
|
||||
public int Count => _queue.Count;
|
||||
|
||||
/// <summary>
|
||||
/// Peek at all pending items without removing them.
|
||||
/// </summary>
|
||||
public IReadOnlyList<TriageItem> PeekAll() => _queue.ToArray();
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Priority classification for triage items based on decay urgency.
|
||||
/// </summary>
|
||||
public enum TriagePriority
|
||||
{
|
||||
/// <summary>No action needed — observation is fresh.</summary>
|
||||
None = 0,
|
||||
|
||||
/// <summary>Observation approaching staleness (decay multiplier 0.50–0.70).</summary>
|
||||
Low = 1,
|
||||
|
||||
/// <summary>Observation is stale (decay multiplier 0.30–0.50).</summary>
|
||||
Medium = 2,
|
||||
|
||||
/// <summary>Observation is heavily decayed (decay multiplier 0.10–0.30).</summary>
|
||||
High = 3,
|
||||
|
||||
/// <summary>Observation at or near floor — effectively no confidence (decay multiplier ≤ 0.10).</summary>
|
||||
Critical = 4
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single unknown observation queued for triage.
|
||||
/// </summary>
|
||||
public sealed record TriageItem
|
||||
{
|
||||
/// <summary>CVE identifier.</summary>
|
||||
[JsonPropertyName("cve")]
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>Component PURL.</summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>Tenant identifier for multi-tenant isolation.</summary>
|
||||
[JsonPropertyName("tenant_id")]
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>The observation decay state.</summary>
|
||||
[JsonPropertyName("observation_decay")]
|
||||
public required ObservationDecay Decay { get; init; }
|
||||
|
||||
/// <summary>Current decay multiplier at evaluation time.</summary>
|
||||
[JsonPropertyName("current_multiplier")]
|
||||
public required double CurrentMultiplier { get; init; }
|
||||
|
||||
/// <summary>Computed triage priority based on decay urgency.</summary>
|
||||
[JsonPropertyName("priority")]
|
||||
public required TriagePriority Priority { get; init; }
|
||||
|
||||
/// <summary>Age in days since last refresh at evaluation time.</summary>
|
||||
[JsonPropertyName("age_days")]
|
||||
public required double AgeDays { get; init; }
|
||||
|
||||
/// <summary>Days until the observation crosses the staleness threshold (negative if already stale).</summary>
|
||||
[JsonPropertyName("days_until_stale")]
|
||||
public required double DaysUntilStale { get; init; }
|
||||
|
||||
/// <summary>When this triage item was evaluated (UTC).</summary>
|
||||
[JsonPropertyName("evaluated_at")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>Optional signal gaps contributing to uncertainty.</summary>
|
||||
[JsonPropertyName("signal_gaps")]
|
||||
public IReadOnlyList<SignalGap> SignalGaps { get; init; } = [];
|
||||
|
||||
/// <summary>Recommended next action for the operator.</summary>
|
||||
[JsonPropertyName("recommended_action")]
|
||||
public string? RecommendedAction { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of evaluating a batch of observations for triage.
|
||||
/// </summary>
|
||||
public sealed record TriageQueueSnapshot
|
||||
{
|
||||
/// <summary>Items sorted by priority (Critical first) then by days-until-stale ascending.</summary>
|
||||
[JsonPropertyName("items")]
|
||||
public required IReadOnlyList<TriageItem> Items { get; init; }
|
||||
|
||||
/// <summary>Total observations evaluated.</summary>
|
||||
[JsonPropertyName("total_evaluated")]
|
||||
public required int TotalEvaluated { get; init; }
|
||||
|
||||
/// <summary>Count of items that are already stale.</summary>
|
||||
[JsonPropertyName("stale_count")]
|
||||
public required int StaleCount { get; init; }
|
||||
|
||||
/// <summary>Count of items approaching staleness (Low priority).</summary>
|
||||
[JsonPropertyName("approaching_count")]
|
||||
public required int ApproachingCount { get; init; }
|
||||
|
||||
/// <summary>When this snapshot was computed (UTC).</summary>
|
||||
[JsonPropertyName("evaluated_at")]
|
||||
public required DateTimeOffset EvaluatedAt { get; init; }
|
||||
|
||||
/// <summary>Summary statistics by priority tier.</summary>
|
||||
[JsonPropertyName("priority_summary")]
|
||||
public required IReadOnlyDictionary<TriagePriority, int> PrioritySummary { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for triage queue evaluation thresholds.
|
||||
/// </summary>
|
||||
public sealed record TriageQueueOptions
|
||||
{
|
||||
/// <summary>Default section name in appsettings.json.</summary>
|
||||
public const string SectionName = "Determinization:TriageQueue";
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier threshold for "approaching staleness" (Low priority).
|
||||
/// Observations with decay multiplier below this but above staleness are flagged.
|
||||
/// Default: 0.70
|
||||
/// </summary>
|
||||
public double ApproachingThreshold { get; init; } = 0.70;
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier threshold for High priority.
|
||||
/// Default: 0.30
|
||||
/// </summary>
|
||||
public double HighPriorityThreshold { get; init; } = 0.30;
|
||||
|
||||
/// <summary>
|
||||
/// Multiplier threshold for Critical priority.
|
||||
/// Default: 0.10
|
||||
/// </summary>
|
||||
public double CriticalPriorityThreshold { get; init; } = 0.10;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of items to include in a snapshot.
|
||||
/// Default: 500
|
||||
/// </summary>
|
||||
public int MaxSnapshotItems { get; init; } = 500;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include non-stale observations that are approaching staleness.
|
||||
/// Default: true
|
||||
/// </summary>
|
||||
public bool IncludeApproaching { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum interval between triage evaluations for the same observation in minutes.
|
||||
/// Default: 60
|
||||
/// </summary>
|
||||
public int MinEvaluationIntervalMinutes { get; init; } = 60;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an observation to be evaluated for triage.
|
||||
/// </summary>
|
||||
public sealed record TriageObservation
|
||||
{
|
||||
/// <summary>CVE identifier.</summary>
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>Component PURL.</summary>
|
||||
public required string Purl { get; init; }
|
||||
|
||||
/// <summary>Tenant identifier.</summary>
|
||||
public required string TenantId { get; init; }
|
||||
|
||||
/// <summary>Decay state of the observation.</summary>
|
||||
public required ObservationDecay Decay { get; init; }
|
||||
|
||||
/// <summary>Optional signal gaps from the most recent uncertainty evaluation.</summary>
|
||||
public IReadOnlyList<SignalGap> SignalGaps { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,227 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Evaluates observations for decay-based triage and produces priority-sorted snapshots.
|
||||
/// All calculations are deterministic given the same inputs and reference time.
|
||||
/// </summary>
|
||||
public sealed class TriageQueueEvaluator : ITriageQueueEvaluator
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
|
||||
private static readonly Counter<long> ItemsEvaluatedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_items_evaluated_total",
|
||||
unit: "{items}",
|
||||
description: "Total observations evaluated for triage");
|
||||
|
||||
private static readonly Counter<long> ItemsQueuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_items_queued_total",
|
||||
unit: "{items}",
|
||||
description: "Observations added to triage queue");
|
||||
|
||||
private static readonly Histogram<double> DecayMultiplierHistogram = Meter.CreateHistogram<double>(
|
||||
"stellaops_triage_decay_multiplier",
|
||||
unit: "ratio",
|
||||
description: "Decay multiplier distribution of triage items");
|
||||
|
||||
private readonly ILogger<TriageQueueEvaluator> _logger;
|
||||
private readonly TriageQueueOptions _options;
|
||||
|
||||
public TriageQueueEvaluator(
|
||||
ILogger<TriageQueueEvaluator> logger,
|
||||
IOptions<TriageQueueOptions> options)
|
||||
{
|
||||
_logger = logger;
|
||||
_options = options.Value;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<TriageQueueSnapshot> EvaluateAsync(
|
||||
IReadOnlyList<TriageObservation> observations,
|
||||
DateTimeOffset now,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(observations);
|
||||
|
||||
var triageItems = new List<TriageItem>();
|
||||
|
||||
foreach (var obs in observations)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var item = EvaluateSingle(obs, now);
|
||||
if (item is not null)
|
||||
{
|
||||
triageItems.Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort: Critical first, then by days-until-stale ascending (most urgent first)
|
||||
var sorted = triageItems
|
||||
.OrderByDescending(i => i.Priority)
|
||||
.ThenBy(i => i.DaysUntilStale)
|
||||
.ThenBy(i => i.Cve, StringComparer.Ordinal)
|
||||
.ThenBy(i => i.Purl, StringComparer.Ordinal)
|
||||
.Take(_options.MaxSnapshotItems)
|
||||
.ToList();
|
||||
|
||||
// Compute summary
|
||||
var prioritySummary = new Dictionary<TriagePriority, int>();
|
||||
foreach (var priority in Enum.GetValues<TriagePriority>())
|
||||
{
|
||||
var count = sorted.Count(i => i.Priority == priority);
|
||||
if (count > 0)
|
||||
prioritySummary[priority] = count;
|
||||
}
|
||||
|
||||
var staleCount = sorted.Count(i => i.DaysUntilStale < 0);
|
||||
var approachingCount = sorted.Count(i => i.Priority == TriagePriority.Low);
|
||||
|
||||
var snapshot = new TriageQueueSnapshot
|
||||
{
|
||||
Items = sorted,
|
||||
TotalEvaluated = observations.Count,
|
||||
StaleCount = staleCount,
|
||||
ApproachingCount = approachingCount,
|
||||
EvaluatedAt = now,
|
||||
PrioritySummary = prioritySummary
|
||||
};
|
||||
|
||||
// Emit metrics
|
||||
ItemsEvaluatedCounter.Add(observations.Count);
|
||||
ItemsQueuedCounter.Add(sorted.Count);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Triage evaluation: {Total} observations, {Queued} queued ({Stale} stale, {Approaching} approaching)",
|
||||
observations.Count,
|
||||
sorted.Count,
|
||||
staleCount,
|
||||
approachingCount);
|
||||
|
||||
return Task.FromResult(snapshot);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public TriageItem? EvaluateSingle(TriageObservation observation, DateTimeOffset now)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(observation);
|
||||
|
||||
var decay = observation.Decay;
|
||||
var multiplier = decay.CalculateDecay(now);
|
||||
var ageDays = (now - decay.RefreshedAt).TotalDays;
|
||||
var isStale = decay.CheckIsStale(now);
|
||||
var priority = ClassifyPriority(multiplier, decay.StalenessThreshold);
|
||||
|
||||
// Skip if not stale and not approaching (unless IncludeApproaching is true)
|
||||
if (priority == TriagePriority.None)
|
||||
return null;
|
||||
|
||||
if (priority == TriagePriority.Low && !_options.IncludeApproaching)
|
||||
return null;
|
||||
|
||||
var daysUntilStale = CalculateDaysUntilStale(
|
||||
decay.RefreshedAt,
|
||||
decay.HalfLifeDays,
|
||||
decay.StalenessThreshold,
|
||||
decay.Floor,
|
||||
now);
|
||||
|
||||
var recommendedAction = DetermineRecommendedAction(priority, observation.SignalGaps);
|
||||
|
||||
// Emit per-item metric
|
||||
DecayMultiplierHistogram.Record(multiplier,
|
||||
new KeyValuePair<string, object?>("priority", priority.ToString()),
|
||||
new KeyValuePair<string, object?>("tenant_id", observation.TenantId));
|
||||
|
||||
return new TriageItem
|
||||
{
|
||||
Cve = observation.Cve,
|
||||
Purl = observation.Purl,
|
||||
TenantId = observation.TenantId,
|
||||
Decay = decay,
|
||||
CurrentMultiplier = multiplier,
|
||||
Priority = priority,
|
||||
AgeDays = Math.Max(0.0, ageDays),
|
||||
DaysUntilStale = daysUntilStale,
|
||||
EvaluatedAt = now,
|
||||
SignalGaps = observation.SignalGaps,
|
||||
RecommendedAction = recommendedAction
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classifies triage priority based on current decay multiplier.
|
||||
/// </summary>
|
||||
internal TriagePriority ClassifyPriority(double multiplier, double stalenessThreshold)
|
||||
{
|
||||
if (multiplier <= _options.CriticalPriorityThreshold)
|
||||
return TriagePriority.Critical;
|
||||
|
||||
if (multiplier <= _options.HighPriorityThreshold)
|
||||
return TriagePriority.High;
|
||||
|
||||
if (multiplier <= stalenessThreshold)
|
||||
return TriagePriority.Medium;
|
||||
|
||||
if (multiplier <= _options.ApproachingThreshold)
|
||||
return TriagePriority.Low;
|
||||
|
||||
return TriagePriority.None;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates days until the observation crosses the staleness threshold.
|
||||
/// Negative values indicate the observation is already stale.
|
||||
/// Formula: days = -halfLife * ln(threshold) / ln(2), solving exp(-ln(2) * days / halfLife) = threshold
|
||||
/// </summary>
|
||||
internal static double CalculateDaysUntilStale(
|
||||
DateTimeOffset refreshedAt,
|
||||
double halfLifeDays,
|
||||
double stalenessThreshold,
|
||||
double floor,
|
||||
DateTimeOffset now)
|
||||
{
|
||||
// If floor >= threshold, the observation can never become stale via decay alone
|
||||
if (floor >= stalenessThreshold)
|
||||
return double.MaxValue;
|
||||
|
||||
// Days at which multiplier crosses threshold:
|
||||
// threshold = exp(-ln(2) * d / halfLife)
|
||||
// ln(threshold) = -ln(2) * d / halfLife
|
||||
// d = -halfLife * ln(threshold) / ln(2)
|
||||
var daysToThreshold = -halfLifeDays * Math.Log(stalenessThreshold) / Math.Log(2.0);
|
||||
var currentAgeDays = (now - refreshedAt).TotalDays;
|
||||
|
||||
return daysToThreshold - currentAgeDays;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines a recommended action based on priority and signal gaps.
|
||||
/// </summary>
|
||||
private static string? DetermineRecommendedAction(TriagePriority priority, IReadOnlyList<SignalGap> gaps)
|
||||
{
|
||||
if (gaps.Count > 0)
|
||||
{
|
||||
var missingSignals = string.Join(", ", gaps.Select(g => g.Signal));
|
||||
return priority switch
|
||||
{
|
||||
TriagePriority.Critical => $"URGENT: Re-analyse immediately. Missing signals: {missingSignals}",
|
||||
TriagePriority.High => $"Re-analyse soon. Missing signals: {missingSignals}",
|
||||
TriagePriority.Medium => $"Schedule re-analysis. Missing signals: {missingSignals}",
|
||||
TriagePriority.Low => $"Monitor — approaching staleness. Missing signals: {missingSignals}",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
return priority switch
|
||||
{
|
||||
TriagePriority.Critical => "URGENT: Re-analyse immediately — evidence has decayed to floor",
|
||||
TriagePriority.High => "Re-analyse soon — evidence is heavily decayed",
|
||||
TriagePriority.Medium => "Schedule re-analysis — observation is stale",
|
||||
TriagePriority.Low => "Monitor — observation is approaching staleness",
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,139 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Background service that periodically evaluates observations for decay-based staleness
|
||||
/// and enqueues stale unknowns for re-analysis.
|
||||
///
|
||||
/// This service is the event-driven mechanism that bridges ObservationDecay.CheckIsStale()
|
||||
/// with the re-analysis pipeline, fulfilling the automated re-analysis triggering requirement.
|
||||
/// </summary>
|
||||
public sealed class UnknownTriageQueueService
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization");
|
||||
|
||||
private static readonly Counter<long> CyclesCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_cycles_total",
|
||||
unit: "{cycles}",
|
||||
description: "Total triage evaluation cycles executed");
|
||||
|
||||
private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>(
|
||||
"stellaops_triage_reanalysis_enqueued_total",
|
||||
unit: "{items}",
|
||||
description: "Total items enqueued for re-analysis");
|
||||
|
||||
private static readonly Histogram<double> CycleDurationHistogram = Meter.CreateHistogram<double>(
|
||||
"stellaops_triage_cycle_duration_seconds",
|
||||
unit: "s",
|
||||
description: "Duration of triage evaluation cycles");
|
||||
|
||||
private readonly ITriageQueueEvaluator _evaluator;
|
||||
private readonly ITriageObservationSource _source;
|
||||
private readonly ITriageReanalysisSink _sink;
|
||||
private readonly ILogger<UnknownTriageQueueService> _logger;
|
||||
private readonly TriageQueueOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public UnknownTriageQueueService(
|
||||
ITriageQueueEvaluator evaluator,
|
||||
ITriageObservationSource source,
|
||||
ITriageReanalysisSink sink,
|
||||
ILogger<UnknownTriageQueueService> logger,
|
||||
IOptions<TriageQueueOptions> options,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_evaluator = evaluator;
|
||||
_source = source;
|
||||
_sink = sink;
|
||||
_logger = logger;
|
||||
_options = options.Value;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Execute a single triage cycle: fetch candidates, evaluate, enqueue stale items.
|
||||
/// This method is designed to be called by a background host, timer, or scheduler.
|
||||
/// </summary>
|
||||
/// <param name="tenantId">Optional tenant filter.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The triage snapshot from this cycle.</returns>
|
||||
public async Task<TriageQueueSnapshot> ExecuteCycleAsync(
|
||||
string? tenantId = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var sw = System.Diagnostics.Stopwatch.StartNew();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting triage cycle at {Now:O} for tenant {TenantId}",
|
||||
now,
|
||||
tenantId ?? "(all)");
|
||||
|
||||
try
|
||||
{
|
||||
// 1. Fetch candidate observations
|
||||
var candidates = await _source.GetCandidatesAsync(
|
||||
tenantId,
|
||||
_options.MaxSnapshotItems,
|
||||
cancellationToken);
|
||||
|
||||
_logger.LogDebug("Fetched {Count} candidate observations", candidates.Count);
|
||||
|
||||
// 2. Evaluate for triage
|
||||
var snapshot = await _evaluator.EvaluateAsync(candidates, now, cancellationToken);
|
||||
|
||||
// 3. Enqueue stale items for re-analysis (Medium, High, Critical)
|
||||
var reanalysisItems = snapshot.Items
|
||||
.Where(i => i.Priority >= TriagePriority.Medium)
|
||||
.ToList();
|
||||
|
||||
var enqueued = 0;
|
||||
if (reanalysisItems.Count > 0)
|
||||
{
|
||||
enqueued = await _sink.EnqueueAsync(reanalysisItems, cancellationToken);
|
||||
EnqueuedCounter.Add(enqueued);
|
||||
}
|
||||
|
||||
sw.Stop();
|
||||
CyclesCounter.Add(1);
|
||||
CycleDurationHistogram.Record(sw.Elapsed.TotalSeconds,
|
||||
new KeyValuePair<string, object?>("tenant_id", tenantId ?? "all"));
|
||||
|
||||
_logger.LogInformation(
|
||||
"Triage cycle complete: {Evaluated} evaluated, {Queued} queued, {Enqueued} enqueued for re-analysis ({Duration:F2}s)",
|
||||
snapshot.TotalEvaluated,
|
||||
snapshot.Items.Count,
|
||||
enqueued,
|
||||
sw.Elapsed.TotalSeconds);
|
||||
|
||||
return snapshot;
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
_logger.LogWarning("Triage cycle cancelled");
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Triage cycle failed");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Evaluate a specific set of observations (for on-demand triage, e.g. CLI/API).
|
||||
/// Does not enqueue — returns the snapshot for the caller to act on.
|
||||
/// </summary>
|
||||
/// <param name="observations">Observations to evaluate.</param>
|
||||
/// <param name="now">Reference time.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Triage queue snapshot.</returns>
|
||||
public Task<TriageQueueSnapshot> EvaluateOnDemandAsync(
|
||||
IReadOnlyList<TriageObservation> observations,
|
||||
DateTimeOffset now,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return _evaluator.EvaluateAsync(observations, now, cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,340 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.TrustLattice;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unified facade composing TrustScoreAggregator + K4Lattice + ScorePolicy into a single
|
||||
/// deterministic scoring pipeline. Entry point for computing trust scores with full
|
||||
/// explainability and attestation-ready output.
|
||||
/// </summary>
|
||||
public interface ITrustScoreAlgebraFacade
|
||||
{
|
||||
/// <summary>
|
||||
/// Compute a complete trust score for an artifact.
|
||||
/// </summary>
|
||||
/// <param name="request">Scoring request with artifact, signals, and optional policy override.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Complete scoring result with Score.v1 predicate.</returns>
|
||||
Task<TrustScoreResult> ComputeTrustScoreAsync(
|
||||
TrustScoreRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Compute trust score synchronously (for batch/offline use).
|
||||
/// </summary>
|
||||
TrustScoreResult ComputeTrustScore(TrustScoreRequest request);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of the trust score algebra facade.
|
||||
/// Composes all scoring components into a deterministic pipeline.
|
||||
/// </summary>
|
||||
public sealed class TrustScoreAlgebraFacade : ITrustScoreAlgebraFacade
|
||||
{
|
||||
private readonly TrustScoreAggregator _aggregator;
|
||||
private readonly UncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly ILogger<TrustScoreAlgebraFacade> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly JsonSerializerOptions _jsonOptions;
|
||||
|
||||
public TrustScoreAlgebraFacade(
|
||||
TrustScoreAggregator aggregator,
|
||||
UncertaintyScoreCalculator uncertaintyCalculator,
|
||||
ILogger<TrustScoreAlgebraFacade>? logger = null,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_aggregator = aggregator ?? throw new ArgumentNullException(nameof(aggregator));
|
||||
_uncertaintyCalculator = uncertaintyCalculator ?? throw new ArgumentNullException(nameof(uncertaintyCalculator));
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<TrustScoreAlgebraFacade>.Instance;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<TrustScoreResult> ComputeTrustScoreAsync(
|
||||
TrustScoreRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// Scoring is CPU-bound and deterministic; run synchronously
|
||||
var result = ComputeTrustScore(request);
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public TrustScoreResult ComputeTrustScore(TrustScoreRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.ArtifactId);
|
||||
|
||||
try
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var policy = request.PolicyOverride ?? ScorePolicy.Default;
|
||||
var signals = request.Signals ?? SignalSnapshot.Empty(
|
||||
request.VulnerabilityId ?? "UNKNOWN",
|
||||
request.ArtifactId,
|
||||
now);
|
||||
|
||||
// Step 1: Calculate uncertainty score
|
||||
var uncertaintyScore = _uncertaintyCalculator.Calculate(signals);
|
||||
|
||||
// Step 2: Aggregate signals using weighted formula
|
||||
var trustScore = _aggregator.Aggregate(signals, uncertaintyScore);
|
||||
|
||||
// Step 3: Compute K4 lattice verdict
|
||||
var latticeVerdict = ComputeLatticeVerdict(signals);
|
||||
|
||||
// Step 4: Extract dimension scores
|
||||
var dimensions = ExtractDimensions(signals, policy);
|
||||
|
||||
// Step 5: Compute weighted final score in basis points
|
||||
var finalBps = ComputeWeightedScoreBps(dimensions, policy.WeightsBps);
|
||||
|
||||
// Step 6: Determine risk tier
|
||||
var tier = DetermineRiskTier(finalBps);
|
||||
|
||||
// Step 7: Compute policy digest
|
||||
var policyDigest = ComputePolicyDigest(policy);
|
||||
|
||||
// Step 8: Build Score.v1 predicate
|
||||
var predicate = new ScoreV1Predicate
|
||||
{
|
||||
ArtifactId = request.ArtifactId,
|
||||
VulnerabilityId = request.VulnerabilityId,
|
||||
TrustScoreBps = finalBps,
|
||||
Tier = tier.ToString(),
|
||||
LatticeVerdict = latticeVerdict,
|
||||
UncertaintyBps = ToBasisPoints(uncertaintyScore.Entropy),
|
||||
Dimensions = dimensions,
|
||||
WeightsUsed = policy.WeightsBps,
|
||||
PolicyDigest = policyDigest,
|
||||
ComputedAt = now,
|
||||
TenantId = request.TenantId
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Computed trust score for {ArtifactId}: {ScoreBps}bps ({Tier}), lattice={Verdict}",
|
||||
request.ArtifactId, finalBps, tier, latticeVerdict);
|
||||
|
||||
return new TrustScoreResult
|
||||
{
|
||||
Predicate = predicate,
|
||||
SignalsUsed = signals,
|
||||
Success = true
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to compute trust score for {ArtifactId}", request.ArtifactId);
|
||||
return new TrustScoreResult
|
||||
{
|
||||
Predicate = CreateEmptyPredicate(request),
|
||||
SignalsUsed = request.Signals ?? SignalSnapshot.Empty(
|
||||
request.VulnerabilityId ?? "UNKNOWN",
|
||||
request.ArtifactId,
|
||||
_timeProvider.GetUtcNow()),
|
||||
Success = false,
|
||||
Error = ex.Message
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute K4 lattice verdict from signal states.
|
||||
/// </summary>
|
||||
private static K4Value ComputeLatticeVerdict(SignalSnapshot signals)
|
||||
{
|
||||
var values = new List<K4Value>();
|
||||
|
||||
// Map each signal to K4 value
|
||||
if (!signals.Vex.IsNotQueried)
|
||||
{
|
||||
values.Add(signals.Vex.Value?.Status?.ToLowerInvariant() switch
|
||||
{
|
||||
"affected" => K4Value.True, // Vulnerability confirmed
|
||||
"not_affected" => K4Value.False, // Vulnerability not present
|
||||
"fixed" => K4Value.False, // Fixed = not vulnerable
|
||||
"under_investigation" => K4Value.Unknown,
|
||||
_ => K4Value.Unknown
|
||||
});
|
||||
}
|
||||
|
||||
if (!signals.Reachability.IsNotQueried)
|
||||
{
|
||||
values.Add(signals.Reachability.Value?.Status switch
|
||||
{
|
||||
ReachabilityStatus.Reachable => K4Value.True,
|
||||
ReachabilityStatus.Unreachable => K4Value.False,
|
||||
ReachabilityStatus.Unknown => K4Value.Unknown,
|
||||
_ => K4Value.Unknown
|
||||
});
|
||||
}
|
||||
|
||||
if (!signals.Epss.IsNotQueried && signals.Epss.Value is not null)
|
||||
{
|
||||
// High EPSS = likely exploitable
|
||||
values.Add(signals.Epss.Value.Epss >= 0.5 ? K4Value.True : K4Value.False);
|
||||
}
|
||||
|
||||
// Join all values using K4 lattice
|
||||
return K4Lattice.JoinAll(values);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract dimension scores from signals.
|
||||
/// </summary>
|
||||
private static ScoreDimensionsBps ExtractDimensions(SignalSnapshot signals, ScorePolicy policy)
|
||||
{
|
||||
// Base severity from CVSS or default
|
||||
var baseSeverityBps = 5000; // Default to medium if no CVSS
|
||||
|
||||
// Reachability
|
||||
var reachabilityBps = signals.Reachability.Value?.Status switch
|
||||
{
|
||||
ReachabilityStatus.Reachable => 10000,
|
||||
ReachabilityStatus.Unreachable => 0,
|
||||
_ => 5000 // Unknown = mid-range
|
||||
};
|
||||
|
||||
// Evidence quality (based on how many signals are present)
|
||||
var signalCount = CountPresentSignals(signals);
|
||||
var evidenceBps = signalCount switch
|
||||
{
|
||||
>= 5 => 9000,
|
||||
4 => 7500,
|
||||
3 => 6000,
|
||||
2 => 4000,
|
||||
1 => 2000,
|
||||
_ => 1000
|
||||
};
|
||||
|
||||
// Provenance (SBOM lineage quality)
|
||||
var provenanceBps = signals.Sbom.Value is not null ? 8000 : 3000;
|
||||
|
||||
// Optional dimensions
|
||||
int? epssBps = signals.Epss.Value is not null
|
||||
? ToBasisPoints(signals.Epss.Value.Epss)
|
||||
: null;
|
||||
|
||||
int? vexBps = signals.Vex.Value?.Status?.ToLowerInvariant() switch
|
||||
{
|
||||
"affected" => 10000,
|
||||
"under_investigation" => 7000,
|
||||
"fixed" => 1000,
|
||||
"not_affected" => 0,
|
||||
_ => null
|
||||
};
|
||||
|
||||
return new ScoreDimensionsBps
|
||||
{
|
||||
BaseSeverityBps = baseSeverityBps,
|
||||
ReachabilityBps = reachabilityBps,
|
||||
EvidenceBps = evidenceBps,
|
||||
ProvenanceBps = provenanceBps,
|
||||
EpssBps = epssBps,
|
||||
VexBps = vexBps
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute final weighted score in basis points.
|
||||
/// </summary>
|
||||
private static int ComputeWeightedScoreBps(ScoreDimensionsBps dimensions, WeightsBps weights)
|
||||
{
|
||||
// Weighted average: Σ(dimension * weight) / Σ(weights)
|
||||
// Since weights sum to 10000, we can use: Σ(dimension * weight) / 10000
|
||||
|
||||
long weighted =
|
||||
(long)dimensions.BaseSeverityBps * weights.BaseSeverity +
|
||||
(long)dimensions.ReachabilityBps * weights.Reachability +
|
||||
(long)dimensions.EvidenceBps * weights.Evidence +
|
||||
(long)dimensions.ProvenanceBps * weights.Provenance;
|
||||
|
||||
var result = (int)(weighted / 10000);
|
||||
return Math.Clamp(result, 0, 10000);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determine risk tier from basis point score.
|
||||
/// </summary>
|
||||
private static RiskTier DetermineRiskTier(int scoreBps)
|
||||
{
|
||||
return scoreBps switch
|
||||
{
|
||||
>= 9000 => RiskTier.Critical,
|
||||
>= 7000 => RiskTier.High,
|
||||
>= 4000 => RiskTier.Medium,
|
||||
>= 1000 => RiskTier.Low,
|
||||
_ => RiskTier.Info
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute SHA-256 digest of policy for reproducibility.
|
||||
/// </summary>
|
||||
private string ComputePolicyDigest(ScorePolicy policy)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(policy, _jsonOptions);
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
|
||||
return Convert.ToHexStringLower(bytes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Count present (non-null, non-queried) signals.
|
||||
/// </summary>
|
||||
private static int CountPresentSignals(SignalSnapshot signals)
|
||||
{
|
||||
var count = 0;
|
||||
if (!signals.Vex.IsNotQueried && signals.Vex.Value is not null) count++;
|
||||
if (!signals.Epss.IsNotQueried && signals.Epss.Value is not null) count++;
|
||||
if (!signals.Reachability.IsNotQueried && signals.Reachability.Value is not null) count++;
|
||||
if (!signals.Runtime.IsNotQueried && signals.Runtime.Value is not null) count++;
|
||||
if (!signals.Backport.IsNotQueried && signals.Backport.Value is not null) count++;
|
||||
if (!signals.Sbom.IsNotQueried && signals.Sbom.Value is not null) count++;
|
||||
return count;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Convert a 0.0-1.0 double to basis points.
|
||||
/// </summary>
|
||||
private static int ToBasisPoints(double value) =>
|
||||
Math.Clamp((int)(value * 10000), 0, 10000);
|
||||
|
||||
/// <summary>
|
||||
/// Create empty predicate for error cases.
|
||||
/// </summary>
|
||||
private ScoreV1Predicate CreateEmptyPredicate(TrustScoreRequest request)
|
||||
{
|
||||
return new ScoreV1Predicate
|
||||
{
|
||||
ArtifactId = request.ArtifactId,
|
||||
VulnerabilityId = request.VulnerabilityId,
|
||||
TrustScoreBps = 0,
|
||||
Tier = RiskTier.Info.ToString(),
|
||||
LatticeVerdict = K4Value.Unknown,
|
||||
UncertaintyBps = 10000,
|
||||
Dimensions = new ScoreDimensionsBps
|
||||
{
|
||||
BaseSeverityBps = 0,
|
||||
ReachabilityBps = 0,
|
||||
EvidenceBps = 0,
|
||||
ProvenanceBps = 0
|
||||
},
|
||||
WeightsUsed = WeightsBps.Default,
|
||||
PolicyDigest = "error",
|
||||
ComputedAt = _timeProvider.GetUtcNow(),
|
||||
TenantId = request.TenantId
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,59 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IWeightManifestLoader.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Weight manifest loader interface
|
||||
// Description: Contract for discovering, loading, validating, and selecting
|
||||
// versioned weight manifests from the file system.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Discovers, loads, validates, and selects versioned weight manifests.
|
||||
/// </summary>
|
||||
public interface IWeightManifestLoader
|
||||
{
|
||||
/// <summary>
|
||||
/// Lists all available weight manifests discovered in the configured directory,
|
||||
/// sorted by <c>effectiveFrom</c> descending (most recent first).
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>All discovered manifest load results.</returns>
|
||||
Task<ImmutableArray<WeightManifestLoadResult>> ListAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Loads and validates a specific manifest file by path.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Absolute or relative path to the manifest file.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Load result containing the manifest and hash verification status.</returns>
|
||||
Task<WeightManifestLoadResult> LoadAsync(string filePath, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Selects the manifest effective for a given reference date.
|
||||
/// Picks the most recent manifest where <c>effectiveFrom ≤ referenceDate</c>.
|
||||
/// </summary>
|
||||
/// <param name="referenceDate">The date to select for (typically <c>DateTimeOffset.UtcNow</c>).</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The applicable manifest, or <c>null</c> if none is effective.</returns>
|
||||
Task<WeightManifestLoadResult?> SelectEffectiveAsync(
|
||||
DateTimeOffset referenceDate,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Validates a manifest: schema version, weight normalization, content hash.
|
||||
/// </summary>
|
||||
/// <param name="result">The load result to validate.</param>
|
||||
/// <returns>Validation issues found (empty if valid).</returns>
|
||||
ImmutableArray<string> Validate(WeightManifestLoadResult result);
|
||||
|
||||
/// <summary>
|
||||
/// Computes a diff between two manifests, comparing weight values and thresholds.
|
||||
/// </summary>
|
||||
/// <param name="from">Source (older) manifest.</param>
|
||||
/// <param name="to">Target (newer) manifest.</param>
|
||||
/// <returns>Diff summary.</returns>
|
||||
WeightManifestDiff Diff(WeightManifestDocument from, WeightManifestDocument to);
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestCommands.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - CLI weight management commands
|
||||
// Description: Service-level commands that back the `stella weights` CLI:
|
||||
// list, validate, diff, activate, hash. Each produces a
|
||||
// deterministic, serializable result model.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Provides the backing logic for CLI weight management commands:
|
||||
/// <c>stella weights list</c>, <c>stella weights validate</c>,
|
||||
/// <c>stella weights diff</c>, <c>stella weights activate</c>,
|
||||
/// <c>stella weights hash</c>.
|
||||
/// </summary>
|
||||
public sealed class WeightManifestCommands
|
||||
{
|
||||
private readonly IWeightManifestLoader _loader;
|
||||
|
||||
public WeightManifestCommands(IWeightManifestLoader loader)
|
||||
{
|
||||
_loader = loader ?? throw new ArgumentNullException(nameof(loader));
|
||||
}
|
||||
|
||||
// ── stella weights list ──────────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Lists all discovered weight manifests with their versions,
|
||||
/// effective dates, profiles, and hash status.
|
||||
/// </summary>
|
||||
public async Task<WeightsListResult> ListAsync(CancellationToken ct = default)
|
||||
{
|
||||
var manifests = await _loader.ListAsync(ct).ConfigureAwait(false);
|
||||
|
||||
var entries = manifests.Select(r => new WeightsListEntry
|
||||
{
|
||||
Version = r.Manifest.Version,
|
||||
EffectiveFrom = r.Manifest.EffectiveFrom,
|
||||
Profile = r.Manifest.Profile,
|
||||
HashStatus = r.Manifest.HasComputedHash
|
||||
? (r.HashVerified ? "verified" : "mismatch")
|
||||
: "auto",
|
||||
SourcePath = r.SourcePath,
|
||||
Description = r.Manifest.Description
|
||||
}).ToImmutableArray();
|
||||
|
||||
return new WeightsListResult { Entries = entries };
|
||||
}
|
||||
|
||||
// ── stella weights validate ──────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Validates a specific manifest file or all discovered manifests.
|
||||
/// </summary>
|
||||
/// <param name="filePath">
|
||||
/// If specified, validate only this file. Otherwise validate all discovered manifests.
|
||||
/// </param>
|
||||
public async Task<WeightsValidateResult> ValidateAsync(
|
||||
string? filePath = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = new List<WeightsValidateEntry>();
|
||||
|
||||
if (!string.IsNullOrEmpty(filePath))
|
||||
{
|
||||
var loadResult = await _loader.LoadAsync(filePath, ct).ConfigureAwait(false);
|
||||
var issues = _loader.Validate(loadResult);
|
||||
results.Add(new WeightsValidateEntry
|
||||
{
|
||||
Version = loadResult.Manifest.Version,
|
||||
SourcePath = loadResult.SourcePath,
|
||||
Issues = issues,
|
||||
IsValid = issues.IsEmpty
|
||||
});
|
||||
}
|
||||
else
|
||||
{
|
||||
var all = await _loader.ListAsync(ct).ConfigureAwait(false);
|
||||
foreach (var loadResult in all)
|
||||
{
|
||||
var issues = _loader.Validate(loadResult);
|
||||
results.Add(new WeightsValidateEntry
|
||||
{
|
||||
Version = loadResult.Manifest.Version,
|
||||
SourcePath = loadResult.SourcePath,
|
||||
Issues = issues,
|
||||
IsValid = issues.IsEmpty
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return new WeightsValidateResult
|
||||
{
|
||||
Entries = [.. results],
|
||||
AllValid = results.TrueForAll(e => e.IsValid)
|
||||
};
|
||||
}
|
||||
|
||||
// ── stella weights diff ──────────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Diffs two manifest files, or two versions by version identifier.
|
||||
/// </summary>
|
||||
public async Task<WeightManifestDiff> DiffAsync(
|
||||
string fromPath,
|
||||
string toPath,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(fromPath);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(toPath);
|
||||
|
||||
var from = await _loader.LoadAsync(fromPath, ct).ConfigureAwait(false);
|
||||
var to = await _loader.LoadAsync(toPath, ct).ConfigureAwait(false);
|
||||
|
||||
return _loader.Diff(from.Manifest, to.Manifest);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Diffs two manifests by version string (searches the discovered set).
|
||||
/// </summary>
|
||||
public async Task<WeightManifestDiff> DiffByVersionAsync(
|
||||
string fromVersion,
|
||||
string toVersion,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(fromVersion);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(toVersion);
|
||||
|
||||
var all = await _loader.ListAsync(ct).ConfigureAwait(false);
|
||||
|
||||
var from = all.FirstOrDefault(r =>
|
||||
string.Equals(r.Manifest.Version, fromVersion, StringComparison.OrdinalIgnoreCase));
|
||||
var to = all.FirstOrDefault(r =>
|
||||
string.Equals(r.Manifest.Version, toVersion, StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
if (from is null)
|
||||
throw new WeightManifestLoadException($"Manifest version '{fromVersion}' not found.");
|
||||
if (to is null)
|
||||
throw new WeightManifestLoadException($"Manifest version '{toVersion}' not found.");
|
||||
|
||||
return _loader.Diff(from.Manifest, to.Manifest);
|
||||
}
|
||||
|
||||
// ── stella weights activate ──────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Selects the currently active (effective) manifest for a given date.
|
||||
/// </summary>
|
||||
public async Task<WeightsActivateResult> ActivateAsync(
|
||||
DateTimeOffset? referenceDate = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var date = referenceDate ?? DateTimeOffset.UtcNow;
|
||||
var result = await _loader.SelectEffectiveAsync(date, ct).ConfigureAwait(false);
|
||||
|
||||
if (result is null)
|
||||
{
|
||||
return new WeightsActivateResult
|
||||
{
|
||||
Found = false,
|
||||
ReferenceDate = date,
|
||||
Version = null,
|
||||
SourcePath = null,
|
||||
ContentHash = null
|
||||
};
|
||||
}
|
||||
|
||||
return new WeightsActivateResult
|
||||
{
|
||||
Found = true,
|
||||
ReferenceDate = date,
|
||||
Version = result.Manifest.Version,
|
||||
SourcePath = result.SourcePath,
|
||||
ContentHash = result.ComputedHash,
|
||||
EffectiveFrom = result.Manifest.EffectiveFrom,
|
||||
Profile = result.Manifest.Profile
|
||||
};
|
||||
}
|
||||
|
||||
// ── stella weights hash ──────────────────────────────────────────────
|
||||
|
||||
/// <summary>
|
||||
/// Computes the content hash for a manifest file and optionally replaces
|
||||
/// the "sha256:auto" placeholder in-place.
|
||||
/// </summary>
|
||||
/// <param name="filePath">Path to the manifest file.</param>
|
||||
/// <param name="writeBack">If true, writes the computed hash back to the file.</param>
|
||||
public async Task<WeightsHashResult> HashAsync(
|
||||
string filePath,
|
||||
bool writeBack = false,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var resolvedPath = Path.GetFullPath(filePath);
|
||||
var json = await File.ReadAllTextAsync(resolvedPath, ct).ConfigureAwait(false);
|
||||
|
||||
var computedHash = WeightManifestHashComputer.ComputeFromJson(json);
|
||||
var hasPlaceholder = json.Contains(
|
||||
WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal);
|
||||
|
||||
string? updatedJson = null;
|
||||
if (writeBack && hasPlaceholder)
|
||||
{
|
||||
var (updated, _) = WeightManifestHashComputer.ReplaceAutoHash(json);
|
||||
updatedJson = updated;
|
||||
await File.WriteAllTextAsync(resolvedPath, updatedJson, ct).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return new WeightsHashResult
|
||||
{
|
||||
SourcePath = resolvedPath,
|
||||
ComputedHash = computedHash,
|
||||
HadPlaceholder = hasPlaceholder,
|
||||
WrittenBack = writeBack && hasPlaceholder
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// ── CLI result models ────────────────────────────────────────────────────────
|
||||
|
||||
/// <summary>Result of <c>stella weights list</c>.</summary>
|
||||
public sealed record WeightsListResult
|
||||
{
|
||||
public required ImmutableArray<WeightsListEntry> Entries { get; init; }
|
||||
}
|
||||
|
||||
public sealed record WeightsListEntry
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public required DateTimeOffset EffectiveFrom { get; init; }
|
||||
public required string Profile { get; init; }
|
||||
public required string HashStatus { get; init; }
|
||||
public required string SourcePath { get; init; }
|
||||
public string? Description { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Result of <c>stella weights validate</c>.</summary>
|
||||
public sealed record WeightsValidateResult
|
||||
{
|
||||
public required ImmutableArray<WeightsValidateEntry> Entries { get; init; }
|
||||
public required bool AllValid { get; init; }
|
||||
}
|
||||
|
||||
public sealed record WeightsValidateEntry
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
public required string SourcePath { get; init; }
|
||||
public required ImmutableArray<string> Issues { get; init; }
|
||||
public required bool IsValid { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Result of <c>stella weights activate</c>.</summary>
|
||||
public sealed record WeightsActivateResult
|
||||
{
|
||||
public required bool Found { get; init; }
|
||||
public required DateTimeOffset ReferenceDate { get; init; }
|
||||
public string? Version { get; init; }
|
||||
public string? SourcePath { get; init; }
|
||||
public string? ContentHash { get; init; }
|
||||
public DateTimeOffset? EffectiveFrom { get; init; }
|
||||
public string? Profile { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>Result of <c>stella weights hash</c>.</summary>
|
||||
public sealed record WeightsHashResult
|
||||
{
|
||||
public required string SourcePath { get; init; }
|
||||
public required string ComputedHash { get; init; }
|
||||
public required bool HadPlaceholder { get; init; }
|
||||
public required bool WrittenBack { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,185 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestHashComputer.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Content hash auto-compute
|
||||
// Description: Deterministic SHA-256 content hash computation for weight
|
||||
// manifests. Hashes the canonical content (excluding the
|
||||
// contentHash field itself) to produce a stable digest.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Computes deterministic SHA-256 content hashes for weight manifests.
|
||||
/// The hash covers all content except the <c>contentHash</c> field itself.
|
||||
/// </summary>
|
||||
public static class WeightManifestHashComputer
|
||||
{
|
||||
private static readonly JsonSerializerOptions CanonicalOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Computes the SHA-256 content hash for a manifest's raw JSON content.
|
||||
/// The <c>contentHash</c> field is excluded from the hash input to allow
|
||||
/// the hash to be embedded in the same document it covers.
|
||||
/// </summary>
|
||||
/// <param name="jsonContent">Raw JSON content of the manifest file.</param>
|
||||
/// <returns>Hash in "sha256:<hex>" format.</returns>
|
||||
public static string ComputeFromJson(string jsonContent)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(jsonContent);
|
||||
|
||||
// Parse JSON, remove contentHash field, re-serialize canonically
|
||||
var doc = JsonDocument.Parse(jsonContent);
|
||||
var canonical = BuildCanonicalContent(doc.RootElement);
|
||||
|
||||
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonical));
|
||||
return $"{WeightManifestConstants.HashPrefix}{Convert.ToHexStringLower(hashBytes)}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes the SHA-256 content hash for a deserialized manifest.
|
||||
/// Re-serializes with the <c>contentHash</c> set to the placeholder,
|
||||
/// then hashes the canonical form.
|
||||
/// </summary>
|
||||
/// <param name="manifest">The manifest document to hash.</param>
|
||||
/// <returns>Hash in "sha256:<hex>" format.</returns>
|
||||
public static string ComputeFromManifest(WeightManifestDocument manifest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
// Serialize with placeholder to ensure contentHash doesn't affect the result
|
||||
var withPlaceholder = manifest with
|
||||
{
|
||||
ContentHash = WeightManifestConstants.AutoHashPlaceholder
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(withPlaceholder, CanonicalOptions);
|
||||
return ComputeFromJson(json);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that a manifest's stored content hash matches its computed hash.
|
||||
/// </summary>
|
||||
/// <param name="jsonContent">Raw JSON content of the manifest file.</param>
|
||||
/// <param name="storedHash">The hash stored in the manifest's contentHash field.</param>
|
||||
/// <returns>True if the hashes match.</returns>
|
||||
public static bool Verify(string jsonContent, string storedHash)
|
||||
{
|
||||
if (string.IsNullOrEmpty(storedHash)
|
||||
|| storedHash.Equals(WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var computed = ComputeFromJson(jsonContent);
|
||||
return computed.Equals(storedHash, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replaces the "sha256:auto" placeholder in raw JSON with the computed hash.
|
||||
/// Returns the updated JSON content and the computed hash.
|
||||
/// </summary>
|
||||
/// <param name="jsonContent">Raw JSON with contentHash placeholder.</param>
|
||||
/// <returns>Tuple of (updatedJson, computedHash).</returns>
|
||||
public static (string UpdatedJson, string ComputedHash) ReplaceAutoHash(string jsonContent)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(jsonContent);
|
||||
|
||||
var computedHash = ComputeFromJson(jsonContent);
|
||||
|
||||
var updatedJson = jsonContent.Replace(
|
||||
$"\"{WeightManifestConstants.AutoHashPlaceholder}\"",
|
||||
$"\"{computedHash}\"",
|
||||
StringComparison.Ordinal);
|
||||
|
||||
return (updatedJson, computedHash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds a canonical JSON string from a <see cref="JsonElement"/>,
|
||||
/// excluding the <c>contentHash</c> field for hash stability.
|
||||
/// Properties are sorted alphabetically for determinism.
|
||||
/// </summary>
|
||||
private static string BuildCanonicalContent(JsonElement root)
|
||||
{
|
||||
using var stream = new MemoryStream();
|
||||
using var writer = new Utf8JsonWriter(stream, new JsonWriterOptions
|
||||
{
|
||||
Indented = false,
|
||||
SkipValidation = false
|
||||
});
|
||||
|
||||
WriteCanonical(writer, root, excludeField: "contentHash");
|
||||
writer.Flush();
|
||||
|
||||
return Encoding.UTF8.GetString(stream.ToArray());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recursively writes JSON with sorted property keys and an optional excluded field.
|
||||
/// </summary>
|
||||
private static void WriteCanonical(Utf8JsonWriter writer, JsonElement element, string? excludeField = null)
|
||||
{
|
||||
switch (element.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
writer.WriteStartObject();
|
||||
|
||||
// Sort properties alphabetically for deterministic output
|
||||
var properties = element.EnumerateObject()
|
||||
.Where(p => !string.Equals(p.Name, excludeField, StringComparison.Ordinal))
|
||||
.OrderBy(p => p.Name, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
foreach (var property in properties)
|
||||
{
|
||||
writer.WritePropertyName(property.Name);
|
||||
WriteCanonical(writer, property.Value);
|
||||
}
|
||||
|
||||
writer.WriteEndObject();
|
||||
break;
|
||||
|
||||
case JsonValueKind.Array:
|
||||
writer.WriteStartArray();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
WriteCanonical(writer, item);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
break;
|
||||
|
||||
case JsonValueKind.String:
|
||||
writer.WriteStringValue(element.GetString());
|
||||
break;
|
||||
|
||||
case JsonValueKind.Number:
|
||||
if (element.TryGetInt64(out var longValue))
|
||||
writer.WriteNumberValue(longValue);
|
||||
else
|
||||
writer.WriteNumberValue(element.GetDouble());
|
||||
break;
|
||||
|
||||
case JsonValueKind.True:
|
||||
writer.WriteBooleanValue(true);
|
||||
break;
|
||||
|
||||
case JsonValueKind.False:
|
||||
writer.WriteBooleanValue(false);
|
||||
break;
|
||||
|
||||
case JsonValueKind.Null:
|
||||
writer.WriteNullValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,403 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestLoader.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Weight manifest loader implementation
|
||||
// Description: File-system-based weight manifest discovery, loading,
|
||||
// validation, selection by effectiveFrom date, and diffing.
|
||||
// Deterministic and offline-friendly (no network calls).
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Diagnostics.Metrics;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for the weight manifest loader.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestLoaderOptions
|
||||
{
|
||||
/// <summary>Configuration section name.</summary>
|
||||
public const string SectionName = "Determinization:WeightManifest";
|
||||
|
||||
/// <summary>
|
||||
/// Base directory to discover manifests in.
|
||||
/// Defaults to <c>etc/weights</c> relative to the application root.
|
||||
/// </summary>
|
||||
public string ManifestDirectory { get; init; } = WeightManifestConstants.DefaultManifestDirectory;
|
||||
|
||||
/// <summary>
|
||||
/// Glob pattern for manifest files.
|
||||
/// </summary>
|
||||
public string FilePattern { get; init; } = WeightManifestConstants.DefaultGlobPattern;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to require valid content hashes (reject "sha256:auto").
|
||||
/// In production this should be true; in development, false is acceptable.
|
||||
/// </summary>
|
||||
public bool RequireComputedHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to fail on hash mismatch (true) or log a warning (false).
|
||||
/// </summary>
|
||||
public bool StrictHashVerification { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// File-system-based weight manifest loader with deterministic behavior.
|
||||
/// Discovers manifests from a configured directory, validates them,
|
||||
/// computes/verifies content hashes, and selects by effectiveFrom date.
|
||||
/// </summary>
|
||||
public sealed class WeightManifestLoader : IWeightManifestLoader
|
||||
{
|
||||
private static readonly Meter Meter = new("StellaOps.Policy.Determinization.WeightManifest", "1.0.0");
|
||||
private static readonly Counter<long> ManifestsLoaded = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.loaded_total", "manifests", "Total manifests loaded");
|
||||
private static readonly Counter<long> ManifestsValidated = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.validated_total", "manifests", "Total manifests validated");
|
||||
private static readonly Counter<long> HashMismatches = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.hash_mismatch_total", "errors", "Content hash mismatches detected");
|
||||
private static readonly Counter<long> ValidationErrors = Meter.CreateCounter<long>(
|
||||
"stellaops.weight_manifest.validation_error_total", "errors", "Validation errors encountered");
|
||||
|
||||
private static readonly JsonSerializerOptions DeserializeOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true
|
||||
};
|
||||
|
||||
private readonly WeightManifestLoaderOptions _options;
|
||||
private readonly ILogger<WeightManifestLoader> _logger;
|
||||
|
||||
public WeightManifestLoader(
|
||||
IOptions<WeightManifestLoaderOptions> options,
|
||||
ILogger<WeightManifestLoader> logger)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ImmutableArray<WeightManifestLoadResult>> ListAsync(
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var directory = ResolveManifestDirectory();
|
||||
if (!Directory.Exists(directory))
|
||||
{
|
||||
_logger.LogWarning("Weight manifest directory not found: {Directory}", directory);
|
||||
return [];
|
||||
}
|
||||
|
||||
var files = Directory.GetFiles(directory, _options.FilePattern)
|
||||
.OrderBy(f => f, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
if (files.Count == 0)
|
||||
{
|
||||
_logger.LogWarning("No weight manifest files found in {Directory}", directory);
|
||||
return [];
|
||||
}
|
||||
|
||||
var results = new List<WeightManifestLoadResult>(files.Count);
|
||||
foreach (var file in files)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
try
|
||||
{
|
||||
var result = await LoadCoreAsync(file, cancellationToken).ConfigureAwait(false);
|
||||
results.Add(result);
|
||||
}
|
||||
catch (WeightManifestLoadException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Skipping invalid manifest: {File}", file);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by effectiveFrom descending (most recent first)
|
||||
return [.. results.OrderByDescending(r => r.Manifest.EffectiveFrom)];
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<WeightManifestLoadResult> LoadAsync(
|
||||
string filePath,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
return LoadCoreAsync(filePath, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<WeightManifestLoadResult?> SelectEffectiveAsync(
|
||||
DateTimeOffset referenceDate,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var all = await ListAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (all.IsEmpty)
|
||||
return null;
|
||||
|
||||
// Already sorted by effectiveFrom descending; pick first where effectiveFrom <= referenceDate
|
||||
return all.FirstOrDefault(r => r.Manifest.EffectiveFrom <= referenceDate);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ImmutableArray<string> Validate(WeightManifestLoadResult result)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
|
||||
var issues = new List<string>();
|
||||
var manifest = result.Manifest;
|
||||
|
||||
// Schema version check
|
||||
if (!string.Equals(manifest.SchemaVersion, WeightManifestConstants.SupportedSchemaVersion,
|
||||
StringComparison.Ordinal))
|
||||
{
|
||||
issues.Add(
|
||||
$"Unsupported schema version '{manifest.SchemaVersion}'. Expected '{WeightManifestConstants.SupportedSchemaVersion}'.");
|
||||
}
|
||||
|
||||
// Version field
|
||||
if (string.IsNullOrWhiteSpace(manifest.Version))
|
||||
{
|
||||
issues.Add("Version field is required.");
|
||||
}
|
||||
|
||||
// Content hash
|
||||
if (_options.RequireComputedHash && !manifest.HasComputedHash)
|
||||
{
|
||||
issues.Add("Content hash is required but manifest contains placeholder 'sha256:auto'.");
|
||||
}
|
||||
|
||||
if (manifest.HasComputedHash && !result.HashVerified)
|
||||
{
|
||||
issues.Add(
|
||||
$"Content hash mismatch: stored={manifest.ContentHash}, computed={result.ComputedHash}.");
|
||||
}
|
||||
|
||||
// Legacy weight normalization
|
||||
if (manifest.Weights.Legacy.Count > 0)
|
||||
{
|
||||
var legacySum = manifest.Weights.Legacy.Values.Sum();
|
||||
if (Math.Abs(legacySum - 1.0) > 0.001)
|
||||
{
|
||||
issues.Add($"Legacy weights sum to {legacySum:F4}, expected 1.0.");
|
||||
}
|
||||
}
|
||||
|
||||
// Advisory weight normalization
|
||||
if (manifest.Weights.Advisory.Count > 0)
|
||||
{
|
||||
var advisorySum = manifest.Weights.Advisory.Values.Sum();
|
||||
if (Math.Abs(advisorySum - 1.0) > 0.001)
|
||||
{
|
||||
issues.Add($"Advisory weights sum to {advisorySum:F4}, expected 1.0.");
|
||||
}
|
||||
}
|
||||
|
||||
// Signal weights for entropy normalization
|
||||
if (manifest.SignalWeightsForEntropy.Count > 0)
|
||||
{
|
||||
var signalSum = manifest.SignalWeightsForEntropy.Values.Sum();
|
||||
if (Math.Abs(signalSum - 1.0) > 0.001)
|
||||
{
|
||||
issues.Add($"Signal weights for entropy sum to {signalSum:F4}, expected 1.0.");
|
||||
}
|
||||
}
|
||||
|
||||
ManifestsValidated.Add(1);
|
||||
if (issues.Count > 0)
|
||||
{
|
||||
ValidationErrors.Add(issues.Count);
|
||||
}
|
||||
|
||||
return [.. issues];
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public WeightManifestDiff Diff(WeightManifestDocument from, WeightManifestDocument to)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(from);
|
||||
ArgumentNullException.ThrowIfNull(to);
|
||||
|
||||
var diffs = new List<WeightManifestFieldDiff>();
|
||||
|
||||
// Compare scalar fields
|
||||
CompareScalar(diffs, "version", from.Version, to.Version);
|
||||
CompareScalar(diffs, "profile", from.Profile, to.Profile);
|
||||
CompareScalar(diffs, "effectiveFrom", from.EffectiveFrom.ToString("O"), to.EffectiveFrom.ToString("O"));
|
||||
|
||||
// Compare legacy weights
|
||||
CompareWeightDictionary(diffs, "weights.legacy", from.Weights.Legacy, to.Weights.Legacy);
|
||||
|
||||
// Compare advisory weights
|
||||
CompareWeightDictionary(diffs, "weights.advisory", from.Weights.Advisory, to.Weights.Advisory);
|
||||
|
||||
// Compare signal weights for entropy
|
||||
CompareWeightDictionary(diffs, "signalWeightsForEntropy",
|
||||
from.SignalWeightsForEntropy, to.SignalWeightsForEntropy);
|
||||
|
||||
// Compare bucket thresholds
|
||||
if (from.Buckets is not null && to.Buckets is not null)
|
||||
{
|
||||
CompareScalar(diffs, "buckets.actNowMin",
|
||||
from.Buckets.ActNowMin.ToString(), to.Buckets.ActNowMin.ToString());
|
||||
CompareScalar(diffs, "buckets.scheduleNextMin",
|
||||
from.Buckets.ScheduleNextMin.ToString(), to.Buckets.ScheduleNextMin.ToString());
|
||||
CompareScalar(diffs, "buckets.investigateMin",
|
||||
from.Buckets.InvestigateMin.ToString(), to.Buckets.InvestigateMin.ToString());
|
||||
}
|
||||
|
||||
// Compare determinization thresholds
|
||||
if (from.DeterminizationThresholds is not null && to.DeterminizationThresholds is not null)
|
||||
{
|
||||
CompareScalar(diffs, "determinizationThresholds.manualReviewEntropy",
|
||||
from.DeterminizationThresholds.ManualReviewEntropy.ToString("F4"),
|
||||
to.DeterminizationThresholds.ManualReviewEntropy.ToString("F4"));
|
||||
CompareScalar(diffs, "determinizationThresholds.refreshEntropy",
|
||||
from.DeterminizationThresholds.RefreshEntropy.ToString("F4"),
|
||||
to.DeterminizationThresholds.RefreshEntropy.ToString("F4"));
|
||||
}
|
||||
|
||||
return new WeightManifestDiff
|
||||
{
|
||||
FromVersion = from.Version,
|
||||
ToVersion = to.Version,
|
||||
Differences = [.. diffs]
|
||||
};
|
||||
}
|
||||
|
||||
// ── Private helpers ──────────────────────────────────────────────────
|
||||
|
||||
private async Task<WeightManifestLoadResult> LoadCoreAsync(
|
||||
string filePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(filePath);
|
||||
if (!File.Exists(resolvedPath))
|
||||
{
|
||||
throw new WeightManifestLoadException($"Weight manifest file not found: {resolvedPath}");
|
||||
}
|
||||
|
||||
var json = await File.ReadAllTextAsync(resolvedPath, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
WeightManifestDocument manifest;
|
||||
try
|
||||
{
|
||||
manifest = JsonSerializer.Deserialize<WeightManifestDocument>(json, DeserializeOptions)
|
||||
?? throw new WeightManifestLoadException(
|
||||
$"Failed to deserialize weight manifest from {resolvedPath}: empty document");
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
throw new WeightManifestLoadException(
|
||||
$"JSON parse error in {resolvedPath}: {ex.Message}", ex);
|
||||
}
|
||||
|
||||
// Compute content hash
|
||||
var computedHash = WeightManifestHashComputer.ComputeFromJson(json);
|
||||
var hashVerified = manifest.HasComputedHash
|
||||
&& computedHash.Equals(manifest.ContentHash, StringComparison.Ordinal);
|
||||
|
||||
if (manifest.HasComputedHash && !hashVerified)
|
||||
{
|
||||
HashMismatches.Add(1);
|
||||
var message =
|
||||
$"Content hash mismatch for {resolvedPath}: stored={manifest.ContentHash}, computed={computedHash}";
|
||||
|
||||
if (_options.StrictHashVerification)
|
||||
{
|
||||
throw new WeightManifestLoadException(message);
|
||||
}
|
||||
|
||||
_logger.LogWarning("{Message}", message);
|
||||
}
|
||||
|
||||
ManifestsLoaded.Add(1);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Loaded weight manifest {Version} from {Path} (hash verified: {HashVerified})",
|
||||
manifest.Version, resolvedPath, hashVerified);
|
||||
|
||||
return new WeightManifestLoadResult
|
||||
{
|
||||
Manifest = manifest,
|
||||
SourcePath = resolvedPath,
|
||||
HashVerified = hashVerified,
|
||||
ComputedHash = computedHash
|
||||
};
|
||||
}
|
||||
|
||||
private string ResolveManifestDirectory()
|
||||
{
|
||||
var dir = _options.ManifestDirectory;
|
||||
if (Path.IsPathRooted(dir))
|
||||
return dir;
|
||||
|
||||
// Resolve relative to current directory (application root)
|
||||
return Path.GetFullPath(dir, AppContext.BaseDirectory);
|
||||
}
|
||||
|
||||
private static void CompareScalar(
|
||||
List<WeightManifestFieldDiff> diffs, string path, string? from, string? to)
|
||||
{
|
||||
if (!string.Equals(from, to, StringComparison.Ordinal))
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff { Path = path, OldValue = from, NewValue = to });
|
||||
}
|
||||
}
|
||||
|
||||
private static void CompareWeightDictionary(
|
||||
List<WeightManifestFieldDiff> diffs,
|
||||
string prefix,
|
||||
ImmutableDictionary<string, double> from,
|
||||
ImmutableDictionary<string, double> to)
|
||||
{
|
||||
var allKeys = from.Keys.Union(to.Keys).Order().ToList();
|
||||
foreach (var key in allKeys)
|
||||
{
|
||||
var hasFrom = from.TryGetValue(key, out var fromVal);
|
||||
var hasTo = to.TryGetValue(key, out var toVal);
|
||||
|
||||
if (!hasFrom)
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff
|
||||
{
|
||||
Path = $"{prefix}.{key}",
|
||||
OldValue = null,
|
||||
NewValue = toVal.ToString("F4")
|
||||
});
|
||||
}
|
||||
else if (!hasTo)
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff
|
||||
{
|
||||
Path = $"{prefix}.{key}",
|
||||
OldValue = fromVal.ToString("F4"),
|
||||
NewValue = null
|
||||
});
|
||||
}
|
||||
else if (Math.Abs(fromVal - toVal) > 0.0001)
|
||||
{
|
||||
diffs.Add(new WeightManifestFieldDiff
|
||||
{
|
||||
Path = $"{prefix}.{key}",
|
||||
OldValue = fromVal.ToString("F4"),
|
||||
NewValue = toVal.ToString("F4")
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when weight manifest loading or validation fails.
|
||||
/// </summary>
|
||||
public sealed class WeightManifestLoadException : Exception
|
||||
{
|
||||
public WeightManifestLoadException(string message) : base(message) { }
|
||||
public WeightManifestLoadException(string message, Exception inner) : base(message, inner) { }
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestModels.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Versioned weight manifest models
|
||||
// Description: Immutable models for weight manifests with content-addressed
|
||||
// hashing, versioning, and deterministic serialization.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
/// <summary>
|
||||
/// Immutable representation of a versioned weight manifest file.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestDocument
|
||||
{
|
||||
/// <summary>JSON Schema URI.</summary>
|
||||
[JsonPropertyName("$schema")]
|
||||
public string? Schema { get; init; }
|
||||
|
||||
/// <summary>Schema version (e.g. "1.0.0").</summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
public required string SchemaVersion { get; init; }
|
||||
|
||||
/// <summary>Manifest version identifier (e.g. "v2026-01-22").</summary>
|
||||
[JsonPropertyName("version")]
|
||||
public required string Version { get; init; }
|
||||
|
||||
/// <summary>UTC date from which this manifest is effective.</summary>
|
||||
[JsonPropertyName("effectiveFrom")]
|
||||
public required DateTimeOffset EffectiveFrom { get; init; }
|
||||
|
||||
/// <summary>Profile name (e.g. "production", "staging").</summary>
|
||||
[JsonPropertyName("profile")]
|
||||
public string Profile { get; init; } = "production";
|
||||
|
||||
/// <summary>Human-readable description of this manifest.</summary>
|
||||
[JsonPropertyName("description")]
|
||||
public string? Description { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content hash in "sha256:<hex>" format.
|
||||
/// The placeholder "sha256:auto" means the hash has not been computed yet.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contentHash")]
|
||||
public required string ContentHash { get; init; }
|
||||
|
||||
/// <summary>Legacy 6-dimension EWS weights.</summary>
|
||||
[JsonPropertyName("weights")]
|
||||
public required WeightManifestWeights Weights { get; init; }
|
||||
|
||||
/// <summary>Dimension human-readable names.</summary>
|
||||
[JsonPropertyName("dimensionNames")]
|
||||
public ImmutableDictionary<string, string> DimensionNames { get; init; } =
|
||||
ImmutableDictionary<string, string>.Empty;
|
||||
|
||||
/// <summary>Dimensions that subtract from risk score.</summary>
|
||||
[JsonPropertyName("subtractiveDimensions")]
|
||||
public ImmutableArray<string> SubtractiveDimensions { get; init; } = [];
|
||||
|
||||
/// <summary>Guardrail configurations.</summary>
|
||||
[JsonPropertyName("guardrails")]
|
||||
public WeightManifestGuardrails? Guardrails { get; init; }
|
||||
|
||||
/// <summary>Bucket boundaries for action tiers.</summary>
|
||||
[JsonPropertyName("buckets")]
|
||||
public WeightManifestBuckets? Buckets { get; init; }
|
||||
|
||||
/// <summary>Determinization thresholds for entropy-based triage.</summary>
|
||||
[JsonPropertyName("determinizationThresholds")]
|
||||
public WeightManifestDeterminizationThresholds? DeterminizationThresholds { get; init; }
|
||||
|
||||
/// <summary>Signal weights for entropy calculation (maps to <see cref="SignalWeights"/>).</summary>
|
||||
[JsonPropertyName("signalWeightsForEntropy")]
|
||||
public ImmutableDictionary<string, double> SignalWeightsForEntropy { get; init; } =
|
||||
ImmutableDictionary<string, double>.Empty;
|
||||
|
||||
/// <summary>Provenance metadata.</summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
public WeightManifestMetadata? Metadata { get; init; }
|
||||
|
||||
/// <summary>Whether the content hash is a computed hash vs. the placeholder.</summary>
|
||||
[JsonIgnore]
|
||||
public bool HasComputedHash => !string.IsNullOrEmpty(ContentHash)
|
||||
&& !ContentHash.Equals(WeightManifestConstants.AutoHashPlaceholder, StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Multi-profile weights block (legacy + advisory).
|
||||
/// </summary>
|
||||
public sealed record WeightManifestWeights
|
||||
{
|
||||
/// <summary>Legacy 6-dimension weights.</summary>
|
||||
[JsonPropertyName("legacy")]
|
||||
public ImmutableDictionary<string, double> Legacy { get; init; } =
|
||||
ImmutableDictionary<string, double>.Empty;
|
||||
|
||||
/// <summary>Advisory weights.</summary>
|
||||
[JsonPropertyName("advisory")]
|
||||
public ImmutableDictionary<string, double> Advisory { get; init; } =
|
||||
ImmutableDictionary<string, double>.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Guardrail configuration from a weight manifest.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestGuardrails
|
||||
{
|
||||
[JsonPropertyName("notAffectedCap")]
|
||||
public GuardrailRule? NotAffectedCap { get; init; }
|
||||
|
||||
[JsonPropertyName("runtimeFloor")]
|
||||
public GuardrailRule? RuntimeFloor { get; init; }
|
||||
|
||||
[JsonPropertyName("speculativeCap")]
|
||||
public GuardrailRule? SpeculativeCap { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual guardrail rule.
|
||||
/// </summary>
|
||||
public sealed record GuardrailRule
|
||||
{
|
||||
[JsonPropertyName("enabled")]
|
||||
public bool Enabled { get; init; }
|
||||
|
||||
[JsonPropertyName("maxScore")]
|
||||
public int? MaxScore { get; init; }
|
||||
|
||||
[JsonPropertyName("minScore")]
|
||||
public int? MinScore { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresBkpMin")]
|
||||
public double? RequiresBkpMin { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresRtsMax")]
|
||||
public double? RequiresRtsMax { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresRtsMin")]
|
||||
public double? RequiresRtsMin { get; init; }
|
||||
|
||||
[JsonPropertyName("requiresRchMax")]
|
||||
public double? RequiresRchMax { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Action bucket boundaries.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestBuckets
|
||||
{
|
||||
[JsonPropertyName("actNowMin")]
|
||||
public int ActNowMin { get; init; } = 90;
|
||||
|
||||
[JsonPropertyName("scheduleNextMin")]
|
||||
public int ScheduleNextMin { get; init; } = 70;
|
||||
|
||||
[JsonPropertyName("investigateMin")]
|
||||
public int InvestigateMin { get; init; } = 40;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entropy-based determinization thresholds.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestDeterminizationThresholds
|
||||
{
|
||||
[JsonPropertyName("manualReviewEntropy")]
|
||||
public double ManualReviewEntropy { get; init; } = 0.60;
|
||||
|
||||
[JsonPropertyName("refreshEntropy")]
|
||||
public double RefreshEntropy { get; init; } = 0.40;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance metadata for audit trail.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestMetadata
|
||||
{
|
||||
[JsonPropertyName("createdBy")]
|
||||
public string? CreatedBy { get; init; }
|
||||
|
||||
[JsonPropertyName("createdAt")]
|
||||
public DateTimeOffset? CreatedAt { get; init; }
|
||||
|
||||
[JsonPropertyName("changelog")]
|
||||
public ImmutableArray<ChangelogEntry> Changelog { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("notes")]
|
||||
public ImmutableArray<string> Notes { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Changelog entry for manifest versioning audit.
|
||||
/// </summary>
|
||||
public sealed record ChangelogEntry
|
||||
{
|
||||
[JsonPropertyName("version")]
|
||||
public string? Version { get; init; }
|
||||
|
||||
[JsonPropertyName("date")]
|
||||
public string? Date { get; init; }
|
||||
|
||||
[JsonPropertyName("changes")]
|
||||
public ImmutableArray<string> Changes { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Constants for the weight manifest system.
|
||||
/// </summary>
|
||||
public static class WeightManifestConstants
|
||||
{
|
||||
/// <summary>Placeholder that signals "compute hash at build/load time".</summary>
|
||||
public const string AutoHashPlaceholder = "sha256:auto";
|
||||
|
||||
/// <summary>Prefix for content hashes.</summary>
|
||||
public const string HashPrefix = "sha256:";
|
||||
|
||||
/// <summary>Supported schema version.</summary>
|
||||
public const string SupportedSchemaVersion = "1.0.0";
|
||||
|
||||
/// <summary>Default glob pattern for discovering manifest files.</summary>
|
||||
public const string DefaultGlobPattern = "*.weights.json";
|
||||
|
||||
/// <summary>Default manifest directory (relative to application root).</summary>
|
||||
public const string DefaultManifestDirectory = "etc/weights";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of loading and validating a weight manifest.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestLoadResult
|
||||
{
|
||||
/// <summary>The loaded and validated manifest.</summary>
|
||||
public required WeightManifestDocument Manifest { get; init; }
|
||||
|
||||
/// <summary>File path the manifest was loaded from.</summary>
|
||||
public required string SourcePath { get; init; }
|
||||
|
||||
/// <summary>Whether the content hash was verified (vs computed fresh).</summary>
|
||||
public required bool HashVerified { get; init; }
|
||||
|
||||
/// <summary>Computed content hash (may differ from manifest if auto).</summary>
|
||||
public required string ComputedHash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two weight manifests.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestDiff
|
||||
{
|
||||
/// <summary>Source (older) manifest version.</summary>
|
||||
public required string FromVersion { get; init; }
|
||||
|
||||
/// <summary>Target (newer) manifest version.</summary>
|
||||
public required string ToVersion { get; init; }
|
||||
|
||||
/// <summary>Individual field differences.</summary>
|
||||
public required ImmutableArray<WeightManifestFieldDiff> Differences { get; init; }
|
||||
|
||||
/// <summary>Whether any differences exist.</summary>
|
||||
public bool HasDifferences => !Differences.IsEmpty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual field difference between two manifests.
|
||||
/// </summary>
|
||||
public sealed record WeightManifestFieldDiff
|
||||
{
|
||||
/// <summary>Dot-delimited path to the changed field.</summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>Previous value (serialized as string).</summary>
|
||||
public required string? OldValue { get; init; }
|
||||
|
||||
/// <summary>New value (serialized as string).</summary>
|
||||
public required string? NewValue { get; init; }
|
||||
}
|
||||
@@ -3,6 +3,9 @@ using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
using StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
using StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
namespace StellaOps.Policy.Determinization;
|
||||
|
||||
@@ -43,8 +46,26 @@ public static class ServiceCollectionExtensions
|
||||
services.TryAddSingleton<DecayedConfidenceCalculator>();
|
||||
services.TryAddSingleton<IDecayedConfidenceCalculator>(sp => sp.GetRequiredService<DecayedConfidenceCalculator>());
|
||||
|
||||
services.TryAddSingleton<ImpactScoreCalculator>();
|
||||
services.TryAddSingleton<IImpactScoreCalculator>(sp => sp.GetRequiredService<ImpactScoreCalculator>());
|
||||
|
||||
services.TryAddSingleton<CombinedImpactCalculator>();
|
||||
services.TryAddSingleton<ICombinedImpactCalculator>(sp => sp.GetRequiredService<CombinedImpactCalculator>());
|
||||
|
||||
services.TryAddSingleton<TrustScoreAggregator>();
|
||||
|
||||
services.TryAddSingleton<TrustScoreAlgebraFacade>();
|
||||
services.TryAddSingleton<ITrustScoreAlgebraFacade>(sp => sp.GetRequiredService<TrustScoreAlgebraFacade>());
|
||||
|
||||
// EWS: 6-dimension Evidence-Weighted Score model
|
||||
RegisterEwsServices(services);
|
||||
|
||||
// Triage: decay-based staleness evaluation and re-analysis queue
|
||||
RegisterTriageServices(services);
|
||||
|
||||
// Weight Manifests: versioned weight discovery, validation, selection
|
||||
RegisterWeightManifestServices(services);
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
@@ -66,8 +87,76 @@ public static class ServiceCollectionExtensions
|
||||
services.TryAddSingleton<DecayedConfidenceCalculator>();
|
||||
services.TryAddSingleton<IDecayedConfidenceCalculator>(sp => sp.GetRequiredService<DecayedConfidenceCalculator>());
|
||||
|
||||
services.TryAddSingleton<ImpactScoreCalculator>();
|
||||
services.TryAddSingleton<IImpactScoreCalculator>(sp => sp.GetRequiredService<ImpactScoreCalculator>());
|
||||
|
||||
services.TryAddSingleton<CombinedImpactCalculator>();
|
||||
services.TryAddSingleton<ICombinedImpactCalculator>(sp => sp.GetRequiredService<CombinedImpactCalculator>());
|
||||
|
||||
services.TryAddSingleton<TrustScoreAggregator>();
|
||||
|
||||
services.TryAddSingleton<TrustScoreAlgebraFacade>();
|
||||
services.TryAddSingleton<ITrustScoreAlgebraFacade>(sp => sp.GetRequiredService<TrustScoreAlgebraFacade>());
|
||||
|
||||
// TSF-004: Delta-if-present calculator for hypothetical score simulations
|
||||
services.TryAddSingleton<DeltaIfPresentCalculator>();
|
||||
services.TryAddSingleton<IDeltaIfPresentCalculator>(sp => sp.GetRequiredService<DeltaIfPresentCalculator>());
|
||||
|
||||
// EWS: 6-dimension Evidence-Weighted Score model
|
||||
RegisterEwsServices(services);
|
||||
|
||||
// Triage: decay-based staleness evaluation and re-analysis queue
|
||||
RegisterTriageServices(services);
|
||||
|
||||
// Weight Manifests: versioned weight discovery, validation, selection
|
||||
RegisterWeightManifestServices(services);
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
private static void RegisterEwsServices(IServiceCollection services)
|
||||
{
|
||||
// Register all 6 dimension normalizers (AddSingleton, not TryAdd,
|
||||
// so IEnumerable<IEwsDimensionNormalizer> resolves all of them)
|
||||
services.AddSingleton<IEwsDimensionNormalizer, ReachabilityNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, RuntimeSignalsNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, BackportEvidenceNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, ExploitabilityNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, SourceConfidenceNormalizer>();
|
||||
services.AddSingleton<IEwsDimensionNormalizer, MitigationStatusNormalizer>();
|
||||
|
||||
// Register guardrails engine
|
||||
services.TryAddSingleton<IGuardrailsEngine, GuardrailsEngine>();
|
||||
|
||||
// Register unified EWS calculator
|
||||
services.TryAddSingleton<IEwsCalculator, EwsCalculator>();
|
||||
}
|
||||
|
||||
private static void RegisterTriageServices(IServiceCollection services)
|
||||
{
|
||||
// Register triage options (defaults if not bound to config)
|
||||
services.AddOptions<TriageQueueOptions>();
|
||||
|
||||
// Register evaluator
|
||||
services.TryAddSingleton<ITriageQueueEvaluator, TriageQueueEvaluator>();
|
||||
|
||||
// Register in-memory sink as default (can be overridden by host-level registration)
|
||||
services.TryAddSingleton<InMemoryTriageReanalysisSink>();
|
||||
services.TryAddSingleton<ITriageReanalysisSink>(sp => sp.GetRequiredService<InMemoryTriageReanalysisSink>());
|
||||
|
||||
// Register the triage queue service
|
||||
services.TryAddSingleton<UnknownTriageQueueService>();
|
||||
}
|
||||
|
||||
private static void RegisterWeightManifestServices(IServiceCollection services)
|
||||
{
|
||||
// Register loader options (defaults if not bound to config)
|
||||
services.AddOptions<WeightManifestLoaderOptions>();
|
||||
|
||||
// Register manifest loader
|
||||
services.TryAddSingleton<IWeightManifestLoader, WeightManifestLoader>();
|
||||
|
||||
// Register CLI command service
|
||||
services.TryAddSingleton<WeightManifestCommands>();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,452 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofGraphBuilder.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Proof graph builder
|
||||
// Description: Constructs proof graphs from verdict rationale data.
|
||||
// Deterministic: same inputs always produce same graph with
|
||||
// content-addressed ID. Supports counterfactual overlay nodes.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Builds proof graphs from verdict rationale components.
|
||||
/// </summary>
|
||||
public interface IProofGraphBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds a complete proof graph from a verdict rationale and
|
||||
/// optional score breakdown data.
|
||||
/// </summary>
|
||||
ProofGraph Build(ProofGraphInput input);
|
||||
|
||||
/// <summary>
|
||||
/// Adds a counterfactual overlay to an existing proof graph,
|
||||
/// showing how scores would change under hypothetical conditions.
|
||||
/// </summary>
|
||||
ProofGraph AddCounterfactualOverlay(
|
||||
ProofGraph baseGraph,
|
||||
CounterfactualScenario scenario);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input data for building a proof graph.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphInput
|
||||
{
|
||||
/// <summary>The verdict rationale to visualize.</summary>
|
||||
public required VerdictRationale Rationale { get; init; }
|
||||
|
||||
/// <summary>Per-factor score breakdown, if available.</summary>
|
||||
public ScoreBreakdownDashboard? ScoreBreakdown { get; init; }
|
||||
|
||||
/// <summary>Reference time for graph computation.</summary>
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A counterfactual scenario for what-if analysis.
|
||||
/// </summary>
|
||||
public sealed record CounterfactualScenario
|
||||
{
|
||||
/// <summary>Scenario label.</summary>
|
||||
[JsonPropertyName("label")]
|
||||
public required string Label { get; init; }
|
||||
|
||||
/// <summary>Factor overrides (factorId → hypothetical score).</summary>
|
||||
[JsonPropertyName("factor_overrides")]
|
||||
public required ImmutableDictionary<string, int> FactorOverrides { get; init; }
|
||||
|
||||
/// <summary>Resulting composite score under this scenario.</summary>
|
||||
[JsonPropertyName("resulting_score")]
|
||||
public int? ResultingScore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Deterministic proof graph builder.
|
||||
/// </summary>
|
||||
public sealed class ProofGraphBuilder : IProofGraphBuilder
|
||||
{
|
||||
private readonly ILogger<ProofGraphBuilder> _logger;
|
||||
|
||||
public ProofGraphBuilder(ILogger<ProofGraphBuilder> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public ProofGraph Build(ProofGraphInput input)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(input);
|
||||
|
||||
var nodes = new List<ProofGraphNode>();
|
||||
var edges = new List<ProofGraphEdge>();
|
||||
|
||||
// 1. Create verdict root node (depth 0)
|
||||
var verdictNodeId = $"verdict:{input.Rationale.VerdictRef.AttestationId}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = verdictNodeId,
|
||||
Label = $"Verdict: {input.Rationale.Decision.Verdict}",
|
||||
Type = ProofNodeType.Verdict,
|
||||
Confidence = input.Rationale.Decision.Score.HasValue
|
||||
? input.Rationale.Decision.Score.Value / 100.0
|
||||
: null,
|
||||
ScoreContribution = input.Rationale.Decision.Score,
|
||||
Depth = 0
|
||||
});
|
||||
|
||||
// 2. Create policy rule node (depth 1)
|
||||
var policyNodeId = $"policy:{input.Rationale.PolicyClause.ClauseId}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = policyNodeId,
|
||||
Label = input.Rationale.PolicyClause.RuleDescription,
|
||||
Type = ProofNodeType.PolicyRule,
|
||||
Depth = 1
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = policyNodeId,
|
||||
Target = verdictNodeId,
|
||||
Relation = ProofEdgeRelation.Gates,
|
||||
Label = "Policy evaluation"
|
||||
});
|
||||
|
||||
// 3. Create score computation nodes from breakdown (depth 2)
|
||||
if (input.ScoreBreakdown is not null)
|
||||
{
|
||||
foreach (var factor in input.ScoreBreakdown.Factors)
|
||||
{
|
||||
var factorNodeId = $"score:{factor.FactorId}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = factorNodeId,
|
||||
Label = $"{factor.FactorName} ({factor.RawScore})",
|
||||
Type = ProofNodeType.ScoreComputation,
|
||||
Confidence = factor.Confidence,
|
||||
ScoreContribution = factor.WeightedContribution,
|
||||
Depth = 2,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
.Add("weight", factor.Weight.ToString("F2"))
|
||||
.Add("raw_score", factor.RawScore.ToString())
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = factorNodeId,
|
||||
Target = verdictNodeId,
|
||||
Relation = ProofEdgeRelation.ContributesScore,
|
||||
Weight = factor.Weight,
|
||||
Label = $"{factor.Weight:P0} weight"
|
||||
});
|
||||
}
|
||||
|
||||
// 3b. Guardrail nodes (depth 1, override verdict)
|
||||
foreach (var guardrail in input.ScoreBreakdown.GuardrailsApplied)
|
||||
{
|
||||
var guardrailNodeId = $"guardrail:{guardrail.GuardrailName}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = guardrailNodeId,
|
||||
Label = $"Guardrail: {guardrail.GuardrailName} ({guardrail.ScoreBefore}→{guardrail.ScoreAfter})",
|
||||
Type = ProofNodeType.Guardrail,
|
||||
Depth = 1,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
.Add("reason", guardrail.Reason)
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = guardrailNodeId,
|
||||
Target = verdictNodeId,
|
||||
Relation = ProofEdgeRelation.GuardrailApplied,
|
||||
Label = guardrail.Reason
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Create evidence leaf nodes (depth 3)
|
||||
var leafNodeIds = new List<string>();
|
||||
|
||||
// Reachability evidence
|
||||
if (input.Rationale.Evidence.Reachability is not null)
|
||||
{
|
||||
var reachNodeId = $"evidence:reachability:{input.Rationale.Evidence.Cve}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = reachNodeId,
|
||||
Label = $"Reachability: {input.Rationale.Evidence.Reachability.VulnerableFunction ?? "analyzed"}",
|
||||
Type = ProofNodeType.ReachabilityAnalysis,
|
||||
Depth = 3,
|
||||
Metadata = ImmutableDictionary<string, string>.Empty
|
||||
.Add("entry_point", input.Rationale.Evidence.Reachability.EntryPoint ?? "unknown")
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = reachNodeId,
|
||||
Target = TryFindScoreNode(nodes, "rch") ?? policyNodeId,
|
||||
Relation = ProofEdgeRelation.ProvidesEvidence,
|
||||
Label = "Reachability signal"
|
||||
});
|
||||
leafNodeIds.Add(reachNodeId);
|
||||
}
|
||||
|
||||
// VEX statement evidence
|
||||
if (input.Rationale.Attestations.VexStatements?.Count > 0)
|
||||
{
|
||||
for (int i = 0; i < input.Rationale.Attestations.VexStatements.Count; i++)
|
||||
{
|
||||
var vex = input.Rationale.Attestations.VexStatements[i];
|
||||
var vexNodeId = $"evidence:vex:{vex.Id}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = vexNodeId,
|
||||
Label = $"VEX: {vex.Summary ?? vex.Id}",
|
||||
Type = ProofNodeType.VexStatement,
|
||||
Digest = vex.Digest,
|
||||
Depth = 3
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = vexNodeId,
|
||||
Target = policyNodeId,
|
||||
Relation = ProofEdgeRelation.Attests,
|
||||
Label = "VEX statement"
|
||||
});
|
||||
leafNodeIds.Add(vexNodeId);
|
||||
}
|
||||
}
|
||||
|
||||
// Provenance attestation
|
||||
if (input.Rationale.Attestations.Provenance is not null)
|
||||
{
|
||||
var provNodeId = $"evidence:provenance:{input.Rationale.Attestations.Provenance.Id}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = provNodeId,
|
||||
Label = $"Provenance: {input.Rationale.Attestations.Provenance.Summary ?? "verified"}",
|
||||
Type = ProofNodeType.Provenance,
|
||||
Digest = input.Rationale.Attestations.Provenance.Digest,
|
||||
Depth = 3
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = provNodeId,
|
||||
Target = policyNodeId,
|
||||
Relation = ProofEdgeRelation.Attests,
|
||||
Label = "Provenance attestation"
|
||||
});
|
||||
leafNodeIds.Add(provNodeId);
|
||||
}
|
||||
|
||||
// Path witness
|
||||
if (input.Rationale.Attestations.PathWitness is not null)
|
||||
{
|
||||
var pathNodeId = $"evidence:pathwitness:{input.Rationale.Attestations.PathWitness.Id}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = pathNodeId,
|
||||
Label = $"Path Witness: {input.Rationale.Attestations.PathWitness.Summary ?? "verified"}",
|
||||
Type = ProofNodeType.ReachabilityAnalysis,
|
||||
Digest = input.Rationale.Attestations.PathWitness.Digest,
|
||||
Depth = 3
|
||||
});
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = pathNodeId,
|
||||
Target = TryFindScoreNode(nodes, "rch") ?? policyNodeId,
|
||||
Relation = ProofEdgeRelation.Attests,
|
||||
Label = "Path witness attestation"
|
||||
});
|
||||
leafNodeIds.Add(pathNodeId);
|
||||
}
|
||||
|
||||
// 5. Build critical paths (leaf → root)
|
||||
var criticalPaths = BuildCriticalPaths(nodes, edges, verdictNodeId, leafNodeIds);
|
||||
|
||||
// 6. Compute content-addressed graph ID
|
||||
var graphId = ComputeGraphId(nodes, edges);
|
||||
|
||||
var graph = new ProofGraph
|
||||
{
|
||||
GraphId = graphId,
|
||||
VerdictRef = input.Rationale.VerdictRef,
|
||||
Nodes = [.. nodes],
|
||||
Edges = [.. edges],
|
||||
CriticalPaths = [.. criticalPaths],
|
||||
RootNodeId = verdictNodeId,
|
||||
LeafNodeIds = [.. leafNodeIds],
|
||||
ComputedAt = input.ComputedAt
|
||||
};
|
||||
|
||||
_logger.LogDebug(
|
||||
"Built proof graph {GraphId} with {NodeCount} nodes, {EdgeCount} edges, {PathCount} paths",
|
||||
graphId, nodes.Count, edges.Count, criticalPaths.Count);
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
public ProofGraph AddCounterfactualOverlay(
|
||||
ProofGraph baseGraph,
|
||||
CounterfactualScenario scenario)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(baseGraph);
|
||||
ArgumentNullException.ThrowIfNull(scenario);
|
||||
|
||||
var nodes = baseGraph.Nodes.ToList();
|
||||
var edges = baseGraph.Edges.ToList();
|
||||
|
||||
// Add a counterfactual hypothesis node
|
||||
var cfNodeId = $"counterfactual:{scenario.Label.Replace(" ", "_").ToLowerInvariant()}";
|
||||
nodes.Add(new ProofGraphNode
|
||||
{
|
||||
Id = cfNodeId,
|
||||
Label = $"What-If: {scenario.Label}",
|
||||
Type = ProofNodeType.Counterfactual,
|
||||
ScoreContribution = scenario.ResultingScore,
|
||||
Depth = 0,
|
||||
Metadata = scenario.FactorOverrides
|
||||
.ToImmutableDictionary(kv => $"override_{kv.Key}", kv => kv.Value.ToString())
|
||||
});
|
||||
|
||||
// Connect overridden factors to the counterfactual node
|
||||
foreach (var (factorId, _) in scenario.FactorOverrides)
|
||||
{
|
||||
var existingNode = nodes.FirstOrDefault(n => n.Id == $"score:{factorId}");
|
||||
if (existingNode is not null)
|
||||
{
|
||||
edges.Add(new ProofGraphEdge
|
||||
{
|
||||
Source = existingNode.Id,
|
||||
Target = cfNodeId,
|
||||
Relation = ProofEdgeRelation.Overrides,
|
||||
Label = $"What-if override: {factorId}"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
var newGraphId = ComputeGraphId(nodes, edges);
|
||||
|
||||
return baseGraph with
|
||||
{
|
||||
GraphId = newGraphId,
|
||||
Nodes = [.. nodes],
|
||||
Edges = [.. edges]
|
||||
};
|
||||
}
|
||||
|
||||
// ── Private helpers ──────────────────────────────────────────────────
|
||||
|
||||
private static string? TryFindScoreNode(List<ProofGraphNode> nodes, string factorCode)
|
||||
{
|
||||
return nodes.FirstOrDefault(n => n.Id == $"score:{factorCode}")?.Id;
|
||||
}
|
||||
|
||||
private static List<ProofGraphPath> BuildCriticalPaths(
|
||||
List<ProofGraphNode> nodes,
|
||||
List<ProofGraphEdge> edges,
|
||||
string rootId,
|
||||
List<string> leafIds)
|
||||
{
|
||||
var paths = new List<ProofGraphPath>();
|
||||
|
||||
// Build adjacency list (reverse: from target to source for tracing back)
|
||||
var reverseAdj = edges
|
||||
.GroupBy(e => e.Target)
|
||||
.ToDictionary(g => g.Key, g => g.Select(e => (e.Source, e.Weight)).ToList());
|
||||
|
||||
// Forward adjacency for tracing leaf to root
|
||||
var forwardAdj = edges
|
||||
.GroupBy(e => e.Source)
|
||||
.ToDictionary(g => g.Key, g => g.Select(e => (e.Target, e.Weight)).ToList());
|
||||
|
||||
foreach (var leafId in leafIds)
|
||||
{
|
||||
var path = FindPathBfs(forwardAdj, leafId, rootId);
|
||||
if (path.Count > 0)
|
||||
{
|
||||
// Calculate path confidence as product of edge weights
|
||||
var confidence = 1.0;
|
||||
for (int i = 0; i < path.Count - 1; i++)
|
||||
{
|
||||
var edge = edges.FirstOrDefault(e =>
|
||||
e.Source == path[i] && e.Target == path[i + 1]);
|
||||
if (edge is not null)
|
||||
{
|
||||
confidence *= edge.Weight;
|
||||
}
|
||||
}
|
||||
|
||||
var leafNode = nodes.FirstOrDefault(n => n.Id == leafId);
|
||||
paths.Add(new ProofGraphPath
|
||||
{
|
||||
NodeIds = [.. path],
|
||||
PathConfidence = confidence,
|
||||
Description = $"{leafNode?.Label ?? leafId} → verdict"
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Mark highest-confidence path as critical
|
||||
if (paths.Count > 0)
|
||||
{
|
||||
var maxConfidence = paths.Max(p => p.PathConfidence);
|
||||
for (int i = 0; i < paths.Count; i++)
|
||||
{
|
||||
if (Math.Abs(paths[i].PathConfidence - maxConfidence) < 0.0001)
|
||||
{
|
||||
paths[i] = paths[i] with { IsCritical = true };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
private static List<string> FindPathBfs(
|
||||
Dictionary<string, List<(string Target, double Weight)>> adj,
|
||||
string from,
|
||||
string to)
|
||||
{
|
||||
var visited = new HashSet<string>();
|
||||
var queue = new Queue<List<string>>();
|
||||
queue.Enqueue([from]);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var path = queue.Dequeue();
|
||||
var current = path[^1];
|
||||
|
||||
if (current == to)
|
||||
return path;
|
||||
|
||||
if (!visited.Add(current))
|
||||
continue;
|
||||
|
||||
if (adj.TryGetValue(current, out var neighbors))
|
||||
{
|
||||
foreach (var (target, _) in neighbors.OrderBy(n => n.Target, StringComparer.Ordinal))
|
||||
{
|
||||
if (!visited.Contains(target))
|
||||
{
|
||||
queue.Enqueue([.. path, target]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
private static string ComputeGraphId(List<ProofGraphNode> nodes, List<ProofGraphEdge> edges)
|
||||
{
|
||||
// Deterministic: sort nodes by ID, edges by source+target
|
||||
var sortedNodes = string.Join("|", nodes.OrderBy(n => n.Id).Select(n => n.Id));
|
||||
var sortedEdges = string.Join("|", edges
|
||||
.OrderBy(e => e.Source).ThenBy(e => e.Target)
|
||||
.Select(e => $"{e.Source}->{e.Target}"));
|
||||
var content = $"{sortedNodes}:{sortedEdges}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
|
||||
return $"pg:sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofGraphModels.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Proof graph visualization models
|
||||
// Description: Directed acyclic graph representation of the full evidence
|
||||
// chain backing a verdict. Nodes represent evidence artifacts,
|
||||
// edges represent derivation/dependency relationships, and
|
||||
// paths show the full chain from source evidence to verdict.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Complete directed acyclic graph representing the evidence chain
|
||||
/// from source artifacts to a final verdict decision.
|
||||
/// </summary>
|
||||
public sealed record ProofGraph
|
||||
{
|
||||
/// <summary>Content-addressed graph identifier.</summary>
|
||||
[JsonPropertyName("graph_id")]
|
||||
public required string GraphId { get; init; }
|
||||
|
||||
/// <summary>Reference to the verdict this graph explains.</summary>
|
||||
[JsonPropertyName("verdict_ref")]
|
||||
public required VerdictReference VerdictRef { get; init; }
|
||||
|
||||
/// <summary>All nodes in the proof graph.</summary>
|
||||
[JsonPropertyName("nodes")]
|
||||
public required ImmutableArray<ProofGraphNode> Nodes { get; init; }
|
||||
|
||||
/// <summary>All edges in the proof graph.</summary>
|
||||
[JsonPropertyName("edges")]
|
||||
public required ImmutableArray<ProofGraphEdge> Edges { get; init; }
|
||||
|
||||
/// <summary>Critical paths from source evidence to verdict.</summary>
|
||||
[JsonPropertyName("critical_paths")]
|
||||
public required ImmutableArray<ProofGraphPath> CriticalPaths { get; init; }
|
||||
|
||||
/// <summary>Root node ID (the verdict node).</summary>
|
||||
[JsonPropertyName("root_node_id")]
|
||||
public required string RootNodeId { get; init; }
|
||||
|
||||
/// <summary>Leaf node IDs (source evidence).</summary>
|
||||
[JsonPropertyName("leaf_node_ids")]
|
||||
public required ImmutableArray<string> LeafNodeIds { get; init; }
|
||||
|
||||
/// <summary>When the graph was computed.</summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A node in the proof graph representing an evidence artifact,
|
||||
/// intermediate computation, or the final verdict.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphNode
|
||||
{
|
||||
/// <summary>Unique node identifier.</summary>
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>Human-readable label for display.</summary>
|
||||
[JsonPropertyName("label")]
|
||||
public required string Label { get; init; }
|
||||
|
||||
/// <summary>Node type classification.</summary>
|
||||
[JsonPropertyName("type")]
|
||||
public required ProofNodeType Type { get; init; }
|
||||
|
||||
/// <summary>Confidence score at this node (0.0 to 1.0).</summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double? Confidence { get; init; }
|
||||
|
||||
/// <summary>Score contribution of this node to the verdict.</summary>
|
||||
[JsonPropertyName("score_contribution")]
|
||||
public double? ScoreContribution { get; init; }
|
||||
|
||||
/// <summary>Content digest of the underlying artifact.</summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>Additional metadata for display.</summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
public ImmutableDictionary<string, string> Metadata { get; init; } =
|
||||
ImmutableDictionary<string, string>.Empty;
|
||||
|
||||
/// <summary>Visual depth in the graph (0 = verdict root).</summary>
|
||||
[JsonPropertyName("depth")]
|
||||
public int Depth { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classification of proof graph nodes.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum ProofNodeType
|
||||
{
|
||||
/// <summary>Final verdict decision.</summary>
|
||||
Verdict,
|
||||
|
||||
/// <summary>Policy rule evaluation.</summary>
|
||||
PolicyRule,
|
||||
|
||||
/// <summary>Scoring computation (e.g., EWS dimension).</summary>
|
||||
ScoreComputation,
|
||||
|
||||
/// <summary>VEX statement evidence.</summary>
|
||||
VexStatement,
|
||||
|
||||
/// <summary>Reachability analysis result.</summary>
|
||||
ReachabilityAnalysis,
|
||||
|
||||
/// <summary>SBOM lineage evidence.</summary>
|
||||
SbomEvidence,
|
||||
|
||||
/// <summary>Provenance attestation.</summary>
|
||||
Provenance,
|
||||
|
||||
/// <summary>Runtime signal observation.</summary>
|
||||
RuntimeSignal,
|
||||
|
||||
/// <summary>EPSS/CVSS advisory data.</summary>
|
||||
AdvisoryData,
|
||||
|
||||
/// <summary>Guardrail rule application.</summary>
|
||||
Guardrail,
|
||||
|
||||
/// <summary>Counterfactual hypothesis node.</summary>
|
||||
Counterfactual
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A directed edge in the proof graph showing derivation.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphEdge
|
||||
{
|
||||
/// <summary>Source node ID (evidence provider).</summary>
|
||||
[JsonPropertyName("source")]
|
||||
public required string Source { get; init; }
|
||||
|
||||
/// <summary>Target node ID (evidence consumer).</summary>
|
||||
[JsonPropertyName("target")]
|
||||
public required string Target { get; init; }
|
||||
|
||||
/// <summary>Relationship type.</summary>
|
||||
[JsonPropertyName("relation")]
|
||||
public required ProofEdgeRelation Relation { get; init; }
|
||||
|
||||
/// <summary>Weight/importance of this edge (0.0 to 1.0).</summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public double Weight { get; init; } = 1.0;
|
||||
|
||||
/// <summary>Human-readable label for the edge.</summary>
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of relationships between proof graph nodes.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public enum ProofEdgeRelation
|
||||
{
|
||||
/// <summary>Source provides input evidence to target.</summary>
|
||||
ProvidesEvidence,
|
||||
|
||||
/// <summary>Source score contributes to target aggregate.</summary>
|
||||
ContributesScore,
|
||||
|
||||
/// <summary>Source evaluation gates target decision.</summary>
|
||||
Gates,
|
||||
|
||||
/// <summary>Source attestation supports target claim.</summary>
|
||||
Attests,
|
||||
|
||||
/// <summary>Source overrides target under certain conditions.</summary>
|
||||
Overrides,
|
||||
|
||||
/// <summary>Source guardrail modifies target score.</summary>
|
||||
GuardrailApplied
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A path through the proof graph from a leaf evidence node
|
||||
/// to the root verdict node.
|
||||
/// </summary>
|
||||
public sealed record ProofGraphPath
|
||||
{
|
||||
/// <summary>Ordered node IDs from leaf to root.</summary>
|
||||
[JsonPropertyName("node_ids")]
|
||||
public required ImmutableArray<string> NodeIds { get; init; }
|
||||
|
||||
/// <summary>Cumulative confidence along this path.</summary>
|
||||
[JsonPropertyName("path_confidence")]
|
||||
public required double PathConfidence { get; init; }
|
||||
|
||||
/// <summary>Whether this path is the highest-confidence path.</summary>
|
||||
[JsonPropertyName("is_critical")]
|
||||
public bool IsCritical { get; init; }
|
||||
|
||||
/// <summary>Human-readable description of this evidence chain.</summary>
|
||||
[JsonPropertyName("description")]
|
||||
public required string Description { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofStudioService.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T2 - Integration service wiring proof graph + score breakdown
|
||||
// Description: Orchestrates proof graph construction and score breakdown
|
||||
// composition from existing policy engine data models.
|
||||
// Bridges ScoreExplanation (Policy.Scoring) and VerdictRationale
|
||||
// (Explainability) into the proof studio visualization models.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics.Metrics;
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Integration surface for the Proof Studio UX.
|
||||
/// Composes proof graphs and score breakdowns from existing
|
||||
/// policy engine results.
|
||||
/// </summary>
|
||||
public interface IProofStudioService
|
||||
{
|
||||
/// <summary>
|
||||
/// Builds a full proof studio view from a verdict rationale and
|
||||
/// optional per-factor score explanation data.
|
||||
/// </summary>
|
||||
ProofStudioView Compose(ProofStudioRequest request);
|
||||
|
||||
/// <summary>
|
||||
/// Applies a counterfactual scenario to an existing proof studio view,
|
||||
/// returning a new view with the overlay applied.
|
||||
/// </summary>
|
||||
ProofStudioView ApplyCounterfactual(
|
||||
ProofStudioView current,
|
||||
CounterfactualScenario scenario);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to compose a proof studio view.
|
||||
/// </summary>
|
||||
public sealed record ProofStudioRequest
|
||||
{
|
||||
/// <summary>Verdict rationale from the explainability module.</summary>
|
||||
public required VerdictRationale Rationale { get; init; }
|
||||
|
||||
/// <summary>Per-factor score explanations from scoring engine.</summary>
|
||||
public IReadOnlyList<ScoreFactorInput>? ScoreFactors { get; init; }
|
||||
|
||||
/// <summary>Composite score (0-100).</summary>
|
||||
public int? CompositeScore { get; init; }
|
||||
|
||||
/// <summary>Action bucket label.</summary>
|
||||
public string? ActionBucket { get; init; }
|
||||
|
||||
/// <summary>Guardrail applications, if any.</summary>
|
||||
public IReadOnlyList<GuardrailInput>? Guardrails { get; init; }
|
||||
|
||||
/// <summary>Entropy value (0-1).</summary>
|
||||
public double? Entropy { get; init; }
|
||||
|
||||
/// <summary>Whether manual review is required.</summary>
|
||||
public bool NeedsReview { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Score factor input from the scoring engine.
|
||||
/// </summary>
|
||||
public sealed record ScoreFactorInput
|
||||
{
|
||||
/// <summary>Factor identifier (e.g., "reachability", "evidence").</summary>
|
||||
public required string Factor { get; init; }
|
||||
|
||||
/// <summary>Raw factor value (0-100).</summary>
|
||||
public required int Value { get; init; }
|
||||
|
||||
/// <summary>Weight applied to this factor (0-1).</summary>
|
||||
public double Weight { get; init; }
|
||||
|
||||
/// <summary>Confidence in this factor's accuracy (0-1).</summary>
|
||||
public double Confidence { get; init; } = 1.0;
|
||||
|
||||
/// <summary>Human-readable explanation.</summary>
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>Whether this factor is subtractive.</summary>
|
||||
public bool IsSubtractive { get; init; }
|
||||
|
||||
/// <summary>Contributing evidence digests.</summary>
|
||||
public IReadOnlyList<string>? ContributingDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Guardrail application input from the scoring engine.
|
||||
/// </summary>
|
||||
public sealed record GuardrailInput
|
||||
{
|
||||
public required string Name { get; init; }
|
||||
public int ScoreBefore { get; init; }
|
||||
public int ScoreAfter { get; init; }
|
||||
public required string Reason { get; init; }
|
||||
public IReadOnlyList<string>? Conditions { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete proof studio view combining graph and dashboard.
|
||||
/// </summary>
|
||||
public sealed record ProofStudioView
|
||||
{
|
||||
/// <summary>The proof graph DAG.</summary>
|
||||
[JsonPropertyName("proof_graph")]
|
||||
public required ProofGraph ProofGraph { get; init; }
|
||||
|
||||
/// <summary>The score breakdown dashboard.</summary>
|
||||
[JsonPropertyName("score_breakdown")]
|
||||
public ScoreBreakdownDashboard? ScoreBreakdown { get; init; }
|
||||
|
||||
/// <summary>When this view was composed.</summary>
|
||||
[JsonPropertyName("composed_at")]
|
||||
public required DateTimeOffset ComposedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IProofStudioService"/>.
|
||||
/// </summary>
|
||||
public sealed class ProofStudioService : IProofStudioService
|
||||
{
|
||||
private readonly IProofGraphBuilder _graphBuilder;
|
||||
private readonly ILogger<ProofStudioService> _logger;
|
||||
private readonly Counter<long> _viewsComposed;
|
||||
private readonly Counter<long> _counterfactualsApplied;
|
||||
|
||||
public ProofStudioService(
|
||||
IProofGraphBuilder graphBuilder,
|
||||
ILogger<ProofStudioService> logger,
|
||||
IMeterFactory meterFactory)
|
||||
{
|
||||
_graphBuilder = graphBuilder;
|
||||
_logger = logger;
|
||||
|
||||
var meter = meterFactory.Create("StellaOps.Policy.Explainability.ProofStudio");
|
||||
_viewsComposed = meter.CreateCounter<long>(
|
||||
"stellaops.proofstudio.views_composed_total",
|
||||
description: "Total proof studio views composed");
|
||||
_counterfactualsApplied = meter.CreateCounter<long>(
|
||||
"stellaops.proofstudio.counterfactuals_applied_total",
|
||||
description: "Total counterfactual scenarios applied");
|
||||
}
|
||||
|
||||
public ProofStudioView Compose(ProofStudioRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
|
||||
// Build score breakdown dashboard if factor data is available
|
||||
ScoreBreakdownDashboard? dashboard = null;
|
||||
if (request.ScoreFactors is { Count: > 0 })
|
||||
{
|
||||
dashboard = BuildDashboard(request, now);
|
||||
}
|
||||
|
||||
// Build proof graph
|
||||
var graphInput = new ProofGraphInput
|
||||
{
|
||||
Rationale = request.Rationale,
|
||||
ScoreBreakdown = dashboard,
|
||||
ComputedAt = now
|
||||
};
|
||||
|
||||
var proofGraph = _graphBuilder.Build(graphInput);
|
||||
|
||||
_viewsComposed.Add(1);
|
||||
_logger.LogDebug(
|
||||
"Composed proof studio view {GraphId} with {HasDashboard} dashboard",
|
||||
proofGraph.GraphId, dashboard is not null);
|
||||
|
||||
return new ProofStudioView
|
||||
{
|
||||
ProofGraph = proofGraph,
|
||||
ScoreBreakdown = dashboard,
|
||||
ComposedAt = now
|
||||
};
|
||||
}
|
||||
|
||||
public ProofStudioView ApplyCounterfactual(
|
||||
ProofStudioView current,
|
||||
CounterfactualScenario scenario)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(current);
|
||||
ArgumentNullException.ThrowIfNull(scenario);
|
||||
|
||||
var overlayGraph = _graphBuilder.AddCounterfactualOverlay(
|
||||
current.ProofGraph, scenario);
|
||||
|
||||
_counterfactualsApplied.Add(1);
|
||||
_logger.LogDebug(
|
||||
"Applied counterfactual '{Label}' to graph {GraphId}",
|
||||
scenario.Label, current.ProofGraph.GraphId);
|
||||
|
||||
return current with
|
||||
{
|
||||
ProofGraph = overlayGraph,
|
||||
ComposedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
// ── Private helpers ──────────────────────────────────────────────────
|
||||
|
||||
private static ScoreBreakdownDashboard BuildDashboard(
|
||||
ProofStudioRequest request,
|
||||
DateTimeOffset computedAt)
|
||||
{
|
||||
var factors = request.ScoreFactors!
|
||||
.Select(f => new FactorContribution
|
||||
{
|
||||
FactorId = f.Factor,
|
||||
FactorName = FormatFactorName(f.Factor),
|
||||
RawScore = f.Value,
|
||||
Weight = f.Weight,
|
||||
Confidence = f.Confidence,
|
||||
IsSubtractive = f.IsSubtractive,
|
||||
EvidenceSource = f.ContributingDigests?.FirstOrDefault(),
|
||||
Explanation = f.Reason
|
||||
})
|
||||
.ToImmutableArray();
|
||||
|
||||
var guardrails = (request.Guardrails ?? [])
|
||||
.Select(g => new GuardrailApplication
|
||||
{
|
||||
GuardrailName = g.Name,
|
||||
ScoreBefore = g.ScoreBefore,
|
||||
ScoreAfter = g.ScoreAfter,
|
||||
Reason = g.Reason,
|
||||
Conditions = g.Conditions is not null
|
||||
? [.. g.Conditions]
|
||||
: []
|
||||
})
|
||||
.ToImmutableArray();
|
||||
|
||||
return new ScoreBreakdownDashboard
|
||||
{
|
||||
DashboardId = $"dash:{Guid.CreateVersion7():N}",
|
||||
VerdictRef = request.Rationale.VerdictRef,
|
||||
CompositeScore = request.CompositeScore ?? 0,
|
||||
ActionBucket = request.ActionBucket ?? "Unknown",
|
||||
Factors = factors,
|
||||
GuardrailsApplied = guardrails,
|
||||
PreGuardrailScore = request.CompositeScore ?? 0,
|
||||
Entropy = request.Entropy ?? 0.0,
|
||||
NeedsReview = request.NeedsReview,
|
||||
ComputedAt = computedAt
|
||||
};
|
||||
}
|
||||
|
||||
private static string FormatFactorName(string factorId)
|
||||
{
|
||||
return factorId switch
|
||||
{
|
||||
"reachability" or "rch" => "Reachability",
|
||||
"evidence" or "evd" => "Evidence",
|
||||
"provenance" or "prv" => "Provenance",
|
||||
"baseSeverity" or "sev" => "Base Severity",
|
||||
"runtimeSignal" or "rts" => "Runtime Signal",
|
||||
"mitigation" or "mit" => "Mitigation",
|
||||
"exploit" or "exp" => "Exploit Maturity",
|
||||
"temporal" or "tmp" => "Temporal",
|
||||
_ => factorId.Length > 0
|
||||
? char.ToUpperInvariant(factorId[0]) + factorId[1..]
|
||||
: factorId
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScoreBreakdownDashboard.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Score breakdown dashboard data models
|
||||
// Description: Per-factor score breakdown for dashboard visualization.
|
||||
// Produces chart-ready data showing how each scoring dimension
|
||||
// contributes to the final verdict score.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Policy.Explainability;
|
||||
|
||||
/// <summary>
|
||||
/// Complete score breakdown for dashboard visualization, showing
|
||||
/// per-factor contributions to the final verdict score.
|
||||
/// </summary>
|
||||
public sealed record ScoreBreakdownDashboard
|
||||
{
|
||||
/// <summary>Content-addressed dashboard identifier.</summary>
|
||||
[JsonPropertyName("dashboard_id")]
|
||||
public required string DashboardId { get; init; }
|
||||
|
||||
/// <summary>Reference to the verdict being broken down.</summary>
|
||||
[JsonPropertyName("verdict_ref")]
|
||||
public required VerdictReference VerdictRef { get; init; }
|
||||
|
||||
/// <summary>Overall composite score (0-100).</summary>
|
||||
[JsonPropertyName("composite_score")]
|
||||
public required int CompositeScore { get; init; }
|
||||
|
||||
/// <summary>Action bucket label (e.g., "Act Now", "Schedule Next").</summary>
|
||||
[JsonPropertyName("action_bucket")]
|
||||
public required string ActionBucket { get; init; }
|
||||
|
||||
/// <summary>Per-factor contribution breakdown for chart rendering.</summary>
|
||||
[JsonPropertyName("factors")]
|
||||
public required ImmutableArray<FactorContribution> Factors { get; init; }
|
||||
|
||||
/// <summary>Guardrails that were applied, if any.</summary>
|
||||
[JsonPropertyName("guardrails_applied")]
|
||||
public ImmutableArray<GuardrailApplication> GuardrailsApplied { get; init; } = [];
|
||||
|
||||
/// <summary>Score before guardrails were applied.</summary>
|
||||
[JsonPropertyName("pre_guardrail_score")]
|
||||
public int? PreGuardrailScore { get; init; }
|
||||
|
||||
/// <summary>Entropy level for determinization decisions.</summary>
|
||||
[JsonPropertyName("entropy")]
|
||||
public double? Entropy { get; init; }
|
||||
|
||||
/// <summary>Whether this verdict needs manual review based on entropy.</summary>
|
||||
[JsonPropertyName("needs_review")]
|
||||
public bool NeedsReview { get; init; }
|
||||
|
||||
/// <summary>When the breakdown was computed.</summary>
|
||||
[JsonPropertyName("computed_at")]
|
||||
public required DateTimeOffset ComputedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual factor contribution to the composite score.
|
||||
/// </summary>
|
||||
public sealed record FactorContribution
|
||||
{
|
||||
/// <summary>Factor identifier (e.g., "rch", "rts", "bkp").</summary>
|
||||
[JsonPropertyName("factor_id")]
|
||||
public required string FactorId { get; init; }
|
||||
|
||||
/// <summary>Human-readable factor name.</summary>
|
||||
[JsonPropertyName("factor_name")]
|
||||
public required string FactorName { get; init; }
|
||||
|
||||
/// <summary>Raw normalized score for this factor (0-100).</summary>
|
||||
[JsonPropertyName("raw_score")]
|
||||
public required int RawScore { get; init; }
|
||||
|
||||
/// <summary>Weight assigned to this factor (0.0-1.0).</summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public required double Weight { get; init; }
|
||||
|
||||
/// <summary>Weighted contribution to composite score.</summary>
|
||||
[JsonPropertyName("weighted_contribution")]
|
||||
public double WeightedContribution => RawScore * Weight;
|
||||
|
||||
/// <summary>Confidence level for this factor (0.0-1.0).</summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether this is a subtractive factor (reduces risk).
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_subtractive")]
|
||||
public bool IsSubtractive { get; init; }
|
||||
|
||||
/// <summary>Source of the evidence for this factor.</summary>
|
||||
[JsonPropertyName("evidence_source")]
|
||||
public string? EvidenceSource { get; init; }
|
||||
|
||||
/// <summary>Human-readable explanation of the score.</summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public required string Explanation { get; init; }
|
||||
|
||||
/// <summary>Percentage of composite that this factor contributes.</summary>
|
||||
[JsonPropertyName("percentage_of_total")]
|
||||
public double PercentageOfTotal { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Record of a guardrail being applied to the score.
|
||||
/// </summary>
|
||||
public sealed record GuardrailApplication
|
||||
{
|
||||
/// <summary>Guardrail name (e.g., "notAffectedCap", "runtimeFloor").</summary>
|
||||
[JsonPropertyName("guardrail_name")]
|
||||
public required string GuardrailName { get; init; }
|
||||
|
||||
/// <summary>Score before this guardrail.</summary>
|
||||
[JsonPropertyName("score_before")]
|
||||
public required int ScoreBefore { get; init; }
|
||||
|
||||
/// <summary>Score after this guardrail.</summary>
|
||||
[JsonPropertyName("score_after")]
|
||||
public required int ScoreAfter { get; init; }
|
||||
|
||||
/// <summary>Human-readable reason the guardrail triggered.</summary>
|
||||
[JsonPropertyName("reason")]
|
||||
public required string Reason { get; init; }
|
||||
|
||||
/// <summary>Conditions that caused the guardrail to fire.</summary>
|
||||
[JsonPropertyName("conditions")]
|
||||
public ImmutableArray<string> Conditions { get; init; } = [];
|
||||
}
|
||||
@@ -7,6 +7,8 @@ public static class ExplainabilityServiceCollectionExtensions
|
||||
public static IServiceCollection AddVerdictExplainability(this IServiceCollection services)
|
||||
{
|
||||
services.AddSingleton<IVerdictRationaleRenderer, VerdictRationaleRenderer>();
|
||||
services.AddSingleton<IProofGraphBuilder, ProofGraphBuilder>();
|
||||
services.AddSingleton<IProofStudioService, ProofStudioService>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,153 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IPolicyDiffMerge.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Policy diff/merge interface
|
||||
// Description: Interface for diffing and merging PolicyPackDocuments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Abstractions;
|
||||
|
||||
/// <summary>
|
||||
/// Computes structural diffs between two PolicyPackDocuments and merges packs.
|
||||
/// </summary>
|
||||
public interface IPolicyDiffMerge
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes a structural diff between two policy pack documents.
|
||||
/// Returns a list of changes (additions, removals, modifications).
|
||||
/// </summary>
|
||||
PolicyDiffResult Diff(PolicyPackDocument baseline, PolicyPackDocument updated);
|
||||
|
||||
/// <summary>
|
||||
/// Merges two policy pack documents according to the specified strategy.
|
||||
/// </summary>
|
||||
PolicyMergeResult Merge(
|
||||
PolicyPackDocument baseDoc,
|
||||
PolicyPackDocument overlay,
|
||||
PolicyMergeStrategy strategy = PolicyMergeStrategy.OverlayWins);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a policy diff operation.
|
||||
/// </summary>
|
||||
public sealed record PolicyDiffResult
|
||||
{
|
||||
/// <summary>Whether the two documents are identical.</summary>
|
||||
public required bool AreIdentical { get; init; }
|
||||
|
||||
/// <summary>Ordered list of changes between baseline and updated.</summary>
|
||||
public IReadOnlyList<PolicyChange> Changes { get; init; } = [];
|
||||
|
||||
/// <summary>Summary statistics about the diff.</summary>
|
||||
public required PolicyDiffSummary Summary { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single change between two policy documents.
|
||||
/// </summary>
|
||||
public sealed record PolicyChange
|
||||
{
|
||||
/// <summary>Type of change.</summary>
|
||||
public required PolicyChangeType ChangeType { get; init; }
|
||||
|
||||
/// <summary>Category: "gate", "rule", "setting", "metadata".</summary>
|
||||
public required string Category { get; init; }
|
||||
|
||||
/// <summary>Path to the changed element (e.g., "gates[cvss-threshold].config.threshold").</summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>Previous value (null for additions).</summary>
|
||||
public object? OldValue { get; init; }
|
||||
|
||||
/// <summary>New value (null for removals).</summary>
|
||||
public object? NewValue { get; init; }
|
||||
|
||||
/// <summary>Human-readable description of the change.</summary>
|
||||
public required string Description { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of policy change.
|
||||
/// </summary>
|
||||
public enum PolicyChangeType
|
||||
{
|
||||
/// <summary>Element was added in the updated document.</summary>
|
||||
Added,
|
||||
|
||||
/// <summary>Element was removed in the updated document.</summary>
|
||||
Removed,
|
||||
|
||||
/// <summary>Element was modified between documents.</summary>
|
||||
Modified
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary statistics for a policy diff.
|
||||
/// </summary>
|
||||
public sealed record PolicyDiffSummary
|
||||
{
|
||||
/// <summary>Number of additions.</summary>
|
||||
public int Additions { get; init; }
|
||||
|
||||
/// <summary>Number of removals.</summary>
|
||||
public int Removals { get; init; }
|
||||
|
||||
/// <summary>Number of modifications.</summary>
|
||||
public int Modifications { get; init; }
|
||||
|
||||
/// <summary>Total number of changes.</summary>
|
||||
public int Total => Additions + Removals + Modifications;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Strategy for merging two policy pack documents.
|
||||
/// </summary>
|
||||
public enum PolicyMergeStrategy
|
||||
{
|
||||
/// <summary>Overlay values win on conflict.</summary>
|
||||
OverlayWins,
|
||||
|
||||
/// <summary>Base values win on conflict.</summary>
|
||||
BaseWins,
|
||||
|
||||
/// <summary>Fail on any conflict.</summary>
|
||||
FailOnConflict
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a policy merge operation.
|
||||
/// </summary>
|
||||
public sealed record PolicyMergeResult
|
||||
{
|
||||
/// <summary>Whether the merge succeeded.</summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>Merged document (null if failed).</summary>
|
||||
public PolicyPackDocument? Document { get; init; }
|
||||
|
||||
/// <summary>Conflicts encountered during merge (empty if OverlayWins/BaseWins).</summary>
|
||||
public IReadOnlyList<PolicyMergeConflict> Conflicts { get; init; } = [];
|
||||
|
||||
/// <summary>Diagnostics from the merge operation.</summary>
|
||||
public IReadOnlyList<PolicyDiagnostic> Diagnostics { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A conflict encountered during policy merge.
|
||||
/// </summary>
|
||||
public sealed record PolicyMergeConflict
|
||||
{
|
||||
/// <summary>Path to the conflicting element.</summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>Value from the base document.</summary>
|
||||
public object? BaseValue { get; init; }
|
||||
|
||||
/// <summary>Value from the overlay document.</summary>
|
||||
public object? OverlayValue { get; init; }
|
||||
|
||||
/// <summary>Human-readable description of the conflict.</summary>
|
||||
public required string Description { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IPolicyYamlExporter.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - YAML export interface
|
||||
// Description: Interface for YAML export of PolicyPackDocuments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Abstractions;
|
||||
|
||||
/// <summary>
|
||||
/// Exports native C# policy packs to YAML format.
|
||||
/// </summary>
|
||||
public interface IPolicyYamlExporter
|
||||
{
|
||||
/// <summary>
|
||||
/// Exports the given policy pack document to canonical YAML format.
|
||||
/// The output is deterministic: same input produces byte-identical output.
|
||||
/// </summary>
|
||||
Task<YamlExportResult> ExportToYamlAsync(
|
||||
PolicyPackDocument document,
|
||||
PolicyExportRequest request,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a YAML export operation.
|
||||
/// </summary>
|
||||
public sealed record YamlExportResult
|
||||
{
|
||||
/// <summary>Whether export succeeded.</summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>Generated YAML content.</summary>
|
||||
public required string YamlContent { get; init; }
|
||||
|
||||
/// <summary>SHA-256 digest of the generated YAML.</summary>
|
||||
public string? Digest { get; init; }
|
||||
|
||||
/// <summary>Warnings generated during export.</summary>
|
||||
public IReadOnlyList<string> Warnings { get; init; } = [];
|
||||
}
|
||||
@@ -320,12 +320,14 @@ public static class PolicyFormats
|
||||
{
|
||||
public const string Json = "json";
|
||||
public const string Rego = "rego";
|
||||
public const string Yaml = "yaml";
|
||||
|
||||
public static readonly IReadOnlyList<string> All = [Json, Rego];
|
||||
public static readonly IReadOnlyList<string> All = [Json, Rego, Yaml];
|
||||
|
||||
public static bool IsValid(string format) =>
|
||||
string.Equals(format, Json, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(format, Rego, StringComparison.OrdinalIgnoreCase);
|
||||
string.Equals(format, Rego, StringComparison.OrdinalIgnoreCase) ||
|
||||
string.Equals(format, Yaml, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.DiffMerge;
|
||||
using StellaOps.Policy.Interop.Export;
|
||||
using StellaOps.Policy.Interop.Import;
|
||||
|
||||
namespace StellaOps.Policy.Interop.DependencyInjection;
|
||||
|
||||
@@ -10,13 +14,26 @@ public static class PolicyInteropServiceCollectionExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds Policy Interop services to the service collection.
|
||||
/// Registers: IPolicyExporter, IPolicyImporter, IPolicyValidator,
|
||||
/// Registers: IPolicyExporter, IPolicyImporter (JSON + YAML),
|
||||
/// IPolicyYamlExporter, IPolicyDiffMerge, IPolicyValidator,
|
||||
/// IPolicyEvaluator, IRegoCodeGenerator, IRemediationResolver.
|
||||
/// </summary>
|
||||
public static IServiceCollection AddPolicyInterop(this IServiceCollection services)
|
||||
{
|
||||
// Implementations are registered in TASK-02..05 when created.
|
||||
// This extension point ensures consistent DI wiring.
|
||||
// JSON export/import
|
||||
services.TryAddSingleton<IPolicyExporter, JsonPolicyExporter>();
|
||||
services.TryAddSingleton<JsonPolicyImporter>();
|
||||
|
||||
// YAML export/import
|
||||
services.TryAddSingleton<IPolicyYamlExporter, YamlPolicyExporter>();
|
||||
services.TryAddSingleton<YamlPolicyImporter>();
|
||||
|
||||
// Register both importers as IPolicyImporter (JSON is the primary/default)
|
||||
services.TryAddSingleton<IPolicyImporter, JsonPolicyImporter>();
|
||||
|
||||
// Policy diff/merge engine
|
||||
services.TryAddSingleton<IPolicyDiffMerge, PolicyDiffMergeEngine>();
|
||||
|
||||
return services;
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,639 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyDiffMergeEngine.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Policy diff/merge implementation
|
||||
// Description: Structural diff and merge engine for PolicyPackDocuments.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
|
||||
namespace StellaOps.Policy.Interop.DiffMerge;
|
||||
|
||||
/// <summary>
|
||||
/// Computes structural diffs and merges for PolicyPackDocuments.
|
||||
/// All operations are deterministic and offline-safe.
|
||||
/// </summary>
|
||||
public sealed class PolicyDiffMergeEngine : IPolicyDiffMerge
|
||||
{
|
||||
/// <inheritdoc/>
|
||||
public PolicyDiffResult Diff(PolicyPackDocument baseline, PolicyPackDocument updated)
|
||||
{
|
||||
var changes = new List<PolicyChange>();
|
||||
|
||||
// Diff metadata
|
||||
DiffMetadata(baseline.Metadata, updated.Metadata, changes);
|
||||
|
||||
// Diff settings
|
||||
DiffSettings(baseline.Spec.Settings, updated.Spec.Settings, changes);
|
||||
|
||||
// Diff gates
|
||||
DiffGates(baseline.Spec.Gates, updated.Spec.Gates, changes);
|
||||
|
||||
// Diff rules
|
||||
DiffRules(baseline.Spec.Rules, updated.Spec.Rules, changes);
|
||||
|
||||
var summary = new PolicyDiffSummary
|
||||
{
|
||||
Additions = changes.Count(c => c.ChangeType == PolicyChangeType.Added),
|
||||
Removals = changes.Count(c => c.ChangeType == PolicyChangeType.Removed),
|
||||
Modifications = changes.Count(c => c.ChangeType == PolicyChangeType.Modified)
|
||||
};
|
||||
|
||||
return new PolicyDiffResult
|
||||
{
|
||||
AreIdentical = changes.Count == 0,
|
||||
Changes = changes,
|
||||
Summary = summary
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public PolicyMergeResult Merge(
|
||||
PolicyPackDocument baseDoc,
|
||||
PolicyPackDocument overlay,
|
||||
PolicyMergeStrategy strategy = PolicyMergeStrategy.OverlayWins)
|
||||
{
|
||||
var conflicts = new List<PolicyMergeConflict>();
|
||||
var diagnostics = new List<PolicyDiagnostic>();
|
||||
|
||||
// Merge metadata (overlay wins for description, version)
|
||||
var mergedMeta = MergeMetadata(baseDoc.Metadata, overlay.Metadata, strategy, conflicts);
|
||||
|
||||
// Merge settings
|
||||
var mergedSettings = MergeSettings(baseDoc.Spec.Settings, overlay.Spec.Settings, strategy, conflicts);
|
||||
|
||||
// Merge gates
|
||||
var mergedGates = MergeGates(baseDoc.Spec.Gates, overlay.Spec.Gates, strategy, conflicts);
|
||||
|
||||
// Merge rules
|
||||
var mergedRules = MergeRules(baseDoc.Spec.Rules, overlay.Spec.Rules, strategy, conflicts);
|
||||
|
||||
// Fail on conflicts if strategy demands it
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict && conflicts.Count > 0)
|
||||
{
|
||||
diagnostics.Add(new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "MERGE_CONFLICT",
|
||||
Message = $"Merge failed: {conflicts.Count} conflict(s) found."
|
||||
});
|
||||
|
||||
return new PolicyMergeResult
|
||||
{
|
||||
Success = false,
|
||||
Document = null,
|
||||
Conflicts = conflicts,
|
||||
Diagnostics = diagnostics
|
||||
};
|
||||
}
|
||||
|
||||
var merged = new PolicyPackDocument
|
||||
{
|
||||
ApiVersion = baseDoc.ApiVersion,
|
||||
Kind = baseDoc.Kind,
|
||||
Metadata = mergedMeta,
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = mergedSettings,
|
||||
Gates = mergedGates,
|
||||
Rules = mergedRules
|
||||
}
|
||||
};
|
||||
|
||||
if (conflicts.Count > 0)
|
||||
{
|
||||
diagnostics.Add(new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Warning,
|
||||
Code = "MERGE_CONFLICTS_RESOLVED",
|
||||
Message = $"{conflicts.Count} conflict(s) resolved using {strategy} strategy."
|
||||
});
|
||||
}
|
||||
|
||||
return new PolicyMergeResult
|
||||
{
|
||||
Success = true,
|
||||
Document = merged,
|
||||
Conflicts = conflicts,
|
||||
Diagnostics = diagnostics
|
||||
};
|
||||
}
|
||||
|
||||
#region Diff Methods
|
||||
|
||||
private static void DiffMetadata(
|
||||
PolicyPackMetadata baseline, PolicyPackMetadata updated, List<PolicyChange> changes)
|
||||
{
|
||||
if (baseline.Name != updated.Name)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "metadata",
|
||||
Path = "metadata.name",
|
||||
OldValue = baseline.Name,
|
||||
NewValue = updated.Name,
|
||||
Description = $"Name changed from '{baseline.Name}' to '{updated.Name}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.Version != updated.Version)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "metadata",
|
||||
Path = "metadata.version",
|
||||
OldValue = baseline.Version,
|
||||
NewValue = updated.Version,
|
||||
Description = $"Version changed from '{baseline.Version}' to '{updated.Version}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.Description != updated.Description)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "metadata",
|
||||
Path = "metadata.description",
|
||||
OldValue = baseline.Description,
|
||||
NewValue = updated.Description,
|
||||
Description = "Description changed."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffSettings(
|
||||
PolicyPackSettings baseline, PolicyPackSettings updated, List<PolicyChange> changes)
|
||||
{
|
||||
if (baseline.DefaultAction != updated.DefaultAction)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.defaultAction",
|
||||
OldValue = baseline.DefaultAction,
|
||||
NewValue = updated.DefaultAction,
|
||||
Description = $"Default action changed from '{baseline.DefaultAction}' to '{updated.DefaultAction}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (Math.Abs(baseline.UnknownsThreshold - updated.UnknownsThreshold) > 1e-10)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.unknownsThreshold",
|
||||
OldValue = baseline.UnknownsThreshold,
|
||||
NewValue = updated.UnknownsThreshold,
|
||||
Description = $"Unknowns threshold changed from {baseline.UnknownsThreshold} to {updated.UnknownsThreshold}."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.StopOnFirstFailure != updated.StopOnFirstFailure)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.stopOnFirstFailure",
|
||||
OldValue = baseline.StopOnFirstFailure,
|
||||
NewValue = updated.StopOnFirstFailure,
|
||||
Description = $"StopOnFirstFailure changed from {baseline.StopOnFirstFailure} to {updated.StopOnFirstFailure}."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseline.DeterministicMode != updated.DeterministicMode)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "setting",
|
||||
Path = "spec.settings.deterministicMode",
|
||||
OldValue = baseline.DeterministicMode,
|
||||
NewValue = updated.DeterministicMode,
|
||||
Description = $"DeterministicMode changed from {baseline.DeterministicMode} to {updated.DeterministicMode}."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffGates(
|
||||
IReadOnlyList<PolicyGateDefinition> baselineGates,
|
||||
IReadOnlyList<PolicyGateDefinition> updatedGates,
|
||||
List<PolicyChange> changes)
|
||||
{
|
||||
var baseMap = baselineGates.ToDictionary(g => g.Id);
|
||||
var updatedMap = updatedGates.ToDictionary(g => g.Id);
|
||||
|
||||
// Removed gates
|
||||
foreach (var id in baseMap.Keys.Except(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Removed,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}]",
|
||||
OldValue = baseMap[id].Type,
|
||||
NewValue = null,
|
||||
Description = $"Gate '{id}' ({baseMap[id].Type}) removed."
|
||||
});
|
||||
}
|
||||
|
||||
// Added gates
|
||||
foreach (var id in updatedMap.Keys.Except(baseMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Added,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}]",
|
||||
OldValue = null,
|
||||
NewValue = updatedMap[id].Type,
|
||||
Description = $"Gate '{id}' ({updatedMap[id].Type}) added."
|
||||
});
|
||||
}
|
||||
|
||||
// Modified gates
|
||||
foreach (var id in baseMap.Keys.Intersect(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
var baseGate = baseMap[id];
|
||||
var updatedGate = updatedMap[id];
|
||||
|
||||
if (baseGate.Enabled != updatedGate.Enabled)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}].enabled",
|
||||
OldValue = baseGate.Enabled,
|
||||
NewValue = updatedGate.Enabled,
|
||||
Description = $"Gate '{id}' enabled changed from {baseGate.Enabled} to {updatedGate.Enabled}."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseGate.Type != updatedGate.Type)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "gate",
|
||||
Path = $"spec.gates[{id}].type",
|
||||
OldValue = baseGate.Type,
|
||||
NewValue = updatedGate.Type,
|
||||
Description = $"Gate '{id}' type changed from '{baseGate.Type}' to '{updatedGate.Type}'."
|
||||
});
|
||||
}
|
||||
|
||||
// Diff config values
|
||||
DiffDictionary(baseGate.Config, updatedGate.Config, $"spec.gates[{id}].config", "gate", changes);
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffRules(
|
||||
IReadOnlyList<PolicyRuleDefinition> baselineRules,
|
||||
IReadOnlyList<PolicyRuleDefinition> updatedRules,
|
||||
List<PolicyChange> changes)
|
||||
{
|
||||
var baseMap = baselineRules.ToDictionary(r => r.Name);
|
||||
var updatedMap = updatedRules.ToDictionary(r => r.Name);
|
||||
|
||||
// Removed rules
|
||||
foreach (var name in baseMap.Keys.Except(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Removed,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}]",
|
||||
OldValue = baseMap[name].Action,
|
||||
NewValue = null,
|
||||
Description = $"Rule '{name}' removed."
|
||||
});
|
||||
}
|
||||
|
||||
// Added rules
|
||||
foreach (var name in updatedMap.Keys.Except(baseMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Added,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}]",
|
||||
OldValue = null,
|
||||
NewValue = updatedMap[name].Action,
|
||||
Description = $"Rule '{name}' added."
|
||||
});
|
||||
}
|
||||
|
||||
// Modified rules
|
||||
foreach (var name in baseMap.Keys.Intersect(updatedMap.Keys).OrderBy(k => k, StringComparer.Ordinal))
|
||||
{
|
||||
var baseRule = baseMap[name];
|
||||
var updatedRule = updatedMap[name];
|
||||
|
||||
if (baseRule.Action != updatedRule.Action)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}].action",
|
||||
OldValue = baseRule.Action,
|
||||
NewValue = updatedRule.Action,
|
||||
Description = $"Rule '{name}' action changed from '{baseRule.Action}' to '{updatedRule.Action}'."
|
||||
});
|
||||
}
|
||||
|
||||
if (baseRule.Priority != updatedRule.Priority)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = "rule",
|
||||
Path = $"spec.rules[{name}].priority",
|
||||
OldValue = baseRule.Priority,
|
||||
NewValue = updatedRule.Priority,
|
||||
Description = $"Rule '{name}' priority changed from {baseRule.Priority} to {updatedRule.Priority}."
|
||||
});
|
||||
}
|
||||
|
||||
DiffDictionary(baseRule.Match, updatedRule.Match, $"spec.rules[{name}].match", "rule", changes);
|
||||
}
|
||||
}
|
||||
|
||||
private static void DiffDictionary(
|
||||
IReadOnlyDictionary<string, object?> baseDict,
|
||||
IReadOnlyDictionary<string, object?> updatedDict,
|
||||
string pathPrefix,
|
||||
string category,
|
||||
List<PolicyChange> changes)
|
||||
{
|
||||
var allKeys = baseDict.Keys.Union(updatedDict.Keys).OrderBy(k => k, StringComparer.Ordinal);
|
||||
|
||||
foreach (var key in allKeys)
|
||||
{
|
||||
var hasBase = baseDict.TryGetValue(key, out var baseVal);
|
||||
var hasUpdated = updatedDict.TryGetValue(key, out var updatedVal);
|
||||
|
||||
if (hasBase && !hasUpdated)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Removed,
|
||||
Category = category,
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
OldValue = baseVal,
|
||||
NewValue = null,
|
||||
Description = $"Config key '{key}' removed."
|
||||
});
|
||||
}
|
||||
else if (!hasBase && hasUpdated)
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Added,
|
||||
Category = category,
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
OldValue = null,
|
||||
NewValue = updatedVal,
|
||||
Description = $"Config key '{key}' added with value '{updatedVal}'."
|
||||
});
|
||||
}
|
||||
else if (hasBase && hasUpdated && !ValuesEqual(baseVal, updatedVal))
|
||||
{
|
||||
changes.Add(new PolicyChange
|
||||
{
|
||||
ChangeType = PolicyChangeType.Modified,
|
||||
Category = category,
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
OldValue = baseVal,
|
||||
NewValue = updatedVal,
|
||||
Description = $"Config key '{key}' changed from '{baseVal}' to '{updatedVal}'."
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static bool ValuesEqual(object? a, object? b)
|
||||
{
|
||||
if (a is null && b is null) return true;
|
||||
if (a is null || b is null) return false;
|
||||
|
||||
// Handle JsonElement comparison (from System.Text.Json deserialization)
|
||||
if (a is JsonElement ja && b is JsonElement jb)
|
||||
return ja.GetRawText() == jb.GetRawText();
|
||||
|
||||
return a.Equals(b);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merge Methods
|
||||
|
||||
private static PolicyPackMetadata MergeMetadata(
|
||||
PolicyPackMetadata baseM,
|
||||
PolicyPackMetadata overlay,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var name = ResolveConflict(baseM.Name, overlay.Name, "metadata.name", strategy, conflicts);
|
||||
var version = ResolveConflict(baseM.Version, overlay.Version, "metadata.version", strategy, conflicts);
|
||||
var description = overlay.Description ?? baseM.Description;
|
||||
|
||||
return new PolicyPackMetadata
|
||||
{
|
||||
Name = name ?? baseM.Name,
|
||||
Version = version ?? baseM.Version,
|
||||
Description = description,
|
||||
Digest = null, // Digest will be recomputed on export
|
||||
CreatedAt = baseM.CreatedAt,
|
||||
ExportedFrom = baseM.ExportedFrom,
|
||||
Parent = overlay.Parent ?? baseM.Parent,
|
||||
Environment = overlay.Environment ?? baseM.Environment
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyPackSettings MergeSettings(
|
||||
PolicyPackSettings baseS,
|
||||
PolicyPackSettings overlay,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var defaultAction = ResolveConflict(
|
||||
baseS.DefaultAction, overlay.DefaultAction,
|
||||
"spec.settings.defaultAction", strategy, conflicts) ?? baseS.DefaultAction;
|
||||
|
||||
return new PolicyPackSettings
|
||||
{
|
||||
DefaultAction = defaultAction,
|
||||
UnknownsThreshold = strategy == PolicyMergeStrategy.BaseWins
|
||||
? baseS.UnknownsThreshold
|
||||
: overlay.UnknownsThreshold,
|
||||
StopOnFirstFailure = strategy == PolicyMergeStrategy.BaseWins
|
||||
? baseS.StopOnFirstFailure
|
||||
: overlay.StopOnFirstFailure,
|
||||
DeterministicMode = strategy == PolicyMergeStrategy.BaseWins
|
||||
? baseS.DeterministicMode
|
||||
: overlay.DeterministicMode
|
||||
};
|
||||
}
|
||||
|
||||
private static List<PolicyGateDefinition> MergeGates(
|
||||
IReadOnlyList<PolicyGateDefinition> baseGates,
|
||||
IReadOnlyList<PolicyGateDefinition> overlayGates,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var baseMap = baseGates.ToDictionary(g => g.Id);
|
||||
var overlayMap = overlayGates.ToDictionary(g => g.Id);
|
||||
var result = new List<PolicyGateDefinition>();
|
||||
|
||||
// Include all base gates (potentially overridden)
|
||||
foreach (var gate in baseGates)
|
||||
{
|
||||
if (overlayMap.TryGetValue(gate.Id, out var overlayGate))
|
||||
{
|
||||
// Gate exists in both: merge configs
|
||||
var mergedConfig = MergeDictionaries(
|
||||
gate.Config, overlayGate.Config,
|
||||
$"spec.gates[{gate.Id}].config", strategy, conflicts);
|
||||
|
||||
var mergedGate = strategy == PolicyMergeStrategy.BaseWins
|
||||
? gate with { Config = mergedConfig }
|
||||
: overlayGate with { Config = mergedConfig };
|
||||
|
||||
result.Add(mergedGate);
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Add(gate);
|
||||
}
|
||||
}
|
||||
|
||||
// Add overlay-only gates
|
||||
foreach (var gate in overlayGates.Where(g => !baseMap.ContainsKey(g.Id)))
|
||||
{
|
||||
result.Add(gate);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static List<PolicyRuleDefinition> MergeRules(
|
||||
IReadOnlyList<PolicyRuleDefinition> baseRules,
|
||||
IReadOnlyList<PolicyRuleDefinition> overlayRules,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var baseMap = baseRules.ToDictionary(r => r.Name);
|
||||
var overlayMap = overlayRules.ToDictionary(r => r.Name);
|
||||
var result = new List<PolicyRuleDefinition>();
|
||||
|
||||
// Include all base rules (potentially overridden)
|
||||
foreach (var rule in baseRules)
|
||||
{
|
||||
if (overlayMap.TryGetValue(rule.Name, out var overlayRule))
|
||||
{
|
||||
// Rule exists in both: pick winner
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict &&
|
||||
(rule.Action != overlayRule.Action || rule.Priority != overlayRule.Priority))
|
||||
{
|
||||
conflicts.Add(new PolicyMergeConflict
|
||||
{
|
||||
Path = $"spec.rules[{rule.Name}]",
|
||||
BaseValue = $"action={rule.Action}, priority={rule.Priority}",
|
||||
OverlayValue = $"action={overlayRule.Action}, priority={overlayRule.Priority}",
|
||||
Description = $"Rule '{rule.Name}' differs between base and overlay."
|
||||
});
|
||||
result.Add(rule); // Keep base on conflict
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Add(strategy == PolicyMergeStrategy.BaseWins ? rule : overlayRule);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
result.Add(rule);
|
||||
}
|
||||
}
|
||||
|
||||
// Add overlay-only rules
|
||||
foreach (var rule in overlayRules.Where(r => !baseMap.ContainsKey(r.Name)))
|
||||
{
|
||||
result.Add(rule);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> MergeDictionaries(
|
||||
IReadOnlyDictionary<string, object?> baseDict,
|
||||
IReadOnlyDictionary<string, object?> overlayDict,
|
||||
string pathPrefix,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
var result = new Dictionary<string, object?>(baseDict);
|
||||
|
||||
foreach (var (key, overlayVal) in overlayDict)
|
||||
{
|
||||
if (result.TryGetValue(key, out var baseVal) && !ValuesEqual(baseVal, overlayVal))
|
||||
{
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict)
|
||||
{
|
||||
conflicts.Add(new PolicyMergeConflict
|
||||
{
|
||||
Path = $"{pathPrefix}.{key}",
|
||||
BaseValue = baseVal,
|
||||
OverlayValue = overlayVal,
|
||||
Description = $"Config key '{key}' differs: base='{baseVal}', overlay='{overlayVal}'."
|
||||
});
|
||||
}
|
||||
else if (strategy == PolicyMergeStrategy.OverlayWins)
|
||||
{
|
||||
result[key] = overlayVal;
|
||||
}
|
||||
// BaseWins: keep existing value
|
||||
}
|
||||
else if (!result.ContainsKey(key))
|
||||
{
|
||||
result[key] = overlayVal;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string? ResolveConflict(
|
||||
string baseVal,
|
||||
string overlayVal,
|
||||
string path,
|
||||
PolicyMergeStrategy strategy,
|
||||
List<PolicyMergeConflict> conflicts)
|
||||
{
|
||||
if (baseVal == overlayVal) return baseVal;
|
||||
|
||||
if (strategy == PolicyMergeStrategy.FailOnConflict)
|
||||
{
|
||||
conflicts.Add(new PolicyMergeConflict
|
||||
{
|
||||
Path = path,
|
||||
BaseValue = baseVal,
|
||||
OverlayValue = overlayVal,
|
||||
Description = $"Conflict at '{path}': base='{baseVal}', overlay='{overlayVal}'."
|
||||
});
|
||||
return baseVal;
|
||||
}
|
||||
|
||||
return strategy == PolicyMergeStrategy.OverlayWins ? overlayVal : baseVal;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,265 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyExporter.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - YAML export support for PolicyPackDocument
|
||||
// Description: Exports PolicyPackDocuments to canonical YAML format.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using YamlDotNet.Serialization;
|
||||
using YamlDotNet.Serialization.NamingConventions;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Exports PolicyPackDocuments to canonical YAML format.
|
||||
/// Output is deterministic: same input produces byte-identical output
|
||||
/// (camelCase keys, sorted properties, consistent formatting).
|
||||
/// </summary>
|
||||
public sealed class YamlPolicyExporter : IPolicyYamlExporter
|
||||
{
|
||||
private static readonly ISerializer YamlSerializer = new SerializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.DisableAliases()
|
||||
.ConfigureDefaultValuesHandling(DefaultValuesHandling.OmitNull)
|
||||
.Build();
|
||||
|
||||
private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.Build();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<YamlExportResult> ExportToYamlAsync(
|
||||
PolicyPackDocument document,
|
||||
PolicyExportRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var exported = document;
|
||||
|
||||
// Apply environment filter if specified
|
||||
if (request.Environment is not null)
|
||||
{
|
||||
exported = FilterByEnvironment(exported, request.Environment);
|
||||
}
|
||||
|
||||
// Strip remediation if not requested
|
||||
if (!request.IncludeRemediation)
|
||||
{
|
||||
exported = StripRemediation(exported);
|
||||
}
|
||||
|
||||
// Serialize to YAML
|
||||
var yamlContent = SerializeToYaml(exported);
|
||||
|
||||
// Compute digest
|
||||
var digest = ComputeDigest(yamlContent);
|
||||
|
||||
return Task.FromResult(new YamlExportResult
|
||||
{
|
||||
Success = true,
|
||||
YamlContent = yamlContent,
|
||||
Digest = digest,
|
||||
Warnings = []
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serializes a PolicyPackDocument to canonical YAML string.
|
||||
/// </summary>
|
||||
public static string SerializeToYaml(PolicyPackDocument document)
|
||||
{
|
||||
// Convert to an intermediate dictionary to ensure consistent output
|
||||
var intermediate = ConvertToSerializable(document);
|
||||
return YamlSerializer.Serialize(intermediate);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Serializes a PolicyPackDocument to canonical YAML bytes (UTF-8).
|
||||
/// </summary>
|
||||
public static byte[] SerializeCanonical(PolicyPackDocument document)
|
||||
{
|
||||
return Encoding.UTF8.GetBytes(SerializeToYaml(document));
|
||||
}
|
||||
|
||||
private static object ConvertToSerializable(PolicyPackDocument doc)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["apiVersion"] = doc.ApiVersion,
|
||||
["kind"] = doc.Kind,
|
||||
["metadata"] = ConvertMetadata(doc.Metadata),
|
||||
["spec"] = ConvertSpec(doc.Spec)
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertMetadata(PolicyPackMetadata meta)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["name"] = meta.Name,
|
||||
["version"] = meta.Version
|
||||
};
|
||||
|
||||
if (meta.Description is not null) result["description"] = meta.Description;
|
||||
if (meta.Digest is not null) result["digest"] = meta.Digest;
|
||||
if (meta.CreatedAt is not null) result["createdAt"] = meta.CreatedAt.Value.ToString("O");
|
||||
if (meta.Parent is not null) result["parent"] = meta.Parent;
|
||||
if (meta.Environment is not null) result["environment"] = meta.Environment;
|
||||
|
||||
if (meta.ExportedFrom is not null)
|
||||
{
|
||||
var provenance = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["engine"] = meta.ExportedFrom.Engine,
|
||||
["engineVersion"] = meta.ExportedFrom.EngineVersion
|
||||
};
|
||||
if (meta.ExportedFrom.ExportedAt is not null)
|
||||
provenance["exportedAt"] = meta.ExportedFrom.ExportedAt.Value.ToString("O");
|
||||
result["exportedFrom"] = provenance;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertSpec(PolicyPackSpec spec)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["settings"] = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["defaultAction"] = spec.Settings.DefaultAction,
|
||||
["deterministicMode"] = spec.Settings.DeterministicMode,
|
||||
["stopOnFirstFailure"] = spec.Settings.StopOnFirstFailure,
|
||||
["unknownsThreshold"] = spec.Settings.UnknownsThreshold
|
||||
},
|
||||
["gates"] = spec.Gates.Select(ConvertGate).ToList(),
|
||||
["rules"] = spec.Rules.Select(ConvertRule).ToList()
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertGate(PolicyGateDefinition gate)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["id"] = gate.Id,
|
||||
["type"] = gate.Type,
|
||||
["enabled"] = gate.Enabled
|
||||
};
|
||||
|
||||
if (gate.Config.Count > 0)
|
||||
result["config"] = new SortedDictionary<string, object?>(gate.Config.ToDictionary(kv => kv.Key, kv => kv.Value), StringComparer.Ordinal);
|
||||
|
||||
if (gate.Environments is not null)
|
||||
{
|
||||
var envs = new SortedDictionary<string, object?>(StringComparer.Ordinal);
|
||||
foreach (var (env, cfg) in gate.Environments.OrderBy(e => e.Key, StringComparer.Ordinal))
|
||||
{
|
||||
envs[env] = new SortedDictionary<string, object?>(cfg.ToDictionary(kv => kv.Key, kv => kv.Value), StringComparer.Ordinal);
|
||||
}
|
||||
result["environments"] = envs;
|
||||
}
|
||||
|
||||
if (gate.Remediation is not null)
|
||||
result["remediation"] = ConvertRemediation(gate.Remediation);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertRule(PolicyRuleDefinition rule)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["name"] = rule.Name,
|
||||
["action"] = rule.Action,
|
||||
["priority"] = rule.Priority
|
||||
};
|
||||
|
||||
if (rule.Match.Count > 0)
|
||||
result["match"] = new SortedDictionary<string, object?>(rule.Match.ToDictionary(kv => kv.Key, kv => kv.Value), StringComparer.Ordinal);
|
||||
|
||||
if (rule.Remediation is not null)
|
||||
result["remediation"] = ConvertRemediation(rule.Remediation);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static object ConvertRemediation(RemediationHint hint)
|
||||
{
|
||||
var result = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["code"] = hint.Code,
|
||||
["title"] = hint.Title
|
||||
};
|
||||
if (hint.Description is not null) result["description"] = hint.Description;
|
||||
if (hint.Actions.Count > 0)
|
||||
{
|
||||
result["actions"] = hint.Actions.Select(a =>
|
||||
{
|
||||
var actionDict = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["type"] = a.Type
|
||||
};
|
||||
if (a.Description is not null) actionDict["description"] = a.Description;
|
||||
if (a.Command is not null) actionDict["command"] = a.Command;
|
||||
return (object)actionDict;
|
||||
}).ToList();
|
||||
}
|
||||
if (hint.References.Count > 0)
|
||||
{
|
||||
result["references"] = hint.References.Select(r =>
|
||||
{
|
||||
var refDict = new SortedDictionary<string, object?>(StringComparer.Ordinal)
|
||||
{
|
||||
["title"] = r.Title
|
||||
};
|
||||
if (r.Url is not null) refDict["url"] = r.Url;
|
||||
return (object)refDict;
|
||||
}).ToList();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private static PolicyPackDocument FilterByEnvironment(PolicyPackDocument doc, string environment)
|
||||
{
|
||||
var filteredGates = doc.Spec.Gates.Select(g =>
|
||||
{
|
||||
if (g.Environments is null || !g.Environments.ContainsKey(environment))
|
||||
return g;
|
||||
|
||||
var envConfig = g.Environments[environment];
|
||||
var mergedConfig = new Dictionary<string, object?>(g.Config);
|
||||
foreach (var (key, value) in envConfig)
|
||||
{
|
||||
mergedConfig[key] = value;
|
||||
}
|
||||
|
||||
return g with { Config = mergedConfig, Environments = null };
|
||||
}).ToList();
|
||||
|
||||
return doc with
|
||||
{
|
||||
Spec = doc.Spec with { Gates = filteredGates }
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyPackDocument StripRemediation(PolicyPackDocument doc)
|
||||
{
|
||||
var gates = doc.Spec.Gates.Select(g => g with { Remediation = null }).ToList();
|
||||
var rules = doc.Spec.Rules.Select(r => r with { Remediation = null }).ToList();
|
||||
return doc with
|
||||
{
|
||||
Spec = doc.Spec with { Gates = gates, Rules = rules }
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeDigest(string content)
|
||||
{
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -45,6 +45,15 @@ public static class FormatDetector
|
||||
return PolicyFormats.Rego;
|
||||
}
|
||||
|
||||
// YAML detection: starts with apiVersion: or --- or has YAML-like key: value structure
|
||||
if (trimmed.StartsWith("---", StringComparison.Ordinal) ||
|
||||
trimmed.StartsWith("apiVersion:", StringComparison.Ordinal) ||
|
||||
(trimmed.Contains("apiVersion:", StringComparison.Ordinal) &&
|
||||
trimmed.Contains("kind:", StringComparison.Ordinal)))
|
||||
{
|
||||
return PolicyFormats.Yaml;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -58,6 +67,7 @@ public static class FormatDetector
|
||||
{
|
||||
".json" => PolicyFormats.Json,
|
||||
".rego" => PolicyFormats.Rego,
|
||||
".yaml" or ".yml" => PolicyFormats.Yaml,
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
@@ -0,0 +1,137 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyImporter.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - YAML import support for PolicyPackDocument
|
||||
// Description: Imports PolicyPackDocuments from YAML format.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using YamlDotNet.Core;
|
||||
using YamlDotNet.Serialization;
|
||||
using YamlDotNet.Serialization.NamingConventions;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Import;
|
||||
|
||||
/// <summary>
|
||||
/// Imports PolicyPack v2 YAML documents into the native model.
|
||||
/// Converts YAML to JSON intermediary then delegates to the JSON importer for validation.
|
||||
/// This ensures consistent validation behavior across all import formats.
|
||||
/// </summary>
|
||||
public sealed class YamlPolicyImporter : IPolicyImporter
|
||||
{
|
||||
private static readonly IDeserializer YamlDeserializer = new DeserializerBuilder()
|
||||
.WithNamingConvention(CamelCaseNamingConvention.Instance)
|
||||
.Build();
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
AllowTrailingCommas = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip
|
||||
};
|
||||
|
||||
private readonly JsonPolicyImporter _jsonImporter = new();
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<PolicyImportResult> ImportAsync(
|
||||
Stream policyStream,
|
||||
PolicyImportOptions options,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
using var reader = new StreamReader(policyStream, Encoding.UTF8);
|
||||
var content = await reader.ReadToEndAsync(ct);
|
||||
return await ImportFromStringAsync(content, options, ct);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public Task<PolicyImportResult> ImportFromStringAsync(
|
||||
string content,
|
||||
PolicyImportOptions options,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Parse YAML into object graph
|
||||
object? yamlObject;
|
||||
try
|
||||
{
|
||||
yamlObject = YamlDeserializer.Deserialize<object>(content);
|
||||
}
|
||||
catch (YamlException ex)
|
||||
{
|
||||
return Task.FromResult(new PolicyImportResult
|
||||
{
|
||||
Success = false,
|
||||
DetectedFormat = PolicyFormats.Yaml,
|
||||
Diagnostics =
|
||||
[
|
||||
new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "YAML_PARSE_ERROR",
|
||||
Message = $"YAML parse error at line {ex.Start.Line}, column {ex.Start.Column}: {ex.Message}",
|
||||
Location = $"line {ex.Start.Line}, column {ex.Start.Column}"
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
if (yamlObject is null)
|
||||
{
|
||||
return Task.FromResult(new PolicyImportResult
|
||||
{
|
||||
Success = false,
|
||||
DetectedFormat = PolicyFormats.Yaml,
|
||||
Diagnostics =
|
||||
[
|
||||
new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "YAML_EMPTY",
|
||||
Message = "YAML document is empty or null."
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
// Convert YAML object graph to JSON string (YamlDotNet -> System.Text.Json roundtrip)
|
||||
string jsonContent;
|
||||
try
|
||||
{
|
||||
jsonContent = JsonSerializer.Serialize(yamlObject, JsonOptions);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
return Task.FromResult(new PolicyImportResult
|
||||
{
|
||||
Success = false,
|
||||
DetectedFormat = PolicyFormats.Yaml,
|
||||
Diagnostics =
|
||||
[
|
||||
new PolicyDiagnostic
|
||||
{
|
||||
Severity = PolicyDiagnostic.Severities.Error,
|
||||
Code = "YAML_CONVERSION_ERROR",
|
||||
Message = $"Failed to convert YAML to JSON: {ex.Message}"
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
|
||||
// Delegate to JSON importer for validation and deserialization
|
||||
var jsonOptions = options with { Format = PolicyFormats.Json };
|
||||
var result = _jsonImporter.ImportFromStringAsync(jsonContent, jsonOptions, ct);
|
||||
|
||||
return result.ContinueWith(t =>
|
||||
{
|
||||
var importResult = t.Result;
|
||||
|
||||
// Update detected format to YAML
|
||||
return importResult with
|
||||
{
|
||||
DetectedFormat = PolicyFormats.Yaml
|
||||
};
|
||||
}, ct, TaskContinuationOptions.ExecuteSynchronously, TaskScheduler.Default);
|
||||
}
|
||||
}
|
||||
@@ -13,6 +13,7 @@
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" />
|
||||
<PackageReference Include="JsonSchema.Net" />
|
||||
<PackageReference Include="YamlDotNet" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,421 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PolicyDiffMergeEngineTests.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Tests for diff/merge engine
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Interop.Abstractions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using StellaOps.Policy.Interop.DiffMerge;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Tests.DiffMerge;
|
||||
|
||||
public sealed class PolicyDiffMergeEngineTests
|
||||
{
|
||||
private readonly PolicyDiffMergeEngine _engine = new();
|
||||
|
||||
private static PolicyPackDocument LoadGoldenFixture()
|
||||
{
|
||||
var fixturePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "golden-policy-pack-v2.json");
|
||||
var json = File.ReadAllText(fixturePath);
|
||||
return JsonSerializer.Deserialize<PolicyPackDocument>(json,
|
||||
new JsonSerializerOptions { PropertyNameCaseInsensitive = true })!;
|
||||
}
|
||||
|
||||
private static PolicyPackDocument CreateMinimalDoc(
|
||||
string name = "test", string version = "1.0.0", string defaultAction = "block")
|
||||
{
|
||||
return new PolicyPackDocument
|
||||
{
|
||||
ApiVersion = PolicyPackDocument.ApiVersionV2,
|
||||
Kind = PolicyPackDocument.KindPolicyPack,
|
||||
Metadata = new PolicyPackMetadata { Name = name, Version = version },
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = defaultAction },
|
||||
Gates = [],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#region Diff Tests
|
||||
|
||||
[Fact]
|
||||
public void Diff_IdenticalDocuments_ReturnsNoChanges()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var result = _engine.Diff(doc, doc);
|
||||
|
||||
result.AreIdentical.Should().BeTrue();
|
||||
result.Changes.Should().BeEmpty();
|
||||
result.Summary.Total.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_MetadataVersionChange_DetectsModification()
|
||||
{
|
||||
var baseline = CreateMinimalDoc(version: "1.0.0");
|
||||
var updated = baseline with
|
||||
{
|
||||
Metadata = baseline.Metadata with { Version = "2.0.0" }
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.AreIdentical.Should().BeFalse();
|
||||
result.Summary.Modifications.Should().Be(1);
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.Path == "metadata.version" && c.ChangeType == PolicyChangeType.Modified);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_SettingsChange_DetectsDefaultActionModification()
|
||||
{
|
||||
var baseline = CreateMinimalDoc(defaultAction: "block");
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with
|
||||
{
|
||||
Settings = baseline.Spec.Settings with { DefaultAction = "warn" }
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.AreIdentical.Should().BeFalse();
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.Path == "spec.settings.defaultAction" &&
|
||||
c.OldValue!.ToString() == "block" &&
|
||||
c.NewValue!.ToString() == "warn");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GateAdded_DetectsAddition()
|
||||
{
|
||||
var baseline = CreateMinimalDoc();
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with
|
||||
{
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition
|
||||
{
|
||||
Id = "new-gate",
|
||||
Type = "CvssThresholdGate"
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Summary.Additions.Should().Be(1);
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.ChangeType == PolicyChangeType.Added && c.Category == "gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GateRemoved_DetectsRemoval()
|
||||
{
|
||||
var baseline = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition
|
||||
{
|
||||
Id = "old-gate",
|
||||
Type = "SbomPresenceGate"
|
||||
}
|
||||
],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with { Gates = [] }
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Summary.Removals.Should().Be(1);
|
||||
result.Changes.Should().ContainSingle(c =>
|
||||
c.ChangeType == PolicyChangeType.Removed && c.Category == "gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_RuleActionChanged_DetectsModification()
|
||||
{
|
||||
var rule = new PolicyRuleDefinition
|
||||
{
|
||||
Name = "test-rule",
|
||||
Action = "block",
|
||||
Priority = 10
|
||||
};
|
||||
|
||||
var baseline = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [],
|
||||
Rules = [rule]
|
||||
}
|
||||
};
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with
|
||||
{
|
||||
Rules = [rule with { Action = "warn" }]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Changes.Should().Contain(c =>
|
||||
c.Path == "spec.rules[test-rule].action" && c.ChangeType == PolicyChangeType.Modified);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GateConfigChanged_DetectsConfigModification()
|
||||
{
|
||||
var gate = new PolicyGateDefinition
|
||||
{
|
||||
Id = "cvss-gate",
|
||||
Type = "CvssThresholdGate",
|
||||
Config = new Dictionary<string, object?> { ["threshold"] = (JsonElement)JsonDocument.Parse("7.0").RootElement.Clone() }
|
||||
};
|
||||
|
||||
var baseline = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [gate],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var updatedGate = gate with
|
||||
{
|
||||
Config = new Dictionary<string, object?> { ["threshold"] = (JsonElement)JsonDocument.Parse("9.0").RootElement.Clone() }
|
||||
};
|
||||
var updated = baseline with
|
||||
{
|
||||
Spec = baseline.Spec with { Gates = [updatedGate] }
|
||||
};
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Changes.Should().Contain(c =>
|
||||
c.Path == "spec.gates[cvss-gate].config.threshold" && c.ChangeType == PolicyChangeType.Modified);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_GoldenFixture_AgainstItself_IsIdentical()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var result = _engine.Diff(doc, doc);
|
||||
|
||||
result.AreIdentical.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_MultipleChanges_ReturnsCorrectSummary()
|
||||
{
|
||||
var baseline = CreateMinimalDoc(name: "base", version: "1.0.0", defaultAction: "block");
|
||||
var updated = CreateMinimalDoc(name: "updated", version: "2.0.0", defaultAction: "warn");
|
||||
|
||||
var result = _engine.Diff(baseline, updated);
|
||||
|
||||
result.Summary.Modifications.Should().Be(3); // name, version, defaultAction
|
||||
result.Summary.Total.Should().Be(3);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Merge Tests
|
||||
|
||||
[Fact]
|
||||
public void Merge_IdenticalDocuments_ReturnsIdenticalResult()
|
||||
{
|
||||
var doc = CreateMinimalDoc();
|
||||
|
||||
var result = _engine.Merge(doc, doc);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document.Should().NotBeNull();
|
||||
result.Conflicts.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayWins_OverlayValuesPreferred()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc(defaultAction: "block");
|
||||
var overlay = CreateMinimalDoc(defaultAction: "warn");
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.OverlayWins);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Settings.DefaultAction.Should().Be("warn");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_BaseWins_BaseValuesPreferred()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc(defaultAction: "block");
|
||||
var overlay = CreateMinimalDoc(defaultAction: "warn");
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.BaseWins);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Settings.DefaultAction.Should().Be("block");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_FailOnConflict_ReportsConflicts()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc(defaultAction: "block");
|
||||
var overlay = CreateMinimalDoc(defaultAction: "warn");
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.FailOnConflict);
|
||||
|
||||
result.Success.Should().BeFalse();
|
||||
result.Conflicts.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayAddsNewGate_GateIncluded()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc();
|
||||
var overlay = baseDoc with
|
||||
{
|
||||
Spec = baseDoc.Spec with
|
||||
{
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition
|
||||
{
|
||||
Id = "overlay-gate",
|
||||
Type = "CvssThresholdGate"
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Gates.Should().ContainSingle(g => g.Id == "overlay-gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayAddsNewRule_RuleIncluded()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc();
|
||||
var overlay = baseDoc with
|
||||
{
|
||||
Spec = baseDoc.Spec with
|
||||
{
|
||||
Rules =
|
||||
[
|
||||
new PolicyRuleDefinition
|
||||
{
|
||||
Name = "overlay-rule",
|
||||
Action = "warn",
|
||||
Priority = 50
|
||||
}
|
||||
]
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Rules.Should().ContainSingle(r => r.Name == "overlay-rule");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_BothHaveGates_MergesAllGates()
|
||||
{
|
||||
var baseDoc = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition { Id = "base-gate", Type = "SbomPresenceGate" }
|
||||
],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
var overlay = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates =
|
||||
[
|
||||
new PolicyGateDefinition { Id = "overlay-gate", Type = "CvssThresholdGate" }
|
||||
],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Gates.Should().HaveCount(2);
|
||||
result.Document.Spec.Gates.Should().Contain(g => g.Id == "base-gate");
|
||||
result.Document.Spec.Gates.Should().Contain(g => g.Id == "overlay-gate");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Merge_OverlayWins_OverridesMatchingGate()
|
||||
{
|
||||
var gate = new PolicyGateDefinition
|
||||
{
|
||||
Id = "shared-gate",
|
||||
Type = "CvssThresholdGate",
|
||||
Enabled = true
|
||||
};
|
||||
|
||||
var baseDoc = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [gate],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
var overlay = CreateMinimalDoc() with
|
||||
{
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "block" },
|
||||
Gates = [gate with { Enabled = false }],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var result = _engine.Merge(baseDoc, overlay, PolicyMergeStrategy.OverlayWins);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Spec.Gates.Should().ContainSingle(g =>
|
||||
g.Id == "shared-gate" && !g.Enabled);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,151 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyExporterTests.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Tests for YAML export
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using StellaOps.Policy.Interop.Export;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Tests.Export;
|
||||
|
||||
public sealed class YamlPolicyExporterTests
|
||||
{
|
||||
private readonly YamlPolicyExporter _exporter = new();
|
||||
|
||||
private static PolicyPackDocument LoadGoldenFixture()
|
||||
{
|
||||
var fixturePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "golden-policy-pack-v2.json");
|
||||
var json = File.ReadAllText(fixturePath);
|
||||
return JsonSerializer.Deserialize<PolicyPackDocument>(json,
|
||||
new JsonSerializerOptions { PropertyNameCaseInsensitive = true })!;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_ProducesValidOutput()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.YamlContent.Should().NotBeNullOrEmpty();
|
||||
result.Digest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_ContainsApiVersionAndKind()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result.YamlContent.Should().Contain("apiVersion: policy.stellaops.io/v2");
|
||||
result.YamlContent.Should().Contain("kind: PolicyPack");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_IsDeterministic()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml };
|
||||
|
||||
var result1 = await _exporter.ExportToYamlAsync(doc, request);
|
||||
var result2 = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result1.Digest.Should().Be(result2.Digest);
|
||||
result1.YamlContent.Should().Be(result2.YamlContent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_WithEnvironment_MergesConfig()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml, Environment = "staging" };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
// Environment-specific config is merged; environments key should not appear
|
||||
result.YamlContent.Should().NotContain("environments:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToYaml_WithoutRemediation_StripsHints()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
var request = new PolicyExportRequest { Format = PolicyFormats.Yaml, IncludeRemediation = false };
|
||||
|
||||
var result = await _exporter.ExportToYamlAsync(doc, request);
|
||||
|
||||
result.YamlContent.Should().NotContain("remediation:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeCanonical_ProducesDeterministicBytes()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var bytes1 = YamlPolicyExporter.SerializeCanonical(doc);
|
||||
var bytes2 = YamlPolicyExporter.SerializeCanonical(doc);
|
||||
|
||||
bytes1.Should().BeEquivalentTo(bytes2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeToYaml_PreservesGateIds()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(doc);
|
||||
|
||||
yaml.Should().Contain("cvss-threshold");
|
||||
yaml.Should().Contain("signature-required");
|
||||
yaml.Should().Contain("evidence-freshness");
|
||||
yaml.Should().Contain("sbom-presence");
|
||||
yaml.Should().Contain("minimum-confidence");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeToYaml_PreservesRuleNames()
|
||||
{
|
||||
var doc = LoadGoldenFixture();
|
||||
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(doc);
|
||||
|
||||
yaml.Should().Contain("require-dsse-signature");
|
||||
yaml.Should().Contain("require-rekor-proof");
|
||||
yaml.Should().Contain("require-sbom-digest");
|
||||
yaml.Should().Contain("require-freshness-tst");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SerializeToYaml_MinimalDocument_Succeeds()
|
||||
{
|
||||
var doc = new PolicyPackDocument
|
||||
{
|
||||
ApiVersion = PolicyPackDocument.ApiVersionV2,
|
||||
Kind = PolicyPackDocument.KindPolicyPack,
|
||||
Metadata = new PolicyPackMetadata
|
||||
{
|
||||
Name = "minimal",
|
||||
Version = "1.0.0"
|
||||
},
|
||||
Spec = new PolicyPackSpec
|
||||
{
|
||||
Settings = new PolicyPackSettings { DefaultAction = "allow" },
|
||||
Gates = [],
|
||||
Rules = []
|
||||
}
|
||||
};
|
||||
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(doc);
|
||||
|
||||
yaml.Should().Contain("name: minimal");
|
||||
yaml.Should().Contain("defaultAction: allow");
|
||||
}
|
||||
}
|
||||
@@ -79,8 +79,30 @@ public class FormatDetectorTests
|
||||
[Fact]
|
||||
public void DetectFromExtension_UnknownExtension_ReturnsNull()
|
||||
{
|
||||
FormatDetector.DetectFromExtension("policy.yaml").Should().BeNull();
|
||||
FormatDetector.DetectFromExtension("policy.txt").Should().BeNull();
|
||||
FormatDetector.DetectFromExtension("policy.xml").Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DetectFromExtension_YamlFile_ReturnsYaml()
|
||||
{
|
||||
FormatDetector.DetectFromExtension("policy.yaml").Should().Be(PolicyFormats.Yaml);
|
||||
FormatDetector.DetectFromExtension("policy.yml").Should().Be(PolicyFormats.Yaml);
|
||||
FormatDetector.DetectFromExtension("/path/to/my-policy.yaml").Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Detect_YamlContent_WithApiVersion_ReturnsYaml()
|
||||
{
|
||||
var content = "apiVersion: policy.stellaops.io/v2\nkind: PolicyPack\n";
|
||||
FormatDetector.Detect(content).Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Detect_YamlContent_WithDocumentSeparator_ReturnsYaml()
|
||||
{
|
||||
var content = "---\napiVersion: policy.stellaops.io/v2\n";
|
||||
FormatDetector.Detect(content).Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
|
||||
@@ -0,0 +1,150 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// YamlPolicyImporterTests.cs
|
||||
// Sprint: SPRINT_20260208_048_Policy_policy_interop_framework
|
||||
// Task: T1 - Tests for YAML import
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Policy.Interop.Contracts;
|
||||
using StellaOps.Policy.Interop.Export;
|
||||
using StellaOps.Policy.Interop.Import;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Interop.Tests.Import;
|
||||
|
||||
public sealed class YamlPolicyImporterTests
|
||||
{
|
||||
private readonly YamlPolicyImporter _importer = new();
|
||||
|
||||
private static PolicyPackDocument LoadGoldenFixture()
|
||||
{
|
||||
var fixturePath = Path.Combine(AppContext.BaseDirectory, "Fixtures", "golden-policy-pack-v2.json");
|
||||
var json = File.ReadAllText(fixturePath);
|
||||
return JsonSerializer.Deserialize<PolicyPackDocument>(json,
|
||||
new JsonSerializerOptions { PropertyNameCaseInsensitive = true })!;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_ValidDocument_Succeeds()
|
||||
{
|
||||
// Export golden fixture to YAML, then re-import
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.DetectedFormat.Should().Be(PolicyFormats.Yaml);
|
||||
result.Document.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesApiVersion()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Document!.ApiVersion.Should().Be(PolicyPackDocument.ApiVersionV2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesGateCount()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.GateCount.Should().Be(original.Spec.Gates.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesRuleCount()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.RuleCount.Should().Be(original.Spec.Rules.Count);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_InvalidYaml_ReturnsDiagnostic()
|
||||
{
|
||||
var invalidYaml = "invalid: yaml:\n bad: [\nincomplete";
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(invalidYaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeFalse();
|
||||
result.DetectedFormat.Should().Be(PolicyFormats.Yaml);
|
||||
result.Diagnostics.Should().Contain(d => d.Code == "YAML_PARSE_ERROR");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_EmptyContent_ReturnsDiagnostic()
|
||||
{
|
||||
var result = await _importer.ImportFromStringAsync("",
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeFalse();
|
||||
result.DetectedFormat.Should().Be(PolicyFormats.Yaml);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_PreservesMetadataName()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Document!.Metadata.Name.Should().Be(original.Metadata.Name);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_MinimalDocument_Succeeds()
|
||||
{
|
||||
var yaml = """
|
||||
apiVersion: policy.stellaops.io/v2
|
||||
kind: PolicyPack
|
||||
metadata:
|
||||
name: test-minimal
|
||||
version: "1.0.0"
|
||||
spec:
|
||||
settings:
|
||||
defaultAction: allow
|
||||
gates: []
|
||||
rules: []
|
||||
""";
|
||||
|
||||
var result = await _importer.ImportFromStringAsync(yaml,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
result.Document!.Metadata.Name.Should().Be("test-minimal");
|
||||
result.Document.Spec.Settings.DefaultAction.Should().Be("allow");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromYaml_Stream_Succeeds()
|
||||
{
|
||||
var original = LoadGoldenFixture();
|
||||
var yaml = YamlPolicyExporter.SerializeToYaml(original);
|
||||
using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(yaml));
|
||||
|
||||
var result = await _importer.ImportAsync(stream,
|
||||
new PolicyImportOptions { Format = PolicyFormats.Yaml });
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="NSubstitute" />
|
||||
<PackageReference Include="JsonSchema.Net" />
|
||||
<PackageReference Include="YamlDotNet" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -0,0 +1,220 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring;
|
||||
|
||||
public class CombinedImpactCalculatorTests
|
||||
{
|
||||
private readonly CombinedImpactCalculator _calculator;
|
||||
|
||||
public CombinedImpactCalculatorTests()
|
||||
{
|
||||
var impactCalculator = new ImpactScoreCalculator(NullLogger<ImpactScoreCalculator>.Instance);
|
||||
var uncertaintyCalculator = new UncertaintyScoreCalculator(NullLogger<UncertaintyScoreCalculator>.Instance);
|
||||
_calculator = new CombinedImpactCalculator(
|
||||
impactCalculator,
|
||||
uncertaintyCalculator,
|
||||
NullLogger<CombinedImpactCalculator>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_HighImpactLowUncertainty_ReturnsHighPriority()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Production,
|
||||
DataSensitivity = DataSensitivity.Healthcare,
|
||||
FleetPrevalence = 0.9,
|
||||
SlaTier = SlaTier.MissionCritical,
|
||||
CvssScore = 9.8
|
||||
};
|
||||
var signalSnapshot = CreateFullSnapshot();
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(impactContext, signalSnapshot);
|
||||
|
||||
// Assert
|
||||
result.Impact.Score.Should().BeGreaterThan(0.8);
|
||||
result.Uncertainty.Entropy.Should().Be(0.0);
|
||||
result.EffectivePriority.Should().BeGreaterThan(0.8);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_HighImpactHighUncertainty_ReducesPriority()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Production,
|
||||
DataSensitivity = DataSensitivity.Healthcare,
|
||||
FleetPrevalence = 0.9,
|
||||
SlaTier = SlaTier.MissionCritical,
|
||||
CvssScore = 9.8
|
||||
};
|
||||
var signalSnapshot = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", DateTimeOffset.UtcNow);
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(impactContext, signalSnapshot, uncertaintyPenaltyFactor: 0.5);
|
||||
|
||||
// Assert
|
||||
result.Impact.Score.Should().BeGreaterThan(0.8);
|
||||
result.Uncertainty.Entropy.Should().Be(1.0);
|
||||
// Effective = impact * (1 - 1.0 * 0.5) = impact * 0.5
|
||||
result.EffectivePriority.Should().BeLessThan(result.Impact.Score);
|
||||
result.EffectivePriority.Should().BeApproximately(result.Impact.Score * 0.5, 0.01);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_LowImpactLowUncertainty_ReturnsLowPriority()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Development,
|
||||
DataSensitivity = DataSensitivity.Public,
|
||||
FleetPrevalence = 0.1,
|
||||
SlaTier = SlaTier.NonCritical,
|
||||
CvssScore = 2.0
|
||||
};
|
||||
var signalSnapshot = CreateFullSnapshot();
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(impactContext, signalSnapshot);
|
||||
|
||||
// Assert
|
||||
result.Impact.Score.Should().BeLessThan(0.2);
|
||||
result.Uncertainty.Entropy.Should().Be(0.0);
|
||||
result.EffectivePriority.Should().BeLessThan(0.2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_ZeroPenaltyFactor_IgnoresUncertainty()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Production,
|
||||
DataSensitivity = DataSensitivity.Healthcare,
|
||||
FleetPrevalence = 0.9,
|
||||
SlaTier = SlaTier.MissionCritical,
|
||||
CvssScore = 9.8
|
||||
};
|
||||
var signalSnapshot = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", DateTimeOffset.UtcNow);
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(impactContext, signalSnapshot, uncertaintyPenaltyFactor: 0.0);
|
||||
|
||||
// Assert
|
||||
result.EffectivePriority.Should().BeApproximately(result.Impact.Score, 0.001);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_FullPenaltyFactor_MaximumReduction()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Production,
|
||||
DataSensitivity = DataSensitivity.Healthcare,
|
||||
FleetPrevalence = 0.9,
|
||||
SlaTier = SlaTier.MissionCritical,
|
||||
CvssScore = 9.8
|
||||
};
|
||||
var signalSnapshot = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", DateTimeOffset.UtcNow);
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(impactContext, signalSnapshot, uncertaintyPenaltyFactor: 1.0);
|
||||
|
||||
// Assert
|
||||
// With 100% entropy and 100% penalty, effective priority = impact * (1 - 1.0) = 0
|
||||
result.EffectivePriority.Should().BeApproximately(0.0, 0.001);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_IsDeterministic_SameInputSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Staging,
|
||||
DataSensitivity = DataSensitivity.Pii,
|
||||
FleetPrevalence = 0.5,
|
||||
SlaTier = SlaTier.Important,
|
||||
CvssScore = 7.5
|
||||
};
|
||||
var signalSnapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var result1 = _calculator.Calculate(impactContext, signalSnapshot);
|
||||
var result2 = _calculator.Calculate(impactContext, signalSnapshot);
|
||||
|
||||
// Assert
|
||||
result1.EffectivePriority.Should().Be(result2.EffectivePriority);
|
||||
result1.EffectivePriorityBasisPoints.Should().Be(result2.EffectivePriorityBasisPoints);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_BasisPointsCalculatedCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var impactContext = ImpactContext.DefaultForUnknowns();
|
||||
var signalSnapshot = CreateFullSnapshot();
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(impactContext, signalSnapshot);
|
||||
|
||||
// Assert
|
||||
var expectedBasisPoints = (int)Math.Round(result.EffectivePriority * 10000);
|
||||
result.EffectivePriorityBasisPoints.Should().Be(expectedBasisPoints);
|
||||
}
|
||||
|
||||
private SignalSnapshot CreateFullSnapshot()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
return new SignalSnapshot
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
Purl = "pkg:maven/test@1.0",
|
||||
Vex = SignalState<VexClaimSummary>.Queried(
|
||||
new VexClaimSummary { Status = "affected", Confidence = 0.95, StatementCount = 3, ComputedAt = now }, now),
|
||||
Epss = SignalState<EpssEvidence>.Queried(
|
||||
new EpssEvidence { Cve = "CVE-2024-1234", Epss = 0.5, Percentile = 0.8, PublishedAt = now }, now),
|
||||
Reachability = SignalState<ReachabilityEvidence>.Queried(
|
||||
new ReachabilityEvidence { Status = ReachabilityStatus.Reachable, AnalyzedAt = now }, now),
|
||||
Runtime = SignalState<RuntimeEvidence>.Queried(
|
||||
new RuntimeEvidence { Detected = true, DetectedAt = now }, now),
|
||||
Backport = SignalState<BackportEvidence>.Queried(
|
||||
new BackportEvidence { Detected = false, AnalyzedAt = now }, now),
|
||||
Sbom = SignalState<SbomLineageEvidence>.Queried(
|
||||
new SbomLineageEvidence { HasLineage = true, AnalyzedAt = now }, now),
|
||||
Cvss = SignalState<CvssEvidence>.Queried(
|
||||
new CvssEvidence { Version = "3.1", BaseScore = 9.8, Severity = "CRITICAL", Source = "NVD", PublishedAt = now }, now),
|
||||
SnapshotAt = now
|
||||
};
|
||||
}
|
||||
|
||||
private SignalSnapshot CreatePartialSnapshot()
|
||||
{
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
return new SignalSnapshot
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
Purl = "pkg:maven/test@1.0",
|
||||
Vex = SignalState<VexClaimSummary>.Queried(
|
||||
new VexClaimSummary { Status = "affected", Confidence = 0.95, StatementCount = 3, ComputedAt = now }, now),
|
||||
Epss = SignalState<EpssEvidence>.Queried(
|
||||
new EpssEvidence { Cve = "CVE-2024-1234", Epss = 0.5, Percentile = 0.8, PublishedAt = now }, now),
|
||||
Reachability = SignalState<ReachabilityEvidence>.NotQueried(),
|
||||
Runtime = SignalState<RuntimeEvidence>.NotQueried(),
|
||||
Backport = SignalState<BackportEvidence>.NotQueried(),
|
||||
Sbom = SignalState<SbomLineageEvidence>.NotQueried(),
|
||||
Cvss = SignalState<CvssEvidence>.NotQueried(),
|
||||
SnapshotAt = now
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,262 @@
|
||||
// <copyright file="DeltaIfPresentCalculatorTests.cs" company="StellaOps">
|
||||
// Copyright (c) StellaOps. Licensed under the BUSL-1.1.
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring;
|
||||
|
||||
public sealed class DeltaIfPresentCalculatorTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider = new();
|
||||
private readonly UncertaintyScoreCalculator _uncertaintyCalculator;
|
||||
private readonly TrustScoreAggregator _trustAggregator;
|
||||
private readonly DeltaIfPresentCalculator _calculator;
|
||||
|
||||
public DeltaIfPresentCalculatorTests()
|
||||
{
|
||||
_uncertaintyCalculator = new UncertaintyScoreCalculator(
|
||||
NullLogger<UncertaintyScoreCalculator>.Instance);
|
||||
_trustAggregator = new TrustScoreAggregator(
|
||||
NullLogger<TrustScoreAggregator>.Instance);
|
||||
_calculator = new DeltaIfPresentCalculator(
|
||||
NullLogger<DeltaIfPresentCalculator>.Instance,
|
||||
_uncertaintyCalculator,
|
||||
_trustAggregator,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateSingleSignalDelta_VexSignal_ReturnsExpectedDelta()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var result = _calculator.CalculateSingleSignalDelta(snapshot, "VEX", 0.0);
|
||||
|
||||
// Assert
|
||||
result.Signal.Should().Be("VEX");
|
||||
result.AssumedValue.Should().Be(0.0);
|
||||
result.SignalWeight.Should().Be(0.25);
|
||||
result.HypotheticalEntropy.Should().BeLessThan(result.CurrentEntropy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateSingleSignalDelta_HighRiskValue_IncreasesScore()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var lowRisk = _calculator.CalculateSingleSignalDelta(snapshot, "EPSS", 0.0);
|
||||
var highRisk = _calculator.CalculateSingleSignalDelta(snapshot, "EPSS", 1.0);
|
||||
|
||||
// Assert
|
||||
highRisk.HypotheticalScore.Should().BeGreaterThan(lowRisk.HypotheticalScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateSingleSignalDelta_AddsSignal_DecreasesEntropy()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var result = _calculator.CalculateSingleSignalDelta(snapshot, "Runtime", 0.5);
|
||||
|
||||
// Assert
|
||||
result.EntropyDelta.Should().BeLessThan(0);
|
||||
result.HypotheticalEntropy.Should().BeLessThan(result.CurrentEntropy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateFullAnalysis_ReturnsAllGaps()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var analysis = _calculator.CalculateFullAnalysis(snapshot);
|
||||
|
||||
// Assert
|
||||
analysis.GapAnalysis.Should().HaveCountGreaterThan(0);
|
||||
analysis.PrioritizedGaps.Should().NotBeEmpty();
|
||||
analysis.ComputedAt.Should().Be(_timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateFullAnalysis_PrioritizesByMaxImpact()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreateEmptySnapshot();
|
||||
|
||||
// Act
|
||||
var analysis = _calculator.CalculateFullAnalysis(snapshot);
|
||||
|
||||
// Assert - VEX and Reachability have highest weights (0.25 each)
|
||||
var topPriority = analysis.PrioritizedGaps.Take(2);
|
||||
topPriority.Should().Contain(s => s == "VEX" || s == "Reachability");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateFullAnalysis_IncludesBestWorstPriorCases()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var analysis = _calculator.CalculateFullAnalysis(snapshot);
|
||||
|
||||
// Assert
|
||||
foreach (var gap in analysis.GapAnalysis)
|
||||
{
|
||||
gap.BestCase.Should().NotBeNull();
|
||||
gap.WorstCase.Should().NotBeNull();
|
||||
gap.PriorCase.Should().NotBeNull();
|
||||
|
||||
gap.BestCase.AssumedValue.Should().Be(0.0);
|
||||
gap.WorstCase.AssumedValue.Should().Be(1.0);
|
||||
gap.MaxImpact.Should().BeGreaterOrEqualTo(0.0);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateScoreBounds_NoGaps_ReturnsSingleValue()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreateFullSnapshot();
|
||||
|
||||
// Act
|
||||
var bounds = _calculator.CalculateScoreBounds(snapshot);
|
||||
|
||||
// Assert
|
||||
bounds.GapCount.Should().Be(0);
|
||||
bounds.Range.Should().Be(0.0);
|
||||
bounds.MinimumScore.Should().Be(bounds.MaximumScore);
|
||||
bounds.MissingWeightPercentage.Should().Be(0.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateScoreBounds_WithGaps_ReturnsRange()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var bounds = _calculator.CalculateScoreBounds(snapshot);
|
||||
|
||||
// Assert
|
||||
bounds.GapCount.Should().BeGreaterThan(0);
|
||||
bounds.Range.Should().BeGreaterThan(0.0);
|
||||
bounds.MaximumScore.Should().BeGreaterThanOrEqualTo(bounds.MinimumScore);
|
||||
bounds.MissingWeightPercentage.Should().BeGreaterThan(0.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateScoreBounds_EmptySnapshot_ReturnsFullRange()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreateEmptySnapshot();
|
||||
|
||||
// Act
|
||||
var bounds = _calculator.CalculateScoreBounds(snapshot);
|
||||
|
||||
// Assert
|
||||
bounds.GapCount.Should().Be(6); // All 6 signals missing
|
||||
bounds.CurrentEntropy.Should().Be(1.0);
|
||||
bounds.MissingWeightPercentage.Should().Be(100.0);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("VEX", 0.25)]
|
||||
[InlineData("EPSS", 0.15)]
|
||||
[InlineData("Reachability", 0.25)]
|
||||
[InlineData("Runtime", 0.15)]
|
||||
[InlineData("Backport", 0.10)]
|
||||
[InlineData("SBOMLineage", 0.10)]
|
||||
public void CalculateSingleSignalDelta_CorrectWeightPerSignal(string signal, double expectedWeight)
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreateEmptySnapshot();
|
||||
|
||||
// Act
|
||||
var result = _calculator.CalculateSingleSignalDelta(snapshot, signal, 0.5);
|
||||
|
||||
// Assert
|
||||
result.SignalWeight.Should().Be(expectedWeight);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateSingleSignalDelta_DeterministicOutput()
|
||||
{
|
||||
// Arrange
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act - Run twice
|
||||
var result1 = _calculator.CalculateSingleSignalDelta(snapshot, "VEX", 0.5);
|
||||
var result2 = _calculator.CalculateSingleSignalDelta(snapshot, "VEX", 0.5);
|
||||
|
||||
// Assert - Results should be identical
|
||||
result1.CurrentScore.Should().Be(result2.CurrentScore);
|
||||
result1.HypotheticalScore.Should().Be(result2.HypotheticalScore);
|
||||
result1.CurrentEntropy.Should().Be(result2.CurrentEntropy);
|
||||
result1.HypotheticalEntropy.Should().Be(result2.HypotheticalEntropy);
|
||||
}
|
||||
|
||||
private SignalSnapshot CreateEmptySnapshot()
|
||||
{
|
||||
return SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
private SignalSnapshot CreatePartialSnapshot()
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
return new SignalSnapshot
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
Purl = "pkg:maven/test@1.0",
|
||||
Vex = SignalState<VexClaimSummary>.NotQueried(),
|
||||
Epss = SignalState<EpssEvidence>.NotQueried(),
|
||||
Reachability = SignalState<ReachabilityEvidence>.Queried(
|
||||
new ReachabilityEvidence { Status = ReachabilityStatus.Reachable, AnalyzedAt = now }, now),
|
||||
Runtime = SignalState<RuntimeEvidence>.NotQueried(),
|
||||
Backport = SignalState<BackportEvidence>.NotQueried(),
|
||||
Sbom = SignalState<SbomLineageEvidence>.Queried(
|
||||
new SbomLineageEvidence { SbomDigest = "sha256:abc", Format = "SPDX", ComponentCount = 150, GeneratedAt = now, HasProvenance = true }, now),
|
||||
Cvss = SignalState<CvssEvidence>.NotQueried(),
|
||||
SnapshotAt = now
|
||||
};
|
||||
}
|
||||
|
||||
private SignalSnapshot CreateFullSnapshot()
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
return new SignalSnapshot
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
Purl = "pkg:maven/test@1.0",
|
||||
Vex = SignalState<VexClaimSummary>.Queried(
|
||||
new VexClaimSummary { Status = "affected", Confidence = 0.95, StatementCount = 3, ComputedAt = now }, now),
|
||||
Epss = SignalState<EpssEvidence>.Queried(
|
||||
new EpssEvidence { Cve = "CVE-2024-1234", Epss = 0.5, Percentile = 0.8, PublishedAt = now }, now),
|
||||
Reachability = SignalState<ReachabilityEvidence>.Queried(
|
||||
new ReachabilityEvidence { Status = ReachabilityStatus.Reachable, AnalyzedAt = now }, now),
|
||||
Runtime = SignalState<RuntimeEvidence>.Queried(
|
||||
new RuntimeEvidence { Detected = true, Source = "test", ObservationStart = now.AddDays(-7), ObservationEnd = now, Confidence = 0.9 }, now),
|
||||
Backport = SignalState<BackportEvidence>.Queried(
|
||||
new BackportEvidence { Detected = false, Source = "test", DetectedAt = now, Confidence = 0.85 }, now),
|
||||
Sbom = SignalState<SbomLineageEvidence>.Queried(
|
||||
new SbomLineageEvidence { SbomDigest = "sha256:abc", Format = "SPDX", ComponentCount = 150, GeneratedAt = now, HasProvenance = true }, now),
|
||||
Cvss = SignalState<CvssEvidence>.Queried(
|
||||
new CvssEvidence { Vector = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", Version = "3.1", BaseScore = 9.8, Severity = "CRITICAL", Source = "NVD", PublishedAt = now }, now),
|
||||
SnapshotAt = now
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,400 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsCalculatorTests.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Unit tests for EWS calculator.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring;
|
||||
|
||||
public sealed class EwsCalculatorTests
|
||||
{
|
||||
private readonly EwsCalculator _calculator;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public EwsCalculatorTests()
|
||||
{
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2026-02-08T12:00:00Z"));
|
||||
_calculator = EwsCalculator.CreateDefault(_timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithEmptySignal_ReturnsConservativeScore()
|
||||
{
|
||||
// Arrange
|
||||
var signal = EwsSignalInput.Empty;
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(signal);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(result);
|
||||
Assert.InRange(result.Score, 40, 80); // Conservative assumptions
|
||||
Assert.Equal(6, result.Dimensions.Length); // All 6 dimensions
|
||||
Assert.True(result.NeedsReview); // Low confidence triggers review
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithHighRiskSignals_ReturnsHighScore()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 4, // R4: Reachable from entrypoint
|
||||
IsInKev = true, // Known exploited
|
||||
EpssProbability = 0.85,
|
||||
VexStatus = "affected",
|
||||
CveId = "CVE-2024-12345"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(signal);
|
||||
|
||||
// Assert
|
||||
Assert.InRange(result.Score, 70, 100); // KEV floor should kick in
|
||||
Assert.Equal("Critical", result.RiskTier);
|
||||
Assert.Contains(result.AppliedGuardrails, g => g.StartsWith("kev_floor"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_WithMitigatedSignals_ReturnsLowScore()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 0, // R0: Unreachable
|
||||
BackportDetected = true,
|
||||
BackportConfidence = 0.95,
|
||||
VexStatus = "not_affected",
|
||||
VexJustification = "Component not used in this deployment"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(signal);
|
||||
|
||||
// Assert
|
||||
Assert.InRange(result.Score, 0, 25); // not_affected cap
|
||||
Assert.Equal("Informational", result.RiskTier);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_AllDimensionsPopulated_ReturnsCorrectStructure()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 2,
|
||||
CallGraphConfidence = 0.8,
|
||||
InstrumentationCoverage = 0.7,
|
||||
RuntimeInvocationCount = 50,
|
||||
BackportDetected = false,
|
||||
EpssProbability = 0.25,
|
||||
CvssBaseScore = 7.5,
|
||||
SbomCompleteness = 0.9,
|
||||
SbomSigned = true,
|
||||
VexStatus = "under_investigation",
|
||||
CveId = "CVE-2024-99999",
|
||||
Purl = "pkg:npm/example@1.0.0"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(signal);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(6, result.Dimensions.Length);
|
||||
|
||||
// Check each dimension is present
|
||||
Assert.Contains(result.Dimensions, d => d.Dimension == EwsDimension.Reachability);
|
||||
Assert.Contains(result.Dimensions, d => d.Dimension == EwsDimension.RuntimeSignals);
|
||||
Assert.Contains(result.Dimensions, d => d.Dimension == EwsDimension.BackportEvidence);
|
||||
Assert.Contains(result.Dimensions, d => d.Dimension == EwsDimension.Exploitability);
|
||||
Assert.Contains(result.Dimensions, d => d.Dimension == EwsDimension.SourceConfidence);
|
||||
Assert.Contains(result.Dimensions, d => d.Dimension == EwsDimension.MitigationStatus);
|
||||
|
||||
// Check metadata propagated
|
||||
Assert.Equal("CVE-2024-99999", result.CveId);
|
||||
Assert.Equal("pkg:npm/example@1.0.0", result.Purl);
|
||||
Assert.Equal(_timeProvider.GetUtcNow(), result.CalculatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_CustomWeights_UsesProvidedWeights()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
IsInKev = false,
|
||||
EpssProbability = 0.9 // High exploitability
|
||||
};
|
||||
|
||||
var exploitHeavyWeights = new EwsDimensionWeights
|
||||
{
|
||||
Reachability = 0.1,
|
||||
RuntimeSignals = 0.1,
|
||||
BackportEvidence = 0.05,
|
||||
Exploitability = 0.5, // Heavy weight on exploitability
|
||||
SourceConfidence = 0.1,
|
||||
MitigationStatus = 0.15
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(signal, exploitHeavyWeights);
|
||||
|
||||
// Assert
|
||||
var xplDim = result.GetDimension(EwsDimension.Exploitability);
|
||||
Assert.NotNull(xplDim);
|
||||
Assert.Equal(0.5, xplDim.Weight);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_SpeculativeScore_AppliesCap()
|
||||
{
|
||||
// Arrange - no real evidence, just defaults/assumptions
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
CveId = "CVE-2024-00001"
|
||||
};
|
||||
|
||||
var guardrails = new EwsGuardrails
|
||||
{
|
||||
SpeculativeCap = 55
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _calculator.Calculate(signal, guardrails: guardrails);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.Score <= 55, $"Score {result.Score} should be capped at speculative cap 55");
|
||||
Assert.True(result.NeedsReview); // Should need review due to low confidence
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_Deterministic_SameInputsProduceSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 3,
|
||||
EpssProbability = 0.45,
|
||||
VexStatus = "affected"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = _calculator.Calculate(signal);
|
||||
var result2 = _calculator.Calculate(signal);
|
||||
|
||||
// Assert - should be identical
|
||||
Assert.Equal(result1.Score, result2.Score);
|
||||
Assert.Equal(result1.RawScore, result2.RawScore);
|
||||
Assert.Equal(result1.Confidence, result2.Confidence);
|
||||
Assert.Equal(result1.CalculatedAt, result2.CalculatedAt);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateDimension_ReachabilityR4_ReturnsHighScore()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 4,
|
||||
CallGraphConfidence = 0.9
|
||||
};
|
||||
|
||||
// Act
|
||||
var dimScore = _calculator.CalculateDimension(EwsDimension.Reachability, signal, 0.25);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(EwsDimension.Reachability, dimScore.Dimension);
|
||||
Assert.Equal("RCH", dimScore.Code);
|
||||
Assert.InRange(dimScore.Score, 90, 100);
|
||||
Assert.True(dimScore.Confidence > 0.7);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateDimension_ReachabilityR0_ReturnsLowScore()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 0,
|
||||
CallGraphConfidence = 0.95
|
||||
};
|
||||
|
||||
// Act
|
||||
var dimScore = _calculator.CalculateDimension(EwsDimension.Reachability, signal, 0.25);
|
||||
|
||||
// Assert
|
||||
Assert.InRange(dimScore.Score, 0, 10);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetNormalizer_AllDimensions_ReturnsNonNull()
|
||||
{
|
||||
foreach (EwsDimension dimension in Enum.GetValues<EwsDimension>())
|
||||
{
|
||||
var normalizer = _calculator.GetNormalizer(dimension);
|
||||
Assert.NotNull(normalizer);
|
||||
Assert.Equal(dimension, normalizer.Dimension);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class EwsDimensionCodesTests
|
||||
{
|
||||
[Theory]
|
||||
[InlineData(EwsDimension.Reachability, "RCH")]
|
||||
[InlineData(EwsDimension.RuntimeSignals, "RTS")]
|
||||
[InlineData(EwsDimension.BackportEvidence, "BKP")]
|
||||
[InlineData(EwsDimension.Exploitability, "XPL")]
|
||||
[InlineData(EwsDimension.SourceConfidence, "SRC")]
|
||||
[InlineData(EwsDimension.MitigationStatus, "MIT")]
|
||||
public void ToCode_ReturnsCorrectCode(EwsDimension dimension, string expectedCode)
|
||||
{
|
||||
Assert.Equal(expectedCode, dimension.ToCode());
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("RCH", EwsDimension.Reachability)]
|
||||
[InlineData("rch", EwsDimension.Reachability)]
|
||||
[InlineData("XPL", EwsDimension.Exploitability)]
|
||||
[InlineData("MIT", EwsDimension.MitigationStatus)]
|
||||
public void FromCode_ReturnsCorrectDimension(string code, EwsDimension expected)
|
||||
{
|
||||
var result = EwsDimensionCodes.FromCode(code);
|
||||
Assert.Equal(expected, result);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData("INVALID")]
|
||||
[InlineData("")]
|
||||
[InlineData(null)]
|
||||
public void FromCode_InvalidCode_ReturnsNull(string? code)
|
||||
{
|
||||
var result = EwsDimensionCodes.FromCode(code!);
|
||||
Assert.Null(result);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class EwsDimensionWeightsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Default_IsNormalized()
|
||||
{
|
||||
var weights = EwsDimensionWeights.Default;
|
||||
Assert.True(weights.IsNormalized());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Legacy_IsNormalized()
|
||||
{
|
||||
var weights = EwsDimensionWeights.Legacy;
|
||||
Assert.True(weights.IsNormalized());
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetWeight_ReturnsCorrectWeights()
|
||||
{
|
||||
var weights = new EwsDimensionWeights
|
||||
{
|
||||
Reachability = 0.3,
|
||||
RuntimeSignals = 0.1,
|
||||
BackportEvidence = 0.1,
|
||||
Exploitability = 0.25,
|
||||
SourceConfidence = 0.1,
|
||||
MitigationStatus = 0.15
|
||||
};
|
||||
|
||||
Assert.Equal(0.3, weights.GetWeight(EwsDimension.Reachability));
|
||||
Assert.Equal(0.25, weights.GetWeight(EwsDimension.Exploitability));
|
||||
Assert.True(weights.IsNormalized());
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class EwsGuardrailsTests
|
||||
{
|
||||
[Fact]
|
||||
public void Default_HasReasonableValues()
|
||||
{
|
||||
var guardrails = EwsGuardrails.Default;
|
||||
|
||||
Assert.InRange(guardrails.NotAffectedCap, 10, 50);
|
||||
Assert.InRange(guardrails.RuntimeFloor, 20, 50);
|
||||
Assert.InRange(guardrails.SpeculativeCap, 50, 70);
|
||||
Assert.InRange(guardrails.KevFloor, 60, 90);
|
||||
Assert.InRange(guardrails.BackportedCap, 10, 30);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class GuardrailsEngineTests
|
||||
{
|
||||
private readonly GuardrailsEngine _engine = new();
|
||||
|
||||
[Fact]
|
||||
public void Apply_KevFloor_RaisesScoreForKnownExploited()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput { IsInKev = true };
|
||||
var guardrails = new EwsGuardrails { KevFloor = 70 };
|
||||
|
||||
// Act
|
||||
var result = _engine.Apply(50, signal, [], guardrails);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(70, result.AdjustedScore);
|
||||
Assert.Contains("kev_floor:70", result.AppliedGuardrails);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_BackportedCap_LowersScoreForBackported()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput { BackportDetected = true };
|
||||
var guardrails = new EwsGuardrails { BackportedCap = 20 };
|
||||
|
||||
// Act
|
||||
var result = _engine.Apply(75, signal, [], guardrails);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(20, result.AdjustedScore);
|
||||
Assert.Contains("backported_cap:20", result.AppliedGuardrails);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_NotAffectedCap_LowersScoreForMitigated()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput { VexStatus = "not_affected" };
|
||||
var guardrails = new EwsGuardrails { NotAffectedCap = 25 };
|
||||
|
||||
// Act
|
||||
var result = _engine.Apply(60, signal, [], guardrails);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(25, result.AdjustedScore);
|
||||
Assert.Contains("not_affected_cap:25", result.AppliedGuardrails);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Apply_NoGuardrailsTriggered_ReturnsOriginalScore()
|
||||
{
|
||||
// Arrange
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
VexStatus = "affected",
|
||||
IsInKev = false,
|
||||
BackportDetected = false
|
||||
};
|
||||
var guardrails = EwsGuardrails.Default;
|
||||
|
||||
// Act
|
||||
var result = _engine.Apply(55, signal, [], guardrails);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(55, result.AdjustedScore);
|
||||
Assert.Empty(result.AppliedGuardrails);
|
||||
Assert.False(result.WasModified);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,286 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EwsNormalizerTests.cs
|
||||
// Sprint: SPRINT_20260208_045_Policy_evidence_weighted_score_model
|
||||
// Task: T1 - Evidence-Weighted Score (EWS) Model (6-Dimension Scoring)
|
||||
// Description: Unit tests for individual dimension normalizers.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Determinization.Scoring.EvidenceWeightedScoring;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring;
|
||||
|
||||
public sealed class ReachabilityNormalizerTests
|
||||
{
|
||||
private readonly ReachabilityNormalizer _normalizer = new();
|
||||
|
||||
[Theory]
|
||||
[InlineData(0, 0, 10)] // R0: Unreachable
|
||||
[InlineData(1, 15, 30)] // R1: In dependency
|
||||
[InlineData(2, 35, 50)] // R2: Imported not called
|
||||
[InlineData(3, 60, 80)] // R3: Called not entrypoint
|
||||
[InlineData(4, 90, 100)] // R4: Reachable
|
||||
public void Normalize_ReachabilityTier_ReturnsExpectedRange(int tier, int minScore, int maxScore)
|
||||
{
|
||||
var signal = new EwsSignalInput { ReachabilityTier = tier };
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, minScore, maxScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_RuntimeTraceConfirmed_BoostsScore()
|
||||
{
|
||||
var signalWithTrace = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 3,
|
||||
RuntimeTraceConfirmed = true
|
||||
};
|
||||
var signalWithoutTrace = new EwsSignalInput
|
||||
{
|
||||
ReachabilityTier = 3
|
||||
};
|
||||
|
||||
var scoreWith = _normalizer.Normalize(signalWithTrace);
|
||||
var scoreWithout = _normalizer.Normalize(signalWithoutTrace);
|
||||
|
||||
Assert.True(scoreWith > scoreWithout);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetConfidence_NoSignals_ReturnsLowConfidence()
|
||||
{
|
||||
var signal = EwsSignalInput.Empty;
|
||||
var confidence = _normalizer.GetConfidence(signal);
|
||||
Assert.True(confidence < 0.3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetExplanation_ReturnsNonEmptyString()
|
||||
{
|
||||
var signal = new EwsSignalInput { ReachabilityTier = 3 };
|
||||
var explanation = _normalizer.GetExplanation(signal, 70);
|
||||
Assert.False(string.IsNullOrWhiteSpace(explanation));
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class ExploitabilityNormalizerTests
|
||||
{
|
||||
private readonly ExploitabilityNormalizer _normalizer = new();
|
||||
|
||||
[Fact]
|
||||
public void Normalize_InKev_ReturnsMaximumScore()
|
||||
{
|
||||
var signal = new EwsSignalInput { IsInKev = true };
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.Equal(100, score);
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(0.0, 0, 20)]
|
||||
[InlineData(0.5, 40, 70)]
|
||||
[InlineData(0.9, 70, 95)]
|
||||
[InlineData(1.0, 85, 100)]
|
||||
public void Normalize_EpssProbability_ScalesAppropriately(double epss, int minScore, int maxScore)
|
||||
{
|
||||
var signal = new EwsSignalInput { EpssProbability = epss };
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, minScore, maxScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ExploitKitAvailable_HighScore()
|
||||
{
|
||||
var signal = new EwsSignalInput { ExploitKitAvailable = true };
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 70);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetConfidence_InKev_ReturnsMaximumConfidence()
|
||||
{
|
||||
var signal = new EwsSignalInput { IsInKev = true };
|
||||
var confidence = _normalizer.GetConfidence(signal);
|
||||
Assert.Equal(1.0, confidence);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class BackportEvidenceNormalizerTests
|
||||
{
|
||||
private readonly BackportEvidenceNormalizer _normalizer = new();
|
||||
|
||||
[Fact]
|
||||
public void Normalize_VendorBackportConfirmed_ReturnsVeryLowScore()
|
||||
{
|
||||
var signal = new EwsSignalInput { VendorBackportConfirmed = true };
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, 0, 10);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_BackportDetectedWithHighConfidence_ReturnsLowScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
BackportDetected = true,
|
||||
BackportConfidence = 0.9
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, 0, 15);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NoBackportWithHighConfidence_ReturnsHighScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
BackportDetected = false,
|
||||
BackportConfidence = 0.9
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 90);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NoAnalysis_AssumesVulnerable()
|
||||
{
|
||||
var signal = EwsSignalInput.Empty;
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 70);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class MitigationStatusNormalizerTests
|
||||
{
|
||||
private readonly MitigationStatusNormalizer _normalizer = new();
|
||||
|
||||
[Theory]
|
||||
[InlineData("not_affected", 0, 10)]
|
||||
[InlineData("fixed", 5, 15)]
|
||||
[InlineData("under_investigation", 50, 70)]
|
||||
[InlineData("affected", 85, 95)]
|
||||
[InlineData("exploitable", 95, 100)]
|
||||
public void Normalize_VexStatus_ReturnsExpectedRange(string status, int minScore, int maxScore)
|
||||
{
|
||||
var signal = new EwsSignalInput { VexStatus = status };
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, minScore, maxScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_WorkaroundApplied_ReducesScore()
|
||||
{
|
||||
var signalWithWorkaround = new EwsSignalInput
|
||||
{
|
||||
VexStatus = "affected",
|
||||
WorkaroundApplied = true
|
||||
};
|
||||
var signalWithoutWorkaround = new EwsSignalInput
|
||||
{
|
||||
VexStatus = "affected"
|
||||
};
|
||||
|
||||
var scoreWith = _normalizer.Normalize(signalWithWorkaround);
|
||||
var scoreWithout = _normalizer.Normalize(signalWithoutWorkaround);
|
||||
|
||||
Assert.True(scoreWith < scoreWithout);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NetworkControlsApplied_ReducesScore()
|
||||
{
|
||||
var signalWithControls = new EwsSignalInput
|
||||
{
|
||||
VexStatus = "affected",
|
||||
NetworkControlsApplied = true
|
||||
};
|
||||
var signalWithoutControls = new EwsSignalInput
|
||||
{
|
||||
VexStatus = "affected"
|
||||
};
|
||||
|
||||
var scoreWith = _normalizer.Normalize(signalWithControls);
|
||||
var scoreWithout = _normalizer.Normalize(signalWithoutControls);
|
||||
|
||||
Assert.True(scoreWith < scoreWithout);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class RuntimeSignalsNormalizerTests
|
||||
{
|
||||
private readonly RuntimeSignalsNormalizer _normalizer = new();
|
||||
|
||||
[Fact]
|
||||
public void Normalize_HighInstrumentationNoInvocations_LowScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
InstrumentationCoverage = 0.9,
|
||||
RuntimeInvocationCount = 0,
|
||||
ApmActiveUsage = false
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, 0, 30);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_HighInvocationCount_HighScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
RuntimeInvocationCount = 5000
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 70);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_ApmActiveUsage_HighScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
ApmActiveUsage = true
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 70);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class SourceConfidenceNormalizerTests
|
||||
{
|
||||
private readonly SourceConfidenceNormalizer _normalizer = new();
|
||||
|
||||
[Fact]
|
||||
public void Normalize_HighConfidenceSource_LowRiskScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
SbomCompleteness = 0.95,
|
||||
SbomSigned = true,
|
||||
AttestationCount = 3,
|
||||
LineageVerified = true
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.InRange(score, 0, 20); // Low risk from source uncertainty
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_LowConfidenceSource_HighRiskScore()
|
||||
{
|
||||
var signal = new EwsSignalInput
|
||||
{
|
||||
SbomCompleteness = 0.3,
|
||||
SbomSigned = false,
|
||||
AttestationCount = 0,
|
||||
LineageVerified = false
|
||||
};
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 60); // High risk from source uncertainty
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Normalize_NoSignals_AssumesHighUncertainty()
|
||||
{
|
||||
var signal = EwsSignalInput.Empty;
|
||||
var score = _normalizer.Normalize(signal);
|
||||
Assert.True(score >= 70);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,260 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring;
|
||||
|
||||
public class ImpactScoreCalculatorTests
|
||||
{
|
||||
private readonly ImpactScoreCalculator _calculator;
|
||||
|
||||
public ImpactScoreCalculatorTests()
|
||||
{
|
||||
_calculator = new ImpactScoreCalculator(NullLogger<ImpactScoreCalculator>.Instance);
|
||||
}
|
||||
|
||||
#region Calculate Tests
|
||||
|
||||
[Fact]
|
||||
public void Calculate_ProductionHighSensitivityCriticalSla_ReturnsHighScore()
|
||||
{
|
||||
// Arrange
|
||||
var context = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Production,
|
||||
DataSensitivity = DataSensitivity.Healthcare,
|
||||
FleetPrevalence = 0.9,
|
||||
SlaTier = SlaTier.MissionCritical,
|
||||
CvssScore = 9.8
|
||||
};
|
||||
|
||||
// Act
|
||||
var score = _calculator.Calculate(context);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().BeGreaterThan(0.8);
|
||||
score.BasisPoints.Should().BeGreaterThan(8000);
|
||||
score.EnvironmentExposure.Should().Be(1.0);
|
||||
score.CvssSeverityScore.Should().BeApproximately(0.98, 0.01);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_DevelopmentLowSensitivity_ReturnsLowScore()
|
||||
{
|
||||
// Arrange
|
||||
var context = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Development,
|
||||
DataSensitivity = DataSensitivity.Public,
|
||||
FleetPrevalence = 0.1,
|
||||
SlaTier = SlaTier.NonCritical,
|
||||
CvssScore = 2.0
|
||||
};
|
||||
|
||||
// Act
|
||||
var score = _calculator.Calculate(context);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().BeLessThan(0.2);
|
||||
score.BasisPoints.Should().BeLessThan(2000);
|
||||
score.EnvironmentExposure.Should().Be(0.0);
|
||||
score.DataSensitivityScore.Should().Be(0.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_DefaultForUnknowns_ReturnsModerateScore()
|
||||
{
|
||||
// Arrange
|
||||
var context = ImpactContext.DefaultForUnknowns();
|
||||
|
||||
// Act
|
||||
var score = _calculator.Calculate(context);
|
||||
|
||||
// Assert - default context assumes production, internal data, 0.5 fleet, standard SLA, CVSS 5.0
|
||||
score.Score.Should().BeInRange(0.3, 0.6);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_CustomWeights_UsesProvidedWeights()
|
||||
{
|
||||
// Arrange
|
||||
var context = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Production,
|
||||
DataSensitivity = DataSensitivity.Classified,
|
||||
FleetPrevalence = 1.0,
|
||||
SlaTier = SlaTier.MissionCritical,
|
||||
CvssScore = 10.0
|
||||
};
|
||||
|
||||
// All weights on CVSS, should return 1.0
|
||||
var weights = new ImpactFactorWeights
|
||||
{
|
||||
EnvironmentExposureWeight = 0.0,
|
||||
DataSensitivityWeight = 0.0,
|
||||
FleetPrevalenceWeight = 0.0,
|
||||
SlaTierWeight = 0.0,
|
||||
CvssSeverityWeight = 1.0
|
||||
};
|
||||
|
||||
// Act
|
||||
var score = _calculator.Calculate(context, weights);
|
||||
|
||||
// Assert
|
||||
score.Score.Should().BeApproximately(1.0, 0.001);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Calculate_IsDeterministic_SameInputSameOutput()
|
||||
{
|
||||
// Arrange
|
||||
var context = new ImpactContext
|
||||
{
|
||||
Environment = EnvironmentType.Staging,
|
||||
DataSensitivity = DataSensitivity.Pii,
|
||||
FleetPrevalence = 0.5,
|
||||
SlaTier = SlaTier.Important,
|
||||
CvssScore = 7.5
|
||||
};
|
||||
|
||||
// Act
|
||||
var score1 = _calculator.Calculate(context);
|
||||
var score2 = _calculator.Calculate(context);
|
||||
|
||||
// Assert
|
||||
score1.Score.Should().Be(score2.Score);
|
||||
score1.BasisPoints.Should().Be(score2.BasisPoints);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NormalizeEnvironment Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(EnvironmentType.Development, 0.0)]
|
||||
[InlineData(EnvironmentType.Testing, 0.33)]
|
||||
[InlineData(EnvironmentType.Staging, 0.66)]
|
||||
[InlineData(EnvironmentType.Production, 1.0)]
|
||||
public void NormalizeEnvironment_ReturnsExpectedScore(EnvironmentType env, double expected)
|
||||
{
|
||||
// Act
|
||||
var score = _calculator.NormalizeEnvironment(env);
|
||||
|
||||
// Assert
|
||||
score.Should().BeApproximately(expected, 0.01);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NormalizeDataSensitivity Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(DataSensitivity.Public, 0.0)]
|
||||
[InlineData(DataSensitivity.Internal, 0.2)]
|
||||
[InlineData(DataSensitivity.Pii, 0.5)]
|
||||
[InlineData(DataSensitivity.Financial, 0.7)]
|
||||
[InlineData(DataSensitivity.Healthcare, 0.8)]
|
||||
[InlineData(DataSensitivity.Classified, 1.0)]
|
||||
public void NormalizeDataSensitivity_ReturnsExpectedScore(DataSensitivity sensitivity, double expected)
|
||||
{
|
||||
// Act
|
||||
var score = _calculator.NormalizeDataSensitivity(sensitivity);
|
||||
|
||||
// Assert
|
||||
score.Should().BeApproximately(expected, 0.01);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NormalizeSlaTier Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(SlaTier.NonCritical, 0.0)]
|
||||
[InlineData(SlaTier.Standard, 0.25)]
|
||||
[InlineData(SlaTier.Important, 0.5)]
|
||||
[InlineData(SlaTier.Critical, 0.75)]
|
||||
[InlineData(SlaTier.MissionCritical, 1.0)]
|
||||
public void NormalizeSlaTier_ReturnsExpectedScore(SlaTier tier, double expected)
|
||||
{
|
||||
// Act
|
||||
var score = _calculator.NormalizeSlaTier(tier);
|
||||
|
||||
// Assert
|
||||
score.Should().BeApproximately(expected, 0.01);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region NormalizeCvss Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(0.0, 0.0)]
|
||||
[InlineData(5.0, 0.5)]
|
||||
[InlineData(10.0, 1.0)]
|
||||
[InlineData(-1.0, 0.0)] // Clamped
|
||||
[InlineData(15.0, 1.0)] // Clamped
|
||||
public void NormalizeCvss_ReturnsExpectedScore(double cvss, double expected)
|
||||
{
|
||||
// Act
|
||||
var score = _calculator.NormalizeCvss(cvss);
|
||||
|
||||
// Assert
|
||||
score.Should().BeApproximately(expected, 0.01);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ImpactFactorWeights Tests
|
||||
|
||||
[Fact]
|
||||
public void ImpactFactorWeights_Default_IsNormalized()
|
||||
{
|
||||
// Act & Assert
|
||||
ImpactFactorWeights.Default.IsNormalized().Should().BeTrue();
|
||||
ImpactFactorWeights.Default.TotalWeight.Should().BeApproximately(1.0, 0.001);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ImpactFactorWeights_Custom_TotalWeightCalculated()
|
||||
{
|
||||
// Arrange
|
||||
var weights = new ImpactFactorWeights
|
||||
{
|
||||
EnvironmentExposureWeight = 0.1,
|
||||
DataSensitivityWeight = 0.2,
|
||||
FleetPrevalenceWeight = 0.3,
|
||||
SlaTierWeight = 0.15,
|
||||
CvssSeverityWeight = 0.25
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
weights.TotalWeight.Should().BeApproximately(1.0, 0.001);
|
||||
weights.IsNormalized().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ImpactScore Tests
|
||||
|
||||
[Fact]
|
||||
public void ImpactScore_Create_CalculatesBasisPointsCorrectly()
|
||||
{
|
||||
// Arrange & Act
|
||||
var score = ImpactScore.Create(
|
||||
envExposure: 1.0,
|
||||
dataSensitivity: 0.5,
|
||||
fleetPrevalence: 0.5,
|
||||
slaTier: 0.5,
|
||||
cvssSeverity: 0.5,
|
||||
ImpactFactorWeights.Default,
|
||||
DateTimeOffset.UtcNow);
|
||||
|
||||
// Assert
|
||||
// Score = 1.0*0.2 + 0.5*0.2 + 0.5*0.15 + 0.5*0.15 + 0.5*0.3 = 0.2 + 0.1 + 0.075 + 0.075 + 0.15 = 0.6
|
||||
score.Score.Should().BeApproximately(0.6, 0.01);
|
||||
score.BasisPoints.Should().Be(6000);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,357 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring.Triage;
|
||||
|
||||
public sealed class TriageQueueEvaluatorTests
|
||||
{
|
||||
private static readonly DateTimeOffset ReferenceTime = new(2026, 2, 8, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
private readonly TriageQueueEvaluator _evaluator;
|
||||
private readonly TriageQueueOptions _options;
|
||||
|
||||
public TriageQueueEvaluatorTests()
|
||||
{
|
||||
_options = new TriageQueueOptions();
|
||||
_evaluator = new TriageQueueEvaluator(
|
||||
NullLogger<TriageQueueEvaluator>.Instance,
|
||||
Options.Create(_options));
|
||||
}
|
||||
|
||||
#region EvaluateSingle Tests
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_FreshObservation_ReturnsNull()
|
||||
{
|
||||
var obs = CreateObservation(ageDays: 0);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().BeNull("fresh observation should not be queued");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_SlightlyAged_ReturnsNull()
|
||||
{
|
||||
// 5 days old with 14-day half-life => multiplier ≈ 0.78, above approaching threshold 0.70
|
||||
var obs = CreateObservation(ageDays: 5);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().BeNull("multiplier 0.78 is above approaching threshold 0.70");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_ApproachingStaleness_ReturnsLowPriority()
|
||||
{
|
||||
// 8 days old with 14-day half-life => multiplier ≈ 0.67, between 0.50 and 0.70
|
||||
var obs = CreateObservation(ageDays: 8);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.Priority.Should().Be(TriagePriority.Low);
|
||||
result.CurrentMultiplier.Should().BeApproximately(0.67, 0.05);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_Stale_ReturnsMediumPriority()
|
||||
{
|
||||
// 14 days old (one half-life) => multiplier = 0.50, at staleness threshold
|
||||
// Actually 15 days => multiplier ≈ 0.48, below 0.50 => Medium
|
||||
var obs = CreateObservation(ageDays: 15);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.Priority.Should().Be(TriagePriority.Medium);
|
||||
result.DaysUntilStale.Should().BeNegative("already stale");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_HeavilyDecayed_ReturnsHighPriority()
|
||||
{
|
||||
// 28 days (two half-lives) => multiplier ≈ 0.25
|
||||
var obs = CreateObservation(ageDays: 28);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.Priority.Should().Be(TriagePriority.High);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_AtFloor_ReturnsCriticalPriority()
|
||||
{
|
||||
// 100 days => multiplier at floor (0.35 but compare to 0.10 threshold)
|
||||
// With floor=0.35, actual multiplier can't go below 0.35
|
||||
// Need floor < CriticalThreshold to get Critical
|
||||
// Use custom decay with floor=0.05
|
||||
var decay = ObservationDecay.WithSettings(
|
||||
ReferenceTime.AddDays(-200),
|
||||
ReferenceTime.AddDays(-200),
|
||||
halfLifeDays: 14.0,
|
||||
floor: 0.05,
|
||||
stalenessThreshold: 0.50);
|
||||
var obs = new TriageObservation
|
||||
{
|
||||
Cve = "CVE-2026-9999",
|
||||
Purl = "pkg:npm/test@1.0.0",
|
||||
TenantId = "tenant-1",
|
||||
Decay = decay
|
||||
};
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.Priority.Should().Be(TriagePriority.Critical);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_PreservesCveAndPurl()
|
||||
{
|
||||
var obs = CreateObservation(ageDays: 20, cve: "CVE-2026-1234", purl: "pkg:maven/org.example/lib@2.0");
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.Cve.Should().Be("CVE-2026-1234");
|
||||
result.Purl.Should().Be("pkg:maven/org.example/lib@2.0");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_SetsEvaluatedAt()
|
||||
{
|
||||
var obs = CreateObservation(ageDays: 20);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.EvaluatedAt.Should().Be(ReferenceTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_WithSignalGaps_SetsRecommendedAction()
|
||||
{
|
||||
var gaps = new List<SignalGap>
|
||||
{
|
||||
new() { Signal = "EPSS", Reason = SignalGapReason.NotQueried, Weight = 0.20 },
|
||||
new() { Signal = "VEX", Reason = SignalGapReason.NotAvailable, Weight = 0.30 }
|
||||
};
|
||||
var obs = CreateObservation(ageDays: 20, gaps: gaps);
|
||||
|
||||
var result = _evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().NotBeNull();
|
||||
result!.RecommendedAction.Should().Contain("EPSS");
|
||||
result.RecommendedAction.Should().Contain("VEX");
|
||||
result.SignalGaps.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void EvaluateSingle_ApproachingDisabled_ReturnsNull()
|
||||
{
|
||||
var options = new TriageQueueOptions { IncludeApproaching = false };
|
||||
var evaluator = new TriageQueueEvaluator(
|
||||
NullLogger<TriageQueueEvaluator>.Instance,
|
||||
Options.Create(options));
|
||||
|
||||
var obs = CreateObservation(ageDays: 8); // approaching but not stale
|
||||
|
||||
var result = evaluator.EvaluateSingle(obs, ReferenceTime);
|
||||
|
||||
result.Should().BeNull("approaching items should be excluded when IncludeApproaching=false");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EvaluateAsync Tests
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_EmptyList_ReturnsEmptySnapshot()
|
||||
{
|
||||
var snapshot = await _evaluator.EvaluateAsync([], ReferenceTime);
|
||||
|
||||
snapshot.Items.Should().BeEmpty();
|
||||
snapshot.TotalEvaluated.Should().Be(0);
|
||||
snapshot.StaleCount.Should().Be(0);
|
||||
snapshot.ApproachingCount.Should().Be(0);
|
||||
snapshot.EvaluatedAt.Should().Be(ReferenceTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_MixedObservations_SortsByPriorityThenUrgency()
|
||||
{
|
||||
var observations = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 8, cve: "CVE-A"), // Low (approaching)
|
||||
CreateObservation(ageDays: 20, cve: "CVE-B"), // Medium (stale)
|
||||
CreateObservation(ageDays: 30, cve: "CVE-C"), // High (heavily decayed)
|
||||
CreateObservation(ageDays: 2, cve: "CVE-D"), // None (fresh)
|
||||
};
|
||||
|
||||
var snapshot = await _evaluator.EvaluateAsync(observations, ReferenceTime);
|
||||
|
||||
snapshot.TotalEvaluated.Should().Be(4);
|
||||
snapshot.Items.Should().HaveCount(3, "fresh observation should be excluded");
|
||||
snapshot.Items[0].Priority.Should().Be(TriagePriority.High, "highest priority first");
|
||||
snapshot.Items[1].Priority.Should().Be(TriagePriority.Medium);
|
||||
snapshot.Items[2].Priority.Should().Be(TriagePriority.Low);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_SamePriority_SortsByDaysUntilStale()
|
||||
{
|
||||
var observations = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 16, cve: "CVE-X"), // Medium, more stale
|
||||
CreateObservation(ageDays: 15, cve: "CVE-Y"), // Medium, less stale
|
||||
};
|
||||
|
||||
var snapshot = await _evaluator.EvaluateAsync(observations, ReferenceTime);
|
||||
|
||||
snapshot.Items.Should().HaveCount(2);
|
||||
// Both Medium, sorted by daysUntilStale ascending (most negative first)
|
||||
snapshot.Items[0].DaysUntilStale.Should().BeLessThan(snapshot.Items[1].DaysUntilStale);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_PrioritySummary_IsCorrect()
|
||||
{
|
||||
var observations = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 8, cve: "CVE-1"), // Low
|
||||
CreateObservation(ageDays: 9, cve: "CVE-2"), // Low
|
||||
CreateObservation(ageDays: 20, cve: "CVE-3"), // Medium
|
||||
};
|
||||
|
||||
var snapshot = await _evaluator.EvaluateAsync(observations, ReferenceTime);
|
||||
|
||||
snapshot.PrioritySummary.Should().ContainKey(TriagePriority.Low);
|
||||
snapshot.PrioritySummary[TriagePriority.Low].Should().Be(2);
|
||||
snapshot.PrioritySummary.Should().ContainKey(TriagePriority.Medium);
|
||||
snapshot.PrioritySummary[TriagePriority.Medium].Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_RespectsMaxSnapshotItems()
|
||||
{
|
||||
var options = new TriageQueueOptions { MaxSnapshotItems = 2 };
|
||||
var evaluator = new TriageQueueEvaluator(
|
||||
NullLogger<TriageQueueEvaluator>.Instance,
|
||||
Options.Create(options));
|
||||
|
||||
var observations = Enumerable.Range(0, 10)
|
||||
.Select(i => CreateObservation(ageDays: 15 + i, cve: $"CVE-{i:D4}"))
|
||||
.ToList();
|
||||
|
||||
var snapshot = await evaluator.EvaluateAsync(observations, ReferenceTime);
|
||||
|
||||
snapshot.Items.Should().HaveCount(2);
|
||||
snapshot.TotalEvaluated.Should().Be(10);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateAsync_Deterministic_SameInputsSameOutput()
|
||||
{
|
||||
var observations = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 10, cve: "CVE-A"),
|
||||
CreateObservation(ageDays: 20, cve: "CVE-B"),
|
||||
CreateObservation(ageDays: 30, cve: "CVE-C"),
|
||||
};
|
||||
|
||||
var snapshot1 = await _evaluator.EvaluateAsync(observations, ReferenceTime);
|
||||
var snapshot2 = await _evaluator.EvaluateAsync(observations, ReferenceTime);
|
||||
|
||||
snapshot1.Items.Count.Should().Be(snapshot2.Items.Count);
|
||||
for (var i = 0; i < snapshot1.Items.Count; i++)
|
||||
{
|
||||
snapshot1.Items[i].Cve.Should().Be(snapshot2.Items[i].Cve);
|
||||
snapshot1.Items[i].Priority.Should().Be(snapshot2.Items[i].Priority);
|
||||
snapshot1.Items[i].CurrentMultiplier.Should().Be(snapshot2.Items[i].CurrentMultiplier);
|
||||
snapshot1.Items[i].DaysUntilStale.Should().Be(snapshot2.Items[i].DaysUntilStale);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region ClassifyPriority Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(0.95, TriagePriority.None)]
|
||||
[InlineData(0.80, TriagePriority.None)]
|
||||
[InlineData(0.65, TriagePriority.Low)]
|
||||
[InlineData(0.55, TriagePriority.Low)]
|
||||
[InlineData(0.45, TriagePriority.Medium)]
|
||||
[InlineData(0.25, TriagePriority.High)]
|
||||
[InlineData(0.08, TriagePriority.Critical)]
|
||||
[InlineData(0.00, TriagePriority.Critical)]
|
||||
public void ClassifyPriority_ReturnsExpectedTier(double multiplier, TriagePriority expected)
|
||||
{
|
||||
var result = _evaluator.ClassifyPriority(multiplier, stalenessThreshold: 0.50);
|
||||
|
||||
result.Should().Be(expected);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region CalculateDaysUntilStale Tests
|
||||
|
||||
[Fact]
|
||||
public void CalculateDaysUntilStale_FreshObservation_ReturnsPositive()
|
||||
{
|
||||
var refreshedAt = ReferenceTime;
|
||||
var result = TriageQueueEvaluator.CalculateDaysUntilStale(
|
||||
refreshedAt, halfLifeDays: 14.0, stalenessThreshold: 0.50, floor: 0.35, ReferenceTime);
|
||||
|
||||
result.Should().BeApproximately(14.0, 0.1, "one half-life until 0.50 threshold");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateDaysUntilStale_AlreadyStale_ReturnsNegative()
|
||||
{
|
||||
var refreshedAt = ReferenceTime.AddDays(-20);
|
||||
var result = TriageQueueEvaluator.CalculateDaysUntilStale(
|
||||
refreshedAt, halfLifeDays: 14.0, stalenessThreshold: 0.50, floor: 0.35, ReferenceTime);
|
||||
|
||||
result.Should().BeNegative("observation is past staleness threshold");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void CalculateDaysUntilStale_FloorAboveThreshold_ReturnsMaxValue()
|
||||
{
|
||||
var result = TriageQueueEvaluator.CalculateDaysUntilStale(
|
||||
ReferenceTime, halfLifeDays: 14.0, stalenessThreshold: 0.30, floor: 0.50, ReferenceTime);
|
||||
|
||||
result.Should().Be(double.MaxValue, "floor prevents reaching threshold");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static TriageObservation CreateObservation(
|
||||
double ageDays,
|
||||
string cve = "CVE-2026-0001",
|
||||
string purl = "pkg:npm/test@1.0.0",
|
||||
string tenantId = "tenant-1",
|
||||
IReadOnlyList<SignalGap>? gaps = null)
|
||||
{
|
||||
var refreshedAt = ReferenceTime.AddDays(-ageDays);
|
||||
return new TriageObservation
|
||||
{
|
||||
Cve = cve,
|
||||
Purl = purl,
|
||||
TenantId = tenantId,
|
||||
Decay = ObservationDecay.Create(refreshedAt, refreshedAt),
|
||||
SignalGaps = gaps ?? []
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,228 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring.Triage;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring.Triage;
|
||||
|
||||
public sealed class UnknownTriageQueueServiceTests
|
||||
{
|
||||
private static readonly DateTimeOffset ReferenceTime = new(2026, 2, 8, 12, 0, 0, TimeSpan.Zero);
|
||||
|
||||
private readonly TriageQueueOptions _options = new();
|
||||
private readonly TriageQueueEvaluator _evaluator;
|
||||
private readonly Mock<ITriageObservationSource> _sourceMock;
|
||||
private readonly InMemoryTriageReanalysisSink _sink;
|
||||
private readonly UnknownTriageQueueService _service;
|
||||
|
||||
public UnknownTriageQueueServiceTests()
|
||||
{
|
||||
_evaluator = new TriageQueueEvaluator(
|
||||
NullLogger<TriageQueueEvaluator>.Instance,
|
||||
Options.Create(_options));
|
||||
|
||||
_sourceMock = new Mock<ITriageObservationSource>();
|
||||
_sink = new InMemoryTriageReanalysisSink(NullLogger<InMemoryTriageReanalysisSink>.Instance);
|
||||
|
||||
var fakeTimeProvider = new FakeTimeProvider(ReferenceTime);
|
||||
|
||||
_service = new UnknownTriageQueueService(
|
||||
_evaluator,
|
||||
_sourceMock.Object,
|
||||
_sink,
|
||||
NullLogger<UnknownTriageQueueService>.Instance,
|
||||
Options.Create(_options),
|
||||
fakeTimeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteCycleAsync_NoCandidates_ReturnsEmptySnapshot()
|
||||
{
|
||||
_sourceMock
|
||||
.Setup(s => s.GetCandidatesAsync(null, It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([]);
|
||||
|
||||
var snapshot = await _service.ExecuteCycleAsync();
|
||||
|
||||
snapshot.Items.Should().BeEmpty();
|
||||
snapshot.TotalEvaluated.Should().Be(0);
|
||||
_sink.Count.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteCycleAsync_StaleObservations_EnqueuedToSink()
|
||||
{
|
||||
var candidates = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 20, cve: "CVE-STALE-1"), // Medium
|
||||
CreateObservation(ageDays: 30, cve: "CVE-STALE-2"), // High
|
||||
};
|
||||
|
||||
_sourceMock
|
||||
.Setup(s => s.GetCandidatesAsync(null, It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(candidates);
|
||||
|
||||
var snapshot = await _service.ExecuteCycleAsync();
|
||||
|
||||
snapshot.Items.Should().HaveCount(2);
|
||||
_sink.Count.Should().Be(2, "both stale items should be enqueued");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteCycleAsync_OnlyApproaching_NotEnqueued()
|
||||
{
|
||||
var candidates = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 8, cve: "CVE-APPROACH"), // Low priority
|
||||
};
|
||||
|
||||
_sourceMock
|
||||
.Setup(s => s.GetCandidatesAsync(null, It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(candidates);
|
||||
|
||||
var snapshot = await _service.ExecuteCycleAsync();
|
||||
|
||||
snapshot.Items.Should().HaveCount(1, "approaching item is in snapshot");
|
||||
_sink.Count.Should().Be(0, "approaching items are not enqueued for re-analysis");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteCycleAsync_MixedStaleAndFresh_OnlyStaleEnqueued()
|
||||
{
|
||||
var candidates = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 2, cve: "CVE-FRESH"), // None
|
||||
CreateObservation(ageDays: 8, cve: "CVE-APPROACH"), // Low
|
||||
CreateObservation(ageDays: 20, cve: "CVE-STALE"), // Medium
|
||||
};
|
||||
|
||||
_sourceMock
|
||||
.Setup(s => s.GetCandidatesAsync(null, It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(candidates);
|
||||
|
||||
var snapshot = await _service.ExecuteCycleAsync();
|
||||
|
||||
snapshot.TotalEvaluated.Should().Be(3);
|
||||
_sink.Count.Should().Be(1, "only medium+ items are enqueued");
|
||||
|
||||
var enqueued = _sink.DrainAll();
|
||||
enqueued[0].Cve.Should().Be("CVE-STALE");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExecuteCycleAsync_WithTenantFilter_PassesToSource()
|
||||
{
|
||||
_sourceMock
|
||||
.Setup(s => s.GetCandidatesAsync("tenant-42", It.IsAny<int>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync([]);
|
||||
|
||||
await _service.ExecuteCycleAsync(tenantId: "tenant-42");
|
||||
|
||||
_sourceMock.Verify(
|
||||
s => s.GetCandidatesAsync("tenant-42", It.IsAny<int>(), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EvaluateOnDemandAsync_DoesNotEnqueue()
|
||||
{
|
||||
var observations = new List<TriageObservation>
|
||||
{
|
||||
CreateObservation(ageDays: 20, cve: "CVE-DEMAND"),
|
||||
};
|
||||
|
||||
var snapshot = await _service.EvaluateOnDemandAsync(observations, ReferenceTime);
|
||||
|
||||
snapshot.Items.Should().HaveCount(1);
|
||||
_sink.Count.Should().Be(0, "on-demand evaluation should not auto-enqueue");
|
||||
}
|
||||
|
||||
#region InMemoryTriageReanalysisSink Tests
|
||||
|
||||
[Fact]
|
||||
public async Task InMemorySink_EnqueueAndDrain()
|
||||
{
|
||||
var items = new List<TriageItem>
|
||||
{
|
||||
CreateTriageItem("CVE-1", TriagePriority.Medium),
|
||||
CreateTriageItem("CVE-2", TriagePriority.High),
|
||||
};
|
||||
|
||||
var enqueued = await _sink.EnqueueAsync(items);
|
||||
|
||||
enqueued.Should().Be(2);
|
||||
_sink.Count.Should().Be(2);
|
||||
|
||||
var drained = _sink.DrainAll();
|
||||
drained.Should().HaveCount(2);
|
||||
_sink.Count.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void InMemorySink_TryDequeue_EmptyQueue_ReturnsFalse()
|
||||
{
|
||||
var result = _sink.TryDequeue(out var item);
|
||||
|
||||
result.Should().BeFalse();
|
||||
item.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InMemorySink_PeekAll_DoesNotRemove()
|
||||
{
|
||||
await _sink.EnqueueAsync([CreateTriageItem("CVE-PEEK", TriagePriority.Critical)]);
|
||||
|
||||
var peeked = _sink.PeekAll();
|
||||
peeked.Should().HaveCount(1);
|
||||
_sink.Count.Should().Be(1, "peek should not remove items");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
private static TriageObservation CreateObservation(double ageDays, string cve = "CVE-2026-0001")
|
||||
{
|
||||
var refreshedAt = ReferenceTime.AddDays(-ageDays);
|
||||
return new TriageObservation
|
||||
{
|
||||
Cve = cve,
|
||||
Purl = "pkg:npm/test@1.0.0",
|
||||
TenantId = "tenant-1",
|
||||
Decay = ObservationDecay.Create(refreshedAt, refreshedAt),
|
||||
};
|
||||
}
|
||||
|
||||
private static TriageItem CreateTriageItem(string cve, TriagePriority priority) => new()
|
||||
{
|
||||
Cve = cve,
|
||||
Purl = "pkg:npm/test@1.0.0",
|
||||
TenantId = "tenant-1",
|
||||
Decay = ObservationDecay.Fresh(ReferenceTime),
|
||||
CurrentMultiplier = 0.5,
|
||||
Priority = priority,
|
||||
AgeDays = 10,
|
||||
DaysUntilStale = -5,
|
||||
EvaluatedAt = ReferenceTime,
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Fake TimeProvider for deterministic testing.
|
||||
/// </summary>
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private readonly DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset now)
|
||||
{
|
||||
_now = now;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,380 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using StellaOps.Policy.Scoring;
|
||||
using StellaOps.Policy.TrustLattice;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for TrustScoreAlgebraFacade.
|
||||
/// Verifies the unified scoring pipeline produces deterministic, attestation-ready results.
|
||||
/// </summary>
|
||||
public sealed class TrustScoreAlgebraFacadeTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider = new(new DateTimeOffset(2026, 1, 15, 12, 0, 0, TimeSpan.Zero));
|
||||
|
||||
private TrustScoreAlgebraFacade CreateFacade()
|
||||
{
|
||||
var aggregator = new TrustScoreAggregator(NullLogger<TrustScoreAggregator>.Instance);
|
||||
var uncertaintyCalculator = new UncertaintyScoreCalculator();
|
||||
return new TrustScoreAlgebraFacade(
|
||||
aggregator,
|
||||
uncertaintyCalculator,
|
||||
NullLogger<TrustScoreAlgebraFacade>.Instance,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
#region Basic Computation Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_EmptySignals_ReturnsValidPredicate()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
VulnerabilityId = "CVE-2024-1234"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Predicate.Should().NotBeNull();
|
||||
result.Predicate.ArtifactId.Should().Be("pkg:maven/test@1.0");
|
||||
result.Predicate.VulnerabilityId.Should().Be("CVE-2024-1234");
|
||||
result.Predicate.TrustScoreBps.Should().BeInRange(0, 10000);
|
||||
result.Predicate.LatticeVerdict.Should().Be(K4Value.Unknown);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_WithSignals_ReturnsCalculatedScore()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var signals = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow())
|
||||
with
|
||||
{
|
||||
Reachability = SignalState<ReachabilityEvidence>.Present(
|
||||
new ReachabilityEvidence(ReachabilityStatus.Reachable, 0, 0, null)),
|
||||
Vex = SignalState<VexClaimSummary>.Present(
|
||||
new VexClaimSummary("affected", null, null, null, null, null))
|
||||
};
|
||||
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Signals = signals
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Predicate.Dimensions.ReachabilityBps.Should().Be(10000); // Reachable = max score
|
||||
result.Predicate.Dimensions.VexBps.Should().Be(10000); // Affected = max risk
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_UnreachableVulnerability_LowerScore()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var signals = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow())
|
||||
with
|
||||
{
|
||||
Reachability = SignalState<ReachabilityEvidence>.Present(
|
||||
new ReachabilityEvidence(ReachabilityStatus.Unreachable, 0, 0, null)),
|
||||
Vex = SignalState<VexClaimSummary>.Present(
|
||||
new VexClaimSummary("affected", null, null, null, null, null))
|
||||
};
|
||||
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Signals = signals
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Predicate.Dimensions.ReachabilityBps.Should().Be(0); // Unreachable = no risk from reachability
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region K4 Lattice Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_ConflictingSignals_ReturnsConflict()
|
||||
{
|
||||
// Arrange: VEX says not_affected, EPSS says high probability
|
||||
var facade = CreateFacade();
|
||||
var signals = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow())
|
||||
with
|
||||
{
|
||||
Vex = SignalState<VexClaimSummary>.Present(
|
||||
new VexClaimSummary("not_affected", null, null, null, null, null)),
|
||||
Epss = SignalState<EpssEvidence>.Present(
|
||||
new EpssEvidence(0.85, 0.95)) // High EPSS = True in K4
|
||||
};
|
||||
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Signals = signals
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Predicate.LatticeVerdict.Should().Be(K4Value.Conflict);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_AllTrueSignals_ReturnsTrueVerdict()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var signals = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow())
|
||||
with
|
||||
{
|
||||
Vex = SignalState<VexClaimSummary>.Present(
|
||||
new VexClaimSummary("affected", null, null, null, null, null)),
|
||||
Reachability = SignalState<ReachabilityEvidence>.Present(
|
||||
new ReachabilityEvidence(ReachabilityStatus.Reachable, 0, 0, null)),
|
||||
Epss = SignalState<EpssEvidence>.Present(
|
||||
new EpssEvidence(0.75, 0.90))
|
||||
};
|
||||
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
Signals = signals
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Predicate.LatticeVerdict.Should().Be(K4Value.True);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score.v1 Predicate Format Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_ReturnsCorrectPredicateType()
|
||||
{
|
||||
// Assert
|
||||
ScoreV1Predicate.PredicateType.Should().Be("https://stella-ops.org/predicates/score/v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_IncludesAllRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
TenantId = "tenant-123"
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Predicate.ArtifactId.Should().NotBeNullOrEmpty();
|
||||
result.Predicate.TrustScoreBps.Should().BeInRange(0, 10000);
|
||||
result.Predicate.Tier.Should().NotBeNullOrEmpty();
|
||||
result.Predicate.UncertaintyBps.Should().BeInRange(0, 10000);
|
||||
result.Predicate.Dimensions.Should().NotBeNull();
|
||||
result.Predicate.WeightsUsed.Should().NotBeNull();
|
||||
result.Predicate.PolicyDigest.Should().NotBeNullOrEmpty();
|
||||
result.Predicate.ComputedAt.Should().Be(_timeProvider.GetUtcNow());
|
||||
result.Predicate.TenantId.Should().Be("tenant-123");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_PolicyDigest_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest { ArtifactId = "pkg:maven/test@1.0" };
|
||||
|
||||
// Act
|
||||
var result1 = facade.ComputeTrustScore(request);
|
||||
var result2 = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result1.Predicate.PolicyDigest.Should().Be(result2.Predicate.PolicyDigest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Basis Point Arithmetic Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_WeightsSumTo10000()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest { ArtifactId = "pkg:maven/test@1.0" };
|
||||
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
var weights = result.Predicate.WeightsUsed;
|
||||
var sum = weights.BaseSeverity + weights.Reachability + weights.Evidence + weights.Provenance;
|
||||
sum.Should().Be(10000, "weights must sum to 10000 basis points");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_FinalScoreWithinBounds()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
|
||||
// Test multiple scenarios
|
||||
var scenarios = new[]
|
||||
{
|
||||
new TrustScoreRequest { ArtifactId = "pkg:a@1.0" },
|
||||
new TrustScoreRequest { ArtifactId = "pkg:b@1.0", VulnerabilityId = "CVE-2024-1234" },
|
||||
};
|
||||
|
||||
foreach (var request in scenarios)
|
||||
{
|
||||
// Act
|
||||
var result = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result.Predicate.TrustScoreBps.Should().BeInRange(0, 10000);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Risk Tier Tests
|
||||
|
||||
[Theory]
|
||||
[InlineData(9500, "Critical")]
|
||||
[InlineData(8000, "High")]
|
||||
[InlineData(5000, "Medium")]
|
||||
[InlineData(2000, "Low")]
|
||||
[InlineData(500, "Info")]
|
||||
public void RiskTier_MapsCorrectly(int scoreBps, string expectedTier)
|
||||
{
|
||||
// The tier is derived from the score; verify tier naming
|
||||
var tier = scoreBps switch
|
||||
{
|
||||
>= 9000 => RiskTier.Critical,
|
||||
>= 7000 => RiskTier.High,
|
||||
>= 4000 => RiskTier.Medium,
|
||||
>= 1000 => RiskTier.Low,
|
||||
_ => RiskTier.Info
|
||||
};
|
||||
|
||||
tier.ToString().Should().Be(expectedTier);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_SameInputs_ProducesSameOutputs()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var signals = SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow())
|
||||
with
|
||||
{
|
||||
Epss = SignalState<EpssEvidence>.Present(new EpssEvidence(0.35, 0.65)),
|
||||
Reachability = SignalState<ReachabilityEvidence>.Present(
|
||||
new ReachabilityEvidence(ReachabilityStatus.Reachable, 2, 5, null))
|
||||
};
|
||||
|
||||
var request = new TrustScoreRequest
|
||||
{
|
||||
ArtifactId = "pkg:maven/test@1.0",
|
||||
VulnerabilityId = "CVE-2024-1234",
|
||||
Signals = signals
|
||||
};
|
||||
|
||||
// Act
|
||||
var result1 = facade.ComputeTrustScore(request);
|
||||
var result2 = facade.ComputeTrustScore(request);
|
||||
|
||||
// Assert
|
||||
result1.Predicate.TrustScoreBps.Should().Be(result2.Predicate.TrustScoreBps);
|
||||
result1.Predicate.LatticeVerdict.Should().Be(result2.Predicate.LatticeVerdict);
|
||||
result1.Predicate.Dimensions.Should().BeEquivalentTo(result2.Predicate.Dimensions);
|
||||
result1.Predicate.PolicyDigest.Should().Be(result2.Predicate.PolicyDigest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Async API Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeTrustScoreAsync_ReturnsResult()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest { ArtifactId = "pkg:maven/test@1.0" };
|
||||
|
||||
// Act
|
||||
var result = await facade.ComputeTrustScoreAsync(request);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.Predicate.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Error Handling Tests
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_NullArtifactId_Throws()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest { ArtifactId = null! };
|
||||
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentException>(() => facade.ComputeTrustScore(request));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeTrustScore_EmptyArtifactId_Throws()
|
||||
{
|
||||
// Arrange
|
||||
var facade = CreateFacade();
|
||||
var request = new TrustScoreRequest { ArtifactId = "" };
|
||||
|
||||
// Act & Assert
|
||||
Assert.Throws<ArgumentException>(() => facade.ComputeTrustScore(request));
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,234 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestCommandsTests.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Unit tests for CLI weight commands
|
||||
// Description: Tests for list, validate, diff, activate, and hash commands.
|
||||
// Uses temp directories for offline, deterministic execution.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring.WeightManifest;
|
||||
|
||||
public sealed class WeightManifestCommandsTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
private readonly WeightManifestLoader _loader;
|
||||
private readonly WeightManifestCommands _commands;
|
||||
|
||||
public WeightManifestCommandsTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"stella-wm-cmd-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
|
||||
var options = Options.Create(new WeightManifestLoaderOptions
|
||||
{
|
||||
ManifestDirectory = _tempDir,
|
||||
RequireComputedHash = false,
|
||||
StrictHashVerification = false
|
||||
});
|
||||
|
||||
_loader = new WeightManifestLoader(options, NullLogger<WeightManifestLoader>.Instance);
|
||||
_commands = new WeightManifestCommands(_loader);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
private string WriteManifest(string filename, string version, string effectiveFrom,
|
||||
double rch = 0.50, double mit = 0.50)
|
||||
{
|
||||
var path = Path.Combine(_tempDir, filename);
|
||||
var json = $$"""
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"version": "{{version}}",
|
||||
"effectiveFrom": "{{effectiveFrom}}",
|
||||
"profile": "production",
|
||||
"contentHash": "sha256:auto",
|
||||
"weights": {
|
||||
"legacy": { "rch": {{rch}}, "mit": {{mit}} },
|
||||
"advisory": {}
|
||||
}
|
||||
}
|
||||
""";
|
||||
File.WriteAllText(path, json);
|
||||
return path;
|
||||
}
|
||||
|
||||
// ── ListAsync ────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_EmptyDirectory_ReturnsEmptyEntries()
|
||||
{
|
||||
var result = await _commands.ListAsync();
|
||||
Assert.True(result.Entries.IsEmpty);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReturnsAllManifests()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
WriteManifest("b.weights.json", "v2", "2026-02-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.ListAsync();
|
||||
|
||||
Assert.Equal(2, result.Entries.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_ReportsHashStatus()
|
||||
{
|
||||
WriteManifest("auto.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.ListAsync();
|
||||
|
||||
Assert.Equal("auto", result.Entries[0].HashStatus);
|
||||
}
|
||||
|
||||
// ── ValidateAsync ────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_AllManifests_AllValid()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
WriteManifest("b.weights.json", "v2", "2026-02-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.ValidateAsync();
|
||||
|
||||
Assert.True(result.AllValid);
|
||||
Assert.Equal(2, result.Entries.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_SpecificFile_ValidatesOnly()
|
||||
{
|
||||
var path = WriteManifest("a.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.ValidateAsync(path);
|
||||
|
||||
Assert.Single(result.Entries);
|
||||
Assert.True(result.Entries[0].IsValid);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ValidateAsync_InvalidManifest_ReportsIssues()
|
||||
{
|
||||
var path = WriteManifest("bad.weights.json", "v1", "2026-01-01T00:00:00Z",
|
||||
rch: 0.90, mit: 0.90); // Sum > 1.0
|
||||
|
||||
var result = await _commands.ValidateAsync(path);
|
||||
|
||||
Assert.False(result.AllValid);
|
||||
Assert.False(result.Entries[0].IsValid);
|
||||
Assert.Contains(result.Entries[0].Issues, i => i.Contains("Legacy weights sum"));
|
||||
}
|
||||
|
||||
// ── DiffAsync ────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task DiffAsync_TwoFiles_ReturnsDiff()
|
||||
{
|
||||
var path1 = WriteManifest("a.weights.json", "v1", "2026-01-01T00:00:00Z", rch: 0.30, mit: 0.70);
|
||||
var path2 = WriteManifest("b.weights.json", "v2", "2026-02-01T00:00:00Z", rch: 0.50, mit: 0.50);
|
||||
|
||||
var diff = await _commands.DiffAsync(path1, path2);
|
||||
|
||||
Assert.True(diff.HasDifferences);
|
||||
Assert.Equal("v1", diff.FromVersion);
|
||||
Assert.Equal("v2", diff.ToVersion);
|
||||
Assert.Contains(diff.Differences, d => d.Path == "weights.legacy.rch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffByVersionAsync_FindsByVersionString()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v2026-01-01", "2026-01-01T00:00:00Z", rch: 0.30, mit: 0.70);
|
||||
WriteManifest("b.weights.json", "v2026-02-01", "2026-02-01T00:00:00Z", rch: 0.50, mit: 0.50);
|
||||
|
||||
var diff = await _commands.DiffByVersionAsync("v2026-01-01", "v2026-02-01");
|
||||
|
||||
Assert.True(diff.HasDifferences);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task DiffByVersionAsync_MissingVersion_Throws()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
await Assert.ThrowsAsync<WeightManifestLoadException>(() =>
|
||||
_commands.DiffByVersionAsync("v1", "v-nonexistent"));
|
||||
}
|
||||
|
||||
// ── ActivateAsync ────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_NoManifests_ReturnsNotFound()
|
||||
{
|
||||
var result = await _commands.ActivateAsync(DateTimeOffset.UtcNow);
|
||||
|
||||
Assert.False(result.Found);
|
||||
Assert.Null(result.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ActivateAsync_SelectsCorrectManifest()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v2026-01-01", "2026-01-01T00:00:00Z");
|
||||
WriteManifest("b.weights.json", "v2026-02-01", "2026-02-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.ActivateAsync(DateTimeOffset.Parse("2026-01-15T00:00:00Z"));
|
||||
|
||||
Assert.True(result.Found);
|
||||
Assert.Equal("v2026-01-01", result.Version);
|
||||
}
|
||||
|
||||
// ── HashAsync ────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task HashAsync_ComputesHash()
|
||||
{
|
||||
var path = WriteManifest("test.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.HashAsync(path);
|
||||
|
||||
Assert.StartsWith("sha256:", result.ComputedHash);
|
||||
Assert.True(result.HadPlaceholder);
|
||||
Assert.False(result.WrittenBack);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HashAsync_WriteBack_ReplacesPlaceholder()
|
||||
{
|
||||
var path = WriteManifest("test.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var result = await _commands.HashAsync(path, writeBack: true);
|
||||
|
||||
Assert.True(result.WrittenBack);
|
||||
|
||||
var updatedContent = File.ReadAllText(path);
|
||||
Assert.DoesNotContain("sha256:auto", updatedContent);
|
||||
Assert.Contains(result.ComputedHash, updatedContent);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HashAsync_WriteBack_Idempotent()
|
||||
{
|
||||
var path = WriteManifest("test.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var result1 = await _commands.HashAsync(path, writeBack: true);
|
||||
var result2 = await _commands.HashAsync(path, writeBack: false);
|
||||
|
||||
// Hash computed from already-replaced content should be the same
|
||||
Assert.Equal(result1.ComputedHash, result2.ComputedHash);
|
||||
Assert.False(result2.HadPlaceholder); // Placeholder is gone now
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,215 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestHashComputerTests.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Unit tests for content hash computation
|
||||
// Description: Deterministic tests for SHA-256 content hashing of weight
|
||||
// manifests, including canonical serialization and auto-replace.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring.WeightManifest;
|
||||
|
||||
public sealed class WeightManifestHashComputerTests
|
||||
{
|
||||
private const string MinimalManifest = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"version": "v2026-01-01",
|
||||
"effectiveFrom": "2026-01-01T00:00:00Z",
|
||||
"contentHash": "sha256:auto",
|
||||
"weights": {
|
||||
"legacy": { "rch": 0.50, "mit": 0.50 },
|
||||
"advisory": {}
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
// ── Determinism ──────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_IsDeterministic_SameInputSameHash()
|
||||
{
|
||||
var hash1 = WeightManifestHashComputer.ComputeFromJson(MinimalManifest);
|
||||
var hash2 = WeightManifestHashComputer.ComputeFromJson(MinimalManifest);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_ReturnsCorrectFormat()
|
||||
{
|
||||
var hash = WeightManifestHashComputer.ComputeFromJson(MinimalManifest);
|
||||
|
||||
Assert.StartsWith("sha256:", hash);
|
||||
Assert.Equal(71, hash.Length); // "sha256:" (7) + 64 hex chars
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_ExcludesContentHashField()
|
||||
{
|
||||
// Two manifests identical except for contentHash should produce same hash
|
||||
var manifestWithAuto = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"version": "v2026-01-01",
|
||||
"contentHash": "sha256:auto",
|
||||
"weights": { "legacy": {}, "advisory": {} }
|
||||
}
|
||||
""";
|
||||
|
||||
var manifestWithDifferentHash = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"version": "v2026-01-01",
|
||||
"contentHash": "sha256:aaaa",
|
||||
"weights": { "legacy": {}, "advisory": {} }
|
||||
}
|
||||
""";
|
||||
|
||||
var hash1 = WeightManifestHashComputer.ComputeFromJson(manifestWithAuto);
|
||||
var hash2 = WeightManifestHashComputer.ComputeFromJson(manifestWithDifferentHash);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_DifferentContent_DifferentHash()
|
||||
{
|
||||
var manifestA = """
|
||||
{ "schemaVersion": "1.0.0", "version": "v1", "contentHash": "sha256:auto",
|
||||
"weights": { "legacy": { "rch": 0.30 }, "advisory": {} } }
|
||||
""";
|
||||
|
||||
var manifestB = """
|
||||
{ "schemaVersion": "1.0.0", "version": "v1", "contentHash": "sha256:auto",
|
||||
"weights": { "legacy": { "rch": 0.70 }, "advisory": {} } }
|
||||
""";
|
||||
|
||||
var hashA = WeightManifestHashComputer.ComputeFromJson(manifestA);
|
||||
var hashB = WeightManifestHashComputer.ComputeFromJson(manifestB);
|
||||
|
||||
Assert.NotEqual(hashA, hashB);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_PropertyOrderDoesNotMatter()
|
||||
{
|
||||
// JSON with properties in different order should produce same hash
|
||||
var manifestOrdered = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"version": "v1",
|
||||
"contentHash": "sha256:auto",
|
||||
"weights": { "legacy": { "a": 0.5, "b": 0.5 }, "advisory": {} }
|
||||
}
|
||||
""";
|
||||
|
||||
var manifestReversed = """
|
||||
{
|
||||
"weights": { "advisory": {}, "legacy": { "b": 0.5, "a": 0.5 } },
|
||||
"contentHash": "sha256:auto",
|
||||
"version": "v1",
|
||||
"schemaVersion": "1.0.0"
|
||||
}
|
||||
""";
|
||||
|
||||
var hash1 = WeightManifestHashComputer.ComputeFromJson(manifestOrdered);
|
||||
var hash2 = WeightManifestHashComputer.ComputeFromJson(manifestReversed);
|
||||
|
||||
Assert.Equal(hash1, hash2);
|
||||
}
|
||||
|
||||
// ── Verify ───────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Verify_AutoPlaceholder_ReturnsFalse()
|
||||
{
|
||||
Assert.False(WeightManifestHashComputer.Verify(MinimalManifest, "sha256:auto"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_EmptyHash_ReturnsFalse()
|
||||
{
|
||||
Assert.False(WeightManifestHashComputer.Verify(MinimalManifest, ""));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_CorrectHash_ReturnsTrue()
|
||||
{
|
||||
var hash = WeightManifestHashComputer.ComputeFromJson(MinimalManifest);
|
||||
Assert.True(WeightManifestHashComputer.Verify(MinimalManifest, hash));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Verify_WrongHash_ReturnsFalse()
|
||||
{
|
||||
Assert.False(WeightManifestHashComputer.Verify(
|
||||
MinimalManifest, "sha256:0000000000000000000000000000000000000000000000000000000000000000"));
|
||||
}
|
||||
|
||||
// ── ReplaceAutoHash ──────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void ReplaceAutoHash_ReplacesPlaceholder()
|
||||
{
|
||||
var (updatedJson, computedHash) = WeightManifestHashComputer.ReplaceAutoHash(MinimalManifest);
|
||||
|
||||
Assert.DoesNotContain("sha256:auto", updatedJson);
|
||||
Assert.Contains(computedHash, updatedJson);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ReplaceAutoHash_ComputedHashVerifies()
|
||||
{
|
||||
var (updatedJson, computedHash) = WeightManifestHashComputer.ReplaceAutoHash(MinimalManifest);
|
||||
|
||||
// After replacement, the hash stored in the JSON should match
|
||||
// what ComputeFromJson produces for the original content
|
||||
Assert.True(WeightManifestHashComputer.Verify(updatedJson, computedHash));
|
||||
}
|
||||
|
||||
// ── ComputeFromManifest ──────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromManifest_ProducesSameHashAsComputeFromJson()
|
||||
{
|
||||
// Build a manifest that matches our minimal JSON
|
||||
var manifest = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v2026-01-01",
|
||||
EffectiveFrom = DateTimeOffset.Parse("2026-01-01T00:00:00Z"),
|
||||
ContentHash = "sha256:auto",
|
||||
Weights = new WeightManifestWeights()
|
||||
};
|
||||
|
||||
// Both paths should produce a valid sha256 hash
|
||||
var hashFromManifest = WeightManifestHashComputer.ComputeFromManifest(manifest);
|
||||
Assert.StartsWith("sha256:", hashFromManifest);
|
||||
Assert.Equal(71, hashFromManifest.Length);
|
||||
}
|
||||
|
||||
// ── Edge cases ───────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_ThrowsOnEmpty()
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
WeightManifestHashComputer.ComputeFromJson(""));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromJson_ThrowsOnNull()
|
||||
{
|
||||
Assert.Throws<ArgumentException>(() =>
|
||||
WeightManifestHashComputer.ComputeFromJson(null!));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeFromManifest_ThrowsOnNull()
|
||||
{
|
||||
Assert.Throws<ArgumentNullException>(() =>
|
||||
WeightManifestHashComputer.ComputeFromManifest(null!));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,448 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WeightManifestLoaderTests.cs
|
||||
// Sprint: SPRINT_20260208_051_Policy_versioned_weight_manifests
|
||||
// Task: T1 - Unit tests for manifest loader
|
||||
// Description: Tests for file-based manifest discovery, loading, validation,
|
||||
// effectiveFrom selection, and diff computation. Uses temp dirs
|
||||
// for offline, deterministic test execution.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Policy.Determinization.Scoring.WeightManifest;
|
||||
|
||||
namespace StellaOps.Policy.Determinization.Tests.Scoring.WeightManifest;
|
||||
|
||||
public sealed class WeightManifestLoaderTests : IDisposable
|
||||
{
|
||||
private readonly string _tempDir;
|
||||
private readonly WeightManifestLoader _loader;
|
||||
|
||||
public WeightManifestLoaderTests()
|
||||
{
|
||||
_tempDir = Path.Combine(Path.GetTempPath(), $"stella-wm-test-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_tempDir);
|
||||
|
||||
var options = Options.Create(new WeightManifestLoaderOptions
|
||||
{
|
||||
ManifestDirectory = _tempDir,
|
||||
RequireComputedHash = false,
|
||||
StrictHashVerification = false
|
||||
});
|
||||
|
||||
_loader = new WeightManifestLoader(options, NullLogger<WeightManifestLoader>.Instance);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (Directory.Exists(_tempDir))
|
||||
{
|
||||
Directory.Delete(_tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
private string WriteManifest(string filename, string version, string effectiveFrom,
|
||||
double rch = 0.50, double mit = 0.50, string contentHash = "sha256:auto")
|
||||
{
|
||||
var path = Path.Combine(_tempDir, filename);
|
||||
var json = $$"""
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"version": "{{version}}",
|
||||
"effectiveFrom": "{{effectiveFrom}}",
|
||||
"profile": "production",
|
||||
"contentHash": "{{contentHash}}",
|
||||
"weights": {
|
||||
"legacy": { "rch": {{rch}}, "mit": {{mit}} },
|
||||
"advisory": {}
|
||||
}
|
||||
}
|
||||
""";
|
||||
File.WriteAllText(path, json);
|
||||
return path;
|
||||
}
|
||||
|
||||
// ── ListAsync ────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_EmptyDirectory_ReturnsEmpty()
|
||||
{
|
||||
var results = await _loader.ListAsync();
|
||||
Assert.True(results.IsEmpty);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_DiscoversSingleManifest()
|
||||
{
|
||||
WriteManifest("v2026-01-01.weights.json", "v2026-01-01", "2026-01-01T00:00:00Z");
|
||||
|
||||
var results = await _loader.ListAsync();
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("v2026-01-01", results[0].Manifest.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_MultipleManifests_SortedByEffectiveFromDescending()
|
||||
{
|
||||
WriteManifest("v2026-01-01.weights.json", "v2026-01-01", "2026-01-01T00:00:00Z");
|
||||
WriteManifest("v2026-02-01.weights.json", "v2026-02-01", "2026-02-01T00:00:00Z");
|
||||
WriteManifest("v2026-01-15.weights.json", "v2026-01-15", "2026-01-15T00:00:00Z");
|
||||
|
||||
var results = await _loader.ListAsync();
|
||||
|
||||
Assert.Equal(3, results.Length);
|
||||
Assert.Equal("v2026-02-01", results[0].Manifest.Version);
|
||||
Assert.Equal("v2026-01-15", results[1].Manifest.Version);
|
||||
Assert.Equal("v2026-01-01", results[2].Manifest.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_SkipsInvalidFiles()
|
||||
{
|
||||
WriteManifest("valid.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
File.WriteAllText(Path.Combine(_tempDir, "invalid.weights.json"), "not valid json {{{");
|
||||
|
||||
var results = await _loader.ListAsync();
|
||||
|
||||
Assert.Single(results);
|
||||
Assert.Equal("v1", results[0].Manifest.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ListAsync_NonexistentDirectory_ReturnsEmpty()
|
||||
{
|
||||
var options = Options.Create(new WeightManifestLoaderOptions
|
||||
{
|
||||
ManifestDirectory = Path.Combine(_tempDir, "nonexistent")
|
||||
});
|
||||
var loader = new WeightManifestLoader(options, NullLogger<WeightManifestLoader>.Instance);
|
||||
|
||||
var results = await loader.ListAsync();
|
||||
|
||||
Assert.True(results.IsEmpty);
|
||||
}
|
||||
|
||||
// ── LoadAsync ────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_ValidFile_ReturnsLoadResult()
|
||||
{
|
||||
var path = WriteManifest("test.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var result = await _loader.LoadAsync(path);
|
||||
|
||||
Assert.Equal("v1", result.Manifest.Version);
|
||||
Assert.Equal("1.0.0", result.Manifest.SchemaVersion);
|
||||
Assert.StartsWith("sha256:", result.ComputedHash);
|
||||
Assert.False(result.HashVerified); // auto placeholder, not computed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_WithComputedHash_VerifiesCorrectly()
|
||||
{
|
||||
var path = WriteManifest("test.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
// Load, compute hash, rewrite with correct hash
|
||||
var json = File.ReadAllText(path);
|
||||
var (updatedJson, computedHash) = WeightManifestHashComputer.ReplaceAutoHash(json);
|
||||
File.WriteAllText(path, updatedJson);
|
||||
|
||||
var result = await _loader.LoadAsync(path);
|
||||
|
||||
Assert.True(result.HashVerified);
|
||||
Assert.Equal(computedHash, result.ComputedHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_NonexistentFile_Throws()
|
||||
{
|
||||
await Assert.ThrowsAsync<WeightManifestLoadException>(() =>
|
||||
_loader.LoadAsync(Path.Combine(_tempDir, "missing.json")));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_InvalidJson_Throws()
|
||||
{
|
||||
var path = Path.Combine(_tempDir, "bad.json");
|
||||
File.WriteAllText(path, "not json");
|
||||
|
||||
await Assert.ThrowsAsync<WeightManifestLoadException>(() =>
|
||||
_loader.LoadAsync(path));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LoadAsync_StrictMode_HashMismatch_Throws()
|
||||
{
|
||||
var path = WriteManifest("test.weights.json", "v1", "2026-01-01T00:00:00Z",
|
||||
contentHash: "sha256:0000000000000000000000000000000000000000000000000000000000000000");
|
||||
|
||||
var strictOptions = Options.Create(new WeightManifestLoaderOptions
|
||||
{
|
||||
ManifestDirectory = _tempDir,
|
||||
StrictHashVerification = true
|
||||
});
|
||||
var strictLoader = new WeightManifestLoader(strictOptions, NullLogger<WeightManifestLoader>.Instance);
|
||||
|
||||
await Assert.ThrowsAsync<WeightManifestLoadException>(() =>
|
||||
strictLoader.LoadAsync(path));
|
||||
}
|
||||
|
||||
// ── SelectEffectiveAsync ─────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task SelectEffectiveAsync_NoManifests_ReturnsNull()
|
||||
{
|
||||
var result = await _loader.SelectEffectiveAsync(DateTimeOffset.UtcNow);
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SelectEffectiveAsync_SelectsMostRecentEffective()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v2026-01-01", "2026-01-01T00:00:00Z");
|
||||
WriteManifest("b.weights.json", "v2026-02-01", "2026-02-01T00:00:00Z");
|
||||
WriteManifest("c.weights.json", "v2026-03-01", "2026-03-01T00:00:00Z");
|
||||
|
||||
var referenceDate = DateTimeOffset.Parse("2026-02-15T00:00:00Z");
|
||||
var result = await _loader.SelectEffectiveAsync(referenceDate);
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("v2026-02-01", result.Manifest.Version);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SelectEffectiveAsync_DateBeforeAll_ReturnsNull()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v2026-06-01", "2026-06-01T00:00:00Z");
|
||||
|
||||
var referenceDate = DateTimeOffset.Parse("2026-01-01T00:00:00Z");
|
||||
var result = await _loader.SelectEffectiveAsync(referenceDate);
|
||||
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SelectEffectiveAsync_ExactDate_Matches()
|
||||
{
|
||||
WriteManifest("a.weights.json", "v2026-01-15", "2026-01-15T00:00:00Z");
|
||||
|
||||
var referenceDate = DateTimeOffset.Parse("2026-01-15T00:00:00Z");
|
||||
var result = await _loader.SelectEffectiveAsync(referenceDate);
|
||||
|
||||
Assert.NotNull(result);
|
||||
Assert.Equal("v2026-01-15", result.Manifest.Version);
|
||||
}
|
||||
|
||||
// ── Validate ─────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public async Task Validate_ValidManifest_NoIssues()
|
||||
{
|
||||
var path = WriteManifest("valid.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
var result = await _loader.LoadAsync(path);
|
||||
|
||||
var issues = _loader.Validate(result);
|
||||
|
||||
Assert.True(issues.IsEmpty);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validate_UnsupportedSchema_ReportsIssue()
|
||||
{
|
||||
var path = Path.Combine(_tempDir, "bad-schema.weights.json");
|
||||
File.WriteAllText(path, """
|
||||
{
|
||||
"schemaVersion": "2.0.0",
|
||||
"version": "v1",
|
||||
"effectiveFrom": "2026-01-01T00:00:00Z",
|
||||
"contentHash": "sha256:auto",
|
||||
"weights": { "legacy": {}, "advisory": {} }
|
||||
}
|
||||
""");
|
||||
|
||||
var result = await _loader.LoadAsync(path);
|
||||
var issues = _loader.Validate(result);
|
||||
|
||||
Assert.Single(issues);
|
||||
Assert.Contains("Unsupported schema version", issues[0]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validate_UnnormalizedLegacyWeights_ReportsIssue()
|
||||
{
|
||||
var path = WriteManifest("bad-weights.weights.json", "v1", "2026-01-01T00:00:00Z",
|
||||
rch: 0.80, mit: 0.80); // Sum = 1.60
|
||||
|
||||
var result = await _loader.LoadAsync(path);
|
||||
var issues = _loader.Validate(result);
|
||||
|
||||
Assert.Contains(issues, i => i.Contains("Legacy weights sum"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Validate_RequireComputedHash_AutoPlaceholder_ReportsIssue()
|
||||
{
|
||||
var path = WriteManifest("auto.weights.json", "v1", "2026-01-01T00:00:00Z");
|
||||
|
||||
var strictOptions = Options.Create(new WeightManifestLoaderOptions
|
||||
{
|
||||
ManifestDirectory = _tempDir,
|
||||
RequireComputedHash = true
|
||||
});
|
||||
var strictLoader = new WeightManifestLoader(strictOptions, NullLogger<WeightManifestLoader>.Instance);
|
||||
|
||||
var result = await strictLoader.LoadAsync(path);
|
||||
var issues = strictLoader.Validate(result);
|
||||
|
||||
Assert.Contains(issues, i => i.Contains("sha256:auto"));
|
||||
}
|
||||
|
||||
// ── Diff ─────────────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Diff_IdenticalManifests_NoDifferences()
|
||||
{
|
||||
var manifest = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v1",
|
||||
EffectiveFrom = DateTimeOffset.Parse("2026-01-01T00:00:00Z"),
|
||||
ContentHash = "sha256:auto",
|
||||
Weights = new WeightManifestWeights
|
||||
{
|
||||
Legacy = ImmutableDictionary<string, double>.Empty.Add("rch", 0.50).Add("mit", 0.50),
|
||||
Advisory = ImmutableDictionary<string, double>.Empty
|
||||
}
|
||||
};
|
||||
|
||||
var diff = _loader.Diff(manifest, manifest);
|
||||
|
||||
Assert.False(diff.HasDifferences);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_DifferentVersions_ShowsDifference()
|
||||
{
|
||||
var from = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v1",
|
||||
EffectiveFrom = DateTimeOffset.Parse("2026-01-01T00:00:00Z"),
|
||||
ContentHash = "sha256:auto",
|
||||
Weights = new WeightManifestWeights()
|
||||
};
|
||||
|
||||
var to = from with { Version = "v2" };
|
||||
|
||||
var diff = _loader.Diff(from, to);
|
||||
|
||||
Assert.True(diff.HasDifferences);
|
||||
Assert.Contains(diff.Differences, d => d.Path == "version" && d.OldValue == "v1" && d.NewValue == "v2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_DifferentWeights_ShowsDifferences()
|
||||
{
|
||||
var from = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v1",
|
||||
EffectiveFrom = DateTimeOffset.Parse("2026-01-01T00:00:00Z"),
|
||||
ContentHash = "sha256:auto",
|
||||
Weights = new WeightManifestWeights
|
||||
{
|
||||
Legacy = ImmutableDictionary<string, double>.Empty.Add("rch", 0.30),
|
||||
Advisory = ImmutableDictionary<string, double>.Empty
|
||||
}
|
||||
};
|
||||
|
||||
var to = from with
|
||||
{
|
||||
Version = "v2",
|
||||
Weights = new WeightManifestWeights
|
||||
{
|
||||
Legacy = ImmutableDictionary<string, double>.Empty.Add("rch", 0.50),
|
||||
Advisory = ImmutableDictionary<string, double>.Empty
|
||||
}
|
||||
};
|
||||
|
||||
var diff = _loader.Diff(from, to);
|
||||
|
||||
Assert.True(diff.HasDifferences);
|
||||
Assert.Contains(diff.Differences, d => d.Path == "weights.legacy.rch");
|
||||
Assert.Equal("v1", diff.FromVersion);
|
||||
Assert.Equal("v2", diff.ToVersion);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Diff_AddedWeight_ShowsAsNewField()
|
||||
{
|
||||
var from = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v1",
|
||||
EffectiveFrom = DateTimeOffset.Parse("2026-01-01T00:00:00Z"),
|
||||
ContentHash = "sha256:auto",
|
||||
Weights = new WeightManifestWeights
|
||||
{
|
||||
Legacy = ImmutableDictionary<string, double>.Empty.Add("rch", 0.30),
|
||||
Advisory = ImmutableDictionary<string, double>.Empty
|
||||
}
|
||||
};
|
||||
|
||||
var to = from with
|
||||
{
|
||||
Version = "v2",
|
||||
Weights = new WeightManifestWeights
|
||||
{
|
||||
Legacy = ImmutableDictionary<string, double>.Empty
|
||||
.Add("rch", 0.30)
|
||||
.Add("mit", 0.20),
|
||||
Advisory = ImmutableDictionary<string, double>.Empty
|
||||
}
|
||||
};
|
||||
|
||||
var diff = _loader.Diff(from, to);
|
||||
|
||||
Assert.True(diff.HasDifferences);
|
||||
var mitDiff = diff.Differences.First(d => d.Path == "weights.legacy.mit");
|
||||
Assert.Null(mitDiff.OldValue);
|
||||
Assert.NotNull(mitDiff.NewValue);
|
||||
}
|
||||
|
||||
// ── WeightManifestDocument model ─────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void HasComputedHash_AutoPlaceholder_ReturnsFalse()
|
||||
{
|
||||
var manifest = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v1",
|
||||
EffectiveFrom = DateTimeOffset.UtcNow,
|
||||
ContentHash = "sha256:auto",
|
||||
Weights = new WeightManifestWeights()
|
||||
};
|
||||
|
||||
Assert.False(manifest.HasComputedHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HasComputedHash_RealHash_ReturnsTrue()
|
||||
{
|
||||
var manifest = new WeightManifestDocument
|
||||
{
|
||||
SchemaVersion = "1.0.0",
|
||||
Version = "v1",
|
||||
EffectiveFrom = DateTimeOffset.UtcNow,
|
||||
ContentHash = "sha256:abcdef0123456789abcdef0123456789abcdef0123456789abcdef0123456789",
|
||||
Weights = new WeightManifestWeights()
|
||||
};
|
||||
|
||||
Assert.True(manifest.HasComputedHash);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,281 @@
|
||||
// <copyright file="DeltaIfPresentIntegrationTests.cs" company="StellaOps">
|
||||
// SPDX-License-Identifier: BUSL-1.1
|
||||
// Sprint: SPRINT_20260208_043_Policy_delta_if_present_calculations_for_missing_signals (TSF-004)
|
||||
// Task: T2 - Wire API/CLI/UI integration tests
|
||||
// </copyright>
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using StellaOps.Policy.Determinization;
|
||||
using StellaOps.Policy.Determinization.Evidence;
|
||||
using StellaOps.Policy.Determinization.Models;
|
||||
using StellaOps.Policy.Determinization.Scoring;
|
||||
using StellaOps.Policy.Engine.DependencyInjection;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for delta-if-present service DI wiring and functionality.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "20260208.043")]
|
||||
[Trait("Task", "T2")]
|
||||
public sealed class DeltaIfPresentIntegrationTests
|
||||
{
|
||||
private readonly FakeTimeProvider _timeProvider = new();
|
||||
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region DI Wiring Tests
|
||||
|
||||
[Fact(DisplayName = "AddDeterminization registers IDeltaIfPresentCalculator")]
|
||||
public void AddDeterminization_RegistersDeltaIfPresentCalculator()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
|
||||
// Act
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Assert
|
||||
var calculator = provider.GetService<IDeltaIfPresentCalculator>();
|
||||
calculator.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "DeltaIfPresentCalculator is registered as singleton")]
|
||||
public void DeltaIfPresentCalculator_IsRegisteredAsSingleton()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Act
|
||||
var first = provider.GetService<IDeltaIfPresentCalculator>();
|
||||
var second = provider.GetService<IDeltaIfPresentCalculator>();
|
||||
|
||||
// Assert
|
||||
first.Should().BeSameAs(second);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "AddDeterminizationEngine also registers delta-if-present")]
|
||||
public void AddDeterminizationEngine_IncludesDeltaIfPresentCalculator()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminizationEngine();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Assert
|
||||
var calculator = provider.GetService<IDeltaIfPresentCalculator>();
|
||||
calculator.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region End-to-End Service Tests
|
||||
|
||||
[Fact(DisplayName = "CalculateSingleSignalDelta works through DI container")]
|
||||
public void CalculateSingleSignalDelta_WorksThroughDI()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var result = calculator.CalculateSingleSignalDelta(snapshot, "VEX", 0.0);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Signal.Should().Be("VEX");
|
||||
result.HypotheticalEntropy.Should().BeLessThan(result.CurrentEntropy);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CalculateFullAnalysis returns prioritized gaps")]
|
||||
public void CalculateFullAnalysis_ReturnsPrioritizedGaps()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var analysis = calculator.CalculateFullAnalysis(snapshot);
|
||||
|
||||
// Assert
|
||||
analysis.Should().NotBeNull();
|
||||
analysis.GapAnalysis.Should().HaveCountGreaterThan(0);
|
||||
analysis.PrioritizedGaps.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CalculateScoreBounds returns valid range")]
|
||||
public void CalculateScoreBounds_ReturnsValidRange()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var bounds = calculator.CalculateScoreBounds(snapshot);
|
||||
|
||||
// Assert
|
||||
bounds.Should().NotBeNull();
|
||||
bounds.MinimumScore.Should().BeLessThanOrEqualTo(bounds.MaximumScore);
|
||||
bounds.Range.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Calculator produces deterministic results through DI")]
|
||||
public void Calculator_ProducesDeterministicResults()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var snapshot = CreatePartialSnapshot();
|
||||
|
||||
// Act
|
||||
var result1 = calculator.CalculateSingleSignalDelta(snapshot, "EPSS", 0.5);
|
||||
var result2 = calculator.CalculateSingleSignalDelta(snapshot, "EPSS", 0.5);
|
||||
|
||||
// Assert - Results should be identical
|
||||
result1.CurrentScore.Should().Be(result2.CurrentScore);
|
||||
result1.HypotheticalScore.Should().Be(result2.HypotheticalScore);
|
||||
result1.CurrentEntropy.Should().Be(result2.CurrentEntropy);
|
||||
result1.HypotheticalEntropy.Should().Be(result2.HypotheticalEntropy);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "All signals can be analyzed without exceptions")]
|
||||
public void AllSignals_CanBeAnalyzed()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var snapshot = CreateEmptySnapshot();
|
||||
var signals = new[] { "VEX", "EPSS", "Reachability", "Runtime", "Backport", "SBOMLineage" };
|
||||
|
||||
// Act & Assert - All signals should be analyzable
|
||||
foreach (var signal in signals)
|
||||
{
|
||||
var result = calculator.CalculateSingleSignalDelta(snapshot, signal, 0.5);
|
||||
result.Signal.Should().Be(signal);
|
||||
result.SignalWeight.Should().BeGreaterThan(0);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Integration with Dependencies
|
||||
|
||||
[Fact(DisplayName = "Calculator uses injected UncertaintyScoreCalculator")]
|
||||
public void Calculator_UsesInjectedDependencies()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Act - Get both services
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var uncertaintyCalc = provider.GetRequiredService<IUncertaintyScoreCalculator>();
|
||||
|
||||
// Assert - Both should be available
|
||||
calculator.Should().NotBeNull();
|
||||
uncertaintyCalc.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Calculator uses injected TrustScoreAggregator")]
|
||||
public void Calculator_UsesInjectedTrustAggregator()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddLogging();
|
||||
services.AddSingleton<TimeProvider>(_timeProvider);
|
||||
services.AddDeterminization();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
// Act - Get both services
|
||||
var calculator = provider.GetRequiredService<IDeltaIfPresentCalculator>();
|
||||
var aggregator = provider.GetRequiredService<TrustScoreAggregator>();
|
||||
|
||||
// Assert - Both should be available
|
||||
calculator.Should().NotBeNull();
|
||||
aggregator.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helpers
|
||||
|
||||
private SignalSnapshot CreateEmptySnapshot()
|
||||
{
|
||||
return SignalSnapshot.Empty("CVE-2024-1234", "pkg:maven/test@1.0", _timeProvider.GetUtcNow());
|
||||
}
|
||||
|
||||
private SignalSnapshot CreatePartialSnapshot()
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
return new SignalSnapshot
|
||||
{
|
||||
Cve = "CVE-2024-1234",
|
||||
Purl = "pkg:maven/test@1.0",
|
||||
Vex = SignalState<VexClaimSummary>.NotQueried(),
|
||||
Epss = SignalState<EpssEvidence>.NotQueried(),
|
||||
Reachability = SignalState<ReachabilityEvidence>.Queried(
|
||||
new ReachabilityEvidence { Status = ReachabilityStatus.Reachable, AnalyzedAt = now }, now),
|
||||
Runtime = SignalState<RuntimeEvidence>.NotQueried(),
|
||||
Backport = SignalState<BackportEvidence>.NotQueried(),
|
||||
Sbom = SignalState<SbomLineageEvidence>.Queried(
|
||||
new SbomLineageEvidence { SbomDigest = "sha256:abc", Format = "SPDX", ComponentCount = 150, GeneratedAt = now, HasProvenance = true }, now),
|
||||
Cvss = SignalState<CvssEvidence>.NotQueried(),
|
||||
SnapshotAt = now
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,509 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofGraphBuilderTests.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T1 - Unit tests for proof graph builder
|
||||
// Description: Deterministic tests for proof graph construction, path finding,
|
||||
// counterfactual overlays, and content-addressed IDs.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
|
||||
namespace StellaOps.Policy.Explainability.Tests;
|
||||
|
||||
public sealed class ProofGraphBuilderTests
|
||||
{
|
||||
private readonly ProofGraphBuilder _builder;
|
||||
|
||||
public ProofGraphBuilderTests()
|
||||
{
|
||||
_builder = new ProofGraphBuilder(NullLogger<ProofGraphBuilder>.Instance);
|
||||
}
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
private static VerdictRationale CreateTestRationale(
|
||||
string cve = "CVE-2026-0001",
|
||||
string verdict = "Affected",
|
||||
double? score = 75.0,
|
||||
bool includeReachability = true,
|
||||
bool includeVex = true,
|
||||
bool includeProvenance = true,
|
||||
bool includePathWitness = false)
|
||||
{
|
||||
return new VerdictRationale
|
||||
{
|
||||
RationaleId = "rat:sha256:test",
|
||||
VerdictRef = new VerdictReference
|
||||
{
|
||||
AttestationId = "att-001",
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
PolicyId = "policy-001",
|
||||
Cve = cve,
|
||||
ComponentPurl = "pkg:npm/lodash@4.17.20"
|
||||
},
|
||||
Evidence = new RationaleEvidence
|
||||
{
|
||||
Cve = cve,
|
||||
Component = new ComponentIdentity
|
||||
{
|
||||
Purl = "pkg:npm/lodash@4.17.20",
|
||||
Name = "lodash",
|
||||
Version = "4.17.20",
|
||||
Ecosystem = "npm"
|
||||
},
|
||||
Reachability = includeReachability
|
||||
? new ReachabilityDetail
|
||||
{
|
||||
VulnerableFunction = "merge()",
|
||||
EntryPoint = "app.js",
|
||||
PathSummary = "app.js -> utils.js -> lodash.merge()"
|
||||
}
|
||||
: null,
|
||||
FormattedText = $"{cve} in lodash@4.17.20"
|
||||
},
|
||||
PolicyClause = new RationalePolicyClause
|
||||
{
|
||||
ClauseId = "S2.1",
|
||||
RuleDescription = "Block on reachable critical CVEs",
|
||||
Conditions = ["severity >= high", "reachability == direct"],
|
||||
FormattedText = "Policy S2.1: Block on reachable critical CVEs"
|
||||
},
|
||||
Attestations = new RationaleAttestations
|
||||
{
|
||||
PathWitness = includePathWitness
|
||||
? new AttestationReference
|
||||
{
|
||||
Id = "pw-001",
|
||||
Type = "path_witness",
|
||||
Digest = "sha256:pw1",
|
||||
Summary = "Path verified by static analysis"
|
||||
}
|
||||
: null,
|
||||
VexStatements = includeVex
|
||||
? [new AttestationReference
|
||||
{
|
||||
Id = "vex-001",
|
||||
Type = "vex",
|
||||
Digest = "sha256:vex1",
|
||||
Summary = "Vendor confirms affected"
|
||||
}]
|
||||
: null,
|
||||
Provenance = includeProvenance
|
||||
? new AttestationReference
|
||||
{
|
||||
Id = "prov-001",
|
||||
Type = "provenance",
|
||||
Digest = "sha256:prov1",
|
||||
Summary = "SLSA Level 3"
|
||||
}
|
||||
: null,
|
||||
FormattedText = "Attestations verified"
|
||||
},
|
||||
Decision = new RationaleDecision
|
||||
{
|
||||
Verdict = verdict,
|
||||
Score = score,
|
||||
Recommendation = "Upgrade to lodash@4.17.21",
|
||||
Mitigation = new MitigationGuidance
|
||||
{
|
||||
Action = "upgrade",
|
||||
Details = "Patch available in 4.17.21"
|
||||
},
|
||||
FormattedText = $"{verdict} (score {score:F2})"
|
||||
},
|
||||
GeneratedAt = new DateTimeOffset(2026, 2, 9, 12, 0, 0, TimeSpan.Zero),
|
||||
InputDigests = new RationaleInputDigests
|
||||
{
|
||||
VerdictDigest = "sha256:verdict1",
|
||||
PolicyDigest = "sha256:policy1",
|
||||
EvidenceDigest = "sha256:evidence1"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static ScoreBreakdownDashboard CreateTestBreakdown()
|
||||
{
|
||||
return new ScoreBreakdownDashboard
|
||||
{
|
||||
DashboardId = "dash-001",
|
||||
VerdictRef = new VerdictReference
|
||||
{
|
||||
AttestationId = "att-001",
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
PolicyId = "policy-001"
|
||||
},
|
||||
CompositeScore = 75,
|
||||
ActionBucket = "Schedule Next",
|
||||
Factors =
|
||||
[
|
||||
new FactorContribution
|
||||
{
|
||||
FactorId = "rch",
|
||||
FactorName = "Reachability",
|
||||
RawScore = 85,
|
||||
Weight = 0.30,
|
||||
Confidence = 0.90,
|
||||
Explanation = "Direct reachability confirmed"
|
||||
},
|
||||
new FactorContribution
|
||||
{
|
||||
FactorId = "rts",
|
||||
FactorName = "Runtime Signal",
|
||||
RawScore = 60,
|
||||
Weight = 0.25,
|
||||
Confidence = 0.70,
|
||||
Explanation = "Runtime detection moderate"
|
||||
},
|
||||
new FactorContribution
|
||||
{
|
||||
FactorId = "mit",
|
||||
FactorName = "Mitigation",
|
||||
RawScore = 30,
|
||||
Weight = 0.10,
|
||||
Confidence = 0.95,
|
||||
IsSubtractive = true,
|
||||
Explanation = "Patch available"
|
||||
}
|
||||
],
|
||||
GuardrailsApplied =
|
||||
[
|
||||
new GuardrailApplication
|
||||
{
|
||||
GuardrailName = "speculativeCap",
|
||||
ScoreBefore = 80,
|
||||
ScoreAfter = 45,
|
||||
Reason = "No runtime evidence, capped at 45",
|
||||
Conditions = ["rch == 0", "rts == 0"]
|
||||
}
|
||||
],
|
||||
PreGuardrailScore = 80,
|
||||
Entropy = 0.35,
|
||||
NeedsReview = false,
|
||||
ComputedAt = new DateTimeOffset(2026, 2, 9, 12, 0, 0, TimeSpan.Zero)
|
||||
};
|
||||
}
|
||||
|
||||
// ── Build basic graph ────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Build_MinimalInput_CreatesGraph()
|
||||
{
|
||||
var rationale = CreateTestRationale(
|
||||
includeReachability: false,
|
||||
includeVex: false,
|
||||
includeProvenance: false);
|
||||
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.Should().NotBeNull();
|
||||
graph.GraphId.Should().StartWith("pg:sha256:");
|
||||
graph.RootNodeId.Should().StartWith("verdict:");
|
||||
graph.Nodes.Should().HaveCountGreaterThanOrEqualTo(2); // verdict + policy
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_WithReachability_AddsEvidenceNode()
|
||||
{
|
||||
var rationale = CreateTestRationale(includeReachability: true);
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.Nodes.Should().Contain(n => n.Type == ProofNodeType.ReachabilityAnalysis);
|
||||
graph.LeafNodeIds.Should().Contain(id => id.Contains("reachability"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_WithVex_AddsVexNode()
|
||||
{
|
||||
var rationale = CreateTestRationale(includeVex: true);
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.Nodes.Should().Contain(n => n.Type == ProofNodeType.VexStatement);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_WithProvenance_AddsProvenanceNode()
|
||||
{
|
||||
var rationale = CreateTestRationale(includeProvenance: true);
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.Nodes.Should().Contain(n => n.Type == ProofNodeType.Provenance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_WithPathWitness_AddsPathWitnessNode()
|
||||
{
|
||||
var rationale = CreateTestRationale(includePathWitness: true);
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.LeafNodeIds.Should().Contain(id => id.Contains("pathwitness"));
|
||||
}
|
||||
|
||||
// ── Score breakdown integration ──────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Build_WithScoreBreakdown_AddsScoreNodes()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var breakdown = CreateTestBreakdown();
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ScoreBreakdown = breakdown,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.Nodes.Should().Contain(n => n.Id == "score:rch");
|
||||
graph.Nodes.Should().Contain(n => n.Id == "score:rts");
|
||||
graph.Nodes.Should().Contain(n => n.Id == "score:mit");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_WithGuardrails_AddsGuardrailNodes()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var breakdown = CreateTestBreakdown();
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ScoreBreakdown = breakdown,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.Nodes.Should().Contain(n => n.Type == ProofNodeType.Guardrail);
|
||||
graph.Edges.Should().Contain(e => e.Relation == ProofEdgeRelation.GuardrailApplied);
|
||||
}
|
||||
|
||||
// ── Determinism ──────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Build_IsDeterministic_SameInputsSameGraphId()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph1 = _builder.Build(input);
|
||||
var graph2 = _builder.Build(input);
|
||||
|
||||
graph1.GraphId.Should().Be(graph2.GraphId);
|
||||
graph1.Nodes.Length.Should().Be(graph2.Nodes.Length);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_DifferentInputs_DifferentGraphIds()
|
||||
{
|
||||
var rationale1 = CreateTestRationale(cve: "CVE-2026-0001");
|
||||
var rationale2 = CreateTestRationale(cve: "CVE-2026-0002");
|
||||
|
||||
var graph1 = _builder.Build(new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale1,
|
||||
ComputedAt = rationale1.GeneratedAt
|
||||
});
|
||||
var graph2 = _builder.Build(new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale2,
|
||||
ComputedAt = rationale2.GeneratedAt
|
||||
});
|
||||
|
||||
graph1.GraphId.Should().NotBe(graph2.GraphId);
|
||||
}
|
||||
|
||||
// ── Depth hierarchy ──────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Build_NodeDepths_FollowHierarchy()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var breakdown = CreateTestBreakdown();
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ScoreBreakdown = breakdown,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
var verdictNode = graph.Nodes.First(n => n.Type == ProofNodeType.Verdict);
|
||||
var policyNode = graph.Nodes.First(n => n.Type == ProofNodeType.PolicyRule);
|
||||
var scoreNodes = graph.Nodes.Where(n => n.Type == ProofNodeType.ScoreComputation);
|
||||
var leafNodes = graph.Nodes.Where(n => n.Depth == 3);
|
||||
|
||||
verdictNode.Depth.Should().Be(0);
|
||||
policyNode.Depth.Should().Be(1);
|
||||
scoreNodes.Should().AllSatisfy(n => n.Depth.Should().Be(2));
|
||||
leafNodes.Should().AllSatisfy(n => n.Depth.Should().Be(3));
|
||||
}
|
||||
|
||||
// ── Critical paths ───────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Build_FullEvidence_HasCriticalPaths()
|
||||
{
|
||||
var rationale = CreateTestRationale(
|
||||
includeReachability: true,
|
||||
includeVex: true,
|
||||
includeProvenance: true);
|
||||
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
graph.CriticalPaths.Should().NotBeEmpty();
|
||||
graph.CriticalPaths.Should().Contain(p => p.IsCritical);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Build_CriticalPaths_StartFromLeafAndEndAtRoot()
|
||||
{
|
||||
var rationale = CreateTestRationale(includeVex: true);
|
||||
var input = new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
};
|
||||
|
||||
var graph = _builder.Build(input);
|
||||
|
||||
foreach (var path in graph.CriticalPaths)
|
||||
{
|
||||
path.NodeIds.Should().NotBeEmpty();
|
||||
graph.LeafNodeIds.Should().Contain(path.NodeIds[0]);
|
||||
path.NodeIds[^1].Should().Be(graph.RootNodeId);
|
||||
}
|
||||
}
|
||||
|
||||
// ── Counterfactual overlay ───────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void AddCounterfactualOverlay_AddsCounterfactualNode()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var breakdown = CreateTestBreakdown();
|
||||
var baseGraph = _builder.Build(new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ScoreBreakdown = breakdown,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
});
|
||||
|
||||
var scenario = new CounterfactualScenario
|
||||
{
|
||||
Label = "Full Mitigation",
|
||||
FactorOverrides = ImmutableDictionary<string, int>.Empty
|
||||
.Add("mit", 100),
|
||||
ResultingScore = 50
|
||||
};
|
||||
|
||||
var overlayGraph = _builder.AddCounterfactualOverlay(baseGraph, scenario);
|
||||
|
||||
overlayGraph.Nodes.Should().Contain(n => n.Type == ProofNodeType.Counterfactual);
|
||||
overlayGraph.GraphId.Should().NotBe(baseGraph.GraphId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddCounterfactualOverlay_ConnectsOverriddenFactors()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var breakdown = CreateTestBreakdown();
|
||||
var baseGraph = _builder.Build(new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ScoreBreakdown = breakdown,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
});
|
||||
|
||||
var scenario = new CounterfactualScenario
|
||||
{
|
||||
Label = "Patch Applied",
|
||||
FactorOverrides = ImmutableDictionary<string, int>.Empty
|
||||
.Add("mit", 100)
|
||||
.Add("rch", 0),
|
||||
ResultingScore = 30
|
||||
};
|
||||
|
||||
var overlayGraph = _builder.AddCounterfactualOverlay(baseGraph, scenario);
|
||||
|
||||
overlayGraph.Edges.Should().Contain(e => e.Relation == ProofEdgeRelation.Overrides);
|
||||
}
|
||||
|
||||
// ── Edge cases ───────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Build_ThrowsOnNullInput()
|
||||
{
|
||||
var act = () => _builder.Build(null!);
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddCounterfactualOverlay_ThrowsOnNullGraph()
|
||||
{
|
||||
var scenario = new CounterfactualScenario
|
||||
{
|
||||
Label = "test",
|
||||
FactorOverrides = ImmutableDictionary<string, int>.Empty
|
||||
};
|
||||
|
||||
var act = () => _builder.AddCounterfactualOverlay(null!, scenario);
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AddCounterfactualOverlay_ThrowsOnNullScenario()
|
||||
{
|
||||
var rationale = CreateTestRationale();
|
||||
var graph = _builder.Build(new ProofGraphInput
|
||||
{
|
||||
Rationale = rationale,
|
||||
ComputedAt = rationale.GeneratedAt
|
||||
});
|
||||
|
||||
var act = () => _builder.AddCounterfactualOverlay(graph, null!);
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,277 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofStudioServiceTests.cs
|
||||
// Sprint: SPRINT_20260208_049_Policy_proof_studio_ux
|
||||
// Task: T2 - Integration tests for proof studio service
|
||||
// Description: Tests for the ProofStudioService integration layer that
|
||||
// composes proof graphs and score breakdowns from policy
|
||||
// engine data.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
|
||||
namespace StellaOps.Policy.Explainability.Tests;
|
||||
|
||||
public sealed class ProofStudioServiceTests
|
||||
{
|
||||
private readonly IProofStudioService _service;
|
||||
|
||||
public ProofStudioServiceTests()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddVerdictExplainability();
|
||||
services.AddLogging();
|
||||
services.AddMetrics();
|
||||
|
||||
var provider = services.BuildServiceProvider();
|
||||
_service = provider.GetRequiredService<IProofStudioService>();
|
||||
}
|
||||
|
||||
// ── Helpers ──────────────────────────────────────────────────────────
|
||||
|
||||
private static VerdictRationale CreateTestRationale(string cve = "CVE-2026-0001")
|
||||
{
|
||||
return new VerdictRationale
|
||||
{
|
||||
RationaleId = "rat:sha256:test",
|
||||
VerdictRef = new VerdictReference
|
||||
{
|
||||
AttestationId = "att-001",
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
PolicyId = "policy-001",
|
||||
Cve = cve
|
||||
},
|
||||
Evidence = new RationaleEvidence
|
||||
{
|
||||
Cve = cve,
|
||||
Component = new ComponentIdentity
|
||||
{
|
||||
Purl = "pkg:npm/lodash@4.17.20",
|
||||
Name = "lodash",
|
||||
Version = "4.17.20",
|
||||
Ecosystem = "npm"
|
||||
},
|
||||
Reachability = new ReachabilityDetail
|
||||
{
|
||||
VulnerableFunction = "merge()",
|
||||
EntryPoint = "app.js",
|
||||
PathSummary = "app.js -> lodash.merge()"
|
||||
},
|
||||
FormattedText = $"{cve} in lodash@4.17.20"
|
||||
},
|
||||
PolicyClause = new RationalePolicyClause
|
||||
{
|
||||
ClauseId = "S2.1",
|
||||
RuleDescription = "Block on reachable critical CVEs",
|
||||
Conditions = ["severity >= high"],
|
||||
FormattedText = "Policy S2.1"
|
||||
},
|
||||
Attestations = new RationaleAttestations
|
||||
{
|
||||
VexStatements =
|
||||
[
|
||||
new AttestationReference
|
||||
{
|
||||
Id = "vex-001", Type = "vex",
|
||||
Digest = "sha256:vex1",
|
||||
Summary = "Vendor confirms affected"
|
||||
}
|
||||
],
|
||||
FormattedText = "Attestations verified"
|
||||
},
|
||||
Decision = new RationaleDecision
|
||||
{
|
||||
Verdict = "Affected",
|
||||
Score = 75.0,
|
||||
Recommendation = "Upgrade lodash",
|
||||
FormattedText = "Affected (score 75.00)"
|
||||
},
|
||||
GeneratedAt = new DateTimeOffset(2026, 2, 9, 12, 0, 0, TimeSpan.Zero),
|
||||
InputDigests = new RationaleInputDigests
|
||||
{
|
||||
VerdictDigest = "sha256:verdict1",
|
||||
PolicyDigest = "sha256:policy1"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static ProofStudioRequest CreateFullRequest()
|
||||
{
|
||||
return new ProofStudioRequest
|
||||
{
|
||||
Rationale = CreateTestRationale(),
|
||||
CompositeScore = 75,
|
||||
ActionBucket = "Schedule Next",
|
||||
ScoreFactors =
|
||||
[
|
||||
new ScoreFactorInput
|
||||
{
|
||||
Factor = "reachability",
|
||||
Value = 85,
|
||||
Weight = 0.30,
|
||||
Confidence = 0.90,
|
||||
Reason = "Direct reachability confirmed"
|
||||
},
|
||||
new ScoreFactorInput
|
||||
{
|
||||
Factor = "evidence",
|
||||
Value = 60,
|
||||
Weight = 0.25,
|
||||
Confidence = 0.70,
|
||||
Reason = "Runtime evidence moderate"
|
||||
},
|
||||
new ScoreFactorInput
|
||||
{
|
||||
Factor = "mitigation",
|
||||
Value = 30,
|
||||
Weight = 0.10,
|
||||
Confidence = 0.95,
|
||||
IsSubtractive = true,
|
||||
Reason = "Patch available"
|
||||
}
|
||||
],
|
||||
Guardrails =
|
||||
[
|
||||
new GuardrailInput
|
||||
{
|
||||
Name = "speculativeCap",
|
||||
ScoreBefore = 80,
|
||||
ScoreAfter = 45,
|
||||
Reason = "No runtime evidence, capped",
|
||||
Conditions = ["rch == 0"]
|
||||
}
|
||||
],
|
||||
Entropy = 0.35,
|
||||
NeedsReview = false
|
||||
};
|
||||
}
|
||||
|
||||
// ── Compose tests ────────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void Compose_MinimalRequest_ReturnsView()
|
||||
{
|
||||
var request = new ProofStudioRequest
|
||||
{
|
||||
Rationale = CreateTestRationale()
|
||||
};
|
||||
|
||||
var view = _service.Compose(request);
|
||||
|
||||
view.Should().NotBeNull();
|
||||
view.ProofGraph.Should().NotBeNull();
|
||||
view.ProofGraph.GraphId.Should().StartWith("pg:sha256:");
|
||||
view.ScoreBreakdown.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_WithScoreFactors_BuildsDashboard()
|
||||
{
|
||||
var request = CreateFullRequest();
|
||||
|
||||
var view = _service.Compose(request);
|
||||
|
||||
view.ScoreBreakdown.Should().NotBeNull();
|
||||
view.ScoreBreakdown!.Factors.Should().HaveCount(3);
|
||||
view.ScoreBreakdown.CompositeScore.Should().Be(75);
|
||||
view.ScoreBreakdown.ActionBucket.Should().Be("Schedule Next");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_WithGuardrails_IncludesGuardrailsInDashboard()
|
||||
{
|
||||
var request = CreateFullRequest();
|
||||
|
||||
var view = _service.Compose(request);
|
||||
|
||||
view.ScoreBreakdown!.GuardrailsApplied.Should().HaveCount(1);
|
||||
view.ScoreBreakdown.GuardrailsApplied[0].GuardrailName.Should().Be("speculativeCap");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_FactorNamesAreFormatted()
|
||||
{
|
||||
var request = CreateFullRequest();
|
||||
|
||||
var view = _service.Compose(request);
|
||||
|
||||
var names = view.ScoreBreakdown!.Factors.Select(f => f.FactorName).ToArray();
|
||||
names.Should().Contain("Reachability");
|
||||
names.Should().Contain("Evidence");
|
||||
names.Should().Contain("Mitigation");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_GraphContainsScoreNodes()
|
||||
{
|
||||
var request = CreateFullRequest();
|
||||
|
||||
var view = _service.Compose(request);
|
||||
|
||||
view.ProofGraph.Nodes.Should().Contain(n => n.Id == "score:reachability");
|
||||
view.ProofGraph.Nodes.Should().Contain(n => n.Id == "score:evidence");
|
||||
view.ProofGraph.Nodes.Should().Contain(n => n.Id == "score:mitigation");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Compose_ThrowsOnNullRequest()
|
||||
{
|
||||
var act = () => _service.Compose(null!);
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
// ── Counterfactual tests ─────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void ApplyCounterfactual_AddsOverlay()
|
||||
{
|
||||
var request = CreateFullRequest();
|
||||
var view = _service.Compose(request);
|
||||
|
||||
var scenario = new CounterfactualScenario
|
||||
{
|
||||
Label = "Full Patch",
|
||||
FactorOverrides = ImmutableDictionary<string, int>.Empty
|
||||
.Add("mitigation", 100),
|
||||
ResultingScore = 50
|
||||
};
|
||||
|
||||
var updatedView = _service.ApplyCounterfactual(view, scenario);
|
||||
|
||||
updatedView.ProofGraph.Nodes.Should()
|
||||
.Contain(n => n.Type == ProofNodeType.Counterfactual);
|
||||
updatedView.ProofGraph.GraphId.Should()
|
||||
.NotBe(view.ProofGraph.GraphId);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ApplyCounterfactual_ThrowsOnNullView()
|
||||
{
|
||||
var scenario = new CounterfactualScenario
|
||||
{
|
||||
Label = "test",
|
||||
FactorOverrides = ImmutableDictionary<string, int>.Empty
|
||||
};
|
||||
|
||||
var act = () => _service.ApplyCounterfactual(null!, scenario);
|
||||
act.Should().Throw<ArgumentNullException>();
|
||||
}
|
||||
|
||||
// ── DI integration ───────────────────────────────────────────────────
|
||||
|
||||
[Fact]
|
||||
public void DI_ResolvesAllExplainabilityServices()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
services.AddVerdictExplainability();
|
||||
services.AddLogging();
|
||||
services.AddMetrics();
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
provider.GetService<IVerdictRationaleRenderer>().Should().NotBeNull();
|
||||
provider.GetService<IProofGraphBuilder>().Should().NotBeNull();
|
||||
provider.GetService<IProofStudioService>().Should().NotBeNull();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="coverlet.collector" />
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\__Libraries\StellaOps.Policy.Explainability\StellaOps.Policy.Explainability.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
Reference in New Issue
Block a user