Add comprehensive security tests for OWASP A02, A05, A07, and A08 categories
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
- Implemented tests for Cryptographic Failures (A02) to ensure proper handling of sensitive data, secure algorithms, and key management. - Added tests for Security Misconfiguration (A05) to validate production configurations, security headers, CORS settings, and feature management. - Developed tests for Authentication Failures (A07) to enforce strong password policies, rate limiting, session management, and MFA support. - Created tests for Software and Data Integrity Failures (A08) to verify artifact signatures, SBOM integrity, attestation chains, and feed updates.
This commit is contained in:
@@ -0,0 +1,165 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Gates;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for gate detectors.
|
||||
/// </summary>
|
||||
public interface IGateDetector
|
||||
{
|
||||
/// <summary>The type of gate this detector finds.</summary>
|
||||
GateType GateType { get; }
|
||||
|
||||
/// <summary>Detects gates in the given call path.</summary>
|
||||
Task<IReadOnlyList<DetectedGate>> DetectAsync(
|
||||
CallPathContext context,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Context for gate detection on a call path.
|
||||
/// </summary>
|
||||
public sealed record CallPathContext
|
||||
{
|
||||
/// <summary>Symbols in the call path from entry to vulnerability.</summary>
|
||||
public required IReadOnlyList<string> CallPath { get; init; }
|
||||
|
||||
/// <summary>Source files associated with each symbol (if available).</summary>
|
||||
public IReadOnlyDictionary<string, string>? SourceFiles { get; init; }
|
||||
|
||||
/// <summary>AST or CFG data for deeper analysis (optional).</summary>
|
||||
public object? AstData { get; init; }
|
||||
|
||||
/// <summary>Language of the code being analyzed.</summary>
|
||||
public required string Language { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Composite gate detector that orchestrates all individual detectors.
|
||||
/// SPRINT_3405_0001_0001 - Task #7
|
||||
/// </summary>
|
||||
public sealed class CompositeGateDetector
|
||||
{
|
||||
private readonly IReadOnlyList<IGateDetector> _detectors;
|
||||
private readonly GateMultiplierConfig _config;
|
||||
private readonly ILogger<CompositeGateDetector> _logger;
|
||||
|
||||
public CompositeGateDetector(
|
||||
IEnumerable<IGateDetector> detectors,
|
||||
GateMultiplierConfig? config = null,
|
||||
ILogger<CompositeGateDetector>? logger = null)
|
||||
{
|
||||
_detectors = detectors?.ToList() ?? throw new ArgumentNullException(nameof(detectors));
|
||||
_config = config ?? GateMultiplierConfig.Default;
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<CompositeGateDetector>.Instance;
|
||||
|
||||
if (_detectors.Count == 0)
|
||||
{
|
||||
_logger.LogWarning("CompositeGateDetector initialized with no detectors");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Detects all gates in the given call path using all registered detectors.
|
||||
/// </summary>
|
||||
public async Task<GateDetectionResult> DetectAllAsync(
|
||||
CallPathContext context,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
|
||||
if (context.CallPath.Count == 0)
|
||||
{
|
||||
return GateDetectionResult.Empty;
|
||||
}
|
||||
|
||||
var allGates = new List<DetectedGate>();
|
||||
|
||||
// Run all detectors in parallel
|
||||
var tasks = _detectors.Select(async detector =>
|
||||
{
|
||||
try
|
||||
{
|
||||
var gates = await detector.DetectAsync(context, cancellationToken);
|
||||
return gates;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex,
|
||||
"Gate detector {DetectorType} failed for path with {PathLength} symbols",
|
||||
detector.GateType, context.CallPath.Count);
|
||||
return Array.Empty<DetectedGate>();
|
||||
}
|
||||
});
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
foreach (var gates in results)
|
||||
{
|
||||
allGates.AddRange(gates);
|
||||
}
|
||||
|
||||
// Deduplicate gates by symbol+type
|
||||
var uniqueGates = allGates
|
||||
.GroupBy(g => (g.GuardSymbol, g.Type))
|
||||
.Select(g => g.OrderByDescending(x => x.Confidence).First())
|
||||
.OrderByDescending(g => g.Confidence)
|
||||
.ToList();
|
||||
|
||||
// Calculate combined multiplier
|
||||
var combinedMultiplier = CalculateCombinedMultiplier(uniqueGates);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Detected {GateCount} gates on path, combined multiplier: {Multiplier}bps",
|
||||
uniqueGates.Count, combinedMultiplier);
|
||||
|
||||
return new GateDetectionResult
|
||||
{
|
||||
Gates = uniqueGates,
|
||||
CombinedMultiplierBps = combinedMultiplier,
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the combined multiplier for all detected gates.
|
||||
/// Gates are multiplicative: auth(30%) * feature_flag(20%) = 6%
|
||||
/// </summary>
|
||||
private int CalculateCombinedMultiplier(IReadOnlyList<DetectedGate> gates)
|
||||
{
|
||||
if (gates.Count == 0)
|
||||
{
|
||||
return 10000; // 100% - no reduction
|
||||
}
|
||||
|
||||
// Start with 100% (10000 bps)
|
||||
double multiplier = 10000.0;
|
||||
|
||||
// Group gates by type and take the lowest multiplier per type
|
||||
// (multiple auth gates don't stack, but auth + feature_flag do)
|
||||
var gatesByType = gates
|
||||
.GroupBy(g => g.Type)
|
||||
.Select(g => g.Key);
|
||||
|
||||
foreach (var gateType in gatesByType)
|
||||
{
|
||||
var typeMultiplier = GetMultiplierForType(gateType);
|
||||
multiplier = multiplier * typeMultiplier / 10000.0;
|
||||
}
|
||||
|
||||
// Apply floor
|
||||
var result = (int)Math.Round(multiplier);
|
||||
return Math.Max(result, _config.MinimumMultiplierBps);
|
||||
}
|
||||
|
||||
private int GetMultiplierForType(GateType type)
|
||||
{
|
||||
return type switch
|
||||
{
|
||||
GateType.AuthRequired => _config.AuthRequiredMultiplierBps,
|
||||
GateType.FeatureFlag => _config.FeatureFlagMultiplierBps,
|
||||
GateType.AdminOnly => _config.AdminOnlyMultiplierBps,
|
||||
GateType.NonDefaultConfig => _config.NonDefaultConfigMultiplierBps,
|
||||
_ => 10000, // Unknown gate type - no reduction
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,258 @@
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for gate detection and multiplier calculation.
|
||||
/// SPRINT_3405_0001_0001 - Tasks #13, #14, #15
|
||||
/// </summary>
|
||||
public sealed class GateDetectionTests
|
||||
{
|
||||
[Fact]
|
||||
public void GateDetectionResult_Empty_HasNoGates()
|
||||
{
|
||||
// Assert
|
||||
Assert.False(GateDetectionResult.Empty.HasGates);
|
||||
Assert.Empty(GateDetectionResult.Empty.Gates);
|
||||
Assert.Null(GateDetectionResult.Empty.PrimaryGate);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GateDetectionResult_WithGates_HasPrimaryGate()
|
||||
{
|
||||
// Arrange
|
||||
var gates = new[]
|
||||
{
|
||||
CreateGate(GateType.AuthRequired, 0.7),
|
||||
CreateGate(GateType.FeatureFlag, 0.9),
|
||||
};
|
||||
|
||||
var result = new GateDetectionResult { Gates = gates };
|
||||
|
||||
// Assert
|
||||
Assert.True(result.HasGates);
|
||||
Assert.Equal(2, result.Gates.Count);
|
||||
Assert.Equal(GateType.FeatureFlag, result.PrimaryGate?.Type); // Highest confidence
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GateMultiplierConfig_Default_HasExpectedValues()
|
||||
{
|
||||
// Arrange
|
||||
var config = GateMultiplierConfig.Default;
|
||||
|
||||
// Assert
|
||||
Assert.Equal(3000, config.AuthRequiredMultiplierBps); // 30%
|
||||
Assert.Equal(2000, config.FeatureFlagMultiplierBps); // 20%
|
||||
Assert.Equal(1500, config.AdminOnlyMultiplierBps); // 15%
|
||||
Assert.Equal(5000, config.NonDefaultConfigMultiplierBps); // 50%
|
||||
Assert.Equal(500, config.MinimumMultiplierBps); // 5% floor
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_NoDetectors_ReturnsEmpty()
|
||||
{
|
||||
// Arrange
|
||||
var detector = new CompositeGateDetector([]);
|
||||
var context = CreateContext(["main", "vulnerable_function"]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.HasGates);
|
||||
Assert.Equal(10000, result.CombinedMultiplierBps); // 100%
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_EmptyCallPath_ReturnsEmpty()
|
||||
{
|
||||
// Arrange
|
||||
var detector = new CompositeGateDetector([new MockAuthDetector()]);
|
||||
var context = CreateContext([]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.False(result.HasGates);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_SingleGate_AppliesMultiplier()
|
||||
{
|
||||
// Arrange
|
||||
var authDetector = new MockAuthDetector(
|
||||
CreateGate(GateType.AuthRequired, 0.95));
|
||||
var detector = new CompositeGateDetector([authDetector]);
|
||||
var context = CreateContext(["main", "auth_check", "vulnerable"]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.HasGates);
|
||||
Assert.Single(result.Gates);
|
||||
Assert.Equal(3000, result.CombinedMultiplierBps); // 30% from auth
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_MultipleGateTypes_MultipliesMultipliers()
|
||||
{
|
||||
// Arrange
|
||||
var authDetector = new MockAuthDetector(
|
||||
CreateGate(GateType.AuthRequired, 0.9));
|
||||
var featureDetector = new MockFeatureFlagDetector(
|
||||
CreateGate(GateType.FeatureFlag, 0.8));
|
||||
|
||||
var detector = new CompositeGateDetector([authDetector, featureDetector]);
|
||||
var context = CreateContext(["main", "auth_check", "feature_check", "vulnerable"]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.True(result.HasGates);
|
||||
Assert.Equal(2, result.Gates.Count);
|
||||
// 30% * 20% = 6% (600 bps), but floor is 500 bps
|
||||
Assert.Equal(600, result.CombinedMultiplierBps);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_DuplicateGates_Deduplicates()
|
||||
{
|
||||
// Arrange - two detectors finding same gate
|
||||
var authDetector1 = new MockAuthDetector(
|
||||
CreateGate(GateType.AuthRequired, 0.9, "checkAuth"));
|
||||
var authDetector2 = new MockAuthDetector(
|
||||
CreateGate(GateType.AuthRequired, 0.7, "checkAuth"));
|
||||
|
||||
var detector = new CompositeGateDetector([authDetector1, authDetector2]);
|
||||
var context = CreateContext(["main", "checkAuth", "vulnerable"]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.Single(result.Gates); // Deduplicated
|
||||
Assert.Equal(0.9, result.Gates[0].Confidence); // Kept higher confidence
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_AllGateTypes_AppliesMinimumFloor()
|
||||
{
|
||||
// Arrange - all gate types = very low multiplier
|
||||
var detectors = new IGateDetector[]
|
||||
{
|
||||
new MockAuthDetector(CreateGate(GateType.AuthRequired, 0.9)),
|
||||
new MockFeatureFlagDetector(CreateGate(GateType.FeatureFlag, 0.9)),
|
||||
new MockAdminDetector(CreateGate(GateType.AdminOnly, 0.9)),
|
||||
new MockConfigDetector(CreateGate(GateType.NonDefaultConfig, 0.9)),
|
||||
};
|
||||
|
||||
var detector = new CompositeGateDetector(detectors);
|
||||
var context = CreateContext(["main", "auth", "feature", "admin", "config", "vulnerable"]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(4, result.Gates.Count);
|
||||
// 30% * 20% * 15% * 50% = 0.45%, but floor is 5% (500 bps)
|
||||
Assert.Equal(500, result.CombinedMultiplierBps);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CompositeGateDetector_DetectorException_ContinuesWithOthers()
|
||||
{
|
||||
// Arrange
|
||||
var failingDetector = new FailingGateDetector();
|
||||
var authDetector = new MockAuthDetector(
|
||||
CreateGate(GateType.AuthRequired, 0.9));
|
||||
|
||||
var detector = new CompositeGateDetector([failingDetector, authDetector]);
|
||||
var context = CreateContext(["main", "vulnerable"]);
|
||||
|
||||
// Act
|
||||
var result = await detector.DetectAllAsync(context);
|
||||
|
||||
// Assert - should still get auth gate despite failing detector
|
||||
Assert.Single(result.Gates);
|
||||
Assert.Equal(GateType.AuthRequired, result.Gates[0].Type);
|
||||
}
|
||||
|
||||
private static DetectedGate CreateGate(GateType type, double confidence, string symbol = "guard_symbol")
|
||||
{
|
||||
return new DetectedGate
|
||||
{
|
||||
Type = type,
|
||||
Detail = $"{type} gate detected",
|
||||
GuardSymbol = symbol,
|
||||
Confidence = confidence,
|
||||
DetectionMethod = "mock",
|
||||
};
|
||||
}
|
||||
|
||||
private static CallPathContext CreateContext(string[] callPath)
|
||||
{
|
||||
return new CallPathContext
|
||||
{
|
||||
CallPath = callPath,
|
||||
Language = "csharp",
|
||||
};
|
||||
}
|
||||
|
||||
// Mock detectors for testing
|
||||
private class MockAuthDetector : IGateDetector
|
||||
{
|
||||
private readonly DetectedGate[] _gates;
|
||||
public GateType GateType => GateType.AuthRequired;
|
||||
|
||||
public MockAuthDetector(params DetectedGate[] gates) => _gates = gates;
|
||||
|
||||
public Task<IReadOnlyList<DetectedGate>> DetectAsync(CallPathContext context, CancellationToken ct)
|
||||
=> Task.FromResult<IReadOnlyList<DetectedGate>>(_gates);
|
||||
}
|
||||
|
||||
private class MockFeatureFlagDetector : IGateDetector
|
||||
{
|
||||
private readonly DetectedGate[] _gates;
|
||||
public GateType GateType => GateType.FeatureFlag;
|
||||
|
||||
public MockFeatureFlagDetector(params DetectedGate[] gates) => _gates = gates;
|
||||
|
||||
public Task<IReadOnlyList<DetectedGate>> DetectAsync(CallPathContext context, CancellationToken ct)
|
||||
=> Task.FromResult<IReadOnlyList<DetectedGate>>(_gates);
|
||||
}
|
||||
|
||||
private class MockAdminDetector : IGateDetector
|
||||
{
|
||||
private readonly DetectedGate[] _gates;
|
||||
public GateType GateType => GateType.AdminOnly;
|
||||
|
||||
public MockAdminDetector(params DetectedGate[] gates) => _gates = gates;
|
||||
|
||||
public Task<IReadOnlyList<DetectedGate>> DetectAsync(CallPathContext context, CancellationToken ct)
|
||||
=> Task.FromResult<IReadOnlyList<DetectedGate>>(_gates);
|
||||
}
|
||||
|
||||
private class MockConfigDetector : IGateDetector
|
||||
{
|
||||
private readonly DetectedGate[] _gates;
|
||||
public GateType GateType => GateType.NonDefaultConfig;
|
||||
|
||||
public MockConfigDetector(params DetectedGate[] gates) => _gates = gates;
|
||||
|
||||
public Task<IReadOnlyList<DetectedGate>> DetectAsync(CallPathContext context, CancellationToken ct)
|
||||
=> Task.FromResult<IReadOnlyList<DetectedGate>>(_gates);
|
||||
}
|
||||
|
||||
private class FailingGateDetector : IGateDetector
|
||||
{
|
||||
public GateType GateType => GateType.AuthRequired;
|
||||
|
||||
public Task<IReadOnlyList<DetectedGate>> DetectAsync(CallPathContext context, CancellationToken ct)
|
||||
=> throw new InvalidOperationException("Simulated detector failure");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,325 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Detection;
|
||||
|
||||
/// <summary>
|
||||
/// Detects material risk changes between two scan snapshots.
|
||||
/// Implements rules R1-R4 from the Smart-Diff advisory.
|
||||
/// Per Sprint 3500.3 - Smart-Diff Detection Rules.
|
||||
/// </summary>
|
||||
public sealed class MaterialRiskChangeDetector
|
||||
{
|
||||
private readonly MaterialRiskChangeOptions _options;
|
||||
|
||||
public MaterialRiskChangeDetector(MaterialRiskChangeOptions? options = null)
|
||||
{
|
||||
_options = options ?? MaterialRiskChangeOptions.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares two snapshots and returns all material changes.
|
||||
/// </summary>
|
||||
public MaterialRiskChangeResult Compare(
|
||||
RiskStateSnapshot previous,
|
||||
RiskStateSnapshot current)
|
||||
{
|
||||
if (previous.FindingKey != current.FindingKey)
|
||||
throw new ArgumentException("FindingKey mismatch between snapshots");
|
||||
|
||||
var changes = new List<DetectedChange>();
|
||||
|
||||
// Rule R1: Reachability Flip
|
||||
var r1 = EvaluateReachabilityFlip(previous, current);
|
||||
if (r1 is not null) changes.Add(r1);
|
||||
|
||||
// Rule R2: VEX Status Flip
|
||||
var r2 = EvaluateVexFlip(previous, current);
|
||||
if (r2 is not null) changes.Add(r2);
|
||||
|
||||
// Rule R3: Affected Range Boundary
|
||||
var r3 = EvaluateRangeBoundary(previous, current);
|
||||
if (r3 is not null) changes.Add(r3);
|
||||
|
||||
// Rule R4: Intelligence/Policy Flip
|
||||
var r4Changes = EvaluateIntelligenceFlip(previous, current);
|
||||
changes.AddRange(r4Changes);
|
||||
|
||||
var hasMaterialChange = changes.Count > 0;
|
||||
var priorityScore = hasMaterialChange ? ComputePriorityScore(changes, current) : 0;
|
||||
|
||||
return new MaterialRiskChangeResult(
|
||||
FindingKey: current.FindingKey,
|
||||
HasMaterialChange: hasMaterialChange,
|
||||
Changes: [.. changes],
|
||||
PriorityScore: priorityScore,
|
||||
PreviousStateHash: previous.ComputeStateHash(),
|
||||
CurrentStateHash: current.ComputeStateHash());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// R1: Reachability Flip - reachable changes false→true or true→false
|
||||
/// </summary>
|
||||
private DetectedChange? EvaluateReachabilityFlip(
|
||||
RiskStateSnapshot prev,
|
||||
RiskStateSnapshot curr)
|
||||
{
|
||||
if (prev.Reachable == curr.Reachable)
|
||||
return null;
|
||||
|
||||
// Skip if either is unknown
|
||||
if (prev.Reachable is null || curr.Reachable is null)
|
||||
return null;
|
||||
|
||||
var direction = curr.Reachable.Value
|
||||
? RiskDirection.Increased
|
||||
: RiskDirection.Decreased;
|
||||
|
||||
return new DetectedChange(
|
||||
Rule: DetectionRule.R1_ReachabilityFlip,
|
||||
ChangeType: MaterialChangeType.ReachabilityFlip,
|
||||
Direction: direction,
|
||||
Reason: $"Reachability changed from {prev.Reachable} to {curr.Reachable}",
|
||||
PreviousValue: prev.Reachable.ToString()!,
|
||||
CurrentValue: curr.Reachable.ToString()!,
|
||||
Weight: direction == RiskDirection.Increased
|
||||
? _options.ReachabilityFlipUpWeight
|
||||
: _options.ReachabilityFlipDownWeight);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// R2: VEX Status Flip - meaningful status transitions
|
||||
/// </summary>
|
||||
private DetectedChange? EvaluateVexFlip(
|
||||
RiskStateSnapshot prev,
|
||||
RiskStateSnapshot curr)
|
||||
{
|
||||
if (prev.VexStatus == curr.VexStatus)
|
||||
return null;
|
||||
|
||||
// Determine if this is a meaningful flip
|
||||
var (isMeaningful, direction) = ClassifyVexTransition(prev.VexStatus, curr.VexStatus);
|
||||
|
||||
if (!isMeaningful)
|
||||
return null;
|
||||
|
||||
return new DetectedChange(
|
||||
Rule: DetectionRule.R2_VexFlip,
|
||||
ChangeType: MaterialChangeType.VexFlip,
|
||||
Direction: direction,
|
||||
Reason: $"VEX status changed from {prev.VexStatus} to {curr.VexStatus}",
|
||||
PreviousValue: prev.VexStatus.ToString(),
|
||||
CurrentValue: curr.VexStatus.ToString(),
|
||||
Weight: direction == RiskDirection.Increased
|
||||
? _options.VexFlipToAffectedWeight
|
||||
: _options.VexFlipToNotAffectedWeight);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Classifies VEX status transitions as meaningful or not.
|
||||
/// </summary>
|
||||
private static (bool IsMeaningful, RiskDirection Direction) ClassifyVexTransition(
|
||||
VexStatusType from,
|
||||
VexStatusType to)
|
||||
{
|
||||
return (from, to) switch
|
||||
{
|
||||
// Risk increases
|
||||
(VexStatusType.NotAffected, VexStatusType.Affected) => (true, RiskDirection.Increased),
|
||||
(VexStatusType.Fixed, VexStatusType.Affected) => (true, RiskDirection.Increased),
|
||||
(VexStatusType.UnderInvestigation, VexStatusType.Affected) => (true, RiskDirection.Increased),
|
||||
|
||||
// Risk decreases
|
||||
(VexStatusType.Affected, VexStatusType.NotAffected) => (true, RiskDirection.Decreased),
|
||||
(VexStatusType.Affected, VexStatusType.Fixed) => (true, RiskDirection.Decreased),
|
||||
(VexStatusType.UnderInvestigation, VexStatusType.NotAffected) => (true, RiskDirection.Decreased),
|
||||
(VexStatusType.UnderInvestigation, VexStatusType.Fixed) => (true, RiskDirection.Decreased),
|
||||
|
||||
// Under investigation transitions (noteworthy but not scored)
|
||||
(VexStatusType.Affected, VexStatusType.UnderInvestigation) => (true, RiskDirection.Neutral),
|
||||
(VexStatusType.NotAffected, VexStatusType.UnderInvestigation) => (true, RiskDirection.Neutral),
|
||||
|
||||
// Unknown transitions (from unknown to known)
|
||||
(VexStatusType.Unknown, VexStatusType.Affected) => (true, RiskDirection.Increased),
|
||||
(VexStatusType.Unknown, VexStatusType.NotAffected) => (true, RiskDirection.Decreased),
|
||||
|
||||
// All other transitions are not meaningful
|
||||
_ => (false, RiskDirection.Neutral)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// R3: Affected Range Boundary - component enters or exits affected version range
|
||||
/// </summary>
|
||||
private DetectedChange? EvaluateRangeBoundary(
|
||||
RiskStateSnapshot prev,
|
||||
RiskStateSnapshot curr)
|
||||
{
|
||||
if (prev.InAffectedRange == curr.InAffectedRange)
|
||||
return null;
|
||||
|
||||
// Skip if either is unknown
|
||||
if (prev.InAffectedRange is null || curr.InAffectedRange is null)
|
||||
return null;
|
||||
|
||||
var direction = curr.InAffectedRange.Value
|
||||
? RiskDirection.Increased
|
||||
: RiskDirection.Decreased;
|
||||
|
||||
return new DetectedChange(
|
||||
Rule: DetectionRule.R3_RangeBoundary,
|
||||
ChangeType: MaterialChangeType.RangeBoundary,
|
||||
Direction: direction,
|
||||
Reason: curr.InAffectedRange.Value
|
||||
? "Component version entered affected range"
|
||||
: "Component version exited affected range",
|
||||
PreviousValue: prev.InAffectedRange.ToString()!,
|
||||
CurrentValue: curr.InAffectedRange.ToString()!,
|
||||
Weight: direction == RiskDirection.Increased
|
||||
? _options.RangeEntryWeight
|
||||
: _options.RangeExitWeight);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// R4: Intelligence/Policy Flip - KEV, EPSS threshold, or policy decision changes
|
||||
/// </summary>
|
||||
private List<DetectedChange> EvaluateIntelligenceFlip(
|
||||
RiskStateSnapshot prev,
|
||||
RiskStateSnapshot curr)
|
||||
{
|
||||
var changes = new List<DetectedChange>();
|
||||
|
||||
// KEV change
|
||||
if (prev.Kev != curr.Kev)
|
||||
{
|
||||
var direction = curr.Kev ? RiskDirection.Increased : RiskDirection.Decreased;
|
||||
changes.Add(new DetectedChange(
|
||||
Rule: DetectionRule.R4_IntelligenceFlip,
|
||||
ChangeType: curr.Kev ? MaterialChangeType.KevAdded : MaterialChangeType.KevRemoved,
|
||||
Direction: direction,
|
||||
Reason: curr.Kev ? "Added to KEV catalog" : "Removed from KEV catalog",
|
||||
PreviousValue: prev.Kev.ToString(),
|
||||
CurrentValue: curr.Kev.ToString(),
|
||||
Weight: curr.Kev ? _options.KevAddedWeight : _options.KevRemovedWeight));
|
||||
}
|
||||
|
||||
// EPSS threshold crossing
|
||||
var epssChange = EvaluateEpssThreshold(prev.EpssScore, curr.EpssScore);
|
||||
if (epssChange is not null)
|
||||
{
|
||||
changes.Add(epssChange);
|
||||
}
|
||||
|
||||
// Policy decision flip
|
||||
if (prev.PolicyDecision != curr.PolicyDecision)
|
||||
{
|
||||
var policyChange = EvaluatePolicyFlip(prev.PolicyDecision, curr.PolicyDecision);
|
||||
if (policyChange is not null)
|
||||
{
|
||||
changes.Add(policyChange);
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
private DetectedChange? EvaluateEpssThreshold(double? prevScore, double? currScore)
|
||||
{
|
||||
if (prevScore is null || currScore is null)
|
||||
return null;
|
||||
|
||||
var prevAbove = prevScore.Value >= _options.EpssThreshold;
|
||||
var currAbove = currScore.Value >= _options.EpssThreshold;
|
||||
|
||||
if (prevAbove == currAbove)
|
||||
return null;
|
||||
|
||||
var direction = currAbove ? RiskDirection.Increased : RiskDirection.Decreased;
|
||||
|
||||
return new DetectedChange(
|
||||
Rule: DetectionRule.R4_IntelligenceFlip,
|
||||
ChangeType: MaterialChangeType.EpssThreshold,
|
||||
Direction: direction,
|
||||
Reason: currAbove
|
||||
? $"EPSS score crossed above threshold ({_options.EpssThreshold:P0})"
|
||||
: $"EPSS score dropped below threshold ({_options.EpssThreshold:P0})",
|
||||
PreviousValue: prevScore.Value.ToString("F4"),
|
||||
CurrentValue: currScore.Value.ToString("F4"),
|
||||
Weight: _options.EpssThresholdWeight);
|
||||
}
|
||||
|
||||
private DetectedChange? EvaluatePolicyFlip(PolicyDecisionType? prev, PolicyDecisionType? curr)
|
||||
{
|
||||
if (prev is null || curr is null)
|
||||
return null;
|
||||
|
||||
// Determine direction based on severity ordering: Allow < Warn < Block
|
||||
var direction = (prev.Value, curr.Value) switch
|
||||
{
|
||||
(PolicyDecisionType.Allow, PolicyDecisionType.Warn) => RiskDirection.Increased,
|
||||
(PolicyDecisionType.Allow, PolicyDecisionType.Block) => RiskDirection.Increased,
|
||||
(PolicyDecisionType.Warn, PolicyDecisionType.Block) => RiskDirection.Increased,
|
||||
(PolicyDecisionType.Block, PolicyDecisionType.Warn) => RiskDirection.Decreased,
|
||||
(PolicyDecisionType.Block, PolicyDecisionType.Allow) => RiskDirection.Decreased,
|
||||
(PolicyDecisionType.Warn, PolicyDecisionType.Allow) => RiskDirection.Decreased,
|
||||
_ => RiskDirection.Neutral
|
||||
};
|
||||
|
||||
if (direction == RiskDirection.Neutral)
|
||||
return null;
|
||||
|
||||
return new DetectedChange(
|
||||
Rule: DetectionRule.R4_IntelligenceFlip,
|
||||
ChangeType: MaterialChangeType.PolicyFlip,
|
||||
Direction: direction,
|
||||
Reason: $"Policy decision changed from {prev} to {curr}",
|
||||
PreviousValue: prev.Value.ToString(),
|
||||
CurrentValue: curr.Value.ToString(),
|
||||
Weight: _options.PolicyFlipWeight);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes priority score for a set of changes.
|
||||
/// Formula: base_severity × Σ(weight_i × direction_i) × confidence_factor
|
||||
/// </summary>
|
||||
private double ComputePriorityScore(List<DetectedChange> changes, RiskStateSnapshot current)
|
||||
{
|
||||
if (changes.Count == 0)
|
||||
return 0;
|
||||
|
||||
// Sum weighted changes
|
||||
var weightedSum = 0.0;
|
||||
foreach (var change in changes)
|
||||
{
|
||||
var directionMultiplier = change.Direction switch
|
||||
{
|
||||
RiskDirection.Increased => 1.0,
|
||||
RiskDirection.Decreased => -0.5,
|
||||
RiskDirection.Neutral => 0.0,
|
||||
_ => 0.0
|
||||
};
|
||||
weightedSum += change.Weight * directionMultiplier;
|
||||
}
|
||||
|
||||
// Base severity from EPSS or default
|
||||
var baseSeverity = current.EpssScore ?? 0.5;
|
||||
|
||||
// KEV boost
|
||||
var kevBoost = current.Kev ? 1.5 : 1.0;
|
||||
|
||||
// Confidence factor from lattice state
|
||||
var confidence = current.LatticeState switch
|
||||
{
|
||||
"certain_reachable" => 1.0,
|
||||
"likely_reachable" => 0.9,
|
||||
"uncertain" => 0.7,
|
||||
"likely_unreachable" => 0.5,
|
||||
"certain_unreachable" => 0.3,
|
||||
_ => 0.7
|
||||
};
|
||||
|
||||
var score = baseSeverity * weightedSum * kevBoost * confidence;
|
||||
|
||||
// Clamp to [-1, 1]
|
||||
return Math.Clamp(score, -1.0, 1.0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Detection;
|
||||
|
||||
/// <summary>
|
||||
/// Result of material risk change detection.
|
||||
/// </summary>
|
||||
public sealed record MaterialRiskChangeResult(
|
||||
[property: JsonPropertyName("findingKey")] FindingKey FindingKey,
|
||||
[property: JsonPropertyName("hasMaterialChange")] bool HasMaterialChange,
|
||||
[property: JsonPropertyName("changes")] ImmutableArray<DetectedChange> Changes,
|
||||
[property: JsonPropertyName("priorityScore")] double PriorityScore,
|
||||
[property: JsonPropertyName("previousStateHash")] string PreviousStateHash,
|
||||
[property: JsonPropertyName("currentStateHash")] string CurrentStateHash);
|
||||
|
||||
/// <summary>
|
||||
/// A detected material change.
|
||||
/// </summary>
|
||||
public sealed record DetectedChange(
|
||||
[property: JsonPropertyName("rule")] DetectionRule Rule,
|
||||
[property: JsonPropertyName("changeType")] MaterialChangeType ChangeType,
|
||||
[property: JsonPropertyName("direction")] RiskDirection Direction,
|
||||
[property: JsonPropertyName("reason")] string Reason,
|
||||
[property: JsonPropertyName("previousValue")] string PreviousValue,
|
||||
[property: JsonPropertyName("currentValue")] string CurrentValue,
|
||||
[property: JsonPropertyName("weight")] double Weight);
|
||||
|
||||
/// <summary>
|
||||
/// Detection rule identifiers (R1-R4).
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<DetectionRule>))]
|
||||
public enum DetectionRule
|
||||
{
|
||||
[JsonStringEnumMemberName("R1")]
|
||||
R1_ReachabilityFlip,
|
||||
|
||||
[JsonStringEnumMemberName("R2")]
|
||||
R2_VexFlip,
|
||||
|
||||
[JsonStringEnumMemberName("R3")]
|
||||
R3_RangeBoundary,
|
||||
|
||||
[JsonStringEnumMemberName("R4")]
|
||||
R4_IntelligenceFlip
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Type of material change.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<MaterialChangeType>))]
|
||||
public enum MaterialChangeType
|
||||
{
|
||||
[JsonStringEnumMemberName("reachability_flip")]
|
||||
ReachabilityFlip,
|
||||
|
||||
[JsonStringEnumMemberName("vex_flip")]
|
||||
VexFlip,
|
||||
|
||||
[JsonStringEnumMemberName("range_boundary")]
|
||||
RangeBoundary,
|
||||
|
||||
[JsonStringEnumMemberName("kev_added")]
|
||||
KevAdded,
|
||||
|
||||
[JsonStringEnumMemberName("kev_removed")]
|
||||
KevRemoved,
|
||||
|
||||
[JsonStringEnumMemberName("epss_threshold")]
|
||||
EpssThreshold,
|
||||
|
||||
[JsonStringEnumMemberName("policy_flip")]
|
||||
PolicyFlip
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Direction of risk change.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<RiskDirection>))]
|
||||
public enum RiskDirection
|
||||
{
|
||||
[JsonStringEnumMemberName("increased")]
|
||||
Increased,
|
||||
|
||||
[JsonStringEnumMemberName("decreased")]
|
||||
Decreased,
|
||||
|
||||
[JsonStringEnumMemberName("neutral")]
|
||||
Neutral
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration options for material risk change detection.
|
||||
/// </summary>
|
||||
public sealed class MaterialRiskChangeOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Default options instance.
|
||||
/// </summary>
|
||||
public static readonly MaterialRiskChangeOptions Default = new();
|
||||
|
||||
/// <summary>
|
||||
/// Weight for reachability flip (unreachable → reachable).
|
||||
/// </summary>
|
||||
public double ReachabilityFlipUpWeight { get; init; } = 1.0;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for reachability flip (reachable → unreachable).
|
||||
/// </summary>
|
||||
public double ReachabilityFlipDownWeight { get; init; } = 0.8;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for VEX flip to affected.
|
||||
/// </summary>
|
||||
public double VexFlipToAffectedWeight { get; init; } = 0.9;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for VEX flip to not_affected.
|
||||
/// </summary>
|
||||
public double VexFlipToNotAffectedWeight { get; init; } = 0.7;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for entering affected range.
|
||||
/// </summary>
|
||||
public double RangeEntryWeight { get; init; } = 0.8;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for exiting affected range.
|
||||
/// </summary>
|
||||
public double RangeExitWeight { get; init; } = 0.6;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for KEV addition.
|
||||
/// </summary>
|
||||
public double KevAddedWeight { get; init; } = 1.0;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for KEV removal.
|
||||
/// </summary>
|
||||
public double KevRemovedWeight { get; init; } = 0.5;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for EPSS threshold crossing.
|
||||
/// </summary>
|
||||
public double EpssThresholdWeight { get; init; } = 0.6;
|
||||
|
||||
/// <summary>
|
||||
/// EPSS score threshold for R4 detection.
|
||||
/// </summary>
|
||||
public double EpssThreshold { get; init; } = 0.5;
|
||||
|
||||
/// <summary>
|
||||
/// Weight for policy decision flip.
|
||||
/// </summary>
|
||||
public double PolicyFlipWeight { get; init; } = 0.7;
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Detection;
|
||||
|
||||
/// <summary>
|
||||
/// Captures the complete risk state for a finding at a point in time.
|
||||
/// Used for cross-scan comparison.
|
||||
/// Per Sprint 3500.3 - Smart-Diff Detection Rules.
|
||||
/// </summary>
|
||||
public sealed record RiskStateSnapshot(
|
||||
[property: JsonPropertyName("findingKey")] FindingKey FindingKey,
|
||||
[property: JsonPropertyName("scanId")] string ScanId,
|
||||
[property: JsonPropertyName("capturedAt")] DateTimeOffset CapturedAt,
|
||||
[property: JsonPropertyName("reachable")] bool? Reachable,
|
||||
[property: JsonPropertyName("latticeState")] string? LatticeState,
|
||||
[property: JsonPropertyName("vexStatus")] VexStatusType VexStatus,
|
||||
[property: JsonPropertyName("inAffectedRange")] bool? InAffectedRange,
|
||||
[property: JsonPropertyName("kev")] bool Kev,
|
||||
[property: JsonPropertyName("epssScore")] double? EpssScore,
|
||||
[property: JsonPropertyName("policyFlags")] ImmutableArray<string> PolicyFlags,
|
||||
[property: JsonPropertyName("policyDecision")] PolicyDecisionType? PolicyDecision,
|
||||
[property: JsonPropertyName("evidenceLinks")] ImmutableArray<EvidenceLink>? EvidenceLinks = null)
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes a deterministic hash for this snapshot (excluding timestamp).
|
||||
/// </summary>
|
||||
public string ComputeStateHash()
|
||||
{
|
||||
var builder = new StringBuilder();
|
||||
builder.Append(FindingKey.ToString());
|
||||
builder.Append(':');
|
||||
builder.Append(Reachable?.ToString() ?? "null");
|
||||
builder.Append(':');
|
||||
builder.Append(VexStatus.ToString());
|
||||
builder.Append(':');
|
||||
builder.Append(InAffectedRange?.ToString() ?? "null");
|
||||
builder.Append(':');
|
||||
builder.Append(Kev.ToString());
|
||||
builder.Append(':');
|
||||
builder.Append(EpssScore?.ToString("F4", CultureInfo.InvariantCulture) ?? "null");
|
||||
builder.Append(':');
|
||||
builder.Append(PolicyDecision?.ToString() ?? "null");
|
||||
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(builder.ToString()));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Key identifying a unique finding.
|
||||
/// </summary>
|
||||
public sealed record FindingKey(
|
||||
[property: JsonPropertyName("vulnId")] string VulnId,
|
||||
[property: JsonPropertyName("componentPurl")] string ComponentPurl)
|
||||
{
|
||||
public override string ToString() => $"{VulnId}@{ComponentPurl}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Link to evidence supporting a state.
|
||||
/// </summary>
|
||||
public sealed record EvidenceLink(
|
||||
[property: JsonPropertyName("type")] string Type,
|
||||
[property: JsonPropertyName("uri")] string Uri,
|
||||
[property: JsonPropertyName("digest")] string? Digest = null);
|
||||
|
||||
/// <summary>
|
||||
/// VEX status values.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<VexStatusType>))]
|
||||
public enum VexStatusType
|
||||
{
|
||||
[JsonStringEnumMemberName("unknown")]
|
||||
Unknown,
|
||||
|
||||
[JsonStringEnumMemberName("affected")]
|
||||
Affected,
|
||||
|
||||
[JsonStringEnumMemberName("not_affected")]
|
||||
NotAffected,
|
||||
|
||||
[JsonStringEnumMemberName("fixed")]
|
||||
Fixed,
|
||||
|
||||
[JsonStringEnumMemberName("under_investigation")]
|
||||
UnderInvestigation
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy decision type.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<PolicyDecisionType>))]
|
||||
public enum PolicyDecisionType
|
||||
{
|
||||
[JsonStringEnumMemberName("allow")]
|
||||
Allow,
|
||||
|
||||
[JsonStringEnumMemberName("warn")]
|
||||
Warn,
|
||||
|
||||
[JsonStringEnumMemberName("block")]
|
||||
Block
|
||||
}
|
||||
@@ -0,0 +1,168 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Output;
|
||||
|
||||
/// <summary>
|
||||
/// SARIF 2.1.0 log model for Smart-Diff output.
|
||||
/// Per Sprint 3500.4 - Smart-Diff Binary Analysis.
|
||||
/// </summary>
|
||||
public sealed record SarifLog(
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("$schema")] string Schema,
|
||||
[property: JsonPropertyName("runs")] ImmutableArray<SarifRun> Runs);
|
||||
|
||||
/// <summary>
|
||||
/// A single SARIF run representing one analysis execution.
|
||||
/// </summary>
|
||||
public sealed record SarifRun(
|
||||
[property: JsonPropertyName("tool")] SarifTool Tool,
|
||||
[property: JsonPropertyName("results")] ImmutableArray<SarifResult> Results,
|
||||
[property: JsonPropertyName("invocations")] ImmutableArray<SarifInvocation>? Invocations = null,
|
||||
[property: JsonPropertyName("artifacts")] ImmutableArray<SarifArtifact>? Artifacts = null,
|
||||
[property: JsonPropertyName("versionControlProvenance")] ImmutableArray<SarifVersionControlDetails>? VersionControlProvenance = null);
|
||||
|
||||
/// <summary>
|
||||
/// Tool information for the SARIF run.
|
||||
/// </summary>
|
||||
public sealed record SarifTool(
|
||||
[property: JsonPropertyName("driver")] SarifToolComponent Driver,
|
||||
[property: JsonPropertyName("extensions")] ImmutableArray<SarifToolComponent>? Extensions = null);
|
||||
|
||||
/// <summary>
|
||||
/// Tool component (driver or extension).
|
||||
/// </summary>
|
||||
public sealed record SarifToolComponent(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("version")] string Version,
|
||||
[property: JsonPropertyName("informationUri")] string? InformationUri = null,
|
||||
[property: JsonPropertyName("rules")] ImmutableArray<SarifReportingDescriptor>? Rules = null,
|
||||
[property: JsonPropertyName("supportedTaxonomies")] ImmutableArray<SarifToolComponentReference>? SupportedTaxonomies = null);
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a tool component.
|
||||
/// </summary>
|
||||
public sealed record SarifToolComponentReference(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("guid")] string? Guid = null);
|
||||
|
||||
/// <summary>
|
||||
/// Rule definition.
|
||||
/// </summary>
|
||||
public sealed record SarifReportingDescriptor(
|
||||
[property: JsonPropertyName("id")] string Id,
|
||||
[property: JsonPropertyName("name")] string? Name = null,
|
||||
[property: JsonPropertyName("shortDescription")] SarifMessage? ShortDescription = null,
|
||||
[property: JsonPropertyName("fullDescription")] SarifMessage? FullDescription = null,
|
||||
[property: JsonPropertyName("defaultConfiguration")] SarifReportingConfiguration? DefaultConfiguration = null,
|
||||
[property: JsonPropertyName("helpUri")] string? HelpUri = null);
|
||||
|
||||
/// <summary>
|
||||
/// Rule configuration.
|
||||
/// </summary>
|
||||
public sealed record SarifReportingConfiguration(
|
||||
[property: JsonPropertyName("level")] SarifLevel Level = SarifLevel.Warning,
|
||||
[property: JsonPropertyName("enabled")] bool Enabled = true);
|
||||
|
||||
/// <summary>
|
||||
/// SARIF message with text.
|
||||
/// </summary>
|
||||
public sealed record SarifMessage(
|
||||
[property: JsonPropertyName("text")] string Text,
|
||||
[property: JsonPropertyName("markdown")] string? Markdown = null);
|
||||
|
||||
/// <summary>
|
||||
/// SARIF result level.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<SarifLevel>))]
|
||||
public enum SarifLevel
|
||||
{
|
||||
[JsonStringEnumMemberName("none")]
|
||||
None,
|
||||
|
||||
[JsonStringEnumMemberName("note")]
|
||||
Note,
|
||||
|
||||
[JsonStringEnumMemberName("warning")]
|
||||
Warning,
|
||||
|
||||
[JsonStringEnumMemberName("error")]
|
||||
Error
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single result/finding.
|
||||
/// </summary>
|
||||
public sealed record SarifResult(
|
||||
[property: JsonPropertyName("ruleId")] string RuleId,
|
||||
[property: JsonPropertyName("level")] SarifLevel Level,
|
||||
[property: JsonPropertyName("message")] SarifMessage Message,
|
||||
[property: JsonPropertyName("locations")] ImmutableArray<SarifLocation>? Locations = null,
|
||||
[property: JsonPropertyName("fingerprints")] ImmutableDictionary<string, string>? Fingerprints = null,
|
||||
[property: JsonPropertyName("partialFingerprints")] ImmutableDictionary<string, string>? PartialFingerprints = null,
|
||||
[property: JsonPropertyName("properties")] ImmutableDictionary<string, object>? Properties = null);
|
||||
|
||||
/// <summary>
|
||||
/// Location of a result.
|
||||
/// </summary>
|
||||
public sealed record SarifLocation(
|
||||
[property: JsonPropertyName("physicalLocation")] SarifPhysicalLocation? PhysicalLocation = null,
|
||||
[property: JsonPropertyName("logicalLocations")] ImmutableArray<SarifLogicalLocation>? LogicalLocations = null);
|
||||
|
||||
/// <summary>
|
||||
/// Physical file location.
|
||||
/// </summary>
|
||||
public sealed record SarifPhysicalLocation(
|
||||
[property: JsonPropertyName("artifactLocation")] SarifArtifactLocation ArtifactLocation,
|
||||
[property: JsonPropertyName("region")] SarifRegion? Region = null);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact location (file path).
|
||||
/// </summary>
|
||||
public sealed record SarifArtifactLocation(
|
||||
[property: JsonPropertyName("uri")] string Uri,
|
||||
[property: JsonPropertyName("uriBaseId")] string? UriBaseId = null,
|
||||
[property: JsonPropertyName("index")] int? Index = null);
|
||||
|
||||
/// <summary>
|
||||
/// Region within a file.
|
||||
/// </summary>
|
||||
public sealed record SarifRegion(
|
||||
[property: JsonPropertyName("startLine")] int? StartLine = null,
|
||||
[property: JsonPropertyName("startColumn")] int? StartColumn = null,
|
||||
[property: JsonPropertyName("endLine")] int? EndLine = null,
|
||||
[property: JsonPropertyName("endColumn")] int? EndColumn = null);
|
||||
|
||||
/// <summary>
|
||||
/// Logical location (namespace, class, function).
|
||||
/// </summary>
|
||||
public sealed record SarifLogicalLocation(
|
||||
[property: JsonPropertyName("name")] string Name,
|
||||
[property: JsonPropertyName("fullyQualifiedName")] string? FullyQualifiedName = null,
|
||||
[property: JsonPropertyName("kind")] string? Kind = null);
|
||||
|
||||
/// <summary>
|
||||
/// Invocation information.
|
||||
/// </summary>
|
||||
public sealed record SarifInvocation(
|
||||
[property: JsonPropertyName("executionSuccessful")] bool ExecutionSuccessful,
|
||||
[property: JsonPropertyName("startTimeUtc")] DateTimeOffset? StartTimeUtc = null,
|
||||
[property: JsonPropertyName("endTimeUtc")] DateTimeOffset? EndTimeUtc = null,
|
||||
[property: JsonPropertyName("workingDirectory")] SarifArtifactLocation? WorkingDirectory = null,
|
||||
[property: JsonPropertyName("commandLine")] string? CommandLine = null);
|
||||
|
||||
/// <summary>
|
||||
/// Artifact (file) information.
|
||||
/// </summary>
|
||||
public sealed record SarifArtifact(
|
||||
[property: JsonPropertyName("location")] SarifArtifactLocation Location,
|
||||
[property: JsonPropertyName("mimeType")] string? MimeType = null,
|
||||
[property: JsonPropertyName("hashes")] ImmutableDictionary<string, string>? Hashes = null);
|
||||
|
||||
/// <summary>
|
||||
/// Version control information.
|
||||
/// </summary>
|
||||
public sealed record SarifVersionControlDetails(
|
||||
[property: JsonPropertyName("repositoryUri")] string RepositoryUri,
|
||||
[property: JsonPropertyName("revisionId")] string? RevisionId = null,
|
||||
[property: JsonPropertyName("branch")] string? Branch = null);
|
||||
@@ -0,0 +1,393 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Output;
|
||||
|
||||
/// <summary>
|
||||
/// Options for SARIF output generation.
|
||||
/// </summary>
|
||||
public sealed class SarifOutputOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Default options instance.
|
||||
/// </summary>
|
||||
public static readonly SarifOutputOptions Default = new();
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include VEX candidates in output.
|
||||
/// </summary>
|
||||
public bool IncludeVexCandidates { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include hardening regressions in output.
|
||||
/// </summary>
|
||||
public bool IncludeHardeningRegressions { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include reachability changes in output.
|
||||
/// </summary>
|
||||
public bool IncludeReachabilityChanges { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to pretty-print JSON output.
|
||||
/// </summary>
|
||||
public bool IndentedJson { get; init; } = false;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Input for SARIF generation.
|
||||
/// </summary>
|
||||
public sealed record SmartDiffSarifInput(
|
||||
string ScannerVersion,
|
||||
DateTimeOffset ScanTime,
|
||||
string? BaseDigest,
|
||||
string? TargetDigest,
|
||||
IReadOnlyList<MaterialRiskChange> MaterialChanges,
|
||||
IReadOnlyList<HardeningRegression> HardeningRegressions,
|
||||
IReadOnlyList<VexCandidate> VexCandidates,
|
||||
IReadOnlyList<ReachabilityChange> ReachabilityChanges,
|
||||
VcsInfo? VcsInfo = null);
|
||||
|
||||
/// <summary>
|
||||
/// VCS information for SARIF provenance.
|
||||
/// </summary>
|
||||
public sealed record VcsInfo(
|
||||
string RepositoryUri,
|
||||
string? RevisionId,
|
||||
string? Branch);
|
||||
|
||||
/// <summary>
|
||||
/// A material risk change finding.
|
||||
/// </summary>
|
||||
public sealed record MaterialRiskChange(
|
||||
string VulnId,
|
||||
string ComponentPurl,
|
||||
RiskDirection Direction,
|
||||
string Reason,
|
||||
string? FilePath = null);
|
||||
|
||||
/// <summary>
|
||||
/// Direction of risk change.
|
||||
/// </summary>
|
||||
public enum RiskDirection
|
||||
{
|
||||
/// <summary>Risk increased (worse).</summary>
|
||||
Increased,
|
||||
|
||||
/// <summary>Risk decreased (better).</summary>
|
||||
Decreased,
|
||||
|
||||
/// <summary>Risk status changed but severity unclear.</summary>
|
||||
Changed
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A hardening regression finding.
|
||||
/// </summary>
|
||||
public sealed record HardeningRegression(
|
||||
string BinaryPath,
|
||||
string FlagName,
|
||||
bool WasEnabled,
|
||||
bool IsEnabled,
|
||||
double ScoreImpact);
|
||||
|
||||
/// <summary>
|
||||
/// A VEX candidate finding.
|
||||
/// </summary>
|
||||
public sealed record VexCandidate(
|
||||
string VulnId,
|
||||
string ComponentPurl,
|
||||
string Justification,
|
||||
string? ImpactStatement);
|
||||
|
||||
/// <summary>
|
||||
/// A reachability status change.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityChange(
|
||||
string VulnId,
|
||||
string ComponentPurl,
|
||||
bool WasReachable,
|
||||
bool IsReachable,
|
||||
string? Evidence);
|
||||
|
||||
/// <summary>
|
||||
/// Generates SARIF 2.1.0 output for Smart-Diff findings.
|
||||
/// Per Sprint 3500.4 - Smart-Diff Binary Analysis.
|
||||
/// </summary>
|
||||
public sealed class SarifOutputGenerator
|
||||
{
|
||||
private const string SarifVersion = "2.1.0";
|
||||
private const string SchemaUri = "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json";
|
||||
private const string ToolName = "StellaOps.Scanner.SmartDiff";
|
||||
private const string ToolInfoUri = "https://stellaops.dev/docs/scanner/smart-diff";
|
||||
|
||||
private static readonly JsonSerializerOptions SarifJsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Generate a SARIF log from Smart-Diff input.
|
||||
/// </summary>
|
||||
public SarifLog Generate(SmartDiffSarifInput input, SarifOutputOptions? options = null)
|
||||
{
|
||||
options ??= SarifOutputOptions.Default;
|
||||
|
||||
var tool = CreateTool(input);
|
||||
var results = CreateResults(input, options);
|
||||
var invocation = CreateInvocation(input);
|
||||
var artifacts = CreateArtifacts(input);
|
||||
var vcsProvenance = CreateVcsProvenance(input);
|
||||
|
||||
var run = new SarifRun(
|
||||
Tool: tool,
|
||||
Results: results,
|
||||
Invocations: [invocation],
|
||||
Artifacts: artifacts.Length > 0 ? artifacts : null,
|
||||
VersionControlProvenance: vcsProvenance);
|
||||
|
||||
return new SarifLog(
|
||||
Version: SarifVersion,
|
||||
Schema: SchemaUri,
|
||||
Runs: [run]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generate SARIF JSON string.
|
||||
/// </summary>
|
||||
public string GenerateJson(SmartDiffSarifInput input, SarifOutputOptions? options = null)
|
||||
{
|
||||
var log = Generate(input, options);
|
||||
var jsonOptions = options?.IndentedJson == true
|
||||
? new JsonSerializerOptions(SarifJsonOptions) { WriteIndented = true }
|
||||
: SarifJsonOptions;
|
||||
return JsonSerializer.Serialize(log, jsonOptions);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Write SARIF to a stream.
|
||||
/// </summary>
|
||||
public async Task WriteAsync(
|
||||
SmartDiffSarifInput input,
|
||||
Stream outputStream,
|
||||
SarifOutputOptions? options = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var log = Generate(input, options);
|
||||
var jsonOptions = options?.IndentedJson == true
|
||||
? new JsonSerializerOptions(SarifJsonOptions) { WriteIndented = true }
|
||||
: SarifJsonOptions;
|
||||
await JsonSerializer.SerializeAsync(outputStream, log, jsonOptions, ct);
|
||||
}
|
||||
|
||||
private static SarifTool CreateTool(SmartDiffSarifInput input)
|
||||
{
|
||||
var rules = CreateRules();
|
||||
|
||||
return new SarifTool(
|
||||
Driver: new SarifToolComponent(
|
||||
Name: ToolName,
|
||||
Version: input.ScannerVersion,
|
||||
InformationUri: ToolInfoUri,
|
||||
Rules: rules,
|
||||
SupportedTaxonomies: [
|
||||
new SarifToolComponentReference(
|
||||
Name: "CWE",
|
||||
Guid: "25F72D7E-8A92-459D-AD67-64853F788765")
|
||||
]));
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifReportingDescriptor> CreateRules()
|
||||
{
|
||||
return
|
||||
[
|
||||
new SarifReportingDescriptor(
|
||||
Id: "SDIFF001",
|
||||
Name: "MaterialRiskChange",
|
||||
ShortDescription: new SarifMessage("Material risk change detected"),
|
||||
FullDescription: new SarifMessage("A vulnerability finding has undergone a material risk state change between scans."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(Level: SarifLevel.Warning),
|
||||
HelpUri: $"{ToolInfoUri}/rules/SDIFF001"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "SDIFF002",
|
||||
Name: "HardeningRegression",
|
||||
ShortDescription: new SarifMessage("Binary hardening regression detected"),
|
||||
FullDescription: new SarifMessage("A binary has lost security hardening flags compared to the previous scan."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(Level: SarifLevel.Error),
|
||||
HelpUri: $"{ToolInfoUri}/rules/SDIFF002"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "SDIFF003",
|
||||
Name: "VexCandidateGenerated",
|
||||
ShortDescription: new SarifMessage("VEX candidate auto-generated"),
|
||||
FullDescription: new SarifMessage("A VEX 'not_affected' candidate was generated because vulnerable APIs are no longer present."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(Level: SarifLevel.Note),
|
||||
HelpUri: $"{ToolInfoUri}/rules/SDIFF003"),
|
||||
|
||||
new SarifReportingDescriptor(
|
||||
Id: "SDIFF004",
|
||||
Name: "ReachabilityFlip",
|
||||
ShortDescription: new SarifMessage("Reachability status changed"),
|
||||
FullDescription: new SarifMessage("The reachability of a vulnerability has flipped between scans."),
|
||||
DefaultConfiguration: new SarifReportingConfiguration(Level: SarifLevel.Warning),
|
||||
HelpUri: $"{ToolInfoUri}/rules/SDIFF004")
|
||||
];
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifResult> CreateResults(SmartDiffSarifInput input, SarifOutputOptions options)
|
||||
{
|
||||
var results = new List<SarifResult>();
|
||||
|
||||
// Material risk changes
|
||||
foreach (var change in input.MaterialChanges)
|
||||
{
|
||||
results.Add(CreateMaterialChangeResult(change));
|
||||
}
|
||||
|
||||
// Hardening regressions
|
||||
if (options.IncludeHardeningRegressions)
|
||||
{
|
||||
foreach (var regression in input.HardeningRegressions)
|
||||
{
|
||||
results.Add(CreateHardeningRegressionResult(regression));
|
||||
}
|
||||
}
|
||||
|
||||
// VEX candidates
|
||||
if (options.IncludeVexCandidates)
|
||||
{
|
||||
foreach (var candidate in input.VexCandidates)
|
||||
{
|
||||
results.Add(CreateVexCandidateResult(candidate));
|
||||
}
|
||||
}
|
||||
|
||||
// Reachability changes
|
||||
if (options.IncludeReachabilityChanges)
|
||||
{
|
||||
foreach (var change in input.ReachabilityChanges)
|
||||
{
|
||||
results.Add(CreateReachabilityChangeResult(change));
|
||||
}
|
||||
}
|
||||
|
||||
return [.. results];
|
||||
}
|
||||
|
||||
private static SarifResult CreateMaterialChangeResult(MaterialRiskChange change)
|
||||
{
|
||||
var level = change.Direction == RiskDirection.Increased ? SarifLevel.Warning : SarifLevel.Note;
|
||||
var message = $"Material risk change for {change.VulnId} in {change.ComponentPurl}: {change.Reason}";
|
||||
|
||||
var locations = change.FilePath is not null
|
||||
? ImmutableArray.Create(new SarifLocation(
|
||||
PhysicalLocation: new SarifPhysicalLocation(
|
||||
ArtifactLocation: new SarifArtifactLocation(Uri: change.FilePath))))
|
||||
: (ImmutableArray<SarifLocation>?)null;
|
||||
|
||||
return new SarifResult(
|
||||
RuleId: "SDIFF001",
|
||||
Level: level,
|
||||
Message: new SarifMessage(message),
|
||||
Locations: locations,
|
||||
Fingerprints: ImmutableDictionary.CreateRange(new[]
|
||||
{
|
||||
KeyValuePair.Create("vulnId", change.VulnId),
|
||||
KeyValuePair.Create("purl", change.ComponentPurl)
|
||||
}));
|
||||
}
|
||||
|
||||
private static SarifResult CreateHardeningRegressionResult(HardeningRegression regression)
|
||||
{
|
||||
var message = $"Hardening flag '{regression.FlagName}' was {(regression.WasEnabled ? "enabled" : "disabled")} " +
|
||||
$"but is now {(regression.IsEnabled ? "enabled" : "disabled")} in {regression.BinaryPath}";
|
||||
|
||||
return new SarifResult(
|
||||
RuleId: "SDIFF002",
|
||||
Level: SarifLevel.Error,
|
||||
Message: new SarifMessage(message),
|
||||
Locations: [new SarifLocation(
|
||||
PhysicalLocation: new SarifPhysicalLocation(
|
||||
ArtifactLocation: new SarifArtifactLocation(Uri: regression.BinaryPath)))]);
|
||||
}
|
||||
|
||||
private static SarifResult CreateVexCandidateResult(VexCandidate candidate)
|
||||
{
|
||||
var message = $"VEX not_affected candidate for {candidate.VulnId} in {candidate.ComponentPurl}: {candidate.Justification}";
|
||||
|
||||
return new SarifResult(
|
||||
RuleId: "SDIFF003",
|
||||
Level: SarifLevel.Note,
|
||||
Message: new SarifMessage(message),
|
||||
Fingerprints: ImmutableDictionary.CreateRange(new[]
|
||||
{
|
||||
KeyValuePair.Create("vulnId", candidate.VulnId),
|
||||
KeyValuePair.Create("purl", candidate.ComponentPurl)
|
||||
}));
|
||||
}
|
||||
|
||||
private static SarifResult CreateReachabilityChangeResult(ReachabilityChange change)
|
||||
{
|
||||
var direction = change.IsReachable ? "became reachable" : "became unreachable";
|
||||
var message = $"Vulnerability {change.VulnId} in {change.ComponentPurl} {direction}";
|
||||
|
||||
return new SarifResult(
|
||||
RuleId: "SDIFF004",
|
||||
Level: SarifLevel.Warning,
|
||||
Message: new SarifMessage(message),
|
||||
Fingerprints: ImmutableDictionary.CreateRange(new[]
|
||||
{
|
||||
KeyValuePair.Create("vulnId", change.VulnId),
|
||||
KeyValuePair.Create("purl", change.ComponentPurl)
|
||||
}));
|
||||
}
|
||||
|
||||
private static SarifInvocation CreateInvocation(SmartDiffSarifInput input)
|
||||
{
|
||||
return new SarifInvocation(
|
||||
ExecutionSuccessful: true,
|
||||
StartTimeUtc: input.ScanTime,
|
||||
EndTimeUtc: DateTimeOffset.UtcNow);
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifArtifact> CreateArtifacts(SmartDiffSarifInput input)
|
||||
{
|
||||
var artifacts = new List<SarifArtifact>();
|
||||
|
||||
// Collect unique file paths from results
|
||||
var paths = new HashSet<string>();
|
||||
|
||||
foreach (var change in input.MaterialChanges)
|
||||
{
|
||||
if (change.FilePath is not null)
|
||||
paths.Add(change.FilePath);
|
||||
}
|
||||
|
||||
foreach (var regression in input.HardeningRegressions)
|
||||
{
|
||||
paths.Add(regression.BinaryPath);
|
||||
}
|
||||
|
||||
foreach (var path in paths)
|
||||
{
|
||||
artifacts.Add(new SarifArtifact(
|
||||
Location: new SarifArtifactLocation(Uri: path)));
|
||||
}
|
||||
|
||||
return [.. artifacts];
|
||||
}
|
||||
|
||||
private static ImmutableArray<SarifVersionControlDetails>? CreateVcsProvenance(SmartDiffSarifInput input)
|
||||
{
|
||||
if (input.VcsInfo is null)
|
||||
return null;
|
||||
|
||||
return [new SarifVersionControlDetails(
|
||||
RepositoryUri: input.VcsInfo.RepositoryUri,
|
||||
RevisionId: input.VcsInfo.RevisionId,
|
||||
Branch: input.VcsInfo.Branch)];
|
||||
}
|
||||
}
|
||||
@@ -202,6 +202,31 @@ public sealed class ClassificationHistoryRepository : RepositoryBase<ScannerData
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
public Task<IReadOnlyList<ClassificationChange>> GetByExecutionAsync(
|
||||
Guid tenantId,
|
||||
Guid executionId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var sql = $"""
|
||||
SELECT id, artifact_digest, vuln_id, package_purl, tenant_id, manifest_id, execution_id,
|
||||
previous_status, new_status, is_fn_transition, cause, cause_detail, changed_at
|
||||
FROM {Table}
|
||||
WHERE tenant_id = @tenant_id AND execution_id = @execution_id
|
||||
ORDER BY vuln_id, package_purl
|
||||
""";
|
||||
|
||||
return QueryAsync(
|
||||
Tenant,
|
||||
sql,
|
||||
cmd =>
|
||||
{
|
||||
AddParameter(cmd, "tenant_id", tenantId);
|
||||
AddParameter(cmd, "execution_id", executionId);
|
||||
},
|
||||
MapChange,
|
||||
cancellationToken);
|
||||
}
|
||||
|
||||
private void AddChangeParameters(NpgsqlCommand cmd, ClassificationChange change)
|
||||
{
|
||||
AddParameter(cmd, "artifact_digest", change.ArtifactDigest);
|
||||
|
||||
@@ -56,6 +56,15 @@ public interface IClassificationHistoryRepository
|
||||
Guid tenantId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets classification changes for a specific execution.
|
||||
/// SPRINT_3404_0001_0001 - Added for delta computation.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ClassificationChange>> GetByExecutionAsync(
|
||||
Guid tenantId,
|
||||
Guid executionId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Refreshes the FN-Drift statistics materialized view.
|
||||
/// </summary>
|
||||
|
||||
@@ -0,0 +1,238 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Storage.Models;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Tracks classification changes for FN-Drift analysis.
|
||||
/// SPRINT_3404_0001_0001 - Task #6
|
||||
/// </summary>
|
||||
public interface IClassificationChangeTracker
|
||||
{
|
||||
/// <summary>
|
||||
/// Records a classification change for drift tracking.
|
||||
/// </summary>
|
||||
Task TrackChangeAsync(ClassificationChange change, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Records multiple classification changes in batch.
|
||||
/// </summary>
|
||||
Task TrackChangesAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Computes the classification delta between two scan executions.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<ClassificationChange>> ComputeDeltaAsync(
|
||||
Guid tenantId,
|
||||
string artifactDigest,
|
||||
Guid previousExecutionId,
|
||||
Guid currentExecutionId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of classification change tracking.
|
||||
/// </summary>
|
||||
public sealed class ClassificationChangeTracker : IClassificationChangeTracker
|
||||
{
|
||||
private readonly IClassificationHistoryRepository _repository;
|
||||
private readonly ILogger<ClassificationChangeTracker> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public ClassificationChangeTracker(
|
||||
IClassificationHistoryRepository repository,
|
||||
ILogger<ClassificationChangeTracker> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task TrackChangeAsync(ClassificationChange change, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(change);
|
||||
|
||||
// Only track actual changes
|
||||
if (change.PreviousStatus == change.NewStatus)
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"Skipping no-op classification change for {VulnId} on {Artifact}",
|
||||
change.VulnId,
|
||||
TruncateDigest(change.ArtifactDigest));
|
||||
return;
|
||||
}
|
||||
|
||||
await _repository.InsertAsync(change, cancellationToken);
|
||||
|
||||
if (change.IsFnTransition)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"FN-Drift detected: {VulnId} on {Artifact} changed from {Previous} to {New} (cause: {Cause})",
|
||||
change.VulnId,
|
||||
TruncateDigest(change.ArtifactDigest),
|
||||
change.PreviousStatus,
|
||||
change.NewStatus,
|
||||
change.Cause);
|
||||
}
|
||||
else
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Classification change: {VulnId} on {Artifact}: {Previous} -> {New}",
|
||||
change.VulnId,
|
||||
TruncateDigest(change.ArtifactDigest),
|
||||
change.PreviousStatus,
|
||||
change.NewStatus);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task TrackChangesAsync(
|
||||
IEnumerable<ClassificationChange> changes,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(changes);
|
||||
|
||||
var changeList = changes
|
||||
.Where(c => c.PreviousStatus != c.NewStatus)
|
||||
.ToList();
|
||||
|
||||
if (changeList.Count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
await _repository.InsertBatchAsync(changeList, cancellationToken);
|
||||
|
||||
var fnCount = changeList.Count(c => c.IsFnTransition);
|
||||
if (fnCount > 0)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"FN-Drift batch: {FnCount} false-negative transitions out of {Total} changes",
|
||||
fnCount,
|
||||
changeList.Count);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<ClassificationChange>> ComputeDeltaAsync(
|
||||
Guid tenantId,
|
||||
string artifactDigest,
|
||||
Guid previousExecutionId,
|
||||
Guid currentExecutionId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrEmpty(artifactDigest);
|
||||
|
||||
// Get classifications from both executions
|
||||
var previousClassifications = await _repository.GetByExecutionAsync(
|
||||
tenantId, previousExecutionId, cancellationToken);
|
||||
var currentClassifications = await _repository.GetByExecutionAsync(
|
||||
tenantId, currentExecutionId, cancellationToken);
|
||||
|
||||
// Index by vuln+package
|
||||
var previousByKey = previousClassifications
|
||||
.Where(c => c.ArtifactDigest == artifactDigest)
|
||||
.ToDictionary(c => (c.VulnId, c.PackagePurl));
|
||||
|
||||
var currentByKey = currentClassifications
|
||||
.Where(c => c.ArtifactDigest == artifactDigest)
|
||||
.ToDictionary(c => (c.VulnId, c.PackagePurl));
|
||||
|
||||
var changes = new List<ClassificationChange>();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Find status changes
|
||||
foreach (var (key, current) in currentByKey)
|
||||
{
|
||||
if (previousByKey.TryGetValue(key, out var previous))
|
||||
{
|
||||
if (previous.NewStatus != current.NewStatus)
|
||||
{
|
||||
changes.Add(new ClassificationChange
|
||||
{
|
||||
ArtifactDigest = artifactDigest,
|
||||
VulnId = key.VulnId,
|
||||
PackagePurl = key.PackagePurl,
|
||||
TenantId = tenantId,
|
||||
ManifestId = current.ManifestId,
|
||||
ExecutionId = currentExecutionId,
|
||||
PreviousStatus = previous.NewStatus,
|
||||
NewStatus = current.NewStatus,
|
||||
Cause = DetermineCause(previous, current),
|
||||
ChangedAt = now,
|
||||
});
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
// New finding
|
||||
changes.Add(new ClassificationChange
|
||||
{
|
||||
ArtifactDigest = artifactDigest,
|
||||
VulnId = key.VulnId,
|
||||
PackagePurl = key.PackagePurl,
|
||||
TenantId = tenantId,
|
||||
ManifestId = current.ManifestId,
|
||||
ExecutionId = currentExecutionId,
|
||||
PreviousStatus = ClassificationStatus.New,
|
||||
NewStatus = current.NewStatus,
|
||||
Cause = DriftCause.FeedDelta,
|
||||
ChangedAt = now,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heuristically determine the cause of drift based on change metadata.
|
||||
/// </summary>
|
||||
private static DriftCause DetermineCause(ClassificationChange previous, ClassificationChange current)
|
||||
{
|
||||
// Check cause detail for hints
|
||||
var prevDetail = previous.CauseDetail ?? new Dictionary<string, string>();
|
||||
var currDetail = current.CauseDetail ?? new Dictionary<string, string>();
|
||||
|
||||
// Feed version change
|
||||
if (prevDetail.TryGetValue("feedVersion", out var prevFeed) &&
|
||||
currDetail.TryGetValue("feedVersion", out var currFeed) &&
|
||||
prevFeed != currFeed)
|
||||
{
|
||||
return DriftCause.FeedDelta;
|
||||
}
|
||||
|
||||
// Policy rule change
|
||||
if (prevDetail.TryGetValue("ruleHash", out var prevRule) &&
|
||||
currDetail.TryGetValue("ruleHash", out var currRule) &&
|
||||
prevRule != currRule)
|
||||
{
|
||||
return DriftCause.RuleDelta;
|
||||
}
|
||||
|
||||
// VEX lattice change
|
||||
if (prevDetail.TryGetValue("vexHash", out var prevVex) &&
|
||||
currDetail.TryGetValue("vexHash", out var currVex) &&
|
||||
prevVex != currVex)
|
||||
{
|
||||
return DriftCause.LatticeDelta;
|
||||
}
|
||||
|
||||
// Reachability change
|
||||
if (prevDetail.TryGetValue("reachable", out var prevReach) &&
|
||||
currDetail.TryGetValue("reachable", out var currReach) &&
|
||||
prevReach != currReach)
|
||||
{
|
||||
return DriftCause.ReachabilityDelta;
|
||||
}
|
||||
|
||||
// Default to feed delta (most common)
|
||||
return DriftCause.FeedDelta;
|
||||
}
|
||||
|
||||
private static string TruncateDigest(string digest)
|
||||
{
|
||||
const int maxLen = 16;
|
||||
return digest.Length > maxLen ? digest[..maxLen] + "..." : digest;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,199 @@
|
||||
using System.Diagnostics.Metrics;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Prometheus metrics exporter for FN-Drift tracking.
|
||||
/// SPRINT_3404_0001_0001 - Task #9
|
||||
/// </summary>
|
||||
public sealed class FnDriftMetricsExporter : BackgroundService
|
||||
{
|
||||
public const string MeterName = "StellaOps.Scanner.FnDrift";
|
||||
|
||||
private readonly Meter _meter;
|
||||
private readonly IClassificationHistoryRepository _repository;
|
||||
private readonly ILogger<FnDriftMetricsExporter> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly TimeSpan _refreshInterval;
|
||||
|
||||
// Observable gauges (updated periodically)
|
||||
private readonly ObservableGauge<double> _fnDriftPercentGauge;
|
||||
private readonly ObservableGauge<long> _fnTransitionsGauge;
|
||||
private readonly ObservableGauge<long> _totalEvaluatedGauge;
|
||||
private readonly ObservableGauge<long> _feedDeltaCountGauge;
|
||||
private readonly ObservableGauge<long> _ruleDeltaCountGauge;
|
||||
private readonly ObservableGauge<long> _latticeDeltaCountGauge;
|
||||
private readonly ObservableGauge<long> _reachabilityDeltaCountGauge;
|
||||
private readonly ObservableGauge<long> _engineDeltaCountGauge;
|
||||
|
||||
// Counters (incremented on each change)
|
||||
private readonly Counter<long> _classificationChangesCounter;
|
||||
private readonly Counter<long> _fnTransitionsCounter;
|
||||
|
||||
// Current state for observable gauges
|
||||
private volatile FnDriftSnapshot _currentSnapshot = new();
|
||||
|
||||
public FnDriftMetricsExporter(
|
||||
IClassificationHistoryRepository repository,
|
||||
ILogger<FnDriftMetricsExporter> logger,
|
||||
TimeProvider? timeProvider = null,
|
||||
TimeSpan? refreshInterval = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_refreshInterval = refreshInterval ?? TimeSpan.FromMinutes(1);
|
||||
|
||||
_meter = new Meter(MeterName);
|
||||
|
||||
// Observable gauges - read from snapshot
|
||||
_fnDriftPercentGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.percent",
|
||||
() => _currentSnapshot.FnDriftPercent,
|
||||
unit: "%",
|
||||
description: "30-day rolling FN-Drift percentage");
|
||||
|
||||
_fnTransitionsGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.transitions_30d",
|
||||
() => _currentSnapshot.FnTransitions,
|
||||
description: "FN transitions in last 30 days");
|
||||
|
||||
_totalEvaluatedGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.evaluated_30d",
|
||||
() => _currentSnapshot.TotalEvaluated,
|
||||
description: "Total findings evaluated in last 30 days");
|
||||
|
||||
_feedDeltaCountGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.cause.feed_delta",
|
||||
() => _currentSnapshot.FeedDeltaCount,
|
||||
description: "FN transitions caused by feed updates");
|
||||
|
||||
_ruleDeltaCountGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.cause.rule_delta",
|
||||
() => _currentSnapshot.RuleDeltaCount,
|
||||
description: "FN transitions caused by rule changes");
|
||||
|
||||
_latticeDeltaCountGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.cause.lattice_delta",
|
||||
() => _currentSnapshot.LatticeDeltaCount,
|
||||
description: "FN transitions caused by VEX lattice changes");
|
||||
|
||||
_reachabilityDeltaCountGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.cause.reachability_delta",
|
||||
() => _currentSnapshot.ReachabilityDeltaCount,
|
||||
description: "FN transitions caused by reachability changes");
|
||||
|
||||
_engineDeltaCountGauge = _meter.CreateObservableGauge(
|
||||
"scanner.fn_drift.cause.engine",
|
||||
() => _currentSnapshot.EngineDeltaCount,
|
||||
description: "FN transitions caused by engine changes (should be ~0)");
|
||||
|
||||
// Counters - incremented per event
|
||||
_classificationChangesCounter = _meter.CreateCounter<long>(
|
||||
"scanner.classification_changes_total",
|
||||
description: "Total classification status changes");
|
||||
|
||||
_fnTransitionsCounter = _meter.CreateCounter<long>(
|
||||
"scanner.fn_transitions_total",
|
||||
description: "Total false-negative transitions");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Records a classification change for metrics.
|
||||
/// </summary>
|
||||
public void RecordClassificationChange(bool isFnTransition, string cause)
|
||||
{
|
||||
_classificationChangesCounter.Add(1, new KeyValuePair<string, object?>("cause", cause));
|
||||
|
||||
if (isFnTransition)
|
||||
{
|
||||
_fnTransitionsCounter.Add(1, new KeyValuePair<string, object?>("cause", cause));
|
||||
}
|
||||
}
|
||||
|
||||
protected override async Task ExecuteAsync(CancellationToken stoppingToken)
|
||||
{
|
||||
_logger.LogInformation("FN-Drift metrics exporter starting with {Interval} refresh interval",
|
||||
_refreshInterval);
|
||||
|
||||
while (!stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
try
|
||||
{
|
||||
await RefreshMetricsAsync(stoppingToken);
|
||||
}
|
||||
catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested)
|
||||
{
|
||||
break;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to refresh FN-Drift metrics, will retry");
|
||||
}
|
||||
|
||||
await Task.Delay(_refreshInterval, _timeProvider, stoppingToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation("FN-Drift metrics exporter stopped");
|
||||
}
|
||||
|
||||
private async Task RefreshMetricsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
// Get 30-day summary for all tenants (aggregated)
|
||||
// In production, this would iterate over active tenants
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var fromDate = DateOnly.FromDateTime(now.AddDays(-30).DateTime);
|
||||
var toDate = DateOnly.FromDateTime(now.DateTime);
|
||||
|
||||
var stats = await _repository.GetDriftStatsAsync(
|
||||
Guid.Empty, // Aggregate across tenants
|
||||
fromDate,
|
||||
toDate,
|
||||
cancellationToken);
|
||||
|
||||
// Aggregate stats into snapshot
|
||||
var snapshot = new FnDriftSnapshot();
|
||||
|
||||
foreach (var stat in stats)
|
||||
{
|
||||
snapshot.FnTransitions += stat.FnCount;
|
||||
snapshot.TotalEvaluated += stat.TotalReclassified;
|
||||
snapshot.FeedDeltaCount += stat.FeedDeltaCount;
|
||||
snapshot.RuleDeltaCount += stat.RuleDeltaCount;
|
||||
snapshot.LatticeDeltaCount += stat.LatticeDeltaCount;
|
||||
snapshot.ReachabilityDeltaCount += stat.ReachabilityDeltaCount;
|
||||
snapshot.EngineDeltaCount += stat.EngineCount;
|
||||
}
|
||||
|
||||
if (snapshot.TotalEvaluated > 0)
|
||||
{
|
||||
snapshot.FnDriftPercent = (double)snapshot.FnTransitions / snapshot.TotalEvaluated * 100;
|
||||
}
|
||||
|
||||
_currentSnapshot = snapshot;
|
||||
|
||||
_logger.LogDebug(
|
||||
"FN-Drift metrics refreshed: {FnPercent:F2}% ({FnCount}/{Total})",
|
||||
snapshot.FnDriftPercent,
|
||||
snapshot.FnTransitions,
|
||||
snapshot.TotalEvaluated);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Snapshot of FN-Drift metrics for observable gauges.
|
||||
/// </summary>
|
||||
private sealed class FnDriftSnapshot
|
||||
{
|
||||
public double FnDriftPercent { get; set; }
|
||||
public long FnTransitions { get; set; }
|
||||
public long TotalEvaluated { get; set; }
|
||||
public long FeedDeltaCount { get; set; }
|
||||
public long RuleDeltaCount { get; set; }
|
||||
public long LatticeDeltaCount { get; set; }
|
||||
public long ReachabilityDeltaCount { get; set; }
|
||||
public long EngineDeltaCount { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,237 @@
|
||||
using StellaOps.Scanner.Storage.Models;
|
||||
using StellaOps.Scanner.Storage.Services;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Unit tests for ClassificationChangeTracker.
|
||||
/// SPRINT_3404_0001_0001 - Task #11, #12
|
||||
/// </summary>
|
||||
public sealed class ClassificationChangeTrackerTests
|
||||
{
|
||||
private readonly Mock<IClassificationHistoryRepository> _repositoryMock;
|
||||
private readonly ClassificationChangeTracker _tracker;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
|
||||
public ClassificationChangeTrackerTests()
|
||||
{
|
||||
_repositoryMock = new Mock<IClassificationHistoryRepository>();
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
_tracker = new ClassificationChangeTracker(
|
||||
_repositoryMock.Object,
|
||||
NullLogger<ClassificationChangeTracker>.Instance,
|
||||
_timeProvider);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TrackChangeAsync_ActualChange_InsertsToRepository()
|
||||
{
|
||||
// Arrange
|
||||
var change = CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected);
|
||||
|
||||
// Act
|
||||
await _tracker.TrackChangeAsync(change);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(r => r.InsertAsync(change, It.IsAny<CancellationToken>()), Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TrackChangeAsync_NoOpChange_SkipsInsert()
|
||||
{
|
||||
// Arrange - same status
|
||||
var change = CreateChange(ClassificationStatus.Affected, ClassificationStatus.Affected);
|
||||
|
||||
// Act
|
||||
await _tracker.TrackChangeAsync(change);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(r => r.InsertAsync(It.IsAny<ClassificationChange>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TrackChangesAsync_FiltersNoOpChanges()
|
||||
{
|
||||
// Arrange
|
||||
var changes = new[]
|
||||
{
|
||||
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected),
|
||||
CreateChange(ClassificationStatus.Affected, ClassificationStatus.Affected), // No-op
|
||||
CreateChange(ClassificationStatus.Affected, ClassificationStatus.Fixed),
|
||||
};
|
||||
|
||||
// Act
|
||||
await _tracker.TrackChangesAsync(changes);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(r => r.InsertBatchAsync(
|
||||
It.Is<IEnumerable<ClassificationChange>>(c => c.Count() == 2),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TrackChangesAsync_EmptyAfterFilter_DoesNotInsert()
|
||||
{
|
||||
// Arrange - all no-ops
|
||||
var changes = new[]
|
||||
{
|
||||
CreateChange(ClassificationStatus.Affected, ClassificationStatus.Affected),
|
||||
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Unknown),
|
||||
};
|
||||
|
||||
// Act
|
||||
await _tracker.TrackChangesAsync(changes);
|
||||
|
||||
// Assert
|
||||
_repositoryMock.Verify(r => r.InsertBatchAsync(It.IsAny<IEnumerable<ClassificationChange>>(), It.IsAny<CancellationToken>()), Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsFnTransition_UnknownToAffected_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var change = CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected);
|
||||
|
||||
// Assert
|
||||
Assert.True(change.IsFnTransition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsFnTransition_UnaffectedToAffected_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var change = CreateChange(ClassificationStatus.Unaffected, ClassificationStatus.Affected);
|
||||
|
||||
// Assert
|
||||
Assert.True(change.IsFnTransition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsFnTransition_AffectedToFixed_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var change = CreateChange(ClassificationStatus.Affected, ClassificationStatus.Fixed);
|
||||
|
||||
// Assert
|
||||
Assert.False(change.IsFnTransition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void IsFnTransition_NewToAffected_ReturnsFalse()
|
||||
{
|
||||
// Arrange - new finding, not a reclassification
|
||||
var change = CreateChange(ClassificationStatus.New, ClassificationStatus.Affected);
|
||||
|
||||
// Assert
|
||||
Assert.False(change.IsFnTransition);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeDeltaAsync_NewFinding_RecordsAsNewStatus()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = Guid.NewGuid();
|
||||
var artifact = "sha256:abc123";
|
||||
var prevExecId = Guid.NewGuid();
|
||||
var currExecId = Guid.NewGuid();
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByExecutionAsync(tenantId, prevExecId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(Array.Empty<ClassificationChange>());
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByExecutionAsync(tenantId, currExecId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
CreateChange(ClassificationStatus.New, ClassificationStatus.Affected, artifact, "CVE-2024-0001"),
|
||||
});
|
||||
|
||||
// Act
|
||||
var delta = await _tracker.ComputeDeltaAsync(tenantId, artifact, prevExecId, currExecId);
|
||||
|
||||
// Assert
|
||||
Assert.Single(delta);
|
||||
Assert.Equal(ClassificationStatus.New, delta[0].PreviousStatus);
|
||||
Assert.Equal(ClassificationStatus.Affected, delta[0].NewStatus);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ComputeDeltaAsync_StatusChange_RecordsDelta()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = Guid.NewGuid();
|
||||
var artifact = "sha256:abc123";
|
||||
var prevExecId = Guid.NewGuid();
|
||||
var currExecId = Guid.NewGuid();
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByExecutionAsync(tenantId, prevExecId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
CreateChange(ClassificationStatus.New, ClassificationStatus.Unknown, artifact, "CVE-2024-0001"),
|
||||
});
|
||||
|
||||
_repositoryMock
|
||||
.Setup(r => r.GetByExecutionAsync(tenantId, currExecId, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new[]
|
||||
{
|
||||
CreateChange(ClassificationStatus.Unknown, ClassificationStatus.Affected, artifact, "CVE-2024-0001"),
|
||||
});
|
||||
|
||||
// Act
|
||||
var delta = await _tracker.ComputeDeltaAsync(tenantId, artifact, prevExecId, currExecId);
|
||||
|
||||
// Assert
|
||||
Assert.Single(delta);
|
||||
Assert.Equal(ClassificationStatus.Unknown, delta[0].PreviousStatus);
|
||||
Assert.Equal(ClassificationStatus.Affected, delta[0].NewStatus);
|
||||
}
|
||||
|
||||
private static ClassificationChange CreateChange(
|
||||
ClassificationStatus previous,
|
||||
ClassificationStatus next,
|
||||
string artifact = "sha256:test",
|
||||
string vulnId = "CVE-2024-0001")
|
||||
{
|
||||
return new ClassificationChange
|
||||
{
|
||||
ArtifactDigest = artifact,
|
||||
VulnId = vulnId,
|
||||
PackagePurl = "pkg:npm/test@1.0.0",
|
||||
TenantId = Guid.NewGuid(),
|
||||
ManifestId = Guid.NewGuid(),
|
||||
ExecutionId = Guid.NewGuid(),
|
||||
PreviousStatus = previous,
|
||||
NewStatus = next,
|
||||
Cause = DriftCause.FeedDelta,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for testing.
|
||||
/// </summary>
|
||||
internal sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset now) => _now = now;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mock interface for testing.
|
||||
/// </summary>
|
||||
public interface IClassificationHistoryRepository
|
||||
{
|
||||
Task InsertAsync(ClassificationChange change, CancellationToken cancellationToken = default);
|
||||
Task InsertBatchAsync(IEnumerable<ClassificationChange> changes, CancellationToken cancellationToken = default);
|
||||
Task<IReadOnlyList<ClassificationChange>> GetByExecutionAsync(Guid tenantId, Guid executionId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
Reference in New Issue
Block a user