feat: add security sink detection patterns for JavaScript/TypeScript

- Introduced `sink-detect.js` with various security sink detection patterns categorized by type (e.g., command injection, SQL injection, file operations).
- Implemented functions to build a lookup map for fast sink detection and to match sink calls against known patterns.
- Added `package-lock.json` for dependency management.
This commit is contained in:
StellaOps Bot
2025-12-22 23:21:21 +02:00
parent 3ba7157b00
commit 5146204f1b
529 changed files with 73579 additions and 5985 deletions

View File

@@ -0,0 +1,258 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Benchmark.Claims;
/// <summary>
/// Index of verifiable competitive claims with evidence links.
/// </summary>
public sealed record ClaimsIndex
{
/// <summary>
/// Version of the claims index format.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// When the claims were last verified.
/// </summary>
public required DateTimeOffset LastVerified { get; init; }
/// <summary>
/// The list of claims.
/// </summary>
public required IReadOnlyList<CompetitiveClaim> Claims { get; init; }
/// <summary>
/// Loads a claims index from a JSON file.
/// </summary>
public static async Task<ClaimsIndex?> LoadAsync(string path, CancellationToken ct = default)
{
await using var stream = File.OpenRead(path);
return await JsonSerializer.DeserializeAsync<ClaimsIndex>(stream, JsonOptions, ct);
}
/// <summary>
/// Saves the claims index to a JSON file.
/// </summary>
public async Task SaveAsync(string path, CancellationToken ct = default)
{
await using var stream = File.Create(path);
await JsonSerializer.SerializeAsync(stream, this, JsonOptions, ct);
}
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
}
/// <summary>
/// A single competitive claim with evidence.
/// </summary>
public sealed record CompetitiveClaim
{
/// <summary>
/// Unique identifier for the claim (e.g., REACH-001).
/// </summary>
public required string ClaimId { get; init; }
/// <summary>
/// Category of the claim.
/// </summary>
public required ClaimCategory Category { get; init; }
/// <summary>
/// The claim statement.
/// </summary>
public required string Claim { get; init; }
/// <summary>
/// Path to evidence file/data.
/// </summary>
public required string EvidencePath { get; init; }
/// <summary>
/// Command to verify the claim.
/// </summary>
public required string VerificationCommand { get; init; }
/// <summary>
/// Status of the claim.
/// </summary>
public required ClaimStatus Status { get; init; }
/// <summary>
/// The specific metric value supporting the claim.
/// </summary>
public string? MetricValue { get; init; }
/// <summary>
/// Comparison baseline (e.g., "vs Trivy 0.50.1").
/// </summary>
public string? Baseline { get; init; }
/// <summary>
/// When the claim was last verified.
/// </summary>
public DateTimeOffset? LastVerified { get; init; }
/// <summary>
/// Notes or caveats about the claim.
/// </summary>
public string? Notes { get; init; }
}
/// <summary>
/// Categories of competitive claims.
/// </summary>
public enum ClaimCategory
{
/// <summary>
/// Reachability analysis claims.
/// </summary>
Reachability,
/// <summary>
/// Precision/accuracy claims.
/// </summary>
Precision,
/// <summary>
/// Recall/coverage claims.
/// </summary>
Recall,
/// <summary>
/// False positive reduction claims.
/// </summary>
FalsePositiveReduction,
/// <summary>
/// Performance/speed claims.
/// </summary>
Performance,
/// <summary>
/// SBOM completeness claims.
/// </summary>
SbomCompleteness,
/// <summary>
/// Explainability claims.
/// </summary>
Explainability,
/// <summary>
/// Reproducibility/determinism claims.
/// </summary>
Reproducibility,
/// <summary>
/// Other claims.
/// </summary>
Other
}
/// <summary>
/// Status of a claim.
/// </summary>
public enum ClaimStatus
{
/// <summary>
/// Claim is verified with current evidence.
/// </summary>
Verified,
/// <summary>
/// Claim needs re-verification.
/// </summary>
NeedsReview,
/// <summary>
/// Claim is pending initial verification.
/// </summary>
Pending,
/// <summary>
/// Claim is outdated and may no longer hold.
/// </summary>
Outdated,
/// <summary>
/// Claim was invalidated by new evidence.
/// </summary>
Invalidated
}
/// <summary>
/// Generates marketing battlecards from benchmark results.
/// </summary>
public sealed class BattlecardGenerator
{
/// <summary>
/// Generates a markdown battlecard from claims and metrics.
/// </summary>
public string Generate(ClaimsIndex claims, IReadOnlyDictionary<string, double> metrics)
{
var sb = new System.Text.StringBuilder();
sb.AppendLine("# Stella Ops Scanner - Competitive Battlecard");
sb.AppendLine();
sb.AppendLine($"*Generated: {DateTimeOffset.UtcNow:yyyy-MM-dd HH:mm:ss} UTC*");
sb.AppendLine();
// Key Differentiators
sb.AppendLine("## Key Differentiators");
sb.AppendLine();
var verifiedClaims = claims.Claims.Where(c => c.Status == ClaimStatus.Verified).ToList();
foreach (var category in Enum.GetValues<ClaimCategory>())
{
var categoryClaims = verifiedClaims.Where(c => c.Category == category).ToList();
if (categoryClaims.Count == 0) continue;
sb.AppendLine($"### {category}");
sb.AppendLine();
foreach (var claim in categoryClaims)
{
sb.AppendLine($"- **{claim.ClaimId}**: {claim.Claim}");
if (claim.MetricValue != null)
sb.AppendLine($" - Metric: {claim.MetricValue}");
if (claim.Baseline != null)
sb.AppendLine($" - Baseline: {claim.Baseline}");
}
sb.AppendLine();
}
// Metrics Summary
sb.AppendLine("## Metrics Summary");
sb.AppendLine();
sb.AppendLine("| Metric | Value |");
sb.AppendLine("|--------|-------|");
foreach (var (name, value) in metrics.OrderBy(kv => kv.Key))
{
sb.AppendLine($"| {name} | {value:P2} |");
}
sb.AppendLine();
// Verification
sb.AppendLine("## Verification");
sb.AppendLine();
sb.AppendLine("All claims can be independently verified using:");
sb.AppendLine();
sb.AppendLine("```bash");
sb.AppendLine("stella bench verify <CLAIM-ID>");
sb.AppendLine("```");
sb.AppendLine();
return sb.ToString();
}
}

View File

@@ -0,0 +1,129 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Benchmark.Corpus;
/// <summary>
/// Manifest for the ground-truth corpus of container images.
/// </summary>
public sealed record CorpusManifest
{
/// <summary>
/// Version of the manifest format.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// When the corpus was last updated.
/// </summary>
public required DateTimeOffset LastUpdated { get; init; }
/// <summary>
/// List of images with ground-truth annotations.
/// </summary>
public required IReadOnlyList<ImageGroundTruth> Images { get; init; }
/// <summary>
/// Statistics about the corpus.
/// </summary>
public CorpusStats? Stats { get; init; }
/// <summary>
/// Loads a corpus manifest from a JSON file.
/// </summary>
public static async Task<CorpusManifest?> LoadAsync(string path, CancellationToken ct = default)
{
await using var stream = File.OpenRead(path);
return await JsonSerializer.DeserializeAsync<CorpusManifest>(stream, JsonOptions, ct);
}
/// <summary>
/// Saves the corpus manifest to a JSON file.
/// </summary>
public async Task SaveAsync(string path, CancellationToken ct = default)
{
await using var stream = File.Create(path);
await JsonSerializer.SerializeAsync(stream, this, JsonOptions, ct);
}
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
}
/// <summary>
/// Ground truth for a single image.
/// </summary>
public sealed record ImageGroundTruth
{
/// <summary>
/// The image digest (sha256:...).
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Image reference (e.g., alpine:3.18).
/// </summary>
public required string ImageRef { get; init; }
/// <summary>
/// CVEs that are verified true positives (should be reported).
/// </summary>
public required IReadOnlyList<string> TruePositives { get; init; }
/// <summary>
/// CVEs that are verified false positives (should NOT be reported).
/// These are typically backported fixes, unreachable code, etc.
/// </summary>
public required IReadOnlyList<string> FalsePositives { get; init; }
/// <summary>
/// Notes explaining why certain CVEs are classified as FP.
/// Key: CVE ID, Value: Explanation.
/// </summary>
public IReadOnlyDictionary<string, string>? Notes { get; init; }
/// <summary>
/// Image categories (alpine, debian, nodejs, python, etc.).
/// </summary>
public IReadOnlyList<string>? Categories { get; init; }
/// <summary>
/// When the ground truth was last verified.
/// </summary>
public DateTimeOffset? VerifiedAt { get; init; }
/// <summary>
/// Who verified the ground truth.
/// </summary>
public string? VerifiedBy { get; init; }
}
/// <summary>
/// Statistics about the corpus.
/// </summary>
public sealed record CorpusStats
{
/// <summary>
/// Total number of images.
/// </summary>
public required int TotalImages { get; init; }
/// <summary>
/// Breakdown by category.
/// </summary>
public IReadOnlyDictionary<string, int>? ByCategory { get; init; }
/// <summary>
/// Total verified true positives across all images.
/// </summary>
public required int TotalTruePositives { get; init; }
/// <summary>
/// Total verified false positives across all images.
/// </summary>
public required int TotalFalsePositives { get; init; }
}

View File

@@ -0,0 +1,125 @@
namespace StellaOps.Scanner.Benchmark.Corpus;
/// <summary>
/// Classification of a finding based on ground truth comparison.
/// </summary>
public enum FindingClassification
{
/// <summary>
/// True Positive: Correctly identified vulnerability.
/// </summary>
TruePositive,
/// <summary>
/// False Positive: Incorrectly reported vulnerability.
/// Examples: backported fixes, unreachable code, version mismatch.
/// </summary>
FalsePositive,
/// <summary>
/// True Negative: Correctly not reported (implicit, not commonly tracked).
/// </summary>
TrueNegative,
/// <summary>
/// False Negative: Vulnerability present but not reported by scanner.
/// </summary>
FalseNegative,
/// <summary>
/// Unknown: Not in ground truth, cannot classify.
/// </summary>
Unknown
}
/// <summary>
/// Reasons for false positive classifications.
/// </summary>
public enum FalsePositiveReason
{
/// <summary>
/// The fix was backported by the distribution.
/// </summary>
BackportedFix,
/// <summary>
/// The vulnerable code path is unreachable.
/// </summary>
UnreachableCode,
/// <summary>
/// Version string was incorrectly parsed.
/// </summary>
VersionMismatch,
/// <summary>
/// The vulnerability doesn't apply to this platform.
/// </summary>
PlatformNotAffected,
/// <summary>
/// The vulnerable feature/component is not enabled.
/// </summary>
FeatureDisabled,
/// <summary>
/// Package name collision (different package, same name).
/// </summary>
PackageNameCollision,
/// <summary>
/// Other reason.
/// </summary>
Other
}
/// <summary>
/// Detailed classification report for a finding.
/// </summary>
public sealed record ClassificationReport
{
/// <summary>
/// The CVE ID.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// The classification.
/// </summary>
public required FindingClassification Classification { get; init; }
/// <summary>
/// For false positives, the reason.
/// </summary>
public FalsePositiveReason? FpReason { get; init; }
/// <summary>
/// Human-readable explanation.
/// </summary>
public string? Explanation { get; init; }
/// <summary>
/// The package name.
/// </summary>
public required string PackageName { get; init; }
/// <summary>
/// The package version.
/// </summary>
public required string PackageVersion { get; init; }
/// <summary>
/// Severity of the vulnerability.
/// </summary>
public required string Severity { get; init; }
/// <summary>
/// The scanner that produced this finding.
/// </summary>
public required string Scanner { get; init; }
/// <summary>
/// The ecosystem (npm, pypi, alpine, etc.).
/// </summary>
public string? Ecosystem { get; init; }
}

View File

@@ -0,0 +1,125 @@
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Benchmark.Harness;
/// <summary>
/// Adapter for Grype vulnerability scanner output.
/// </summary>
public sealed class GrypeAdapter : CompetitorAdapterBase
{
private readonly ILogger<GrypeAdapter> _logger;
private readonly string _grypePath;
public GrypeAdapter(ILogger<GrypeAdapter> logger, string? grypePath = null)
{
_logger = logger;
_grypePath = grypePath ?? "grype";
}
public override string ToolName => "Grype";
public override string ToolVersion => "latest";
public override async Task<IReadOnlyList<NormalizedFinding>> ScanAsync(
string imageRef,
CancellationToken ct = default)
{
_logger.LogInformation("Scanning {Image} with Grype", imageRef);
var startInfo = new ProcessStartInfo
{
FileName = _grypePath,
Arguments = $"--output json {imageRef}",
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = new Process { StartInfo = startInfo };
process.Start();
var output = await process.StandardOutput.ReadToEndAsync(ct);
await process.WaitForExitAsync(ct);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(ct);
_logger.LogError("Grype scan failed: {Error}", error);
return [];
}
return await ParseOutputAsync(output, ct);
}
public override Task<IReadOnlyList<NormalizedFinding>> ParseOutputAsync(
string jsonOutput,
CancellationToken ct = default)
{
var findings = new List<NormalizedFinding>();
try
{
using var doc = JsonDocument.Parse(jsonOutput);
var root = doc.RootElement;
// Grype output structure: { "matches": [ { "vulnerability": {...}, "artifact": {...} } ] }
if (root.TryGetProperty("matches", out var matches))
{
foreach (var match in matches.EnumerateArray())
{
var finding = ParseMatch(match);
if (finding != null)
findings.Add(finding);
}
}
}
catch (JsonException ex)
{
_logger.LogError(ex, "Failed to parse Grype JSON output");
}
return Task.FromResult<IReadOnlyList<NormalizedFinding>>(findings);
}
private NormalizedFinding? ParseMatch(JsonElement match)
{
if (!match.TryGetProperty("vulnerability", out var vuln))
return null;
if (!vuln.TryGetProperty("id", out var idElement))
return null;
var cveId = idElement.GetString();
if (string.IsNullOrEmpty(cveId))
return null;
if (!match.TryGetProperty("artifact", out var artifact))
return null;
var pkgName = artifact.TryGetProperty("name", out var pkg) ? pkg.GetString() : null;
var version = artifact.TryGetProperty("version", out var ver) ? ver.GetString() : null;
var severity = vuln.TryGetProperty("severity", out var sev) ? sev.GetString() : null;
string? fixedVer = null;
if (vuln.TryGetProperty("fix", out var fix) && fix.TryGetProperty("versions", out var fixVersions))
{
var versions = fixVersions.EnumerateArray().Select(v => v.GetString()).ToList();
fixedVer = versions.FirstOrDefault();
}
if (string.IsNullOrEmpty(pkgName) || string.IsNullOrEmpty(version))
return null;
return new NormalizedFinding
{
CveId = cveId,
PackageName = pkgName,
PackageVersion = version,
Severity = NormalizeSeverity(severity),
Source = ToolName,
FixedVersion = fixedVer
};
}
}

View File

@@ -0,0 +1,67 @@
namespace StellaOps.Scanner.Benchmark.Harness;
/// <summary>
/// Interface for adapting competitor scanner output to normalized findings.
/// </summary>
public interface ICompetitorAdapter
{
/// <summary>
/// The name of the competitor tool.
/// </summary>
string ToolName { get; }
/// <summary>
/// The version of the competitor tool.
/// </summary>
string ToolVersion { get; }
/// <summary>
/// Scans an image and returns normalized findings.
/// </summary>
Task<IReadOnlyList<NormalizedFinding>> ScanAsync(
string imageRef,
CancellationToken ct = default);
/// <summary>
/// Parses existing JSON output from the competitor tool.
/// </summary>
Task<IReadOnlyList<NormalizedFinding>> ParseOutputAsync(
string jsonOutput,
CancellationToken ct = default);
}
/// <summary>
/// Base class for competitor adapters with common functionality.
/// </summary>
public abstract class CompetitorAdapterBase : ICompetitorAdapter
{
public abstract string ToolName { get; }
public abstract string ToolVersion { get; }
public abstract Task<IReadOnlyList<NormalizedFinding>> ScanAsync(
string imageRef,
CancellationToken ct = default);
public abstract Task<IReadOnlyList<NormalizedFinding>> ParseOutputAsync(
string jsonOutput,
CancellationToken ct = default);
/// <summary>
/// Normalizes a severity string to a standard format.
/// </summary>
protected static string NormalizeSeverity(string? severity)
{
if (string.IsNullOrWhiteSpace(severity))
return "UNKNOWN";
return severity.ToUpperInvariant() switch
{
"CRITICAL" or "CRIT" => "CRITICAL",
"HIGH" or "H" => "HIGH",
"MEDIUM" or "MED" or "M" => "MEDIUM",
"LOW" or "L" => "LOW",
"NEGLIGIBLE" or "NEG" or "INFO" => "NEGLIGIBLE",
_ => "UNKNOWN"
};
}
}

View File

@@ -0,0 +1,52 @@
namespace StellaOps.Scanner.Benchmark.Harness;
/// <summary>
/// A normalized finding that can be compared across different scanners.
/// </summary>
public sealed record NormalizedFinding
{
/// <summary>
/// The CVE ID (e.g., CVE-2024-1234).
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// The affected package name.
/// </summary>
public required string PackageName { get; init; }
/// <summary>
/// The installed version of the package.
/// </summary>
public required string PackageVersion { get; init; }
/// <summary>
/// The severity level (CRITICAL, HIGH, MEDIUM, LOW, UNKNOWN).
/// </summary>
public required string Severity { get; init; }
/// <summary>
/// The source scanner that produced this finding.
/// </summary>
public required string Source { get; init; }
/// <summary>
/// The package ecosystem (npm, pypi, maven, etc.).
/// </summary>
public string? Ecosystem { get; init; }
/// <summary>
/// The fixed version if available.
/// </summary>
public string? FixedVersion { get; init; }
/// <summary>
/// Additional metadata from the scanner.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// Gets a unique key for this finding for comparison purposes.
/// </summary>
public string UniqueKey => $"{CveId}|{PackageName}|{PackageVersion}".ToLowerInvariant();
}

View File

@@ -0,0 +1,111 @@
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Benchmark.Harness;
/// <summary>
/// Adapter for Syft SBOM generator output.
/// Note: Syft generates SBOMs, not vulnerabilities directly.
/// This adapter extracts package information for SBOM comparison.
/// </summary>
public sealed class SyftAdapter : CompetitorAdapterBase
{
private readonly ILogger<SyftAdapter> _logger;
private readonly string _syftPath;
public SyftAdapter(ILogger<SyftAdapter> logger, string? syftPath = null)
{
_logger = logger;
_syftPath = syftPath ?? "syft";
}
public override string ToolName => "Syft";
public override string ToolVersion => "latest";
public override async Task<IReadOnlyList<NormalizedFinding>> ScanAsync(
string imageRef,
CancellationToken ct = default)
{
_logger.LogInformation("Scanning {Image} with Syft", imageRef);
var startInfo = new ProcessStartInfo
{
FileName = _syftPath,
Arguments = $"--output json {imageRef}",
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = new Process { StartInfo = startInfo };
process.Start();
var output = await process.StandardOutput.ReadToEndAsync(ct);
await process.WaitForExitAsync(ct);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(ct);
_logger.LogError("Syft scan failed: {Error}", error);
return [];
}
return await ParseOutputAsync(output, ct);
}
public override Task<IReadOnlyList<NormalizedFinding>> ParseOutputAsync(
string jsonOutput,
CancellationToken ct = default)
{
var findings = new List<NormalizedFinding>();
try
{
using var doc = JsonDocument.Parse(jsonOutput);
var root = doc.RootElement;
// Syft output structure: { "artifacts": [ { "name": "...", "version": "..." } ] }
// Note: Syft doesn't produce vulnerability findings, only SBOM components
// For benchmark purposes, we create placeholder findings for package presence comparison
if (root.TryGetProperty("artifacts", out var artifacts))
{
foreach (var artifact in artifacts.EnumerateArray())
{
var component = ParseArtifact(artifact);
if (component != null)
findings.Add(component);
}
}
}
catch (JsonException ex)
{
_logger.LogError(ex, "Failed to parse Syft JSON output");
}
return Task.FromResult<IReadOnlyList<NormalizedFinding>>(findings);
}
private NormalizedFinding? ParseArtifact(JsonElement artifact)
{
var pkgName = artifact.TryGetProperty("name", out var pkg) ? pkg.GetString() : null;
var version = artifact.TryGetProperty("version", out var ver) ? ver.GetString() : null;
var pkgType = artifact.TryGetProperty("type", out var typeEl) ? typeEl.GetString() : null;
if (string.IsNullOrEmpty(pkgName) || string.IsNullOrEmpty(version))
return null;
// For Syft, we create a pseudo-finding representing package presence
// This is used for SBOM completeness comparison, not vulnerability comparison
return new NormalizedFinding
{
CveId = $"SBOM-COMPONENT-{pkgName}",
PackageName = pkgName,
PackageVersion = version,
Severity = "INFO",
Source = ToolName,
Ecosystem = pkgType
};
}
}

View File

@@ -0,0 +1,119 @@
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Benchmark.Harness;
/// <summary>
/// Adapter for Trivy vulnerability scanner output.
/// </summary>
public sealed class TrivyAdapter : CompetitorAdapterBase
{
private readonly ILogger<TrivyAdapter> _logger;
private readonly string _trivyPath;
public TrivyAdapter(ILogger<TrivyAdapter> logger, string? trivyPath = null)
{
_logger = logger;
_trivyPath = trivyPath ?? "trivy";
}
public override string ToolName => "Trivy";
public override string ToolVersion => "latest";
public override async Task<IReadOnlyList<NormalizedFinding>> ScanAsync(
string imageRef,
CancellationToken ct = default)
{
_logger.LogInformation("Scanning {Image} with Trivy", imageRef);
var startInfo = new ProcessStartInfo
{
FileName = _trivyPath,
Arguments = $"image --format json --quiet {imageRef}",
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
using var process = new Process { StartInfo = startInfo };
process.Start();
var output = await process.StandardOutput.ReadToEndAsync(ct);
await process.WaitForExitAsync(ct);
if (process.ExitCode != 0)
{
var error = await process.StandardError.ReadToEndAsync(ct);
_logger.LogError("Trivy scan failed: {Error}", error);
return [];
}
return await ParseOutputAsync(output, ct);
}
public override Task<IReadOnlyList<NormalizedFinding>> ParseOutputAsync(
string jsonOutput,
CancellationToken ct = default)
{
var findings = new List<NormalizedFinding>();
try
{
using var doc = JsonDocument.Parse(jsonOutput);
var root = doc.RootElement;
// Trivy output structure: { "Results": [ { "Vulnerabilities": [...] } ] }
if (root.TryGetProperty("Results", out var results))
{
foreach (var result in results.EnumerateArray())
{
if (!result.TryGetProperty("Vulnerabilities", out var vulnerabilities))
continue;
foreach (var vuln in vulnerabilities.EnumerateArray())
{
var finding = ParseVulnerability(vuln);
if (finding != null)
findings.Add(finding);
}
}
}
}
catch (JsonException ex)
{
_logger.LogError(ex, "Failed to parse Trivy JSON output");
}
return Task.FromResult<IReadOnlyList<NormalizedFinding>>(findings);
}
private NormalizedFinding? ParseVulnerability(JsonElement vuln)
{
if (!vuln.TryGetProperty("VulnerabilityID", out var idElement))
return null;
var cveId = idElement.GetString();
if (string.IsNullOrEmpty(cveId))
return null;
var pkgName = vuln.TryGetProperty("PkgName", out var pkg) ? pkg.GetString() : null;
var version = vuln.TryGetProperty("InstalledVersion", out var ver) ? ver.GetString() : null;
var severity = vuln.TryGetProperty("Severity", out var sev) ? sev.GetString() : null;
var fixedVer = vuln.TryGetProperty("FixedVersion", out var fix) ? fix.GetString() : null;
if (string.IsNullOrEmpty(pkgName) || string.IsNullOrEmpty(version))
return null;
return new NormalizedFinding
{
CveId = cveId,
PackageName = pkgName,
PackageVersion = version,
Severity = NormalizeSeverity(severity),
Source = ToolName,
FixedVersion = fixedVer
};
}
}

View File

@@ -0,0 +1,152 @@
namespace StellaOps.Scanner.Benchmark.Metrics;
/// <summary>
/// Benchmark metrics for comparing scanner accuracy.
/// </summary>
public sealed record BenchmarkMetrics
{
/// <summary>
/// Number of true positive findings.
/// </summary>
public required int TruePositives { get; init; }
/// <summary>
/// Number of false positive findings.
/// </summary>
public required int FalsePositives { get; init; }
/// <summary>
/// Number of true negative findings.
/// </summary>
public required int TrueNegatives { get; init; }
/// <summary>
/// Number of false negative findings (missed vulnerabilities).
/// </summary>
public required int FalseNegatives { get; init; }
/// <summary>
/// Precision = TP / (TP + FP).
/// </summary>
public double Precision => TruePositives + FalsePositives > 0
? (double)TruePositives / (TruePositives + FalsePositives)
: 0;
/// <summary>
/// Recall = TP / (TP + FN).
/// </summary>
public double Recall => TruePositives + FalseNegatives > 0
? (double)TruePositives / (TruePositives + FalseNegatives)
: 0;
/// <summary>
/// F1 Score = 2 * (Precision * Recall) / (Precision + Recall).
/// </summary>
public double F1Score => Precision + Recall > 0
? 2 * (Precision * Recall) / (Precision + Recall)
: 0;
/// <summary>
/// Accuracy = (TP + TN) / (TP + TN + FP + FN).
/// </summary>
public double Accuracy
{
get
{
var total = TruePositives + TrueNegatives + FalsePositives + FalseNegatives;
return total > 0 ? (double)(TruePositives + TrueNegatives) / total : 0;
}
}
/// <summary>
/// The scanner tool name.
/// </summary>
public required string ToolName { get; init; }
/// <summary>
/// The image reference that was scanned.
/// </summary>
public string? ImageRef { get; init; }
/// <summary>
/// Timestamp when the benchmark was run.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
}
/// <summary>
/// Aggregated metrics across multiple images.
/// </summary>
public sealed record AggregatedMetrics
{
/// <summary>
/// The scanner tool name.
/// </summary>
public required string ToolName { get; init; }
/// <summary>
/// Total images scanned.
/// </summary>
public required int TotalImages { get; init; }
/// <summary>
/// Sum of true positives across all images.
/// </summary>
public required int TotalTruePositives { get; init; }
/// <summary>
/// Sum of false positives across all images.
/// </summary>
public required int TotalFalsePositives { get; init; }
/// <summary>
/// Sum of true negatives across all images.
/// </summary>
public required int TotalTrueNegatives { get; init; }
/// <summary>
/// Sum of false negatives across all images.
/// </summary>
public required int TotalFalseNegatives { get; init; }
/// <summary>
/// Aggregate precision.
/// </summary>
public double Precision => TotalTruePositives + TotalFalsePositives > 0
? (double)TotalTruePositives / (TotalTruePositives + TotalFalsePositives)
: 0;
/// <summary>
/// Aggregate recall.
/// </summary>
public double Recall => TotalTruePositives + TotalFalseNegatives > 0
? (double)TotalTruePositives / (TotalTruePositives + TotalFalseNegatives)
: 0;
/// <summary>
/// Aggregate F1 score.
/// </summary>
public double F1Score => Precision + Recall > 0
? 2 * (Precision * Recall) / (Precision + Recall)
: 0;
/// <summary>
/// Breakdown by severity.
/// </summary>
public IReadOnlyDictionary<string, BenchmarkMetrics>? BySeverity { get; init; }
/// <summary>
/// Breakdown by ecosystem.
/// </summary>
public IReadOnlyDictionary<string, BenchmarkMetrics>? ByEcosystem { get; init; }
/// <summary>
/// Individual image metrics.
/// </summary>
public IReadOnlyList<BenchmarkMetrics>? PerImageMetrics { get; init; }
/// <summary>
/// Timestamp when the aggregation was computed.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
}

View File

@@ -0,0 +1,153 @@
using StellaOps.Scanner.Benchmark.Corpus;
using StellaOps.Scanner.Benchmark.Harness;
namespace StellaOps.Scanner.Benchmark.Metrics;
/// <summary>
/// Calculates benchmark metrics by comparing scanner findings against ground truth.
/// </summary>
public sealed class MetricsCalculator
{
/// <summary>
/// Calculates metrics for a single image.
/// </summary>
public BenchmarkMetrics Calculate(
string toolName,
string imageRef,
IReadOnlyList<NormalizedFinding> scannerFindings,
ImageGroundTruth groundTruth)
{
var groundTruthPositives = groundTruth.TruePositives
.Select(cve => cve.ToUpperInvariant())
.ToHashSet();
var groundTruthNegatives = groundTruth.FalsePositives
.Select(cve => cve.ToUpperInvariant())
.ToHashSet();
var reportedCves = scannerFindings
.Select(f => f.CveId.ToUpperInvariant())
.ToHashSet();
// True Positives: CVEs correctly identified
var tp = reportedCves.Intersect(groundTruthPositives).Count();
// False Positives: CVEs reported but should not have been
var fp = reportedCves.Intersect(groundTruthNegatives).Count();
// False Negatives: CVEs that should have been reported but weren't
var fn = groundTruthPositives.Except(reportedCves).Count();
// True Negatives: CVEs correctly not reported
var tn = groundTruthNegatives.Except(reportedCves).Count();
return new BenchmarkMetrics
{
ToolName = toolName,
ImageRef = imageRef,
TruePositives = tp,
FalsePositives = fp,
TrueNegatives = tn,
FalseNegatives = fn,
Timestamp = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Aggregates metrics across multiple images.
/// </summary>
public AggregatedMetrics Aggregate(
string toolName,
IReadOnlyList<BenchmarkMetrics> perImageMetrics)
{
var totalTp = perImageMetrics.Sum(m => m.TruePositives);
var totalFp = perImageMetrics.Sum(m => m.FalsePositives);
var totalTn = perImageMetrics.Sum(m => m.TrueNegatives);
var totalFn = perImageMetrics.Sum(m => m.FalseNegatives);
return new AggregatedMetrics
{
ToolName = toolName,
TotalImages = perImageMetrics.Count,
TotalTruePositives = totalTp,
TotalFalsePositives = totalFp,
TotalTrueNegatives = totalTn,
TotalFalseNegatives = totalFn,
PerImageMetrics = perImageMetrics,
Timestamp = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Classifies each finding as TP, FP, TN, or FN.
/// </summary>
public IReadOnlyList<ClassifiedFinding> ClassifyFindings(
IReadOnlyList<NormalizedFinding> scannerFindings,
ImageGroundTruth groundTruth)
{
var groundTruthPositives = groundTruth.TruePositives
.Select(cve => cve.ToUpperInvariant())
.ToHashSet();
var groundTruthNegatives = groundTruth.FalsePositives
.Select(cve => cve.ToUpperInvariant())
.ToHashSet();
var classified = new List<ClassifiedFinding>();
// Classify reported findings
foreach (var finding in scannerFindings)
{
var cveUpper = finding.CveId.ToUpperInvariant();
FindingClassification classification;
string? reason = null;
if (groundTruthPositives.Contains(cveUpper))
{
classification = FindingClassification.TruePositive;
}
else if (groundTruthNegatives.Contains(cveUpper))
{
classification = FindingClassification.FalsePositive;
reason = groundTruth.Notes?.GetValueOrDefault(cveUpper);
}
else
{
// Not in ground truth - treat as unknown
classification = FindingClassification.Unknown;
}
classified.Add(new ClassifiedFinding(finding, classification, reason));
}
// Add false negatives (missed CVEs)
var reportedCves = scannerFindings.Select(f => f.CveId.ToUpperInvariant()).ToHashSet();
foreach (var missedCve in groundTruthPositives.Except(reportedCves))
{
var placeholder = new NormalizedFinding
{
CveId = missedCve,
PackageName = "unknown",
PackageVersion = "unknown",
Severity = "UNKNOWN",
Source = "GroundTruth"
};
classified.Add(new ClassifiedFinding(
placeholder,
FindingClassification.FalseNegative,
"Vulnerability present but not reported by scanner"));
}
return classified;
}
}
/// <summary>
/// A finding with its classification.
/// </summary>
public sealed record ClassifiedFinding(
NormalizedFinding Finding,
FindingClassification Classification,
string? Reason);

View File

@@ -0,0 +1,13 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="System.Text.Json" Version="9.0.0" />
</ItemGroup>
</Project>

View File

@@ -101,6 +101,11 @@ public sealed record JsCallGraphResult
/// Detected entrypoints.
/// </summary>
public IReadOnlyList<JsEntrypointInfo> Entrypoints { get; init; } = [];
/// <summary>
/// Detected security sinks.
/// </summary>
public IReadOnlyList<JsSinkInfo> Sinks { get; init; } = [];
}
/// <summary>
@@ -216,3 +221,29 @@ public sealed record JsEntrypointInfo
/// </summary>
public string? Method { get; init; }
}
/// <summary>
/// A security sink from the JavaScript call graph.
/// </summary>
public sealed record JsSinkInfo
{
/// <summary>
/// Node ID of the caller function that invokes the sink.
/// </summary>
public required string Caller { get; init; }
/// <summary>
/// Sink category (command_injection, sql_injection, ssrf, etc.).
/// </summary>
public required string Category { get; init; }
/// <summary>
/// Method being called (e.g., exec, query, fetch).
/// </summary>
public required string Method { get; init; }
/// <summary>
/// Call site position.
/// </summary>
public JsPositionInfo? Site { get; init; }
}

View File

@@ -1,21 +1,35 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability;
namespace StellaOps.Scanner.CallGraph.Node;
/// <summary>
/// Placeholder Node.js call graph extractor.
/// Babel integration is planned; this implementation is intentionally minimal.
/// Node.js call graph extractor using Babel AST analysis.
/// Invokes stella-callgraph-node tool for JavaScript/TypeScript projects.
/// </summary>
public sealed class NodeCallGraphExtractor : ICallGraphExtractor
{
private readonly TimeProvider _timeProvider;
private readonly ILogger<NodeCallGraphExtractor>? _logger;
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
public NodeCallGraphExtractor(TimeProvider? timeProvider = null)
/// <summary>
/// Path to the stella-callgraph-node tool (configurable).
/// </summary>
public string ToolPath { get; init; } = "stella-callgraph-node";
/// <summary>
/// Timeout for tool execution.
/// </summary>
public TimeSpan ToolTimeout { get; init; } = TimeSpan.FromMinutes(5);
public NodeCallGraphExtractor(TimeProvider? timeProvider = null, ILogger<NodeCallGraphExtractor>? logger = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_logger = logger;
}
public string Language => "node";
@@ -28,6 +42,25 @@ public sealed class NodeCallGraphExtractor : ICallGraphExtractor
throw new ArgumentException($"Expected language '{Language}', got '{request.Language}'.", nameof(request));
}
// Try to extract using Babel tool first
var targetDir = ResolveProjectDirectory(request.TargetPath);
if (targetDir is not null)
{
try
{
var result = await InvokeToolAsync(targetDir, cancellationToken).ConfigureAwait(false);
if (result is not null)
{
return BuildFromBabelResult(request.ScanId, result);
}
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
_logger?.LogWarning(ex, "Babel tool invocation failed for {Path}, falling back to trace file", targetDir);
}
}
// Fallback: try legacy trace file
var tracePath = ResolveTracePath(request.TargetPath);
if (tracePath is not null && File.Exists(tracePath))
{
@@ -42,10 +75,11 @@ public sealed class NodeCallGraphExtractor : ICallGraphExtractor
}
catch (Exception ex) when (ex is IOException or JsonException)
{
// fall through to empty snapshot
_logger?.LogDebug(ex, "Failed to read trace file at {Path}", tracePath);
}
}
// Return empty snapshot
var extractedAt = _timeProvider.GetUtcNow();
var provisional = new CallGraphSnapshot(
ScanId: request.ScanId,
@@ -61,6 +95,238 @@ public sealed class NodeCallGraphExtractor : ICallGraphExtractor
return provisional with { GraphDigest = digest };
}
private async Task<JsCallGraphResult?> InvokeToolAsync(string projectPath, CancellationToken cancellationToken)
{
using var process = new Process();
process.StartInfo = new ProcessStartInfo
{
FileName = ToolPath,
Arguments = $"\"{projectPath}\" --json",
UseShellExecute = false,
RedirectStandardOutput = true,
RedirectStandardError = true,
CreateNoWindow = true
};
_logger?.LogDebug("Invoking stella-callgraph-node on {Path}", projectPath);
try
{
process.Start();
// Read output asynchronously
var outputTask = process.StandardOutput.ReadToEndAsync(cancellationToken);
var errorTask = process.StandardError.ReadToEndAsync(cancellationToken);
using var timeoutCts = new CancellationTokenSource(ToolTimeout);
using var linkedCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken, timeoutCts.Token);
await process.WaitForExitAsync(linkedCts.Token).ConfigureAwait(false);
var output = await outputTask.ConfigureAwait(false);
var errors = await errorTask.ConfigureAwait(false);
if (process.ExitCode != 0)
{
_logger?.LogWarning("stella-callgraph-node exited with code {ExitCode}: {Errors}", process.ExitCode, errors);
return null;
}
if (string.IsNullOrWhiteSpace(output))
{
_logger?.LogDebug("stella-callgraph-node produced no output");
return null;
}
return BabelResultParser.Parse(output);
}
catch (Exception ex) when (ex is System.ComponentModel.Win32Exception)
{
_logger?.LogDebug(ex, "stella-callgraph-node not found at {Path}", ToolPath);
return null;
}
}
private CallGraphSnapshot BuildFromBabelResult(string scanId, JsCallGraphResult result)
{
var extractedAt = _timeProvider.GetUtcNow();
// Build entrypoint set for quick lookup
var entrypointIds = result.Entrypoints
.Select(e => e.Id)
.ToHashSet(StringComparer.Ordinal);
// Build sink lookup by caller
var sinksByNode = result.Sinks
.GroupBy(s => s.Caller, StringComparer.Ordinal)
.ToDictionary(
g => g.Key,
g => g.First().Category,
StringComparer.Ordinal);
// Convert nodes
var nodes = result.Nodes.Select(n =>
{
var isEntrypoint = entrypointIds.Contains(n.Id);
var isSink = sinksByNode.ContainsKey(n.Id);
var sinkCategory = isSink ? sinksByNode[n.Id] : null;
// Determine entrypoint type
EntrypointType? entrypointType = null;
if (isEntrypoint)
{
var ep = result.Entrypoints.FirstOrDefault(e => e.Id == n.Id);
entrypointType = MapEntrypointType(ep?.Type);
}
return new CallGraphNode(
NodeId: CallGraphNodeIds.Compute(n.Id),
Symbol: n.Name,
File: n.Position?.File ?? string.Empty,
Line: n.Position?.Line ?? 0,
Package: n.Package,
Visibility: MapVisibility(n.Visibility),
IsEntrypoint: isEntrypoint,
EntrypointType: entrypointType,
IsSink: isSink,
SinkCategory: MapSinkCategory(sinkCategory));
}).ToList();
// Convert edges
var edges = result.Edges.Select(e => new CallGraphEdge(
CallGraphNodeIds.Compute(e.From),
CallGraphNodeIds.Compute(e.To),
MapCallKind(e.Kind)
)).ToList();
// Create sink nodes for detected sinks (these may not be in the nodes list)
foreach (var sink in result.Sinks)
{
var sinkNodeId = CallGraphNodeIds.Compute($"js:sink:{sink.Category}:{sink.Method}");
// Check if we already have this sink node
if (!nodes.Any(n => n.NodeId == sinkNodeId))
{
nodes.Add(new CallGraphNode(
NodeId: sinkNodeId,
Symbol: sink.Method,
File: sink.Site?.File ?? string.Empty,
Line: sink.Site?.Line ?? 0,
Package: "external",
Visibility: Visibility.Public,
IsEntrypoint: false,
EntrypointType: null,
IsSink: true,
SinkCategory: sink.Category));
// Add edge from caller to sink
var callerNodeId = CallGraphNodeIds.Compute(sink.Caller);
edges.Add(new CallGraphEdge(callerNodeId, sinkNodeId, CallKind.Direct));
}
}
var distinctNodes = nodes
.GroupBy(n => n.NodeId, StringComparer.Ordinal)
.Select(g => g.First())
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
.ToImmutableArray();
var distinctEdges = edges
.Distinct(CallGraphEdgeStructuralComparer.Instance)
.OrderBy(e => e.SourceId, StringComparer.Ordinal)
.ThenBy(e => e.TargetId, StringComparer.Ordinal)
.ToImmutableArray();
var entrypointNodeIds = distinctNodes
.Where(n => n.IsEntrypoint)
.Select(n => n.NodeId)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var sinkNodeIds = distinctNodes
.Where(n => n.IsSink)
.Select(n => n.NodeId)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var provisional = new CallGraphSnapshot(
ScanId: scanId,
GraphDigest: string.Empty,
Language: Language,
ExtractedAt: extractedAt,
Nodes: distinctNodes,
Edges: distinctEdges,
EntrypointIds: entrypointNodeIds,
SinkIds: sinkNodeIds);
return provisional with { GraphDigest = CallGraphDigests.ComputeGraphDigest(provisional) };
}
private static Visibility MapVisibility(string? visibility) => visibility?.ToLowerInvariant() switch
{
"public" => Visibility.Public,
"private" => Visibility.Private,
"protected" => Visibility.Protected,
_ => Visibility.Public
};
private static EntrypointType MapEntrypointType(string? type) => type?.ToLowerInvariant() switch
{
"http_handler" => EntrypointType.HttpHandler,
"lambda" => EntrypointType.Lambda,
"websocket_handler" => EntrypointType.WebSocketHandler,
"grpc_handler" or "grpc_method" => EntrypointType.GrpcMethod,
"message_handler" => EntrypointType.MessageHandler,
_ => EntrypointType.HttpHandler
};
private static CallKind MapCallKind(string? kind) => kind?.ToLowerInvariant() switch
{
"direct" => CallKind.Direct,
"dynamic" => CallKind.Dynamic,
"virtual" => CallKind.Virtual,
"callback" or "delegate" => CallKind.Delegate,
_ => CallKind.Direct
};
private static SinkCategory? MapSinkCategory(string? category) => category?.ToLowerInvariant() switch
{
"command_injection" or "cmd_exec" => SinkCategory.CmdExec,
"sql_injection" or "sql_raw" => SinkCategory.SqlRaw,
"deserialization" or "unsafe_deser" => SinkCategory.UnsafeDeser,
"ssrf" => SinkCategory.Ssrf,
"file_write" => SinkCategory.FileWrite,
"file_read" or "path_traversal" => SinkCategory.PathTraversal,
"weak_crypto" or "crypto_weak" => SinkCategory.CryptoWeak,
"ldap_injection" => SinkCategory.LdapInjection,
"nosql_injection" or "nosql" => SinkCategory.NoSqlInjection,
"xss" or "template_injection" => SinkCategory.TemplateInjection,
"log_injection" or "log_forging" => SinkCategory.LogForging,
"regex_dos" or "redos" => SinkCategory.ReDos,
_ => null
};
private static string? ResolveProjectDirectory(string targetPath)
{
if (string.IsNullOrWhiteSpace(targetPath))
{
return null;
}
var path = Path.GetFullPath(targetPath);
if (Directory.Exists(path))
{
// Check for package.json to verify it's a Node.js project
if (File.Exists(Path.Combine(path, "package.json")))
{
return path;
}
}
return null;
}
private CallGraphSnapshot BuildFromTrace(string scanId, TraceDocument trace)
{
var extractedAt = _timeProvider.GetUtcNow();

View File

@@ -0,0 +1,101 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Emit.Lineage;
/// <summary>
/// Interface for content-addressable SBOM storage with lineage tracking.
/// </summary>
public interface ISbomStore
{
/// <summary>
/// Stores an SBOM with optional parent reference.
/// </summary>
/// <param name="sbomContent">The canonical SBOM content.</param>
/// <param name="imageDigest">The image digest.</param>
/// <param name="parentId">Optional parent SBOM ID.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>The assigned SBOM ID.</returns>
Task<SbomId> StoreAsync(
string sbomContent,
string imageDigest,
SbomId? parentId = null,
CancellationToken ct = default);
/// <summary>
/// Gets an SBOM by its content hash.
/// </summary>
Task<SbomLineage?> GetByHashAsync(string contentHash, CancellationToken ct = default);
/// <summary>
/// Gets an SBOM by its ID.
/// </summary>
Task<SbomLineage?> GetByIdAsync(SbomId id, CancellationToken ct = default);
/// <summary>
/// Gets the lineage chain for an SBOM.
/// </summary>
Task<ImmutableArray<SbomLineage>> GetLineageAsync(SbomId id, CancellationToken ct = default);
/// <summary>
/// Gets the diff between two SBOMs.
/// </summary>
Task<SbomDiff?> GetDiffAsync(SbomId fromId, SbomId toId, CancellationToken ct = default);
/// <summary>
/// Gets all SBOM versions for an image.
/// </summary>
Task<ImmutableArray<SbomLineage>> GetByImageDigestAsync(string imageDigest, CancellationToken ct = default);
}
/// <summary>
/// Extension methods for SBOM lineage traversal.
/// </summary>
public static class SbomLineageExtensions
{
/// <summary>
/// Gets the full ancestor chain as a list.
/// </summary>
public static async Task<IReadOnlyList<SbomLineage>> GetFullAncestryAsync(
this ISbomStore store,
SbomId id,
CancellationToken ct = default)
{
var ancestry = new List<SbomLineage>();
var current = await store.GetByIdAsync(id, ct);
while (current != null)
{
ancestry.Add(current);
if (current.ParentId is null)
break;
current = await store.GetByIdAsync(current.ParentId.Value, ct);
}
return ancestry;
}
/// <summary>
/// Finds the common ancestor of two SBOM versions.
/// </summary>
public static async Task<SbomId?> FindCommonAncestorAsync(
this ISbomStore store,
SbomId id1,
SbomId id2,
CancellationToken ct = default)
{
var lineage1 = await store.GetLineageAsync(id1, ct);
var lineage2 = await store.GetLineageAsync(id2, ct);
var ancestors1 = lineage1.Select(l => l.Id).ToHashSet();
foreach (var ancestor in lineage2)
{
if (ancestors1.Contains(ancestor.Id))
return ancestor.Id;
}
return null;
}
}

View File

@@ -0,0 +1,162 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Emit.Lineage;
/// <summary>
/// Proof manifest that enables reproducible SBOM generation.
/// </summary>
public sealed record RebuildProof
{
/// <summary>
/// The SBOM ID this proof applies to.
/// </summary>
public required SbomId SbomId { get; init; }
/// <summary>
/// The image digest that was scanned.
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Version of Stella Ops used for the scan.
/// </summary>
public required string StellaOpsVersion { get; init; }
/// <summary>
/// Snapshots of all feeds used during the scan.
/// </summary>
public required ImmutableArray<FeedSnapshot> FeedSnapshots { get; init; }
/// <summary>
/// Versions of all analyzers used during the scan.
/// </summary>
public required ImmutableArray<AnalyzerVersion> AnalyzerVersions { get; init; }
/// <summary>
/// Hash of the policy configuration used.
/// </summary>
public required string PolicyHash { get; init; }
/// <summary>
/// When the proof was generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// DSSE signature of the proof (optional).
/// </summary>
public string? DsseSignature { get; init; }
/// <summary>
/// Hash of the entire proof document.
/// </summary>
public string? ProofHash { get; init; }
}
/// <summary>
/// Snapshot of a vulnerability/advisory feed at a point in time.
/// </summary>
public sealed record FeedSnapshot
{
/// <summary>
/// Unique feed identifier.
/// </summary>
public required string FeedId { get; init; }
/// <summary>
/// Feed name/description.
/// </summary>
public required string FeedName { get; init; }
/// <summary>
/// Hash of the feed content at snapshot time.
/// </summary>
public required string SnapshotHash { get; init; }
/// <summary>
/// When the snapshot was taken.
/// </summary>
public required DateTimeOffset AsOf { get; init; }
/// <summary>
/// Number of entries in the feed.
/// </summary>
public int? EntryCount { get; init; }
/// <summary>
/// Feed version/revision if available.
/// </summary>
public string? FeedVersion { get; init; }
}
/// <summary>
/// Version of an analyzer used during scanning.
/// </summary>
public sealed record AnalyzerVersion
{
/// <summary>
/// Analyzer identifier.
/// </summary>
public required string AnalyzerId { get; init; }
/// <summary>
/// Analyzer name.
/// </summary>
public required string AnalyzerName { get; init; }
/// <summary>
/// Version string.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// Hash of analyzer code/rules if available.
/// </summary>
public string? CodeHash { get; init; }
/// <summary>
/// Configuration hash if applicable.
/// </summary>
public string? ConfigHash { get; init; }
}
/// <summary>
/// Result of a rebuild verification.
/// </summary>
public sealed record RebuildVerification
{
/// <summary>
/// The proof that was verified.
/// </summary>
public required RebuildProof Proof { get; init; }
/// <summary>
/// Whether the rebuild was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// The SBOM produced by the rebuild.
/// </summary>
public SbomId? RebuiltSbomId { get; init; }
/// <summary>
/// Whether the rebuilt SBOM matches the original.
/// </summary>
public bool? HashMatches { get; init; }
/// <summary>
/// Differences if the rebuild didn't match.
/// </summary>
public SbomDiff? Differences { get; init; }
/// <summary>
/// Error message if rebuild failed.
/// </summary>
public string? ErrorMessage { get; init; }
/// <summary>
/// When the verification was performed.
/// </summary>
public required DateTimeOffset VerifiedAt { get; init; }
}

View File

@@ -0,0 +1,168 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Emit.Lineage;
/// <summary>
/// Semantic diff between two SBOM versions.
/// </summary>
public sealed record SbomDiff
{
/// <summary>
/// Source SBOM ID.
/// </summary>
public required SbomId FromId { get; init; }
/// <summary>
/// Target SBOM ID.
/// </summary>
public required SbomId ToId { get; init; }
/// <summary>
/// Individual component-level changes.
/// </summary>
public required ImmutableArray<ComponentDelta> Deltas { get; init; }
/// <summary>
/// Summary of the diff.
/// </summary>
public required DiffSummary Summary { get; init; }
/// <summary>
/// When the diff was computed.
/// </summary>
public required DateTimeOffset ComputedAt { get; init; }
}
/// <summary>
/// A single component-level change.
/// </summary>
public sealed record ComponentDelta
{
/// <summary>
/// Type of change.
/// </summary>
public required ComponentDeltaType Type { get; init; }
/// <summary>
/// The component reference before the change (null if added).
/// </summary>
public ComponentRef? Before { get; init; }
/// <summary>
/// The component reference after the change (null if removed).
/// </summary>
public ComponentRef? After { get; init; }
/// <summary>
/// List of fields that changed (for modified components).
/// </summary>
public ImmutableArray<string> ChangedFields { get; init; } = [];
}
/// <summary>
/// Type of component change.
/// </summary>
public enum ComponentDeltaType
{
/// <summary>
/// Component was added.
/// </summary>
Added,
/// <summary>
/// Component was removed.
/// </summary>
Removed,
/// <summary>
/// Component version changed.
/// </summary>
VersionChanged,
/// <summary>
/// Component license changed.
/// </summary>
LicenseChanged,
/// <summary>
/// Component dependencies changed.
/// </summary>
DependencyChanged,
/// <summary>
/// Other metadata changed.
/// </summary>
MetadataChanged
}
/// <summary>
/// Reference to a component.
/// </summary>
public sealed record ComponentRef
{
/// <summary>
/// Package URL (PURL).
/// </summary>
public required string Purl { get; init; }
/// <summary>
/// Component name.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Component version.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// Component type/ecosystem.
/// </summary>
public string? Type { get; init; }
/// <summary>
/// License expression (SPDX).
/// </summary>
public string? License { get; init; }
}
/// <summary>
/// Summary statistics for a diff.
/// </summary>
public sealed record DiffSummary
{
/// <summary>
/// Number of components added.
/// </summary>
public required int Added { get; init; }
/// <summary>
/// Number of components removed.
/// </summary>
public required int Removed { get; init; }
/// <summary>
/// Number of components with version changes.
/// </summary>
public required int VersionChanged { get; init; }
/// <summary>
/// Number of components with other modifications.
/// </summary>
public required int OtherModified { get; init; }
/// <summary>
/// Number of components unchanged.
/// </summary>
public required int Unchanged { get; init; }
/// <summary>
/// Total components in target SBOM.
/// </summary>
public int TotalComponents => Added + VersionChanged + OtherModified + Unchanged;
/// <summary>
/// Is this a breaking change (any removals or version downgrades)?
/// </summary>
public bool IsBreaking { get; init; }
}

View File

@@ -0,0 +1,195 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.Scanner.Emit.Lineage;
/// <summary>
/// Engine for computing semantic diffs between SBOM versions.
/// </summary>
public sealed class SbomDiffEngine
{
/// <summary>
/// Computes the semantic diff between two SBOMs.
/// </summary>
public SbomDiff ComputeDiff(
SbomId fromId,
IReadOnlyList<ComponentRef> fromComponents,
SbomId toId,
IReadOnlyList<ComponentRef> toComponents)
{
var fromByPurl = fromComponents.ToDictionary(c => c.Purl, c => c);
var toByPurl = toComponents.ToDictionary(c => c.Purl, c => c);
var deltas = new List<ComponentDelta>();
var added = 0;
var removed = 0;
var versionChanged = 0;
var otherModified = 0;
var unchanged = 0;
var isBreaking = false;
// Find added and modified components
foreach (var (purl, toComp) in toByPurl)
{
if (!fromByPurl.TryGetValue(purl, out var fromComp))
{
// Added
deltas.Add(new ComponentDelta
{
Type = ComponentDeltaType.Added,
After = toComp
});
added++;
}
else
{
// Possibly modified
var changedFields = CompareComponents(fromComp, toComp);
if (changedFields.Length > 0)
{
var deltaType = changedFields.Contains("Version")
? ComponentDeltaType.VersionChanged
: changedFields.Contains("License")
? ComponentDeltaType.LicenseChanged
: ComponentDeltaType.MetadataChanged;
deltas.Add(new ComponentDelta
{
Type = deltaType,
Before = fromComp,
After = toComp,
ChangedFields = changedFields
});
if (deltaType == ComponentDeltaType.VersionChanged)
versionChanged++;
else
otherModified++;
// Check for version downgrade (breaking)
if (changedFields.Contains("Version") && IsVersionDowngrade(fromComp.Version, toComp.Version))
isBreaking = true;
}
else
{
unchanged++;
}
}
}
// Find removed components
foreach (var (purl, fromComp) in fromByPurl)
{
if (!toByPurl.ContainsKey(purl))
{
deltas.Add(new ComponentDelta
{
Type = ComponentDeltaType.Removed,
Before = fromComp
});
removed++;
isBreaking = true;
}
}
// Sort deltas for determinism
var sortedDeltas = deltas
.OrderBy(d => d.Type)
.ThenBy(d => d.Before?.Purl ?? d.After?.Purl)
.ToImmutableArray();
return new SbomDiff
{
FromId = fromId,
ToId = toId,
Deltas = sortedDeltas,
Summary = new DiffSummary
{
Added = added,
Removed = removed,
VersionChanged = versionChanged,
OtherModified = otherModified,
Unchanged = unchanged,
IsBreaking = isBreaking
},
ComputedAt = DateTimeOffset.UtcNow
};
}
/// <summary>
/// Creates a diff pointer from a diff.
/// </summary>
public SbomDiffPointer CreatePointer(SbomDiff diff)
{
var hash = ComputeDiffHash(diff);
return new SbomDiffPointer
{
ComponentsAdded = diff.Summary.Added,
ComponentsRemoved = diff.Summary.Removed,
ComponentsModified = diff.Summary.VersionChanged + diff.Summary.OtherModified,
DiffHash = hash
};
}
private static ImmutableArray<string> CompareComponents(ComponentRef from, ComponentRef to)
{
var changes = new List<string>();
if (from.Version != to.Version)
changes.Add("Version");
if (from.License != to.License)
changes.Add("License");
if (from.Type != to.Type)
changes.Add("Type");
return [.. changes];
}
private static bool IsVersionDowngrade(string fromVersion, string toVersion)
{
// Simple semver-like comparison
// In production, use proper version comparison per ecosystem
try
{
var fromParts = fromVersion.Split('.').Select(int.Parse).ToArray();
var toParts = toVersion.Split('.').Select(int.Parse).ToArray();
for (var i = 0; i < Math.Min(fromParts.Length, toParts.Length); i++)
{
if (toParts[i] < fromParts[i]) return true;
if (toParts[i] > fromParts[i]) return false;
}
return toParts.Length < fromParts.Length;
}
catch
{
// Fall back to string comparison
return string.Compare(toVersion, fromVersion, StringComparison.Ordinal) < 0;
}
}
private static string ComputeDiffHash(SbomDiff diff)
{
var json = JsonSerializer.Serialize(new
{
diff.FromId,
diff.ToId,
Deltas = diff.Deltas.Select(d => new
{
d.Type,
BeforePurl = d.Before?.Purl,
AfterPurl = d.After?.Purl,
d.ChangedFields
})
}, new JsonSerializerOptions { WriteIndented = false });
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexStringLower(hashBytes);
}
}

View File

@@ -0,0 +1,85 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Emit.Lineage;
/// <summary>
/// Represents an SBOM with lineage tracking to its parent versions.
/// </summary>
public sealed record SbomLineage
{
/// <summary>
/// Unique identifier for this SBOM version.
/// </summary>
public required SbomId Id { get; init; }
/// <summary>
/// Parent SBOM ID (null if this is the first version).
/// </summary>
public SbomId? ParentId { get; init; }
/// <summary>
/// The image digest this SBOM describes.
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Content-addressable hash (SHA-256 of canonical SBOM).
/// </summary>
public required string ContentHash { get; init; }
/// <summary>
/// When this SBOM version was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Ancestor chain (parent, grandparent, etc.).
/// </summary>
public ImmutableArray<SbomId> Ancestors { get; init; } = [];
/// <summary>
/// Pointer to the diff from parent (null if no parent).
/// </summary>
public SbomDiffPointer? DiffFromParent { get; init; }
}
/// <summary>
/// Strongly-typed SBOM identifier.
/// </summary>
public readonly record struct SbomId(Guid Value)
{
public static SbomId New() => new(Guid.NewGuid());
public static SbomId Parse(string value) => new(Guid.Parse(value));
public override string ToString() => Value.ToString();
}
/// <summary>
/// Pointer to a diff document with summary statistics.
/// </summary>
public sealed record SbomDiffPointer
{
/// <summary>
/// Number of components added since parent.
/// </summary>
public required int ComponentsAdded { get; init; }
/// <summary>
/// Number of components removed since parent.
/// </summary>
public required int ComponentsRemoved { get; init; }
/// <summary>
/// Number of components modified since parent.
/// </summary>
public required int ComponentsModified { get; init; }
/// <summary>
/// Hash of the diff document for retrieval.
/// </summary>
public required string DiffHash { get; init; }
/// <summary>
/// Total number of changes.
/// </summary>
public int TotalChanges => ComponentsAdded + ComponentsRemoved + ComponentsModified;
}

View File

@@ -14,7 +14,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="CycloneDX.Core" Version="11.0.0" />
<PackageReference Include="CycloneDX.Core" Version="10.0.2" />
<PackageReference Include="RoaringBitmap" Version="0.0.9" />
</ItemGroup>

View File

@@ -33,6 +33,12 @@ public sealed record EvidenceBundle
/// EPSS evidence.
/// </summary>
public EpssEvidence? Epss { get; init; }
/// <summary>
/// Version comparison evidence for backport explainability.
/// Shows which comparator was used and why a package is considered fixed/vulnerable.
/// </summary>
public VersionComparisonEvidence? VersionComparison { get; init; }
}
/// <summary>

View File

@@ -0,0 +1,79 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Sprint: SPRINT_4000_0002_0001
// Task: T1 - Extend Findings API Response with version comparison metadata
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Evidence.Models;
/// <summary>
/// Evidence of version comparison used to determine vulnerability status.
/// Provides explainability for backport detection logic.
/// </summary>
public sealed record VersionComparisonEvidence
{
/// <summary>
/// Comparator algorithm used (rpm-evr, dpkg, apk, semver).
/// </summary>
[JsonPropertyName("comparator")]
public required string Comparator { get; init; }
/// <summary>
/// Installed version in native format.
/// </summary>
[JsonPropertyName("installedVersion")]
public required string InstalledVersion { get; init; }
/// <summary>
/// Fixed version threshold from advisory.
/// </summary>
[JsonPropertyName("fixedVersion")]
public required string FixedVersion { get; init; }
/// <summary>
/// Whether the installed version is >= fixed.
/// </summary>
[JsonPropertyName("isFixed")]
public required bool IsFixed { get; init; }
/// <summary>
/// Human-readable proof lines showing comparison steps.
/// </summary>
[JsonPropertyName("proofLines")]
public ImmutableArray<string> ProofLines { get; init; } = [];
/// <summary>
/// Advisory source (DSA-1234, RHSA-2025:1234, USN-1234-1).
/// </summary>
[JsonPropertyName("advisorySource")]
public string? AdvisorySource { get; init; }
/// <summary>
/// Creates VersionComparisonEvidence from a version comparison result.
/// </summary>
/// <param name="comparator">The comparator type identifier.</param>
/// <param name="installedVersion">The installed version string.</param>
/// <param name="fixedVersion">The fixed version threshold.</param>
/// <param name="comparisonResult">The comparison result (negative if installed < fixed).</param>
/// <param name="proofLines">Human-readable comparison steps.</param>
/// <param name="advisorySource">Optional advisory identifier.</param>
public static VersionComparisonEvidence Create(
string comparator,
string installedVersion,
string fixedVersion,
int comparisonResult,
ImmutableArray<string> proofLines,
string? advisorySource = null)
{
return new VersionComparisonEvidence
{
Comparator = comparator,
InstalledVersion = installedVersion,
FixedVersion = fixedVersion,
IsFixed = comparisonResult >= 0, // installed >= fixed means fixed
ProofLines = proofLines,
AdvisorySource = advisorySource
};
}
}

View File

@@ -0,0 +1,108 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
namespace StellaOps.Scanner.Explainability.Assumptions;
/// <summary>
/// Represents a single assumption made during vulnerability analysis.
/// Assumptions capture the conditions under which a finding is considered valid.
/// </summary>
/// <param name="Category">The category of assumption (compiler flag, runtime config, etc.)</param>
/// <param name="Key">The specific assumption key (e.g., "-fstack-protector", "DEBUG_MODE")</param>
/// <param name="AssumedValue">The value assumed during analysis</param>
/// <param name="ObservedValue">The actual observed value, if available</param>
/// <param name="Source">How the assumption was derived</param>
/// <param name="Confidence">The confidence level in this assumption</param>
public sealed record Assumption(
AssumptionCategory Category,
string Key,
string AssumedValue,
string? ObservedValue,
AssumptionSource Source,
ConfidenceLevel Confidence
)
{
/// <summary>
/// Returns true if the observed value matches the assumed value.
/// </summary>
public bool IsValidated => ObservedValue is not null &&
string.Equals(AssumedValue, ObservedValue, StringComparison.OrdinalIgnoreCase);
/// <summary>
/// Returns true if the observed value contradicts the assumed value.
/// </summary>
public bool IsContradicted => ObservedValue is not null &&
!string.Equals(AssumedValue, ObservedValue, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Categories of assumptions that affect vulnerability exploitability.
/// </summary>
public enum AssumptionCategory
{
/// <summary>Compiler flags like -fstack-protector, -D_FORTIFY_SOURCE</summary>
CompilerFlag,
/// <summary>Environment variables, config files, runtime settings</summary>
RuntimeConfig,
/// <summary>Feature flags, build variants, conditional compilation</summary>
FeatureGate,
/// <summary>LD_PRELOAD, RPATH, symbol versioning, loader behavior</summary>
LoaderBehavior,
/// <summary>Port bindings, firewall rules, network exposure</summary>
NetworkExposure,
/// <summary>Capabilities, seccomp profiles, AppArmor/SELinux policies</summary>
ProcessPrivilege,
/// <summary>Memory layout assumptions (ASLR, PIE)</summary>
MemoryProtection,
/// <summary>System call availability and filtering</summary>
SyscallAvailability
}
/// <summary>
/// How an assumption was derived.
/// </summary>
public enum AssumptionSource
{
/// <summary>Default assumption when no evidence available</summary>
Default,
/// <summary>Inferred from static analysis of binaries/code</summary>
StaticAnalysis,
/// <summary>Observed from runtime telemetry</summary>
RuntimeObservation,
/// <summary>Derived from container/image manifest</summary>
ImageManifest,
/// <summary>Provided by user configuration</summary>
UserProvided,
/// <summary>Extracted from Dockerfile or build configuration</summary>
BuildConfig
}
/// <summary>
/// Confidence level in an assumption.
/// </summary>
public enum ConfidenceLevel
{
/// <summary>No evidence, using defaults</summary>
Low = 1,
/// <summary>Some indirect evidence</summary>
Medium = 2,
/// <summary>Strong evidence from static analysis</summary>
High = 3,
/// <summary>Direct runtime observation</summary>
Verified = 4
}

View File

@@ -0,0 +1,119 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using System.Collections.Immutable;
namespace StellaOps.Scanner.Explainability.Assumptions;
/// <summary>
/// A collection of assumptions associated with a finding or analysis context.
/// Provides methods for querying and validating assumptions.
/// </summary>
public sealed record AssumptionSet
{
/// <summary>
/// The unique identifier for this assumption set.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// The assumptions in this set, keyed by category and key.
/// </summary>
public ImmutableArray<Assumption> Assumptions { get; init; } = [];
/// <summary>
/// When this assumption set was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Optional context identifier (e.g., finding ID, image digest).
/// </summary>
public string? ContextId { get; init; }
/// <summary>
/// Gets all assumptions of a specific category.
/// </summary>
public IEnumerable<Assumption> GetByCategory(AssumptionCategory category) =>
Assumptions.Where(a => a.Category == category);
/// <summary>
/// Gets a specific assumption by category and key.
/// </summary>
public Assumption? Get(AssumptionCategory category, string key) =>
Assumptions.FirstOrDefault(a => a.Category == category &&
string.Equals(a.Key, key, StringComparison.OrdinalIgnoreCase));
/// <summary>
/// Returns the overall confidence level (minimum of all assumptions).
/// </summary>
public ConfidenceLevel OverallConfidence =>
Assumptions.Length == 0
? ConfidenceLevel.Low
: Assumptions.Min(a => a.Confidence);
/// <summary>
/// Returns the count of validated assumptions.
/// </summary>
public int ValidatedCount => Assumptions.Count(a => a.IsValidated);
/// <summary>
/// Returns the count of contradicted assumptions.
/// </summary>
public int ContradictedCount => Assumptions.Count(a => a.IsContradicted);
/// <summary>
/// Returns true if any assumption is contradicted by observed evidence.
/// </summary>
public bool HasContradictions => Assumptions.Any(a => a.IsContradicted);
/// <summary>
/// Returns the validation ratio (validated / total with observations).
/// </summary>
public double ValidationRatio
{
get
{
var withObservations = Assumptions.Count(a => a.ObservedValue is not null);
return withObservations == 0 ? 0.0 : (double)ValidatedCount / withObservations;
}
}
/// <summary>
/// Creates a new AssumptionSet with an additional assumption.
/// </summary>
public AssumptionSet WithAssumption(Assumption assumption) =>
this with { Assumptions = Assumptions.Add(assumption) };
/// <summary>
/// Creates a new AssumptionSet with updated observation for an assumption.
/// </summary>
public AssumptionSet WithObservation(AssumptionCategory category, string key, string observedValue)
{
var index = Assumptions.FindIndex(a =>
a.Category == category &&
string.Equals(a.Key, key, StringComparison.OrdinalIgnoreCase));
if (index < 0)
return this;
var updated = Assumptions[index] with { ObservedValue = observedValue };
return this with { Assumptions = Assumptions.SetItem(index, updated) };
}
}
/// <summary>
/// Extension methods for ImmutableArray to support FindIndex.
/// </summary>
internal static class ImmutableArrayExtensions
{
public static int FindIndex<T>(this ImmutableArray<T> array, Func<T, bool> predicate)
{
for (int i = 0; i < array.Length; i++)
{
if (predicate(array[i]))
return i;
}
return -1;
}
}

View File

@@ -0,0 +1,117 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
namespace StellaOps.Scanner.Explainability.Assumptions;
/// <summary>
/// Collects assumptions from various sources during vulnerability analysis.
/// </summary>
public interface IAssumptionCollector
{
/// <summary>
/// Records an assumption made during analysis.
/// </summary>
/// <param name="category">The category of assumption</param>
/// <param name="key">The assumption key</param>
/// <param name="assumedValue">The assumed value</param>
/// <param name="source">How the assumption was derived</param>
/// <param name="confidence">Confidence level</param>
void Record(
AssumptionCategory category,
string key,
string assumedValue,
AssumptionSource source,
ConfidenceLevel confidence = ConfidenceLevel.Low);
/// <summary>
/// Records an observation that validates or contradicts an assumption.
/// </summary>
/// <param name="category">The category of assumption</param>
/// <param name="key">The assumption key</param>
/// <param name="observedValue">The observed value</param>
void RecordObservation(AssumptionCategory category, string key, string observedValue);
/// <summary>
/// Builds the final assumption set from collected assumptions.
/// </summary>
/// <param name="contextId">Optional context identifier</param>
/// <returns>The completed assumption set</returns>
AssumptionSet Build(string? contextId = null);
/// <summary>
/// Clears all collected assumptions.
/// </summary>
void Clear();
}
/// <summary>
/// Default implementation of <see cref="IAssumptionCollector"/>.
/// </summary>
public sealed class AssumptionCollector : IAssumptionCollector
{
private readonly Dictionary<(AssumptionCategory, string), Assumption> _assumptions = new();
/// <inheritdoc />
public void Record(
AssumptionCategory category,
string key,
string assumedValue,
AssumptionSource source,
ConfidenceLevel confidence = ConfidenceLevel.Low)
{
var normalizedKey = key.ToLowerInvariant();
var existing = _assumptions.GetValueOrDefault((category, normalizedKey));
// Keep assumption with higher confidence
if (existing is null || confidence > existing.Confidence)
{
_assumptions[(category, normalizedKey)] = new Assumption(
category,
key,
assumedValue,
existing?.ObservedValue,
source,
confidence);
}
}
/// <inheritdoc />
public void RecordObservation(AssumptionCategory category, string key, string observedValue)
{
var normalizedKey = key.ToLowerInvariant();
if (_assumptions.TryGetValue((category, normalizedKey), out var existing))
{
_assumptions[(category, normalizedKey)] = existing with
{
ObservedValue = observedValue,
Confidence = ConfidenceLevel.Verified
};
}
else
{
// Record observation even without prior assumption
_assumptions[(category, normalizedKey)] = new Assumption(
category,
key,
observedValue, // Use observed as assumed when no prior assumption
observedValue,
AssumptionSource.RuntimeObservation,
ConfidenceLevel.Verified);
}
}
/// <inheritdoc />
public AssumptionSet Build(string? contextId = null)
{
return new AssumptionSet
{
Id = Guid.NewGuid().ToString("N"),
Assumptions = [.. _assumptions.Values],
CreatedAt = DateTimeOffset.UtcNow,
ContextId = contextId
};
}
/// <inheritdoc />
public void Clear() => _assumptions.Clear();
}

View File

@@ -0,0 +1,226 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Explainability.Falsifiability;
namespace StellaOps.Scanner.Explainability.Confidence;
/// <summary>
/// Evidence factors that contribute to confidence scoring.
/// </summary>
public sealed record EvidenceFactors
{
/// <summary>Assumption set for the finding</summary>
public AssumptionSet? Assumptions { get; init; }
/// <summary>Falsifiability criteria for the finding</summary>
public FalsifiabilityCriteria? Falsifiability { get; init; }
/// <summary>Whether static reachability analysis was performed</summary>
public bool HasStaticReachability { get; init; }
/// <summary>Whether runtime observations are available</summary>
public bool HasRuntimeObservations { get; init; }
/// <summary>Whether SBOM lineage is tracked</summary>
public bool HasSbomLineage { get; init; }
/// <summary>Number of corroborating vulnerability sources</summary>
public int SourceCount { get; init; } = 1;
/// <summary>Whether VEX assessment is available</summary>
public bool HasVexAssessment { get; init; }
/// <summary>Whether exploit code is known to exist</summary>
public bool HasKnownExploit { get; init; }
}
/// <summary>
/// Result of evidence density scoring.
/// </summary>
public sealed record EvidenceDensityScore
{
/// <summary>Overall confidence score (0.0 to 1.0)</summary>
public required double Score { get; init; }
/// <summary>Confidence level derived from score</summary>
public required ConfidenceLevel Level { get; init; }
/// <summary>Individual factor contributions</summary>
public required IReadOnlyDictionary<string, double> FactorBreakdown { get; init; }
/// <summary>Human-readable explanation</summary>
public required string Explanation { get; init; }
/// <summary>Recommendations to improve confidence</summary>
public required IReadOnlyList<string> ImprovementRecommendations { get; init; }
}
/// <summary>
/// Calculates confidence scores based on evidence density.
/// More evidence types and validation = higher confidence in the finding accuracy.
/// </summary>
public interface IEvidenceDensityScorer
{
/// <summary>
/// Calculates an evidence density score for a finding.
/// </summary>
EvidenceDensityScore Calculate(EvidenceFactors factors);
}
/// <summary>
/// Default implementation of <see cref="IEvidenceDensityScorer"/>.
/// </summary>
public sealed class EvidenceDensityScorer : IEvidenceDensityScorer
{
// Weights for different evidence factors
private const double WeightAssumptionValidation = 0.20;
private const double WeightFalsifiabilityEval = 0.15;
private const double WeightStaticReachability = 0.15;
private const double WeightRuntimeObservation = 0.20;
private const double WeightSbomLineage = 0.05;
private const double WeightMultipleSources = 0.10;
private const double WeightVexAssessment = 0.10;
private const double WeightKnownExploit = 0.05;
/// <inheritdoc />
public EvidenceDensityScore Calculate(EvidenceFactors factors)
{
var breakdown = new Dictionary<string, double>();
var recommendations = new List<string>();
// Factor 1: Assumption validation ratio
double assumptionScore = 0.0;
if (factors.Assumptions is not null && factors.Assumptions.Assumptions.Length > 0)
{
assumptionScore = factors.Assumptions.ValidationRatio * WeightAssumptionValidation;
if (factors.Assumptions.ValidationRatio < 0.5)
{
recommendations.Add("Validate more assumptions with runtime observations or static analysis");
}
}
else
{
recommendations.Add("Add assumption tracking to understand analysis context");
}
breakdown["assumption_validation"] = assumptionScore;
// Factor 2: Falsifiability evaluation
double falsifiabilityScore = 0.0;
if (factors.Falsifiability is not null)
{
var evaluatedCount = factors.Falsifiability.Criteria
.Count(c => c.Status is CriterionStatus.Satisfied or CriterionStatus.NotSatisfied);
var totalCount = factors.Falsifiability.Criteria.Length;
if (totalCount > 0)
{
falsifiabilityScore = ((double)evaluatedCount / totalCount) * WeightFalsifiabilityEval;
}
if (factors.Falsifiability.Status == FalsifiabilityStatus.PartiallyEvaluated)
{
recommendations.Add("Complete evaluation of pending falsifiability criteria");
}
}
else
{
recommendations.Add("Generate falsifiability criteria to understand what would disprove this finding");
}
breakdown["falsifiability_evaluation"] = falsifiabilityScore;
// Factor 3: Static reachability
double staticReachScore = factors.HasStaticReachability ? WeightStaticReachability : 0.0;
if (!factors.HasStaticReachability)
{
recommendations.Add("Perform static reachability analysis to verify code paths");
}
breakdown["static_reachability"] = staticReachScore;
// Factor 4: Runtime observations
double runtimeScore = factors.HasRuntimeObservations ? WeightRuntimeObservation : 0.0;
if (!factors.HasRuntimeObservations)
{
recommendations.Add("Collect runtime observations to verify actual behavior");
}
breakdown["runtime_observations"] = runtimeScore;
// Factor 5: SBOM lineage
double lineageScore = factors.HasSbomLineage ? WeightSbomLineage : 0.0;
if (!factors.HasSbomLineage)
{
recommendations.Add("Track SBOM lineage for reproducibility");
}
breakdown["sbom_lineage"] = lineageScore;
// Factor 6: Multiple sources
double sourceScore = Math.Min(factors.SourceCount, 3) / 3.0 * WeightMultipleSources;
if (factors.SourceCount < 2)
{
recommendations.Add("Cross-reference with additional vulnerability databases");
}
breakdown["multiple_sources"] = sourceScore;
// Factor 7: VEX assessment
double vexScore = factors.HasVexAssessment ? WeightVexAssessment : 0.0;
if (!factors.HasVexAssessment)
{
recommendations.Add("Obtain vendor VEX assessment for authoritative status");
}
breakdown["vex_assessment"] = vexScore;
// Factor 8: Known exploit
double exploitScore = factors.HasKnownExploit ? WeightKnownExploit : 0.0;
// Not having a known exploit is not a negative - don't recommend
breakdown["known_exploit"] = exploitScore;
// Calculate total score
double totalScore = breakdown.Values.Sum();
var level = ScoreToLevel(totalScore);
var explanation = GenerateExplanation(totalScore, level, breakdown);
return new EvidenceDensityScore
{
Score = Math.Round(totalScore, 3),
Level = level,
FactorBreakdown = breakdown,
Explanation = explanation,
ImprovementRecommendations = recommendations
};
}
private static ConfidenceLevel ScoreToLevel(double score) => score switch
{
>= 0.75 => ConfidenceLevel.Verified,
>= 0.50 => ConfidenceLevel.High,
>= 0.25 => ConfidenceLevel.Medium,
_ => ConfidenceLevel.Low
};
private static string GenerateExplanation(
double score,
ConfidenceLevel level,
Dictionary<string, double> breakdown)
{
var topFactors = breakdown
.Where(kv => kv.Value > 0)
.OrderByDescending(kv => kv.Value)
.Take(3)
.Select(kv => kv.Key.Replace("_", " "));
var factorList = string.Join(", ", topFactors);
return level switch
{
ConfidenceLevel.Verified =>
$"Very high confidence ({score:P0}). Strong evidence from: {factorList}.",
ConfidenceLevel.High =>
$"High confidence ({score:P0}). Good evidence from: {factorList}.",
ConfidenceLevel.Medium =>
$"Medium confidence ({score:P0}). Some evidence from: {factorList}.",
_ =>
$"Low confidence ({score:P0}). Limited evidence available. Consider gathering more data."
};
}
}

View File

@@ -0,0 +1,232 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Explainability.Confidence;
using StellaOps.Scanner.Explainability.Falsifiability;
namespace StellaOps.Scanner.Explainability.Dsse;
/// <summary>
/// Serializes explainability data to DSSE predicate format.
/// </summary>
public interface IExplainabilityPredicateSerializer
{
/// <summary>
/// The predicate type URI for finding explainability predicates.
/// </summary>
const string PredicateType = "https://stella-ops.org/predicates/finding-explainability/v2";
/// <summary>
/// Converts a risk report to DSSE predicate format.
/// </summary>
byte[] Serialize(RiskReport report);
/// <summary>
/// Converts a risk report to a predicate object that can be embedded in DSSE.
/// </summary>
FindingExplainabilityPredicate ToPredicate(RiskReport report);
}
/// <summary>
/// Default implementation of <see cref="IExplainabilityPredicateSerializer"/>.
/// </summary>
public sealed class ExplainabilityPredicateSerializer : IExplainabilityPredicateSerializer
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
/// <inheritdoc />
public byte[] Serialize(RiskReport report)
{
var predicate = ToPredicate(report);
return JsonSerializer.SerializeToUtf8Bytes(predicate, SerializerOptions);
}
/// <inheritdoc />
public FindingExplainabilityPredicate ToPredicate(RiskReport report)
{
return new FindingExplainabilityPredicate
{
FindingId = report.FindingId,
VulnerabilityId = report.VulnerabilityId,
PackageName = report.PackageName,
PackageVersion = report.PackageVersion,
GeneratedAt = report.GeneratedAt,
EngineVersion = report.EngineVersion,
Explanation = report.Explanation,
DetailedNarrative = report.DetailedNarrative,
Assumptions = report.Assumptions is not null ? ToPredicateAssumptions(report.Assumptions) : null,
Falsifiability = report.Falsifiability is not null ? ToPredicateFalsifiability(report.Falsifiability) : null,
ConfidenceScore = report.ConfidenceScore is not null ? ToPredicateConfidence(report.ConfidenceScore) : null,
RecommendedActions = report.RecommendedActions
.Select(a => new PredicateRecommendedAction
{
Priority = a.Priority,
Action = a.Action,
Rationale = a.Rationale,
Effort = a.Effort.ToString()
})
.ToArray()
};
}
private static PredicateAssumptionSet ToPredicateAssumptions(AssumptionSet assumptions)
{
return new PredicateAssumptionSet
{
Id = assumptions.Id,
ContextId = assumptions.ContextId,
CreatedAt = assumptions.CreatedAt,
Assumptions = assumptions.Assumptions
.Select(a => new PredicateAssumption
{
Category = a.Category.ToString(),
Key = a.Key,
AssumedValue = a.AssumedValue,
ObservedValue = a.ObservedValue,
Source = a.Source.ToString(),
Confidence = a.Confidence.ToString()
})
.ToArray()
};
}
private static PredicateFalsifiabilityCriteria ToPredicateFalsifiability(FalsifiabilityCriteria falsifiability)
{
return new PredicateFalsifiabilityCriteria
{
Id = falsifiability.Id,
FindingId = falsifiability.FindingId,
GeneratedAt = falsifiability.GeneratedAt,
Status = falsifiability.Status.ToString(),
Summary = falsifiability.Summary,
Criteria = falsifiability.Criteria
.Select(c => new PredicateFalsificationCriterion
{
Type = c.Type.ToString(),
Description = c.Description,
CheckExpression = c.CheckExpression,
Evidence = c.Evidence,
Status = c.Status.ToString()
})
.ToArray()
};
}
private static PredicateEvidenceDensityScore ToPredicateConfidence(EvidenceDensityScore score)
{
return new PredicateEvidenceDensityScore
{
Score = score.Score,
Level = score.Level.ToString(),
FactorBreakdown = score.FactorBreakdown.ToDictionary(kv => kv.Key, kv => kv.Value),
Explanation = score.Explanation,
ImprovementRecommendations = score.ImprovementRecommendations.ToArray()
};
}
}
#region Predicate DTOs
/// <summary>
/// DSSE predicate DTO for finding explainability.
/// </summary>
public sealed class FindingExplainabilityPredicate
{
public required string FindingId { get; init; }
public required string VulnerabilityId { get; init; }
public required string PackageName { get; init; }
public required string PackageVersion { get; init; }
public string? Severity { get; init; }
public string? FixedVersion { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
public required string EngineVersion { get; init; }
public string? Explanation { get; init; }
public string? DetailedNarrative { get; init; }
public PredicateAssumptionSet? Assumptions { get; init; }
public PredicateFalsifiabilityCriteria? Falsifiability { get; init; }
public PredicateEvidenceDensityScore? ConfidenceScore { get; init; }
public PredicateRecommendedAction[]? RecommendedActions { get; init; }
}
/// <summary>
/// Predicate DTO for assumption set.
/// </summary>
public sealed class PredicateAssumptionSet
{
public required string Id { get; init; }
public string? ContextId { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public required PredicateAssumption[] Assumptions { get; init; }
}
/// <summary>
/// Predicate DTO for individual assumption.
/// </summary>
public sealed class PredicateAssumption
{
public required string Category { get; init; }
public required string Key { get; init; }
public required string AssumedValue { get; init; }
public string? ObservedValue { get; init; }
public required string Source { get; init; }
public required string Confidence { get; init; }
}
/// <summary>
/// Predicate DTO for falsifiability criteria.
/// </summary>
public sealed class PredicateFalsifiabilityCriteria
{
public required string Id { get; init; }
public required string FindingId { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
public required string Status { get; init; }
public string? Summary { get; init; }
public required PredicateFalsificationCriterion[] Criteria { get; init; }
}
/// <summary>
/// Predicate DTO for individual falsification criterion.
/// </summary>
public sealed class PredicateFalsificationCriterion
{
public required string Type { get; init; }
public required string Description { get; init; }
public string? CheckExpression { get; init; }
public string? Evidence { get; init; }
public required string Status { get; init; }
}
/// <summary>
/// Predicate DTO for evidence density score.
/// </summary>
public sealed class PredicateEvidenceDensityScore
{
public required double Score { get; init; }
public required string Level { get; init; }
public Dictionary<string, double>? FactorBreakdown { get; init; }
public string? Explanation { get; init; }
public string[]? ImprovementRecommendations { get; init; }
}
/// <summary>
/// Predicate DTO for recommended action.
/// </summary>
public sealed class PredicateRecommendedAction
{
public required int Priority { get; init; }
public required string Action { get; init; }
public required string Rationale { get; init; }
public required string Effort { get; init; }
}
#endregion

View File

@@ -0,0 +1,131 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using System.Collections.Immutable;
namespace StellaOps.Scanner.Explainability.Falsifiability;
/// <summary>
/// Represents criteria that would falsify (disprove) a vulnerability finding.
/// Answers the question: "What would prove this finding wrong?"
/// </summary>
public sealed record FalsifiabilityCriteria
{
/// <summary>
/// Unique identifier for this criteria set.
/// </summary>
public required string Id { get; init; }
/// <summary>
/// The finding ID these criteria apply to.
/// </summary>
public required string FindingId { get; init; }
/// <summary>
/// Individual criteria that would disprove the finding.
/// </summary>
public ImmutableArray<FalsificationCriterion> Criteria { get; init; } = [];
/// <summary>
/// Overall falsifiability status.
/// </summary>
public FalsifiabilityStatus Status { get; init; } = FalsifiabilityStatus.Unknown;
/// <summary>
/// Human-readable summary of what would disprove this finding.
/// </summary>
public string? Summary { get; init; }
/// <summary>
/// When these criteria were generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
}
/// <summary>
/// A single criterion that would falsify a finding.
/// </summary>
/// <param name="Type">The type of falsification check</param>
/// <param name="Description">Human-readable description of the criterion</param>
/// <param name="CheckExpression">Machine-evaluable expression (e.g., CEL, Rego)</param>
/// <param name="Evidence">Evidence that supports or refutes this criterion</param>
/// <param name="Status">Current evaluation status</param>
public sealed record FalsificationCriterion(
FalsificationType Type,
string Description,
string? CheckExpression,
string? Evidence,
CriterionStatus Status
);
/// <summary>
/// Types of falsification criteria.
/// </summary>
public enum FalsificationType
{
/// <summary>Package is not actually installed</summary>
PackageNotPresent,
/// <summary>Vulnerable version is not the installed version</summary>
VersionMismatch,
/// <summary>Vulnerable code path is not reachable</summary>
CodeUnreachable,
/// <summary>Required feature/function is disabled</summary>
FeatureDisabled,
/// <summary>Mitigation is in place (ASLR, stack canaries, etc.)</summary>
MitigationPresent,
/// <summary>Network exposure required but not present</summary>
NoNetworkExposure,
/// <summary>Required privileges not available</summary>
InsufficientPrivileges,
/// <summary>Vulnerability is already patched</summary>
PatchApplied,
/// <summary>Configuration prevents exploitation</summary>
ConfigurationPrevents,
/// <summary>Runtime environment prevents exploitation</summary>
RuntimePrevents
}
/// <summary>
/// Status of a falsification criterion evaluation.
/// </summary>
public enum CriterionStatus
{
/// <summary>Not yet evaluated</summary>
Pending,
/// <summary>Criterion is satisfied (finding is falsified)</summary>
Satisfied,
/// <summary>Criterion is not satisfied (finding stands)</summary>
NotSatisfied,
/// <summary>Could not be evaluated (insufficient data)</summary>
Inconclusive
}
/// <summary>
/// Overall falsifiability status.
/// </summary>
public enum FalsifiabilityStatus
{
/// <summary>Status not determined</summary>
Unknown,
/// <summary>Finding has been falsified (at least one criterion satisfied)</summary>
Falsified,
/// <summary>Finding stands (all criteria not satisfied)</summary>
NotFalsified,
/// <summary>Some criteria inconclusive</summary>
PartiallyEvaluated
}

View File

@@ -0,0 +1,215 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using System.Collections.Immutable;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Explainability.Assumptions;
namespace StellaOps.Scanner.Explainability.Falsifiability;
/// <summary>
/// Input data for generating falsifiability criteria.
/// </summary>
public sealed record FalsifiabilityInput
{
/// <summary>The finding ID</summary>
public required string FindingId { get; init; }
/// <summary>The CVE or vulnerability ID</summary>
public required string VulnerabilityId { get; init; }
/// <summary>Package name</summary>
public required string PackageName { get; init; }
/// <summary>Installed version</summary>
public required string InstalledVersion { get; init; }
/// <summary>Vulnerable version range</summary>
public string? VulnerableRange { get; init; }
/// <summary>Fixed version, if available</summary>
public string? FixedVersion { get; init; }
/// <summary>Assumptions made during analysis</summary>
public AssumptionSet? Assumptions { get; init; }
/// <summary>Whether reachability analysis was performed</summary>
public bool HasReachabilityData { get; init; }
/// <summary>Whether code is reachable (if analysis was performed)</summary>
public bool? IsReachable { get; init; }
/// <summary>Known mitigations in place</summary>
public ImmutableArray<string> Mitigations { get; init; } = [];
}
/// <summary>
/// Generates falsifiability criteria for vulnerability findings.
/// </summary>
public interface IFalsifiabilityGenerator
{
/// <summary>
/// Generates falsifiability criteria for a finding.
/// </summary>
FalsifiabilityCriteria Generate(FalsifiabilityInput input);
}
/// <summary>
/// Default implementation of <see cref="IFalsifiabilityGenerator"/>.
/// </summary>
public sealed class FalsifiabilityGenerator : IFalsifiabilityGenerator
{
private readonly ILogger<FalsifiabilityGenerator> _logger;
public FalsifiabilityGenerator(ILogger<FalsifiabilityGenerator> logger)
{
_logger = logger;
}
/// <inheritdoc />
public FalsifiabilityCriteria Generate(FalsifiabilityInput input)
{
_logger.LogDebug("Generating falsifiability criteria for finding {FindingId}", input.FindingId);
var criteria = new List<FalsificationCriterion>();
// Criterion 1: Package presence
criteria.Add(new FalsificationCriterion(
FalsificationType.PackageNotPresent,
$"Package '{input.PackageName}' is not actually installed or is a false positive from manifest parsing",
$"package.exists(\"{input.PackageName}\") == false",
null,
CriterionStatus.Pending));
// Criterion 2: Version mismatch
if (input.VulnerableRange is not null)
{
criteria.Add(new FalsificationCriterion(
FalsificationType.VersionMismatch,
$"Installed version '{input.InstalledVersion}' is not within vulnerable range '{input.VulnerableRange}'",
$"version.inRange(\"{input.InstalledVersion}\", \"{input.VulnerableRange}\") == false",
null,
CriterionStatus.Pending));
}
// Criterion 3: Patch applied
if (input.FixedVersion is not null)
{
criteria.Add(new FalsificationCriterion(
FalsificationType.PatchApplied,
$"Version '{input.InstalledVersion}' is at or above fixed version '{input.FixedVersion}'",
$"version.gte(\"{input.InstalledVersion}\", \"{input.FixedVersion}\")",
null,
CriterionStatus.Pending));
}
// Criterion 4: Code unreachable
if (input.HasReachabilityData)
{
var reachabilityStatus = input.IsReachable switch
{
false => CriterionStatus.Satisfied,
true => CriterionStatus.NotSatisfied,
null => CriterionStatus.Inconclusive
};
criteria.Add(new FalsificationCriterion(
FalsificationType.CodeUnreachable,
"Vulnerable code path is not reachable from application entry points",
"reachability.isReachable() == false",
input.IsReachable.HasValue ? $"Reachability analysis: {(input.IsReachable.Value ? "reachable" : "unreachable")}" : null,
reachabilityStatus));
}
// Criterion 5: Mitigations
foreach (var mitigation in input.Mitigations)
{
criteria.Add(new FalsificationCriterion(
FalsificationType.MitigationPresent,
$"Mitigation '{mitigation}' prevents exploitation",
null,
$"Mitigation present: {mitigation}",
CriterionStatus.Satisfied));
}
// Criterion 6: Assumption-based criteria
if (input.Assumptions is not null)
{
foreach (var assumption in input.Assumptions.Assumptions.Where(a => a.IsContradicted))
{
var type = assumption.Category switch
{
AssumptionCategory.NetworkExposure => FalsificationType.NoNetworkExposure,
AssumptionCategory.ProcessPrivilege => FalsificationType.InsufficientPrivileges,
AssumptionCategory.FeatureGate => FalsificationType.FeatureDisabled,
AssumptionCategory.RuntimeConfig => FalsificationType.ConfigurationPrevents,
AssumptionCategory.CompilerFlag => FalsificationType.MitigationPresent,
_ => FalsificationType.RuntimePrevents
};
criteria.Add(new FalsificationCriterion(
type,
$"Assumption '{assumption.Key}' was contradicted: assumed '{assumption.AssumedValue}', observed '{assumption.ObservedValue}'",
null,
$"Observed: {assumption.ObservedValue}",
CriterionStatus.Satisfied));
}
}
// Determine overall status
var status = DetermineOverallStatus(criteria);
// Generate summary
var summary = GenerateSummary(input, criteria, status);
return new FalsifiabilityCriteria
{
Id = Guid.NewGuid().ToString("N"),
FindingId = input.FindingId,
Criteria = [.. criteria],
Status = status,
Summary = summary,
GeneratedAt = DateTimeOffset.UtcNow
};
}
private static FalsifiabilityStatus DetermineOverallStatus(List<FalsificationCriterion> criteria)
{
if (criteria.Count == 0)
return FalsifiabilityStatus.Unknown;
if (criteria.Any(c => c.Status == CriterionStatus.Satisfied))
return FalsifiabilityStatus.Falsified;
if (criteria.All(c => c.Status == CriterionStatus.NotSatisfied))
return FalsifiabilityStatus.NotFalsified;
if (criteria.Any(c => c.Status is CriterionStatus.Pending or CriterionStatus.Inconclusive))
return FalsifiabilityStatus.PartiallyEvaluated;
return FalsifiabilityStatus.Unknown;
}
private static string GenerateSummary(
FalsifiabilityInput input,
List<FalsificationCriterion> criteria,
FalsifiabilityStatus status)
{
return status switch
{
FalsifiabilityStatus.Falsified =>
$"Finding {input.FindingId} can be falsified. " +
$"Criteria satisfied: {string.Join(", ", criteria.Where(c => c.Status == CriterionStatus.Satisfied).Select(c => c.Type))}",
FalsifiabilityStatus.NotFalsified =>
$"Finding {input.FindingId} has not been falsified. All {criteria.Count} criteria evaluated negative.",
FalsifiabilityStatus.PartiallyEvaluated =>
$"Finding {input.FindingId} is partially evaluated. " +
$"{criteria.Count(c => c.Status == CriterionStatus.Pending)} pending, " +
$"{criteria.Count(c => c.Status == CriterionStatus.Inconclusive)} inconclusive.",
_ => $"Finding {input.FindingId} falsifiability status unknown."
};
}
}

View File

@@ -0,0 +1,269 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Copyright (c) StellaOps
using System.Collections.Immutable;
using StellaOps.Scanner.Explainability.Assumptions;
using StellaOps.Scanner.Explainability.Confidence;
using StellaOps.Scanner.Explainability.Falsifiability;
namespace StellaOps.Scanner.Explainability;
/// <summary>
/// A comprehensive risk report that includes all explainability data for a finding.
/// </summary>
public sealed record RiskReport
{
/// <summary>Unique report identifier</summary>
public required string Id { get; init; }
/// <summary>The finding this report explains</summary>
public required string FindingId { get; init; }
/// <summary>The vulnerability ID (CVE, GHSA, etc.)</summary>
public required string VulnerabilityId { get; init; }
/// <summary>Package name</summary>
public required string PackageName { get; init; }
/// <summary>Package version</summary>
public required string PackageVersion { get; init; }
/// <summary>Assumptions made during analysis</summary>
public AssumptionSet? Assumptions { get; init; }
/// <summary>Falsifiability criteria and status</summary>
public FalsifiabilityCriteria? Falsifiability { get; init; }
/// <summary>Evidence density confidence score</summary>
public EvidenceDensityScore? ConfidenceScore { get; init; }
/// <summary>Human-readable explanation of the finding</summary>
public required string Explanation { get; init; }
/// <summary>Detailed narrative explaining the risk</summary>
public string? DetailedNarrative { get; init; }
/// <summary>Recommended actions</summary>
public ImmutableArray<RecommendedAction> RecommendedActions { get; init; } = [];
/// <summary>When this report was generated</summary>
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>Version of the explainability engine</summary>
public required string EngineVersion { get; init; }
}
/// <summary>
/// A recommended action to address a finding.
/// </summary>
/// <param name="Priority">Action priority (1 = highest)</param>
/// <param name="Action">The recommended action</param>
/// <param name="Rationale">Why this action is recommended</param>
/// <param name="Effort">Estimated effort level</param>
public sealed record RecommendedAction(
int Priority,
string Action,
string Rationale,
EffortLevel Effort
);
/// <summary>
/// Effort level for a recommended action.
/// </summary>
public enum EffortLevel
{
/// <summary>Quick configuration change or update</summary>
Low,
/// <summary>Moderate code changes or testing required</summary>
Medium,
/// <summary>Significant refactoring or architectural changes</summary>
High
}
/// <summary>
/// Generates comprehensive risk reports.
/// </summary>
public interface IRiskReportGenerator
{
/// <summary>
/// Generates a risk report for a finding.
/// </summary>
RiskReport Generate(RiskReportInput input);
}
/// <summary>
/// Input for generating a risk report.
/// </summary>
public sealed record RiskReportInput
{
public required string FindingId { get; init; }
public required string VulnerabilityId { get; init; }
public required string PackageName { get; init; }
public required string PackageVersion { get; init; }
public string? Severity { get; init; }
public string? Description { get; init; }
public string? FixedVersion { get; init; }
public AssumptionSet? Assumptions { get; init; }
public FalsifiabilityCriteria? Falsifiability { get; init; }
public EvidenceFactors? EvidenceFactors { get; init; }
}
/// <summary>
/// Default implementation of <see cref="IRiskReportGenerator"/>.
/// </summary>
public sealed class RiskReportGenerator : IRiskReportGenerator
{
private const string EngineVersionValue = "1.0.0";
private readonly IEvidenceDensityScorer _scorer;
public RiskReportGenerator(IEvidenceDensityScorer scorer)
{
_scorer = scorer;
}
/// <inheritdoc />
public RiskReport Generate(RiskReportInput input)
{
// Calculate confidence score if evidence factors provided
EvidenceDensityScore? confidenceScore = null;
if (input.EvidenceFactors is not null)
{
confidenceScore = _scorer.Calculate(input.EvidenceFactors);
}
var explanation = GenerateExplanation(input);
var narrative = GenerateNarrative(input, confidenceScore);
var actions = GenerateRecommendedActions(input);
return new RiskReport
{
Id = Guid.NewGuid().ToString("N"),
FindingId = input.FindingId,
VulnerabilityId = input.VulnerabilityId,
PackageName = input.PackageName,
PackageVersion = input.PackageVersion,
Assumptions = input.Assumptions,
Falsifiability = input.Falsifiability,
ConfidenceScore = confidenceScore,
Explanation = explanation,
DetailedNarrative = narrative,
RecommendedActions = [.. actions],
GeneratedAt = DateTimeOffset.UtcNow,
EngineVersion = EngineVersionValue
};
}
private static string GenerateExplanation(RiskReportInput input)
{
var parts = new List<string>
{
$"Vulnerability {input.VulnerabilityId} affects {input.PackageName}@{input.PackageVersion}."
};
if (input.Severity is not null)
{
parts.Add($"Severity: {input.Severity}.");
}
if (input.Falsifiability?.Status == FalsifiabilityStatus.Falsified)
{
parts.Add("This finding has been falsified and may not be exploitable in your environment.");
}
else if (input.Assumptions?.HasContradictions == true)
{
parts.Add("Some analysis assumptions have been contradicted by observed evidence.");
}
return string.Join(" ", parts);
}
private static string GenerateNarrative(RiskReportInput input, EvidenceDensityScore? score)
{
var sections = new List<string>();
// Overview
sections.Add($"## Overview\n{input.Description ?? "No description available."}");
// Assumptions section
if (input.Assumptions is not null && input.Assumptions.Assumptions.Length > 0)
{
var assumptionLines = input.Assumptions.Assumptions
.Select(a => $"- **{a.Category}**: {a.Key} = {a.AssumedValue}" +
(a.ObservedValue is not null ? $" (observed: {a.ObservedValue})" : ""));
sections.Add($"## Assumptions\n{string.Join("\n", assumptionLines)}");
}
// Falsifiability section
if (input.Falsifiability is not null)
{
sections.Add($"## Falsifiability\n**Status**: {input.Falsifiability.Status}\n\n{input.Falsifiability.Summary}");
}
// Confidence section
if (score is not null)
{
sections.Add($"## Confidence Assessment\n{score.Explanation}");
if (score.ImprovementRecommendations.Count > 0)
{
var recs = score.ImprovementRecommendations.Select(r => $"- {r}");
sections.Add($"### Recommendations to Improve Confidence\n{string.Join("\n", recs)}");
}
}
return string.Join("\n\n", sections);
}
private static List<RecommendedAction> GenerateRecommendedActions(RiskReportInput input)
{
var actions = new List<RecommendedAction>();
int priority = 1;
// Action: Update package if fix available
if (input.FixedVersion is not null)
{
actions.Add(new RecommendedAction(
priority++,
$"Update {input.PackageName} to version {input.FixedVersion} or later",
"A fixed version is available that addresses this vulnerability",
EffortLevel.Low));
}
// Action: Validate assumptions
if (input.Assumptions is not null && input.Assumptions.ValidatedCount < input.Assumptions.Assumptions.Length)
{
actions.Add(new RecommendedAction(
priority++,
"Validate analysis assumptions with runtime observations",
$"Only {input.Assumptions.ValidatedCount}/{input.Assumptions.Assumptions.Length} assumptions are validated",
EffortLevel.Medium));
}
// Action: Evaluate falsifiability criteria
if (input.Falsifiability?.Status == FalsifiabilityStatus.PartiallyEvaluated)
{
var pendingCount = input.Falsifiability.Criteria.Count(c => c.Status == CriterionStatus.Pending);
actions.Add(new RecommendedAction(
priority++,
"Complete falsifiability evaluation",
$"{pendingCount} criteria are pending evaluation",
EffortLevel.Medium));
}
// Default action if no fix available
if (input.FixedVersion is null)
{
actions.Add(new RecommendedAction(
priority,
"Monitor for vendor patch or implement compensating controls",
"No fixed version is currently available",
EffortLevel.High));
}
return actions;
}
}

View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
</ItemGroup>
</Project>

View File

@@ -44,7 +44,7 @@ public sealed record RuntimeStaticMergeResult
/// <summary>
/// Merged graph with runtime annotations.
/// </summary>
public required CallGraph MergedGraph { get; init; }
public required RichGraph MergedGraph { get; init; }
/// <summary>
/// Statistics about the merge operation.
@@ -141,7 +141,7 @@ public sealed class RuntimeStaticMerger
/// Merge runtime events into a static call graph.
/// </summary>
public RuntimeStaticMergeResult Merge(
CallGraph staticGraph,
RichGraph staticGraph,
IEnumerable<RuntimeCallEvent> runtimeEvents)
{
ArgumentNullException.ThrowIfNull(staticGraph);
@@ -158,7 +158,7 @@ public sealed class RuntimeStaticMerger
var observedEdges = new List<ObservedEdge>();
var runtimeOnlyEdges = new List<RuntimeOnlyEdge>();
var modifiedEdges = new List<CallEdge>();
var modifiedEdges = new List<RichGraphEdge>();
var matchedEdgeKeys = new HashSet<string>(StringComparer.Ordinal);
foreach (var (edgeKey, aggregate) in runtimeEdgeAggregates)
@@ -177,21 +177,12 @@ public sealed class RuntimeStaticMerger
if (staticEdgeIndex.TryGetValue(edgeKey, out var staticEdge))
{
// Edge exists in static graph - mark as observed
// Edge exists in static graph - mark as observed with boosted confidence
matchedEdgeKeys.Add(edgeKey);
var observedMetadata = new ObservedEdgeMetadata
{
FirstObserved = aggregate.FirstObserved,
LastObserved = aggregate.LastObserved,
ObservationCount = aggregate.ObservationCount,
TraceDigest = aggregate.TraceDigest
};
var boostedEdge = staticEdge with
{
Confidence = _options.ObservedConfidenceBoost,
Observed = observedMetadata
Confidence = _options.ObservedConfidenceBoost
};
modifiedEdges.Add(boostedEdge);
@@ -207,22 +198,16 @@ public sealed class RuntimeStaticMerger
}
else if (_options.AddRuntimeOnlyEdges)
{
// Edge only exists in runtime - add it
var runtimeEdge = new CallEdge
{
From = aggregate.From,
To = aggregate.To,
Kind = CallEdgeKind.Dynamic,
Confidence = ComputeRuntimeOnlyConfidence(aggregate),
Evidence = "runtime_observation",
Observed = new ObservedEdgeMetadata
{
FirstObserved = aggregate.FirstObserved,
LastObserved = aggregate.LastObserved,
ObservationCount = aggregate.ObservationCount,
TraceDigest = aggregate.TraceDigest
}
};
// Edge only exists in runtime - add it as dynamic edge
var runtimeEdge = new RichGraphEdge(
From: aggregate.From,
To: aggregate.To,
Kind: "dynamic",
Purl: null,
SymbolDigest: null,
Evidence: new[] { "runtime_observation" },
Confidence: ComputeRuntimeOnlyConfidence(aggregate),
Candidates: null);
modifiedEdges.Add(runtimeEdge);
runtimeOnlyEdges.Add(new RuntimeOnlyEdge
@@ -239,7 +224,7 @@ public sealed class RuntimeStaticMerger
}
// Build merged edge list: unmatched static + modified
var mergedEdges = new List<CallEdge>();
var mergedEdges = new List<RichGraphEdge>();
foreach (var edge in staticGraph.Edges)
{
var key = BuildEdgeKey(edge.From, edge.To);
@@ -252,16 +237,16 @@ public sealed class RuntimeStaticMerger
var mergedGraph = staticGraph with
{
Edges = mergedEdges.ToImmutableArray()
Edges = mergedEdges
};
var statistics = new MergeStatistics
{
StaticEdgeCount = staticGraph.Edges.Length,
StaticEdgeCount = staticGraph.Edges.Count,
RuntimeEventCount = runtimeEdgeAggregates.Count,
MatchedEdgeCount = matchedEdgeKeys.Count,
RuntimeOnlyEdgeCount = runtimeOnlyEdges.Count,
UnmatchedStaticEdgeCount = staticGraph.Edges.Length - matchedEdgeKeys.Count
UnmatchedStaticEdgeCount = staticGraph.Edges.Count - matchedEdgeKeys.Count
};
_logger.LogInformation(
@@ -280,9 +265,9 @@ public sealed class RuntimeStaticMerger
};
}
private static Dictionary<string, CallEdge> BuildStaticEdgeIndex(CallGraph graph)
private static Dictionary<string, RichGraphEdge> BuildStaticEdgeIndex(RichGraph graph)
{
var index = new Dictionary<string, CallEdge>(StringComparer.Ordinal);
var index = new Dictionary<string, RichGraphEdge>(StringComparer.Ordinal);
foreach (var edge in graph.Edges)
{
var key = BuildEdgeKey(edge.From, edge.To);

View File

@@ -1,25 +1 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Npgsql" Version="9.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.ProofSpine\StellaOps.Scanner.ProofSpine.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.Surface.Env\StellaOps.Scanner.Surface.Env.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.SmartDiff\StellaOps.Scanner.SmartDiff.csproj" />
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="..\..\..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>
</Project>

View File

@@ -20,7 +20,8 @@ public sealed record SarifRun(
[property: JsonPropertyName("results")] ImmutableArray<SarifResult> Results,
[property: JsonPropertyName("invocations")] ImmutableArray<SarifInvocation>? Invocations = null,
[property: JsonPropertyName("artifacts")] ImmutableArray<SarifArtifact>? Artifacts = null,
[property: JsonPropertyName("versionControlProvenance")] ImmutableArray<SarifVersionControlDetails>? VersionControlProvenance = null);
[property: JsonPropertyName("versionControlProvenance")] ImmutableArray<SarifVersionControlDetails>? VersionControlProvenance = null,
[property: JsonPropertyName("properties")] ImmutableSortedDictionary<string, object>? Properties = null);
/// <summary>
/// Tool information for the SARIF run.

View File

@@ -47,7 +47,19 @@ public sealed record SmartDiffSarifInput(
IReadOnlyList<VexCandidate> VexCandidates,
IReadOnlyList<ReachabilityChange> ReachabilityChanges,
VcsInfo? VcsInfo = null,
string? DeltaVerdictReference = null);
string? DeltaVerdictReference = null,
AttestationReference? Attestation = null);
/// <summary>
/// Attestation reference for SARIF provenance linkage.
/// Sprint: SPRINT_4400_0001_0001 - Signed Delta Verdict Attestation
/// </summary>
public sealed record AttestationReference(
string Digest,
string PredicateType,
string? OciReference = null,
string? RekorLogId = null,
string? SignatureKeyId = null);
/// <summary>
/// VCS information for SARIF provenance.
@@ -142,12 +154,15 @@ public sealed class SarifOutputGenerator
var artifacts = CreateArtifacts(input);
var vcsProvenance = CreateVcsProvenance(input);
var runProperties = CreateRunProperties(input);
var run = new SarifRun(
Tool: tool,
Results: results,
Invocations: [invocation],
Artifacts: artifacts.Length > 0 ? artifacts : null,
VersionControlProvenance: vcsProvenance);
VersionControlProvenance: vcsProvenance,
Properties: runProperties);
return new SarifLog(
Version: SarifVersion,
@@ -399,4 +414,70 @@ public sealed class SarifOutputGenerator
RevisionId: input.VcsInfo.RevisionId,
Branch: input.VcsInfo.Branch)];
}
/// <summary>
/// Create run-level properties including attestation references.
/// Sprint: SPRINT_4400_0001_0001 - Signed Delta Verdict Attestation
/// </summary>
private static ImmutableSortedDictionary<string, object>? CreateRunProperties(SmartDiffSarifInput input)
{
var hasAttestation = input.Attestation is not null;
var hasDeltaRef = !string.IsNullOrWhiteSpace(input.DeltaVerdictReference);
var hasBaseDigest = !string.IsNullOrWhiteSpace(input.BaseDigest);
var hasTargetDigest = !string.IsNullOrWhiteSpace(input.TargetDigest);
if (!hasAttestation && !hasDeltaRef && !hasBaseDigest && !hasTargetDigest)
{
return null;
}
var props = new SortedDictionary<string, object>(StringComparer.Ordinal);
// Add digest references for diff tracking
if (hasBaseDigest)
{
props["stellaops.diff.base.digest"] = input.BaseDigest!;
}
if (hasTargetDigest)
{
props["stellaops.diff.target.digest"] = input.TargetDigest!;
}
// Add legacy delta verdict reference for backwards compatibility
if (hasDeltaRef)
{
props["stellaops.deltaVerdictRef"] = input.DeltaVerdictReference!;
}
// Add full attestation reference per SPRINT_4400_0001_0001
if (hasAttestation)
{
var attestation = input.Attestation!;
var attestationObj = new SortedDictionary<string, object>(StringComparer.Ordinal)
{
["digest"] = attestation.Digest,
["predicateType"] = attestation.PredicateType
};
if (!string.IsNullOrWhiteSpace(attestation.OciReference))
{
attestationObj["ociReference"] = attestation.OciReference!;
}
if (!string.IsNullOrWhiteSpace(attestation.RekorLogId))
{
attestationObj["rekorLogId"] = attestation.RekorLogId!;
}
if (!string.IsNullOrWhiteSpace(attestation.SignatureKeyId))
{
attestationObj["signatureKeyId"] = attestation.SignatureKeyId!;
}
props["stellaops.attestation"] = attestationObj;
}
return ImmutableSortedDictionary.CreateRange(StringComparer.Ordinal, props);
}
}

View File

@@ -0,0 +1,202 @@
// -----------------------------------------------------------------------------
// VerdictPushDiagnostics.cs
// Sprint: SPRINT_4300_0001_0001_oci_verdict_attestation_push
// Task: VERDICT-009
// Description: OpenTelemetry instrumentation for verdict push operations.
// -----------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.Diagnostics.Metrics;
namespace StellaOps.Scanner.Storage.Oci.Diagnostics;
/// <summary>
/// OpenTelemetry instrumentation for verdict push operations.
/// Provides activity tracing and metrics for observability.
/// </summary>
public static class VerdictPushDiagnostics
{
/// <summary>
/// Activity source name for verdict push operations.
/// </summary>
public const string ActivitySourceName = "StellaOps.Scanner.VerdictPush";
/// <summary>
/// Activity source version.
/// </summary>
public const string ActivityVersion = "1.0.0";
/// <summary>
/// Meter name for verdict push metrics.
/// </summary>
public const string MeterName = "stellaops.scanner.verdict_push";
/// <summary>
/// Meter version.
/// </summary>
public const string MeterVersion = "1.0.0";
/// <summary>
/// Activity source for verdict push tracing.
/// </summary>
public static ActivitySource ActivitySource { get; } = new(ActivitySourceName, ActivityVersion);
/// <summary>
/// Meter for verdict push metrics.
/// </summary>
public static Meter Meter { get; } = new(MeterName, MeterVersion);
// Counters
private static readonly Counter<long> _pushAttempts = Meter.CreateCounter<long>(
"stellaops.verdict.push.attempts",
unit: "{attempts}",
description: "Total number of verdict push attempts");
private static readonly Counter<long> _pushSuccesses = Meter.CreateCounter<long>(
"stellaops.verdict.push.successes",
unit: "{successes}",
description: "Total number of successful verdict pushes");
private static readonly Counter<long> _pushFailures = Meter.CreateCounter<long>(
"stellaops.verdict.push.failures",
unit: "{failures}",
description: "Total number of failed verdict pushes");
private static readonly Counter<long> _pushRetries = Meter.CreateCounter<long>(
"stellaops.verdict.push.retries",
unit: "{retries}",
description: "Total number of verdict push retries");
// Histograms
private static readonly Histogram<double> _pushDuration = Meter.CreateHistogram<double>(
"stellaops.verdict.push.duration",
unit: "ms",
description: "Duration of verdict push operations in milliseconds");
private static readonly Histogram<long> _payloadSize = Meter.CreateHistogram<long>(
"stellaops.verdict.push.payload_size",
unit: "By",
description: "Size of verdict payload in bytes");
/// <summary>
/// Start an activity for a verdict push operation.
/// </summary>
public static Activity? StartPushActivity(
string imageReference,
string? imageDigest = null,
string? registry = null)
{
var activity = ActivitySource.StartActivity("verdict.push", ActivityKind.Client);
if (activity is null)
{
return null;
}
activity.SetTag("stellaops.verdict.image_reference", imageReference);
if (!string.IsNullOrWhiteSpace(imageDigest))
{
activity.SetTag("stellaops.verdict.image_digest", imageDigest);
}
if (!string.IsNullOrWhiteSpace(registry))
{
activity.SetTag("stellaops.verdict.registry", registry);
}
return activity;
}
/// <summary>
/// Record a push attempt.
/// </summary>
public static void RecordPushAttempt(string registry, string decision)
{
_pushAttempts.Add(1,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("decision", decision));
}
/// <summary>
/// Record a successful push.
/// </summary>
public static void RecordPushSuccess(string registry, string decision, double durationMs, long payloadBytes)
{
_pushSuccesses.Add(1,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("decision", decision));
_pushDuration.Record(durationMs,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("decision", decision),
new KeyValuePair<string, object?>("status", "success"));
_payloadSize.Record(payloadBytes,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("decision", decision));
}
/// <summary>
/// Record a failed push.
/// </summary>
public static void RecordPushFailure(string registry, string decision, string errorType, double durationMs)
{
_pushFailures.Add(1,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("decision", decision),
new KeyValuePair<string, object?>("error_type", errorType));
_pushDuration.Record(durationMs,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("decision", decision),
new KeyValuePair<string, object?>("status", "failure"));
}
/// <summary>
/// Record a push retry.
/// </summary>
public static void RecordPushRetry(string registry, int attemptNumber, string reason)
{
_pushRetries.Add(1,
new KeyValuePair<string, object?>("registry", registry),
new KeyValuePair<string, object?>("attempt", attemptNumber),
new KeyValuePair<string, object?>("reason", reason));
}
/// <summary>
/// Set activity status to error.
/// </summary>
public static void SetActivityError(Activity? activity, Exception exception)
{
if (activity is null)
{
return;
}
activity.SetStatus(ActivityStatusCode.Error, exception.Message);
activity.SetTag("otel.status_code", "ERROR");
activity.SetTag("otel.status_description", exception.Message);
activity.SetTag("exception.type", exception.GetType().FullName);
activity.SetTag("exception.message", exception.Message);
}
/// <summary>
/// Set activity status to success.
/// </summary>
public static void SetActivitySuccess(Activity? activity, string? manifestDigest = null)
{
if (activity is null)
{
return;
}
activity.SetStatus(ActivityStatusCode.Ok);
activity.SetTag("otel.status_code", "OK");
if (!string.IsNullOrWhiteSpace(manifestDigest))
{
activity.SetTag("stellaops.verdict.manifest_digest", manifestDigest);
}
}
}

View File

@@ -14,4 +14,46 @@ public static class OciAnnotations
public const string StellaAfterDigest = "org.stellaops.delta.after.digest";
public const string StellaSbomDigest = "org.stellaops.sbom.digest";
public const string StellaVerdictDigest = "org.stellaops.verdict.digest";
// Sprint: SPRINT_4300_0001_0001 - OCI Verdict Attestation Push
/// <summary>
/// The final decision (pass, warn, block) for the verdict.
/// </summary>
public const string StellaVerdictDecision = "org.stellaops.verdict.decision";
/// <summary>
/// Digest of the feeds snapshot used for vulnerability matching.
/// </summary>
public const string StellaFeedsDigest = "org.stellaops.feeds.digest";
/// <summary>
/// Digest of the policy bundle used for evaluation.
/// </summary>
public const string StellaPolicyDigest = "org.stellaops.policy.digest";
/// <summary>
/// Graph revision identifier for the scan.
/// </summary>
public const string StellaGraphRevisionId = "org.stellaops.graph.revision.id";
/// <summary>
/// Digest of the proof bundle containing the evidence chain.
/// </summary>
public const string StellaProofBundleDigest = "org.stellaops.proof.bundle.digest";
/// <summary>
/// Timestamp when the verdict was computed.
/// </summary>
public const string StellaVerdictTimestamp = "org.stellaops.verdict.timestamp";
// Sprint: SPRINT_4300_0002_0002 - Unknowns Attestation Predicates
/// <summary>
/// Digest of the uncertainty state attestation.
/// </summary>
public const string StellaUncertaintyDigest = "org.stellaops.uncertainty.digest";
/// <summary>
/// Digest of the uncertainty budget attestation.
/// </summary>
public const string StellaUncertaintyBudgetDigest = "org.stellaops.uncertainty.budget.digest";
}

View File

@@ -14,4 +14,16 @@ public static class OciMediaTypes
public const string ReachabilitySlice = "application/vnd.stellaops.slice.v1+json";
public const string SliceConfig = "application/vnd.stellaops.slice.config.v1+json";
public const string SliceArtifact = "application/vnd.stellaops.slice.v1+json";
// Sprint: SPRINT_4300_0001_0001 - OCI Verdict Attestation Push
/// <summary>
/// Media type for risk verdict attestation artifacts.
/// These are pushed as OCI referrers for container images.
/// </summary>
public const string VerdictAttestation = "application/vnd.stellaops.verdict.v1+json";
/// <summary>
/// Config media type for verdict attestation artifacts.
/// </summary>
public const string VerdictConfig = "application/vnd.stellaops.verdict.config.v1+json";
}

View File

@@ -73,4 +73,17 @@ public sealed record OciRegistryAuthorization
break;
}
}
/// <summary>
/// Asynchronously authorizes a request. This is a convenience method that wraps ApplyTo.
/// The OciImageReference parameter is for future token refresh support.
/// </summary>
public Task AuthorizeRequestAsync(
HttpRequestMessage request,
OciImageReference reference,
CancellationToken cancellationToken = default)
{
ApplyTo(request);
return Task.CompletedTask;
}
}

View File

@@ -4,7 +4,6 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability.Slices;
namespace StellaOps.Scanner.Storage.Oci.Offline;
@@ -95,15 +94,48 @@ public sealed record BundleImportResult
public string? Error { get; init; }
}
/// <summary>
/// Data transfer object for slice data in offline bundles.
/// Decoupled from ReachabilitySlice to avoid circular dependencies.
/// </summary>
public sealed record SliceDataDto
{
/// <summary>
/// Raw JSON bytes of the slice.
/// </summary>
public required byte[] JsonBytes { get; init; }
/// <summary>
/// CVE ID extracted from slice query (for annotations).
/// </summary>
public string? CveId { get; init; }
/// <summary>
/// Verdict status (for annotations).
/// </summary>
public string? VerdictStatus { get; init; }
/// <summary>
/// Referenced call graph digest.
/// </summary>
public string? GraphDigest { get; init; }
/// <summary>
/// Referenced SBOM digest.
/// </summary>
public string? SbomDigest { get; init; }
}
/// <summary>
/// Provider interface for slice storage operations.
/// Uses SliceDataDto to avoid circular dependencies with Reachability project.
/// </summary>
public interface ISliceStorageProvider
{
Task<IReadOnlyList<ReachabilitySlice>> GetSlicesForScanAsync(string scanId, CancellationToken cancellationToken = default);
Task<IReadOnlyList<SliceDataDto>> GetSlicesForScanAsync(string scanId, CancellationToken cancellationToken = default);
Task<byte[]?> GetGraphAsync(string digest, CancellationToken cancellationToken = default);
Task<byte[]?> GetSbomAsync(string digest, CancellationToken cancellationToken = default);
Task StoreSliceAsync(ReachabilitySlice slice, CancellationToken cancellationToken = default);
Task StoreSliceAsync(byte[] sliceJsonBytes, CancellationToken cancellationToken = default);
Task StoreGraphAsync(string digest, byte[] data, CancellationToken cancellationToken = default);
Task StoreSbomAsync(string digest, byte[] data, CancellationToken cancellationToken = default);
}
@@ -183,8 +215,7 @@ public sealed class OfflineBundleService
// Export slices
foreach (var slice in slices)
{
var sliceJson = JsonSerializer.Serialize(slice, JsonOptions);
var sliceBytes = Encoding.UTF8.GetBytes(sliceJson);
var sliceBytes = slice.JsonBytes;
var sliceDigest = ComputeDigest(sliceBytes);
var slicePath = Path.Combine(blobsDir, sliceDigest);
@@ -197,8 +228,8 @@ public sealed class OfflineBundleService
Size = sliceBytes.Length,
Path = $"{BlobsDirectory}/{sliceDigest}",
Annotations = ImmutableDictionary<string, string>.Empty
.Add("stellaops.slice.cveId", slice.Query?.CveId ?? "unknown")
.Add("stellaops.slice.verdict", slice.Verdict?.Status.ToString() ?? "unknown")
.Add("stellaops.slice.cveId", slice.CveId ?? "unknown")
.Add("stellaops.slice.verdict", slice.VerdictStatus ?? "unknown")
});
// Collect referenced graphs and SBOMs
@@ -435,12 +466,9 @@ public sealed class OfflineBundleService
if (artifact.MediaType == OciMediaTypes.ReachabilitySlice)
{
var slice = JsonSerializer.Deserialize<ReachabilitySlice>(data, JsonOptions);
if (slice != null)
{
await _storage.StoreSliceAsync(slice, cancellationToken).ConfigureAwait(false);
slicesImported++;
}
// Store raw JSON bytes - consumer deserializes to specific type
await _storage.StoreSliceAsync(data, cancellationToken).ConfigureAwait(false);
slicesImported++;
}
else if (artifact.MediaType == OciMediaTypes.ReachabilitySubgraph)
{

View File

@@ -3,7 +3,6 @@ using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Reachability.Slices;
namespace StellaOps.Scanner.Storage.Oci;
@@ -39,7 +38,11 @@ public sealed record SlicePullOptions
public sealed record SlicePullResult
{
public required bool Success { get; init; }
public ReachabilitySlice? Slice { get; init; }
/// <summary>
/// Raw slice data as JSON element (decoupled from ReachabilitySlice type).
/// Consumer should deserialize to appropriate type.
/// </summary>
public JsonElement? SliceData { get; init; }
public string? SliceDigest { get; init; }
public byte[]? DsseEnvelope { get; init; }
public string? Error { get; init; }
@@ -96,7 +99,7 @@ public sealed class SlicePullService : IDisposable
return new SlicePullResult
{
Success = true,
Slice = cached!.Slice,
SliceData = cached!.SliceData,
SliceDigest = digest,
DsseEnvelope = cached.DsseEnvelope,
FromCache = true,
@@ -185,9 +188,14 @@ public sealed class SlicePullService : IDisposable
};
}
// Parse slice
var slice = JsonSerializer.Deserialize<ReachabilitySlice>(sliceBytes, JsonOptions);
if (slice == null)
// Parse slice as raw JSON element (decoupled from ReachabilitySlice type)
JsonElement sliceData;
try
{
using var doc = JsonDocument.Parse(sliceBytes);
sliceData = doc.RootElement.Clone();
}
catch (JsonException)
{
return new SlicePullResult
{
@@ -216,7 +224,7 @@ public sealed class SlicePullService : IDisposable
{
AddToCache(cacheKey, new CachedSlice
{
Slice = slice,
SliceData = sliceData,
DsseEnvelope = dsseEnvelope,
SignatureVerified = signatureVerified,
ExpiresAt = DateTimeOffset.UtcNow.Add(_options.CacheTtl)
@@ -230,7 +238,7 @@ public sealed class SlicePullService : IDisposable
return new SlicePullResult
{
Success = true,
Slice = slice,
SliceData = sliceData,
SliceDigest = digest,
DsseEnvelope = dsseEnvelope,
FromCache = false,
@@ -346,7 +354,7 @@ public sealed class SlicePullService : IDisposable
var index = await response.Content.ReadFromJsonAsync<OciReferrersIndex>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return index?.Manifests ?? Array.Empty<OciReferrer>();
return (IReadOnlyList<OciReferrer>?)index?.Manifests ?? Array.Empty<OciReferrer>();
}
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException)
{
@@ -430,7 +438,7 @@ public sealed class SlicePullService : IDisposable
private sealed record CachedSlice
{
public required ReachabilitySlice Slice { get; init; }
public required JsonElement SliceData { get; init; }
public byte[]? DsseEnvelope { get; init; }
public bool SignatureVerified { get; init; }
public required DateTimeOffset ExpiresAt { get; init; }

View File

@@ -8,6 +8,9 @@
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
</ItemGroup>
<!-- NOTE: Reachability reference intentionally removed to break circular dependency:
Reachability -> SmartDiff -> Storage.Oci -> Reachability
Use SliceDataDto and JsonElement instead of ReachabilitySlice type. -->
<ItemGroup>
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,287 @@
// -----------------------------------------------------------------------------
// VerdictOciPublisher.cs
// Sprint: SPRINT_4300_0001_0001_oci_verdict_attestation_push
// Task: VERDICT-009 - OpenTelemetry instrumentation integrated.
// Description: Pushes risk verdict attestations as OCI referrer artifacts.
// -----------------------------------------------------------------------------
using System.Diagnostics;
using StellaOps.Scanner.Storage.Oci.Diagnostics;
namespace StellaOps.Scanner.Storage.Oci;
/// <summary>
/// Request to push a verdict attestation to an OCI registry.
/// </summary>
public sealed record VerdictOciPublishRequest
{
/// <summary>
/// OCI image reference to attach the verdict to.
/// Format: registry/repository@sha256:digest
/// </summary>
public required string Reference { get; init; }
/// <summary>
/// Digest of the container image this verdict applies to.
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// The DSSE envelope bytes containing the signed verdict statement.
/// </summary>
public required byte[] DsseEnvelopeBytes { get; init; }
/// <summary>
/// Digest of the SBOM used for vulnerability matching.
/// </summary>
public required string SbomDigest { get; init; }
/// <summary>
/// Digest of the advisory feeds snapshot used.
/// </summary>
public required string FeedsDigest { get; init; }
/// <summary>
/// Digest of the policy bundle used for evaluation.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// The final verdict decision: pass, warn, or block.
/// </summary>
public required string Decision { get; init; }
/// <summary>
/// Graph revision ID for the scan.
/// </summary>
public string? GraphRevisionId { get; init; }
/// <summary>
/// Digest of the proof bundle containing the evidence chain.
/// </summary>
public string? ProofBundleDigest { get; init; }
/// <summary>
/// Digest of the attestation itself (for cross-referencing).
/// </summary>
public string? AttestationDigest { get; init; }
/// <summary>
/// When the verdict was computed.
/// </summary>
public DateTimeOffset? VerdictTimestamp { get; init; }
/// <summary>
/// Optional: Digest of the uncertainty state attestation.
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
/// </summary>
public string? UncertaintyStatementDigest { get; init; }
/// <summary>
/// Optional: Digest of the uncertainty budget attestation.
/// Sprint: SPRINT_4300_0002_0002_unknowns_attestation_predicates
/// </summary>
public string? UncertaintyBudgetDigest { get; init; }
}
/// <summary>
/// Service for pushing risk verdict attestations as OCI referrer artifacts.
/// This enables verdicts to be portable "ship tokens" attached to container images.
/// </summary>
public sealed class VerdictOciPublisher
{
private readonly OciArtifactPusher _pusher;
public VerdictOciPublisher(OciArtifactPusher pusher)
{
_pusher = pusher ?? throw new ArgumentNullException(nameof(pusher));
}
/// <summary>
/// Push a verdict attestation as an OCI referrer artifact.
/// </summary>
/// <param name="request">The verdict push request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The result of the push operation.</returns>
public async Task<OciArtifactPushResult> PushAsync(
VerdictOciPublishRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
// Extract registry from reference for telemetry
var registry = ExtractRegistry(request.Reference);
var payloadSize = request.DsseEnvelopeBytes.Length;
// Start activity for distributed tracing
using var activity = VerdictPushDiagnostics.StartPushActivity(
request.Reference,
request.ImageDigest,
registry);
// Record push attempt
VerdictPushDiagnostics.RecordPushAttempt(registry, request.Decision);
var stopwatch = Stopwatch.StartNew();
try
{
var annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
[OciAnnotations.StellaPredicateType] = VerdictPredicateTypes.Verdict,
[OciAnnotations.StellaSbomDigest] = request.SbomDigest,
[OciAnnotations.StellaFeedsDigest] = request.FeedsDigest,
[OciAnnotations.StellaPolicyDigest] = request.PolicyDigest,
[OciAnnotations.StellaVerdictDecision] = request.Decision
};
if (!string.IsNullOrWhiteSpace(request.GraphRevisionId))
{
annotations[OciAnnotations.StellaGraphRevisionId] = request.GraphRevisionId!;
}
if (!string.IsNullOrWhiteSpace(request.ProofBundleDigest))
{
annotations[OciAnnotations.StellaProofBundleDigest] = request.ProofBundleDigest!;
}
if (!string.IsNullOrWhiteSpace(request.AttestationDigest))
{
annotations[OciAnnotations.StellaAttestationDigest] = request.AttestationDigest!;
}
if (request.VerdictTimestamp.HasValue)
{
annotations[OciAnnotations.StellaVerdictTimestamp] = request.VerdictTimestamp.Value.ToString("O");
}
// Sprint: SPRINT_4300_0002_0002 - Unknowns Attestation Predicates
if (!string.IsNullOrWhiteSpace(request.UncertaintyStatementDigest))
{
annotations[OciAnnotations.StellaUncertaintyDigest] = request.UncertaintyStatementDigest!;
}
if (!string.IsNullOrWhiteSpace(request.UncertaintyBudgetDigest))
{
annotations[OciAnnotations.StellaUncertaintyBudgetDigest] = request.UncertaintyBudgetDigest!;
}
var pushRequest = new OciArtifactPushRequest
{
Reference = request.Reference,
ArtifactType = OciMediaTypes.VerdictAttestation,
SubjectDigest = request.ImageDigest,
Layers =
[
new OciLayerContent
{
Content = request.DsseEnvelopeBytes,
MediaType = OciMediaTypes.DsseEnvelope
}
],
Annotations = annotations
};
var result = await _pusher.PushAsync(pushRequest, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
if (result.Success)
{
// Record success metrics
VerdictPushDiagnostics.RecordPushSuccess(
registry,
request.Decision,
stopwatch.Elapsed.TotalMilliseconds,
payloadSize);
VerdictPushDiagnostics.SetActivitySuccess(activity, result.ManifestDigest);
}
else
{
// Record failure metrics
VerdictPushDiagnostics.RecordPushFailure(
registry,
request.Decision,
result.Error ?? "unknown",
stopwatch.Elapsed.TotalMilliseconds);
activity?.SetStatus(ActivityStatusCode.Error, result.Error);
}
return result;
}
catch (Exception ex)
{
stopwatch.Stop();
// Record failure metrics
VerdictPushDiagnostics.RecordPushFailure(
registry,
request.Decision,
ex.GetType().Name,
stopwatch.Elapsed.TotalMilliseconds);
VerdictPushDiagnostics.SetActivityError(activity, ex);
throw;
}
}
/// <summary>
/// Extract registry hostname from an OCI reference.
/// </summary>
private static string ExtractRegistry(string reference)
{
if (string.IsNullOrWhiteSpace(reference))
{
return "unknown";
}
// Remove tag or digest suffix
var refWithoutTag = reference;
var atIndex = reference.IndexOf('@');
if (atIndex > 0)
{
refWithoutTag = reference[..atIndex];
}
else
{
var colonIndex = reference.LastIndexOf(':');
if (colonIndex > 0)
{
// Check if it's a port number or tag
var slashIndex = reference.LastIndexOf('/');
if (slashIndex < colonIndex)
{
refWithoutTag = reference[..colonIndex];
}
}
}
// Extract registry (first path component)
var firstSlash = refWithoutTag.IndexOf('/');
if (firstSlash > 0)
{
var potentialRegistry = refWithoutTag[..firstSlash];
// Check if it looks like a registry (contains . or : or is localhost)
if (potentialRegistry.Contains('.') ||
potentialRegistry.Contains(':') ||
potentialRegistry.Equals("localhost", StringComparison.OrdinalIgnoreCase))
{
return potentialRegistry;
}
}
// Default to docker.io for implicit registry
return "docker.io";
}
}
/// <summary>
/// Predicate type URIs for verdict attestations.
/// </summary>
public static class VerdictPredicateTypes
{
/// <summary>
/// Predicate type for risk verdict attestations.
/// </summary>
public const string Verdict = "verdict.stella/v1";
}