Add Canonical JSON serialization library with tests and documentation

- Implemented CanonJson class for deterministic JSON serialization and hashing.
- Added unit tests for CanonJson functionality, covering various scenarios including key sorting, handling of nested objects, arrays, and special characters.
- Created project files for the Canonical JSON library and its tests, including necessary package references.
- Added README.md for library usage and API reference.
- Introduced RabbitMqIntegrationFactAttribute for conditional RabbitMQ integration tests.
This commit is contained in:
master
2025-12-19 15:35:00 +02:00
parent 43882078a4
commit 951a38d561
192 changed files with 27550 additions and 2611 deletions

View File

@@ -0,0 +1,221 @@
// -----------------------------------------------------------------------------
// ReachabilityDriftPredicate.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-014
// Description: DSSE predicate for reachability drift attestation.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Predicates;
/// <summary>
/// DSSE predicate for reachability drift attestation.
/// predicateType: stellaops.dev/predicates/reachability-drift@v1
/// </summary>
public sealed record ReachabilityDriftPredicate
{
/// <summary>
/// The predicate type URI for reachability drift attestations.
/// </summary>
public const string PredicateType = "stellaops.dev/predicates/reachability-drift@v1";
/// <summary>
/// Reference to the base (previous) image being compared.
/// </summary>
[JsonPropertyName("baseImage")]
public required DriftImageReference BaseImage { get; init; }
/// <summary>
/// Reference to the target (current) image being compared.
/// </summary>
[JsonPropertyName("targetImage")]
public required DriftImageReference TargetImage { get; init; }
/// <summary>
/// Scan ID of the baseline scan.
/// </summary>
[JsonPropertyName("baseScanId")]
public required string BaseScanId { get; init; }
/// <summary>
/// Scan ID of the head (current) scan.
/// </summary>
[JsonPropertyName("headScanId")]
public required string HeadScanId { get; init; }
/// <summary>
/// Summary of detected drift.
/// </summary>
[JsonPropertyName("drift")]
public required DriftPredicateSummary Drift { get; init; }
/// <summary>
/// Metadata about the analysis performed.
/// </summary>
[JsonPropertyName("analysis")]
public required DriftAnalysisMetadata Analysis { get; init; }
}
/// <summary>
/// Reference to a container image in drift analysis.
/// </summary>
public sealed record DriftImageReference
{
/// <summary>
/// Image name (repository/image).
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Image digest (sha256:...).
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Optional tag at time of analysis.
/// </summary>
[JsonPropertyName("tag")]
public string? Tag { get; init; }
}
/// <summary>
/// Summary of drift detection results for the predicate.
/// </summary>
public sealed record DriftPredicateSummary
{
/// <summary>
/// Number of sinks that became reachable.
/// </summary>
[JsonPropertyName("newlyReachableCount")]
public required int NewlyReachableCount { get; init; }
/// <summary>
/// Number of sinks that became unreachable.
/// </summary>
[JsonPropertyName("newlyUnreachableCount")]
public required int NewlyUnreachableCount { get; init; }
/// <summary>
/// Details of newly reachable sinks.
/// </summary>
[JsonPropertyName("newlyReachable")]
public required ImmutableArray<DriftedSinkPredicateSummary> NewlyReachable { get; init; }
/// <summary>
/// Details of newly unreachable (mitigated) sinks.
/// </summary>
[JsonPropertyName("newlyUnreachable")]
public required ImmutableArray<DriftedSinkPredicateSummary> NewlyUnreachable { get; init; }
}
/// <summary>
/// Summary of a single drifted sink for inclusion in the predicate.
/// </summary>
public sealed record DriftedSinkPredicateSummary
{
/// <summary>
/// Unique identifier for the sink node.
/// </summary>
[JsonPropertyName("sinkNodeId")]
public required string SinkNodeId { get; init; }
/// <summary>
/// Fully qualified symbol name of the sink.
/// </summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>
/// Category of the sink (sql_injection, command_execution, etc.).
/// </summary>
[JsonPropertyName("sinkCategory")]
public required string SinkCategory { get; init; }
/// <summary>
/// Kind of drift cause (guard_removed, new_route, dependency_change, etc.).
/// </summary>
[JsonPropertyName("causeKind")]
public required string CauseKind { get; init; }
/// <summary>
/// Human-readable description of the cause.
/// </summary>
[JsonPropertyName("causeDescription")]
public required string CauseDescription { get; init; }
/// <summary>
/// CVE IDs associated with this sink.
/// </summary>
[JsonPropertyName("associatedCves")]
public ImmutableArray<string> AssociatedCves { get; init; } = [];
/// <summary>
/// Hash of the compressed path for verification.
/// </summary>
[JsonPropertyName("pathHash")]
public string? PathHash { get; init; }
}
/// <summary>
/// Metadata about the drift analysis.
/// </summary>
public sealed record DriftAnalysisMetadata
{
/// <summary>
/// When the analysis was performed.
/// </summary>
[JsonPropertyName("analyzedAt")]
public required DateTimeOffset AnalyzedAt { get; init; }
/// <summary>
/// Information about the scanner that performed the analysis.
/// </summary>
[JsonPropertyName("scanner")]
public required DriftScannerInfo Scanner { get; init; }
/// <summary>
/// Content-addressed digest of the baseline call graph.
/// </summary>
[JsonPropertyName("baseGraphDigest")]
public required string BaseGraphDigest { get; init; }
/// <summary>
/// Content-addressed digest of the head call graph.
/// </summary>
[JsonPropertyName("headGraphDigest")]
public required string HeadGraphDigest { get; init; }
/// <summary>
/// Optional: digest of the code change facts used.
/// </summary>
[JsonPropertyName("codeChangesDigest")]
public string? CodeChangesDigest { get; init; }
}
/// <summary>
/// Information about the scanner that performed drift analysis.
/// </summary>
public sealed record DriftScannerInfo
{
/// <summary>
/// Name of the scanner.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Version of the scanner.
/// </summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>
/// Optional ruleset used for sink detection.
/// </summary>
[JsonPropertyName("ruleset")]
public string? Ruleset { get; init; }
}

View File

@@ -0,0 +1,257 @@
// -----------------------------------------------------------------------------
// ReachabilityDriftStatement.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Description: DSSE predicate for reachability drift attestation.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Statements;
/// <summary>
/// In-toto statement for reachability drift between scans.
/// Predicate type: stellaops.dev/predicates/reachability-drift@v1
/// </summary>
public sealed record ReachabilityDriftStatement : InTotoStatement
{
/// <inheritdoc />
[JsonPropertyName("predicateType")]
public override string PredicateType => "stellaops.dev/predicates/reachability-drift@v1";
/// <summary>
/// The drift payload.
/// </summary>
[JsonPropertyName("predicate")]
public required ReachabilityDriftPayload Predicate { get; init; }
}
/// <summary>
/// Payload for reachability drift statements.
/// </summary>
public sealed record ReachabilityDriftPayload
{
/// <summary>
/// Base image reference (before).
/// </summary>
[JsonPropertyName("baseImage")]
public required ImageReference BaseImage { get; init; }
/// <summary>
/// Target image reference (after).
/// </summary>
[JsonPropertyName("targetImage")]
public required ImageReference TargetImage { get; init; }
/// <summary>
/// Scan ID of the base scan.
/// </summary>
[JsonPropertyName("baseScanId")]
public required string BaseScanId { get; init; }
/// <summary>
/// Scan ID of the head scan.
/// </summary>
[JsonPropertyName("headScanId")]
public required string HeadScanId { get; init; }
/// <summary>
/// Drift summary.
/// </summary>
[JsonPropertyName("drift")]
public required DriftSummary Drift { get; init; }
/// <summary>
/// Analysis metadata.
/// </summary>
[JsonPropertyName("analysis")]
public required DriftAnalysisMetadata Analysis { get; init; }
}
/// <summary>
/// Image reference for drift comparison.
/// </summary>
public sealed record ImageReference
{
/// <summary>
/// Image name (e.g., "myregistry.io/app").
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Image digest (e.g., "sha256:...").
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
}
/// <summary>
/// Summary of reachability drift.
/// </summary>
public sealed record DriftSummary
{
/// <summary>
/// Count of newly reachable paths (NEW RISK).
/// </summary>
[JsonPropertyName("newlyReachableCount")]
public required int NewlyReachableCount { get; init; }
/// <summary>
/// Count of newly unreachable paths (MITIGATED).
/// </summary>
[JsonPropertyName("newlyUnreachableCount")]
public required int NewlyUnreachableCount { get; init; }
/// <summary>
/// Details of newly reachable sinks.
/// </summary>
[JsonPropertyName("newlyReachable")]
public ImmutableArray<DriftedSinkSummary> NewlyReachable { get; init; } = [];
/// <summary>
/// Details of newly unreachable sinks.
/// </summary>
[JsonPropertyName("newlyUnreachable")]
public ImmutableArray<DriftedSinkSummary> NewlyUnreachable { get; init; } = [];
/// <summary>
/// Net change in reachable vulnerability paths.
/// Positive = more risk, negative = less risk.
/// </summary>
[JsonPropertyName("netChange")]
public int NetChange => NewlyReachableCount - NewlyUnreachableCount;
/// <summary>
/// Whether this drift should block a PR.
/// </summary>
[JsonPropertyName("shouldBlock")]
public bool ShouldBlock => NewlyReachableCount > 0;
}
/// <summary>
/// Summary of a drifted sink.
/// </summary>
public sealed record DriftedSinkSummary
{
/// <summary>
/// Sink node identifier.
/// </summary>
[JsonPropertyName("sinkNodeId")]
public required string SinkNodeId { get; init; }
/// <summary>
/// Symbol name of the sink.
/// </summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>
/// Category of the sink (e.g., "deserialization", "sql_injection").
/// </summary>
[JsonPropertyName("sinkCategory")]
public required string SinkCategory { get; init; }
/// <summary>
/// Kind of change that caused the drift.
/// </summary>
[JsonPropertyName("causeKind")]
public required string CauseKind { get; init; }
/// <summary>
/// Human-readable description of the cause.
/// </summary>
[JsonPropertyName("causeDescription")]
public required string CauseDescription { get; init; }
/// <summary>
/// File where the change occurred.
/// </summary>
[JsonPropertyName("changedFile")]
public string? ChangedFile { get; init; }
/// <summary>
/// Line where the change occurred.
/// </summary>
[JsonPropertyName("changedLine")]
public int? ChangedLine { get; init; }
/// <summary>
/// Associated CVE IDs.
/// </summary>
[JsonPropertyName("associatedCves")]
public ImmutableArray<string> AssociatedCves { get; init; } = [];
/// <summary>
/// Entry point method key.
/// </summary>
[JsonPropertyName("entryMethodKey")]
public string? EntryMethodKey { get; init; }
/// <summary>
/// Path length from entry to sink.
/// </summary>
[JsonPropertyName("pathLength")]
public int? PathLength { get; init; }
}
/// <summary>
/// Metadata about the drift analysis.
/// </summary>
public sealed record DriftAnalysisMetadata
{
/// <summary>
/// When the analysis was performed.
/// </summary>
[JsonPropertyName("analyzedAt")]
public required DateTimeOffset AnalyzedAt { get; init; }
/// <summary>
/// Scanner information.
/// </summary>
[JsonPropertyName("scanner")]
public required DriftScannerInfo Scanner { get; init; }
/// <summary>
/// Digest of the base call graph.
/// </summary>
[JsonPropertyName("baseGraphDigest")]
public required string BaseGraphDigest { get; init; }
/// <summary>
/// Digest of the head call graph.
/// </summary>
[JsonPropertyName("headGraphDigest")]
public required string HeadGraphDigest { get; init; }
/// <summary>
/// Algorithm used for graph hashing.
/// </summary>
[JsonPropertyName("hashAlgorithm")]
public string HashAlgorithm { get; init; } = "blake3";
}
/// <summary>
/// Scanner information for drift analysis.
/// </summary>
public sealed record DriftScannerInfo
{
/// <summary>
/// Scanner name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Scanner version.
/// </summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>
/// Ruleset used for analysis.
/// </summary>
[JsonPropertyName("ruleset")]
public string? Ruleset { get; init; }
}

View File

@@ -0,0 +1,316 @@
// -----------------------------------------------------------------------------
// ReachabilityWitnessStatement.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Description: DSSE predicate for individual reachability witness attestation.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Attestor.ProofChain.Statements;
/// <summary>
/// In-toto statement for reachability witness attestation.
/// Predicate type: stellaops.dev/predicates/reachability-witness@v1
/// </summary>
public sealed record ReachabilityWitnessStatement : InTotoStatement
{
/// <inheritdoc />
[JsonPropertyName("predicateType")]
public override string PredicateType => "stellaops.dev/predicates/reachability-witness@v1";
/// <summary>
/// The witness payload.
/// </summary>
[JsonPropertyName("predicate")]
public required ReachabilityWitnessPayload Predicate { get; init; }
}
/// <summary>
/// Payload for reachability witness statements.
/// </summary>
public sealed record ReachabilityWitnessPayload
{
/// <summary>
/// Unique witness identifier.
/// </summary>
[JsonPropertyName("witnessId")]
public required string WitnessId { get; init; }
/// <summary>
/// Scan ID that produced this witness.
/// </summary>
[JsonPropertyName("scanId")]
public required string ScanId { get; init; }
/// <summary>
/// Vulnerability identifier (internal).
/// </summary>
[JsonPropertyName("vulnId")]
public required string VulnId { get; init; }
/// <summary>
/// CVE identifier if applicable.
/// </summary>
[JsonPropertyName("cveId")]
public string? CveId { get; init; }
/// <summary>
/// Package name.
/// </summary>
[JsonPropertyName("packageName")]
public required string PackageName { get; init; }
/// <summary>
/// Package version.
/// </summary>
[JsonPropertyName("packageVersion")]
public string? PackageVersion { get; init; }
/// <summary>
/// Package URL (purl).
/// </summary>
[JsonPropertyName("purl")]
public string? Purl { get; init; }
/// <summary>
/// Confidence tier for reachability assessment.
/// </summary>
[JsonPropertyName("confidenceTier")]
public required string ConfidenceTier { get; init; }
/// <summary>
/// Confidence score (0.0-1.0).
/// </summary>
[JsonPropertyName("confidenceScore")]
public required double ConfidenceScore { get; init; }
/// <summary>
/// Whether the vulnerable code is reachable.
/// </summary>
[JsonPropertyName("isReachable")]
public required bool IsReachable { get; init; }
/// <summary>
/// Call path from entry point to sink.
/// </summary>
[JsonPropertyName("callPath")]
public ImmutableArray<WitnessCallPathNode> CallPath { get; init; } = [];
/// <summary>
/// Entry point information.
/// </summary>
[JsonPropertyName("entrypoint")]
public WitnessPathNode? Entrypoint { get; init; }
/// <summary>
/// Sink (vulnerable method) information.
/// </summary>
[JsonPropertyName("sink")]
public WitnessPathNode? Sink { get; init; }
/// <summary>
/// Security gates encountered along the path.
/// </summary>
[JsonPropertyName("gates")]
public ImmutableArray<WitnessGateInfo> Gates { get; init; } = [];
/// <summary>
/// Evidence metadata.
/// </summary>
[JsonPropertyName("evidence")]
public required WitnessEvidenceMetadata Evidence { get; init; }
/// <summary>
/// When the witness was observed.
/// </summary>
[JsonPropertyName("observedAt")]
public required DateTimeOffset ObservedAt { get; init; }
/// <summary>
/// VEX recommendation based on reachability.
/// </summary>
[JsonPropertyName("vexRecommendation")]
public string? VexRecommendation { get; init; }
}
/// <summary>
/// Node in the witness call path.
/// </summary>
public sealed record WitnessCallPathNode
{
/// <summary>
/// Node identifier.
/// </summary>
[JsonPropertyName("nodeId")]
public required string NodeId { get; init; }
/// <summary>
/// Symbol name.
/// </summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>
/// Source file path.
/// </summary>
[JsonPropertyName("file")]
public string? File { get; init; }
/// <summary>
/// Line number.
/// </summary>
[JsonPropertyName("line")]
public int? Line { get; init; }
/// <summary>
/// Package name if external.
/// </summary>
[JsonPropertyName("package")]
public string? Package { get; init; }
/// <summary>
/// Whether this node was changed (for drift).
/// </summary>
[JsonPropertyName("isChanged")]
public bool IsChanged { get; init; }
/// <summary>
/// Kind of change if changed.
/// </summary>
[JsonPropertyName("changeKind")]
public string? ChangeKind { get; init; }
}
/// <summary>
/// Detailed path node for entry/sink.
/// </summary>
public sealed record WitnessPathNode
{
/// <summary>
/// Node identifier.
/// </summary>
[JsonPropertyName("nodeId")]
public required string NodeId { get; init; }
/// <summary>
/// Symbol name.
/// </summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>
/// Source file path.
/// </summary>
[JsonPropertyName("file")]
public string? File { get; init; }
/// <summary>
/// Line number.
/// </summary>
[JsonPropertyName("line")]
public int? Line { get; init; }
/// <summary>
/// Package name.
/// </summary>
[JsonPropertyName("package")]
public string? Package { get; init; }
/// <summary>
/// Method name.
/// </summary>
[JsonPropertyName("method")]
public string? Method { get; init; }
/// <summary>
/// HTTP route if entry point.
/// </summary>
[JsonPropertyName("httpRoute")]
public string? HttpRoute { get; init; }
/// <summary>
/// HTTP method if entry point.
/// </summary>
[JsonPropertyName("httpMethod")]
public string? HttpMethod { get; init; }
}
/// <summary>
/// Security gate information in witness.
/// </summary>
public sealed record WitnessGateInfo
{
/// <summary>
/// Type of gate.
/// </summary>
[JsonPropertyName("gateType")]
public required string GateType { get; init; }
/// <summary>
/// Symbol name.
/// </summary>
[JsonPropertyName("symbol")]
public required string Symbol { get; init; }
/// <summary>
/// Confidence in gate detection.
/// </summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>
/// Description of the gate.
/// </summary>
[JsonPropertyName("description")]
public string? Description { get; init; }
/// <summary>
/// File where gate is located.
/// </summary>
[JsonPropertyName("file")]
public string? File { get; init; }
/// <summary>
/// Line number.
/// </summary>
[JsonPropertyName("line")]
public int? Line { get; init; }
}
/// <summary>
/// Evidence metadata for witness.
/// </summary>
public sealed record WitnessEvidenceMetadata
{
/// <summary>
/// Call graph hash.
/// </summary>
[JsonPropertyName("callGraphHash")]
public string? CallGraphHash { get; init; }
/// <summary>
/// Surface hash.
/// </summary>
[JsonPropertyName("surfaceHash")]
public string? SurfaceHash { get; init; }
/// <summary>
/// Analysis method used.
/// </summary>
[JsonPropertyName("analysisMethod")]
public required string AnalysisMethod { get; init; }
/// <summary>
/// Tool version.
/// </summary>
[JsonPropertyName("toolVersion")]
public string? ToolVersion { get; init; }
/// <summary>
/// Hash algorithm used.
/// </summary>
[JsonPropertyName("hashAlgorithm")]
public string HashAlgorithm { get; init; } = "blake3";
}

View File

@@ -0,0 +1,497 @@
// -----------------------------------------------------------------------------
// CommandHandlers.Witness.cs
// Sprint: SPRINT_3700_0005_0001_witness_ui_cli
// Tasks: CLI-001, CLI-002, CLI-003, CLI-004
// Description: Command handlers for reachability witness CLI.
// -----------------------------------------------------------------------------
using System.Text.Json;
using Spectre.Console;
namespace StellaOps.Cli.Commands;
internal static partial class CommandHandlers
{
private static readonly JsonSerializerOptions WitnessJsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Handler for `witness show` command.
/// </summary>
internal static async Task HandleWitnessShowAsync(
IServiceProvider services,
string witnessId,
string format,
bool noColor,
bool pathOnly,
bool verbose,
CancellationToken cancellationToken)
{
var console = AnsiConsole.Console;
if (verbose)
{
console.MarkupLine($"[dim]Fetching witness: {witnessId}[/]");
}
// TODO: Replace with actual service call when witness API is available
var witness = new WitnessDto
{
WitnessId = witnessId,
WitnessSchema = "stellaops.witness.v1",
CveId = "CVE-2024-12345",
PackageName = "Newtonsoft.Json",
PackageVersion = "12.0.3",
ConfidenceTier = "confirmed",
ObservedAt = DateTimeOffset.UtcNow.AddHours(-2).ToString("O"),
Entrypoint = new WitnessEntrypointDto
{
Type = "http",
Route = "GET /api/users/{id}",
Symbol = "UserController.GetUser()",
File = "src/Controllers/UserController.cs",
Line = 42
},
Sink = new WitnessSinkDto
{
Symbol = "JsonConvert.DeserializeObject<User>()",
Package = "Newtonsoft.Json",
IsTrigger = true
},
Path = new[]
{
new PathStepDto { Symbol = "UserController.GetUser()", File = "src/Controllers/UserController.cs", Line = 42 },
new PathStepDto { Symbol = "UserService.GetUserById()", File = "src/Services/UserService.cs", Line = 88 },
new PathStepDto { Symbol = "JsonConvert.DeserializeObject<User>()", Package = "Newtonsoft.Json" }
},
Gates = new[]
{
new GateDto { Type = "authRequired", Detail = "[Authorize] attribute", Confidence = 0.95m }
},
Evidence = new WitnessEvidenceDto
{
CallgraphDigest = "blake3:a1b2c3d4e5f6...",
SurfaceDigest = "sha256:9f8e7d6c5b4a...",
SignedBy = "attestor-stellaops-ed25519"
}
};
switch (format)
{
case "json":
var json = JsonSerializer.Serialize(witness, WitnessJsonOptions);
console.WriteLine(json);
break;
case "yaml":
WriteWitnessYaml(console, witness);
break;
default:
WriteWitnessText(console, witness, pathOnly, noColor);
break;
}
await Task.CompletedTask;
}
/// <summary>
/// Handler for `witness verify` command.
/// </summary>
internal static async Task HandleWitnessVerifyAsync(
IServiceProvider services,
string witnessId,
string? publicKeyPath,
bool offline,
bool verbose,
CancellationToken cancellationToken)
{
var console = AnsiConsole.Console;
if (verbose)
{
console.MarkupLine($"[dim]Verifying witness: {witnessId}[/]");
if (publicKeyPath != null)
{
console.MarkupLine($"[dim]Using public key: {publicKeyPath}[/]");
}
}
// TODO: Replace with actual verification when DSSE verification is wired up
await Task.Delay(100, cancellationToken); // Simulate verification
// Placeholder result
var valid = true;
var keyId = "attestor-stellaops-ed25519";
var algorithm = "Ed25519";
if (valid)
{
console.MarkupLine("[green]✓ Signature VALID[/]");
console.MarkupLine($" Key ID: {keyId}");
console.MarkupLine($" Algorithm: {algorithm}");
}
else
{
console.MarkupLine("[red]✗ Signature INVALID[/]");
console.MarkupLine(" Error: Signature verification failed");
Environment.ExitCode = 1;
}
}
/// <summary>
/// Handler for `witness list` command.
/// </summary>
internal static async Task HandleWitnessListAsync(
IServiceProvider services,
string scanId,
string? cve,
string? tier,
string format,
int limit,
bool verbose,
CancellationToken cancellationToken)
{
var console = AnsiConsole.Console;
if (verbose)
{
console.MarkupLine($"[dim]Listing witnesses for scan: {scanId}[/]");
if (cve != null) console.MarkupLine($"[dim]Filtering by CVE: {cve}[/]");
if (tier != null) console.MarkupLine($"[dim]Filtering by tier: {tier}[/]");
}
// TODO: Replace with actual service call
var witnesses = new[]
{
new WitnessListItemDto
{
WitnessId = "wit:sha256:abc123",
CveId = "CVE-2024-12345",
PackageName = "Newtonsoft.Json",
ConfidenceTier = "confirmed",
Entrypoint = "GET /api/users/{id}",
Sink = "JsonConvert.DeserializeObject()"
},
new WitnessListItemDto
{
WitnessId = "wit:sha256:def456",
CveId = "CVE-2024-12346",
PackageName = "lodash",
ConfidenceTier = "likely",
Entrypoint = "POST /api/data",
Sink = "_.template()"
}
};
switch (format)
{
case "json":
var json = JsonSerializer.Serialize(new { witnesses, total = witnesses.Length }, WitnessJsonOptions);
console.WriteLine(json);
break;
default:
WriteWitnessListTable(console, witnesses);
break;
}
await Task.CompletedTask;
}
/// <summary>
/// Handler for `witness export` command.
/// </summary>
internal static async Task HandleWitnessExportAsync(
IServiceProvider services,
string witnessId,
string format,
string? outputPath,
bool includeDsse,
bool verbose,
CancellationToken cancellationToken)
{
var console = AnsiConsole.Console;
if (verbose)
{
console.MarkupLine($"[dim]Exporting witness: {witnessId} as {format}[/]");
if (outputPath != null) console.MarkupLine($"[dim]Output: {outputPath}[/]");
}
// TODO: Replace with actual witness fetch and export
var exportContent = format switch
{
"sarif" => GenerateWitnessSarif(witnessId),
_ => GenerateWitnessJson(witnessId, includeDsse)
};
if (outputPath != null)
{
await File.WriteAllTextAsync(outputPath, exportContent, cancellationToken);
console.MarkupLine($"[green]Exported to {outputPath}[/]");
}
else
{
console.WriteLine(exportContent);
}
}
private static void WriteWitnessText(IAnsiConsole console, WitnessDto witness, bool pathOnly, bool noColor)
{
if (!pathOnly)
{
console.WriteLine();
console.MarkupLine($"[bold]WITNESS:[/] {witness.WitnessId}");
console.WriteLine(new string('═', 70));
console.WriteLine();
var tierColor = witness.ConfidenceTier switch
{
"confirmed" => "red",
"likely" => "yellow",
"present" => "grey",
"unreachable" => "green",
_ => "white"
};
console.MarkupLine($"Vulnerability: [bold]{witness.CveId}[/] ({witness.PackageName} <={witness.PackageVersion})");
console.MarkupLine($"Confidence: [{tierColor}]{witness.ConfidenceTier.ToUpperInvariant()}[/]");
console.MarkupLine($"Observed: {witness.ObservedAt}");
console.WriteLine();
}
console.MarkupLine("[bold]CALL PATH[/]");
console.WriteLine(new string('─', 70));
// Entrypoint
console.MarkupLine($"[green][ENTRYPOINT][/] {witness.Entrypoint.Route}");
console.MarkupLine(" │");
// Path steps
for (var i = 0; i < witness.Path.Length; i++)
{
var step = witness.Path[i];
var isLast = i == witness.Path.Length - 1;
var prefix = isLast ? "└──" : "├──";
if (isLast)
{
console.MarkupLine($" {prefix} [red][SINK][/] {step.Symbol}");
if (step.Package != null)
{
console.MarkupLine($" └── {step.Package} (TRIGGER METHOD)");
}
}
else
{
console.MarkupLine($" {prefix} {step.Symbol}");
if (step.File != null)
{
console.MarkupLine($" │ └── {step.File}:{step.Line}");
}
// Check for gates after this step
if (i < witness.Gates.Length)
{
var gate = witness.Gates[i];
console.MarkupLine(" │");
console.MarkupLine($" │ [yellow][GATE: {gate.Type}][/] {gate.Detail} ({gate.Confidence:P0})");
}
}
if (!isLast)
{
console.MarkupLine(" │");
}
}
if (!pathOnly)
{
console.WriteLine();
console.MarkupLine("[bold]EVIDENCE[/]");
console.WriteLine(new string('─', 70));
console.MarkupLine($"Call Graph: {witness.Evidence.CallgraphDigest}");
console.MarkupLine($"Surface: {witness.Evidence.SurfaceDigest}");
console.MarkupLine($"Signed By: {witness.Evidence.SignedBy}");
console.WriteLine();
}
}
private static void WriteWitnessYaml(IAnsiConsole console, WitnessDto witness)
{
console.WriteLine($"witnessId: {witness.WitnessId}");
console.WriteLine($"witnessSchema: {witness.WitnessSchema}");
console.WriteLine($"cveId: {witness.CveId}");
console.WriteLine($"packageName: {witness.PackageName}");
console.WriteLine($"packageVersion: {witness.PackageVersion}");
console.WriteLine($"confidenceTier: {witness.ConfidenceTier}");
console.WriteLine($"observedAt: {witness.ObservedAt}");
console.WriteLine("entrypoint:");
console.WriteLine($" type: {witness.Entrypoint.Type}");
console.WriteLine($" route: {witness.Entrypoint.Route}");
console.WriteLine($" symbol: {witness.Entrypoint.Symbol}");
console.WriteLine("path:");
foreach (var step in witness.Path)
{
console.WriteLine($" - symbol: {step.Symbol}");
if (step.File != null) console.WriteLine($" file: {step.File}");
if (step.Line > 0) console.WriteLine($" line: {step.Line}");
}
console.WriteLine("evidence:");
console.WriteLine($" callgraphDigest: {witness.Evidence.CallgraphDigest}");
console.WriteLine($" surfaceDigest: {witness.Evidence.SurfaceDigest}");
console.WriteLine($" signedBy: {witness.Evidence.SignedBy}");
}
private static void WriteWitnessListTable(IAnsiConsole console, WitnessListItemDto[] witnesses)
{
var table = new Table();
table.AddColumn("Witness ID");
table.AddColumn("CVE");
table.AddColumn("Package");
table.AddColumn("Tier");
table.AddColumn("Entrypoint");
table.AddColumn("Sink");
foreach (var w in witnesses)
{
var tierColor = w.ConfidenceTier switch
{
"confirmed" => "red",
"likely" => "yellow",
"present" => "grey",
"unreachable" => "green",
_ => "white"
};
table.AddRow(
w.WitnessId[..20] + "...",
w.CveId,
w.PackageName,
$"[{tierColor}]{w.ConfidenceTier}[/]",
w.Entrypoint.Length > 25 ? w.Entrypoint[..25] + "..." : w.Entrypoint,
w.Sink.Length > 25 ? w.Sink[..25] + "..." : w.Sink
);
}
console.Write(table);
}
private static string GenerateWitnessJson(string witnessId, bool includeDsse)
{
var witness = new
{
witness_schema = "stellaops.witness.v1",
witness_id = witnessId,
artifact = new { sbom_digest = "sha256:...", component_purl = "pkg:nuget/Newtonsoft.Json@12.0.3" },
vuln = new { id = "CVE-2024-12345", source = "NVD" },
entrypoint = new { type = "http", route = "GET /api/users/{id}" },
path = new[] { new { symbol = "UserController.GetUser" }, new { symbol = "JsonConvert.DeserializeObject" } },
evidence = new { callgraph_digest = "blake3:...", surface_digest = "sha256:..." }
};
return JsonSerializer.Serialize(witness, WitnessJsonOptions);
}
private static string GenerateWitnessSarif(string witnessId)
{
var sarif = new
{
version = "2.1.0",
schema = "https://json.schemastore.org/sarif-2.1.0.json",
runs = new[]
{
new
{
tool = new
{
driver = new
{
name = "StellaOps Reachability",
version = "1.0.0",
informationUri = "https://stellaops.dev"
}
},
results = new[]
{
new
{
ruleId = "REACH001",
level = "warning",
message = new { text = "Reachable vulnerability: CVE-2024-12345" },
properties = new { witnessId }
}
}
}
}
};
return JsonSerializer.Serialize(sarif, WitnessJsonOptions);
}
// DTO classes for witness commands
private sealed record WitnessDto
{
public required string WitnessId { get; init; }
public required string WitnessSchema { get; init; }
public required string CveId { get; init; }
public required string PackageName { get; init; }
public required string PackageVersion { get; init; }
public required string ConfidenceTier { get; init; }
public required string ObservedAt { get; init; }
public required WitnessEntrypointDto Entrypoint { get; init; }
public required WitnessSinkDto Sink { get; init; }
public required PathStepDto[] Path { get; init; }
public required GateDto[] Gates { get; init; }
public required WitnessEvidenceDto Evidence { get; init; }
}
private sealed record WitnessEntrypointDto
{
public required string Type { get; init; }
public required string Route { get; init; }
public required string Symbol { get; init; }
public string? File { get; init; }
public int Line { get; init; }
}
private sealed record WitnessSinkDto
{
public required string Symbol { get; init; }
public string? Package { get; init; }
public bool IsTrigger { get; init; }
}
private sealed record PathStepDto
{
public required string Symbol { get; init; }
public string? File { get; init; }
public int Line { get; init; }
public string? Package { get; init; }
}
private sealed record GateDto
{
public required string Type { get; init; }
public required string Detail { get; init; }
public decimal Confidence { get; init; }
}
private sealed record WitnessEvidenceDto
{
public required string CallgraphDigest { get; init; }
public required string SurfaceDigest { get; init; }
public required string SignedBy { get; init; }
}
private sealed record WitnessListItemDto
{
public required string WitnessId { get; init; }
public required string CveId { get; init; }
public required string PackageName { get; init; }
public required string ConfidenceTier { get; init; }
public required string Entrypoint { get; init; }
public required string Sink { get; init; }
}
}

View File

@@ -0,0 +1,255 @@
// -----------------------------------------------------------------------------
// WitnessCommandGroup.cs
// Sprint: SPRINT_3700_0005_0001_witness_ui_cli
// Tasks: CLI-001, CLI-002, CLI-003, CLI-004
// Description: CLI command group for reachability witness operations.
// -----------------------------------------------------------------------------
using System.CommandLine;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Cli.Extensions;
using Spectre.Console;
namespace StellaOps.Cli.Commands;
/// <summary>
/// CLI command group for reachability witness operations.
/// </summary>
internal static class WitnessCommandGroup
{
internal static Command BuildWitnessCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var witness = new Command("witness", "Reachability witness operations.");
witness.Add(BuildWitnessShowCommand(services, verboseOption, cancellationToken));
witness.Add(BuildWitnessVerifyCommand(services, verboseOption, cancellationToken));
witness.Add(BuildWitnessListCommand(services, verboseOption, cancellationToken));
witness.Add(BuildWitnessExportCommand(services, verboseOption, cancellationToken));
return witness;
}
private static Command BuildWitnessShowCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var witnessIdArg = new Argument<string>("witness-id")
{
Description = "The witness ID to display (e.g., wit:sha256:abc123)."
};
var formatOption = new Option<string>("--format", new[] { "-f" })
{
Description = "Output format: text (default), json, yaml."
}.SetDefaultValue("text").FromAmong("text", "json", "yaml");
var noColorOption = new Option<bool>("--no-color")
{
Description = "Disable colored output."
};
var pathOnlyOption = new Option<bool>("--path-only")
{
Description = "Show only the call path, not full witness details."
};
var command = new Command("show", "Display a witness with call path visualization.")
{
witnessIdArg,
formatOption,
noColorOption,
pathOnlyOption,
verboseOption
};
command.SetAction(parseResult =>
{
var witnessId = parseResult.GetValue(witnessIdArg)!;
var format = parseResult.GetValue(formatOption)!;
var noColor = parseResult.GetValue(noColorOption);
var pathOnly = parseResult.GetValue(pathOnlyOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleWitnessShowAsync(
services,
witnessId,
format,
noColor,
pathOnly,
verbose,
cancellationToken);
});
return command;
}
private static Command BuildWitnessVerifyCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var witnessIdArg = new Argument<string>("witness-id")
{
Description = "The witness ID to verify."
};
var publicKeyOption = new Option<string?>("--public-key", new[] { "-k" })
{
Description = "Path to public key file (default: fetch from authority)."
};
var offlineOption = new Option<bool>("--offline")
{
Description = "Verify using local key only, don't fetch from server."
};
var command = new Command("verify", "Verify a witness signature.")
{
witnessIdArg,
publicKeyOption,
offlineOption,
verboseOption
};
command.SetAction(parseResult =>
{
var witnessId = parseResult.GetValue(witnessIdArg)!;
var publicKeyPath = parseResult.GetValue(publicKeyOption);
var offline = parseResult.GetValue(offlineOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleWitnessVerifyAsync(
services,
witnessId,
publicKeyPath,
offline,
verbose,
cancellationToken);
});
return command;
}
private static Command BuildWitnessListCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var scanOption = new Option<string>("--scan", new[] { "-s" })
{
Description = "Scan ID to list witnesses for.",
Required = true
};
var cveOption = new Option<string?>("--cve")
{
Description = "Filter witnesses by CVE ID."
};
var tierOption = new Option<string?>("--tier")
{
Description = "Filter by confidence tier: confirmed, likely, present, unreachable."
}?.FromAmong("confirmed", "likely", "present", "unreachable");
var formatOption = new Option<string>("--format", new[] { "-f" })
{
Description = "Output format: table (default), json."
}.SetDefaultValue("table").FromAmong("table", "json");
var limitOption = new Option<int>("--limit", new[] { "-l" })
{
Description = "Maximum number of witnesses to return."
}.SetDefaultValue(50);
var command = new Command("list", "List witnesses for a scan.")
{
scanOption,
cveOption,
tierOption,
formatOption,
limitOption,
verboseOption
};
command.SetAction(parseResult =>
{
var scanId = parseResult.GetValue(scanOption)!;
var cve = parseResult.GetValue(cveOption);
var tier = parseResult.GetValue(tierOption);
var format = parseResult.GetValue(formatOption)!;
var limit = parseResult.GetValue(limitOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleWitnessListAsync(
services,
scanId,
cve,
tier,
format,
limit,
verbose,
cancellationToken);
});
return command;
}
private static Command BuildWitnessExportCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var witnessIdArg = new Argument<string>("witness-id")
{
Description = "The witness ID to export."
};
var formatOption = new Option<string>("--format", new[] { "-f" })
{
Description = "Export format: json (default), sarif."
}.SetDefaultValue("json").FromAmong("json", "sarif");
var outputOption = new Option<string?>("--output", new[] { "-o" })
{
Description = "Output file path (default: stdout)."
};
var includeDsseOption = new Option<bool>("--include-dsse")
{
Description = "Include DSSE envelope in export."
};
var command = new Command("export", "Export a witness to file.")
{
witnessIdArg,
formatOption,
outputOption,
includeDsseOption,
verboseOption
};
command.SetAction(parseResult =>
{
var witnessId = parseResult.GetValue(witnessIdArg)!;
var format = parseResult.GetValue(formatOption)!;
var outputPath = parseResult.GetValue(outputOption);
var includeDsse = parseResult.GetValue(includeDsseOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleWitnessExportAsync(
services,
witnessId,
format,
outputPath,
includeDsse,
verbose,
cancellationToken);
});
return command;
}
}

View File

@@ -0,0 +1,296 @@
// -----------------------------------------------------------------------------
// AssumptionPenalties.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: D-SCORE-002 - Assumption penalties in score calculation
// Description: Penalties applied when scoring relies on assumptions.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Types of assumptions that incur scoring penalties.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum AssumptionType
{
/// <summary>Assumed vulnerable code is reachable (no reachability analysis).</summary>
AssumedReachable,
/// <summary>Assumed VEX status from source without verification.</summary>
AssumedVexStatus,
/// <summary>Assumed SBOM completeness (no SBOM validation).</summary>
AssumedSbomComplete,
/// <summary>Assumed feed is current (stale feed data).</summary>
AssumedFeedCurrent,
/// <summary>Assumed default CVSS metrics (no specific vector).</summary>
AssumedDefaultCvss,
/// <summary>Assumed package version (ambiguous version).</summary>
AssumedPackageVersion,
/// <summary>Assumed deployment context (no runtime info).</summary>
AssumedDeploymentContext,
/// <summary>Assumed transitive dependency (unverified chain).</summary>
AssumedTransitiveDep,
/// <summary>Assumed no compensating controls.</summary>
AssumedNoControls,
/// <summary>Assumed exploit exists (no PoC verification).</summary>
AssumedExploitExists
}
/// <summary>
/// Configuration for assumption penalties.
/// </summary>
public sealed record AssumptionPenaltyConfig
{
/// <summary>
/// Default penalties by assumption type.
/// </summary>
[JsonPropertyName("penalties")]
public ImmutableDictionary<AssumptionType, double> Penalties { get; init; } =
DefaultPenalties;
/// <summary>
/// Whether to compound penalties (multiply) or add them.
/// </summary>
[JsonPropertyName("compoundPenalties")]
public bool CompoundPenalties { get; init; } = true;
/// <summary>
/// Maximum total penalty (floor for confidence).
/// </summary>
[JsonPropertyName("maxTotalPenalty")]
public double MaxTotalPenalty { get; init; } = 0.7;
/// <summary>
/// Minimum confidence score after penalties.
/// </summary>
[JsonPropertyName("minConfidence")]
public double MinConfidence { get; init; } = 0.1;
/// <summary>
/// Default assumption penalties.
/// </summary>
public static readonly ImmutableDictionary<AssumptionType, double> DefaultPenalties =
new Dictionary<AssumptionType, double>
{
[AssumptionType.AssumedReachable] = 0.15,
[AssumptionType.AssumedVexStatus] = 0.10,
[AssumptionType.AssumedSbomComplete] = 0.12,
[AssumptionType.AssumedFeedCurrent] = 0.08,
[AssumptionType.AssumedDefaultCvss] = 0.05,
[AssumptionType.AssumedPackageVersion] = 0.10,
[AssumptionType.AssumedDeploymentContext] = 0.07,
[AssumptionType.AssumedTransitiveDep] = 0.05,
[AssumptionType.AssumedNoControls] = 0.08,
[AssumptionType.AssumedExploitExists] = 0.06
}.ToImmutableDictionary();
}
/// <summary>
/// An assumption made during scoring.
/// </summary>
public sealed record ScoringAssumption
{
/// <summary>
/// Type of assumption.
/// </summary>
[JsonPropertyName("type")]
public required AssumptionType Type { get; init; }
/// <summary>
/// Human-readable description.
/// </summary>
[JsonPropertyName("description")]
public required string Description { get; init; }
/// <summary>
/// Penalty applied for this assumption.
/// </summary>
[JsonPropertyName("penalty")]
public required double Penalty { get; init; }
/// <summary>
/// What would remove this assumption.
/// </summary>
[JsonPropertyName("resolutionHint")]
public string? ResolutionHint { get; init; }
/// <summary>
/// Related finding or component ID.
/// </summary>
[JsonPropertyName("relatedId")]
public string? RelatedId { get; init; }
}
/// <summary>
/// Result of applying assumption penalties.
/// </summary>
public sealed record AssumptionPenaltyResult
{
/// <summary>
/// Original confidence score (before penalties).
/// </summary>
[JsonPropertyName("originalConfidence")]
public required double OriginalConfidence { get; init; }
/// <summary>
/// Adjusted confidence score (after penalties).
/// </summary>
[JsonPropertyName("adjustedConfidence")]
public required double AdjustedConfidence { get; init; }
/// <summary>
/// Total penalty applied.
/// </summary>
[JsonPropertyName("totalPenalty")]
public required double TotalPenalty { get; init; }
/// <summary>
/// Assumptions that contributed to the penalty.
/// </summary>
[JsonPropertyName("assumptions")]
public ImmutableArray<ScoringAssumption> Assumptions { get; init; } = [];
/// <summary>
/// Whether the penalty was capped.
/// </summary>
[JsonPropertyName("penaltyCapped")]
public bool PenaltyCapped { get; init; }
}
/// <summary>
/// Calculator for assumption-based penalties.
/// </summary>
public sealed class AssumptionPenaltyCalculator
{
private readonly AssumptionPenaltyConfig _config;
public AssumptionPenaltyCalculator(AssumptionPenaltyConfig? config = null)
{
_config = config ?? new AssumptionPenaltyConfig();
}
/// <summary>
/// Calculates the penalty result for a set of assumptions.
/// </summary>
public AssumptionPenaltyResult Calculate(
double originalConfidence,
IEnumerable<ScoringAssumption> assumptions)
{
var assumptionList = assumptions.ToImmutableArray();
if (assumptionList.Length == 0)
{
return new AssumptionPenaltyResult
{
OriginalConfidence = originalConfidence,
AdjustedConfidence = originalConfidence,
TotalPenalty = 0,
Assumptions = [],
PenaltyCapped = false
};
}
double adjustedConfidence;
double totalPenalty;
bool capped = false;
if (_config.CompoundPenalties)
{
// Compound: multiply (1 - penalty) factors
var factor = 1.0;
foreach (var assumption in assumptionList)
{
factor *= (1.0 - assumption.Penalty);
}
adjustedConfidence = originalConfidence * factor;
totalPenalty = 1.0 - factor;
}
else
{
// Additive: sum penalties
totalPenalty = assumptionList.Sum(a => a.Penalty);
if (totalPenalty > _config.MaxTotalPenalty)
{
totalPenalty = _config.MaxTotalPenalty;
capped = true;
}
adjustedConfidence = originalConfidence * (1.0 - totalPenalty);
}
// Apply minimum confidence floor
if (adjustedConfidence < _config.MinConfidence)
{
adjustedConfidence = _config.MinConfidence;
capped = true;
}
return new AssumptionPenaltyResult
{
OriginalConfidence = originalConfidence,
AdjustedConfidence = adjustedConfidence,
TotalPenalty = totalPenalty,
Assumptions = assumptionList,
PenaltyCapped = capped
};
}
/// <summary>
/// Creates a scoring assumption with default penalty.
/// </summary>
public ScoringAssumption CreateAssumption(
AssumptionType type,
string description,
string? relatedId = null)
{
var penalty = _config.Penalties.TryGetValue(type, out var p)
? p
: AssumptionPenaltyConfig.DefaultPenalties.GetValueOrDefault(type, 0.05);
return new ScoringAssumption
{
Type = type,
Description = description,
Penalty = penalty,
ResolutionHint = GetResolutionHint(type),
RelatedId = relatedId
};
}
private static string GetResolutionHint(AssumptionType type) => type switch
{
AssumptionType.AssumedReachable =>
"Run reachability analysis to determine actual code path",
AssumptionType.AssumedVexStatus =>
"Obtain signed VEX statement from vendor",
AssumptionType.AssumedSbomComplete =>
"Generate verified SBOM with attestation",
AssumptionType.AssumedFeedCurrent =>
"Update vulnerability feeds to latest version",
AssumptionType.AssumedDefaultCvss =>
"Obtain environment-specific CVSS vector",
AssumptionType.AssumedPackageVersion =>
"Verify exact package version from lockfile",
AssumptionType.AssumedDeploymentContext =>
"Provide runtime environment information",
AssumptionType.AssumedTransitiveDep =>
"Verify dependency chain with lockfile",
AssumptionType.AssumedNoControls =>
"Document compensating controls in policy",
AssumptionType.AssumedExploitExists =>
"Check exploit databases for PoC availability",
_ => "Provide additional context to remove assumption"
};
}

View File

@@ -0,0 +1,394 @@
// -----------------------------------------------------------------------------
// ScoreAttestationStatement.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: D-SCORE-005 - DSSE-signed score attestation
// Description: DSSE predicate for attesting to security scores.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
using StellaOps.Attestor.ProofChain.Statements;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// DSSE predicate type for score attestation.
/// </summary>
public static class ScoreAttestationPredicateType
{
/// <summary>
/// Predicate type URI for score attestation.
/// </summary>
public const string PredicateType = "https://stellaops.io/attestation/score/v1";
}
/// <summary>
/// Score attestation statement (DSSE predicate payload).
/// </summary>
public sealed record ScoreAttestationStatement
{
/// <summary>
/// Attestation version.
/// </summary>
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0.0";
/// <summary>
/// When the score was computed.
/// </summary>
[JsonPropertyName("scoredAt")]
public required DateTimeOffset ScoredAt { get; init; }
/// <summary>
/// Subject artifact digest.
/// </summary>
[JsonPropertyName("subjectDigest")]
public required string SubjectDigest { get; init; }
/// <summary>
/// Subject artifact name/reference.
/// </summary>
[JsonPropertyName("subjectName")]
public string? SubjectName { get; init; }
/// <summary>
/// Overall security score (0-100).
/// </summary>
[JsonPropertyName("overallScore")]
public required int OverallScore { get; init; }
/// <summary>
/// Score confidence (0.0 to 1.0).
/// </summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>
/// Score grade (A-F).
/// </summary>
[JsonPropertyName("grade")]
public required string Grade { get; init; }
/// <summary>
/// Score breakdown by category.
/// </summary>
[JsonPropertyName("breakdown")]
public required ScoreBreakdown Breakdown { get; init; }
/// <summary>
/// Scoring policy used.
/// </summary>
[JsonPropertyName("policy")]
public required ScoringPolicyRef Policy { get; init; }
/// <summary>
/// Inputs used for scoring.
/// </summary>
[JsonPropertyName("inputs")]
public required ScoringInputs Inputs { get; init; }
/// <summary>
/// Assumptions made during scoring.
/// </summary>
[JsonPropertyName("assumptions")]
public ImmutableArray<AssumptionSummary> Assumptions { get; init; } = [];
/// <summary>
/// Unknowns that affect the score.
/// </summary>
[JsonPropertyName("unknowns")]
public ImmutableArray<UnknownSummary> Unknowns { get; init; } = [];
/// <summary>
/// Hash of this statement for integrity.
/// </summary>
[JsonPropertyName("statementHash")]
public string? StatementHash { get; init; }
}
/// <summary>
/// Score breakdown by category.
/// </summary>
public sealed record ScoreBreakdown
{
/// <summary>
/// Vulnerability score (0-100).
/// </summary>
[JsonPropertyName("vulnerability")]
public required int Vulnerability { get; init; }
/// <summary>
/// Exploitability score (0-100).
/// </summary>
[JsonPropertyName("exploitability")]
public required int Exploitability { get; init; }
/// <summary>
/// Reachability score (0-100).
/// </summary>
[JsonPropertyName("reachability")]
public required int Reachability { get; init; }
/// <summary>
/// Policy compliance score (0-100).
/// </summary>
[JsonPropertyName("compliance")]
public required int Compliance { get; init; }
/// <summary>
/// Supply chain score (0-100).
/// </summary>
[JsonPropertyName("supplyChain")]
public required int SupplyChain { get; init; }
/// <summary>
/// VEX/mitigation score (0-100).
/// </summary>
[JsonPropertyName("mitigation")]
public required int Mitigation { get; init; }
}
/// <summary>
/// Reference to the scoring policy used.
/// </summary>
public sealed record ScoringPolicyRef
{
/// <summary>
/// Policy identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Policy version.
/// </summary>
[JsonPropertyName("version")]
public required string Version { get; init; }
/// <summary>
/// Policy digest.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Policy name.
/// </summary>
[JsonPropertyName("name")]
public string? Name { get; init; }
}
/// <summary>
/// Inputs used for scoring.
/// </summary>
public sealed record ScoringInputs
{
/// <summary>
/// SBOM digest.
/// </summary>
[JsonPropertyName("sbomDigest")]
public string? SbomDigest { get; init; }
/// <summary>
/// Vulnerability feed version.
/// </summary>
[JsonPropertyName("feedVersion")]
public string? FeedVersion { get; init; }
/// <summary>
/// Feed fetch timestamp.
/// </summary>
[JsonPropertyName("feedFetchedAt")]
public DateTimeOffset? FeedFetchedAt { get; init; }
/// <summary>
/// Reachability analysis digest.
/// </summary>
[JsonPropertyName("reachabilityDigest")]
public string? ReachabilityDigest { get; init; }
/// <summary>
/// VEX documents used.
/// </summary>
[JsonPropertyName("vexDocuments")]
public ImmutableArray<VexDocRef> VexDocuments { get; init; } = [];
/// <summary>
/// Total components analyzed.
/// </summary>
[JsonPropertyName("componentCount")]
public int ComponentCount { get; init; }
/// <summary>
/// Total vulnerabilities found.
/// </summary>
[JsonPropertyName("vulnerabilityCount")]
public int VulnerabilityCount { get; init; }
/// <summary>
/// Total findings after filtering.
/// </summary>
[JsonPropertyName("findingCount")]
public int FindingCount { get; init; }
}
/// <summary>
/// Reference to a VEX document.
/// </summary>
public sealed record VexDocRef
{
/// <summary>
/// VEX document digest.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// VEX source.
/// </summary>
[JsonPropertyName("source")]
public required string Source { get; init; }
/// <summary>
/// Decisions applied from this document.
/// </summary>
[JsonPropertyName("decisionCount")]
public int DecisionCount { get; init; }
}
/// <summary>
/// Summary of an assumption made.
/// </summary>
public sealed record AssumptionSummary
{
/// <summary>
/// Assumption type.
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Count of this assumption type.
/// </summary>
[JsonPropertyName("count")]
public required int Count { get; init; }
/// <summary>
/// Total penalty from this assumption type.
/// </summary>
[JsonPropertyName("totalPenalty")]
public required double TotalPenalty { get; init; }
}
/// <summary>
/// Summary of an unknown.
/// </summary>
public sealed record UnknownSummary
{
/// <summary>
/// Unknown type.
/// </summary>
[JsonPropertyName("type")]
public required string Type { get; init; }
/// <summary>
/// Count of this unknown type.
/// </summary>
[JsonPropertyName("count")]
public required int Count { get; init; }
/// <summary>
/// Score impact from this unknown type.
/// </summary>
[JsonPropertyName("scoreImpact")]
public required int ScoreImpact { get; init; }
}
/// <summary>
/// Builder for score attestation statements.
/// </summary>
public sealed class ScoreAttestationBuilder
{
private readonly ScoreAttestationStatement _statement;
private ScoreAttestationBuilder(ScoreAttestationStatement statement)
{
_statement = statement;
}
/// <summary>
/// Creates a new builder.
/// </summary>
public static ScoreAttestationBuilder Create(
string subjectDigest,
int overallScore,
double confidence,
ScoreBreakdown breakdown,
ScoringPolicyRef policy,
ScoringInputs inputs)
{
return new ScoreAttestationBuilder(new ScoreAttestationStatement
{
ScoredAt = DateTimeOffset.UtcNow,
SubjectDigest = subjectDigest,
OverallScore = overallScore,
Confidence = confidence,
Grade = ComputeGrade(overallScore),
Breakdown = breakdown,
Policy = policy,
Inputs = inputs
});
}
/// <summary>
/// Sets the subject name.
/// </summary>
public ScoreAttestationBuilder WithSubjectName(string name)
{
return new ScoreAttestationBuilder(_statement with { SubjectName = name });
}
/// <summary>
/// Adds assumptions.
/// </summary>
public ScoreAttestationBuilder WithAssumptions(IEnumerable<AssumptionSummary> assumptions)
{
return new ScoreAttestationBuilder(_statement with
{
Assumptions = assumptions.ToImmutableArray()
});
}
/// <summary>
/// Adds unknowns.
/// </summary>
public ScoreAttestationBuilder WithUnknowns(IEnumerable<UnknownSummary> unknowns)
{
return new ScoreAttestationBuilder(_statement with
{
Unknowns = unknowns.ToImmutableArray()
});
}
/// <summary>
/// Builds the statement.
/// </summary>
public ScoreAttestationStatement Build()
{
// Compute statement hash
var canonical = StellaOps.Canonical.Json.CanonJson.Canonicalize(_statement);
var hash = StellaOps.Canonical.Json.CanonJson.Sha256Prefixed(canonical);
return _statement with { StatementHash = hash };
}
private static string ComputeGrade(int score) => score switch
{
>= 90 => "A",
>= 80 => "B",
>= 70 => "C",
>= 60 => "D",
_ => "F"
};
}

View File

@@ -0,0 +1,477 @@
// -----------------------------------------------------------------------------
// ScoringRulesSnapshot.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: E-OFF-003 - Scoring rules snapshot with digest
// Description: Immutable snapshot of scoring rules for offline/audit use.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Immutable snapshot of scoring rules with cryptographic digest.
/// Used for offline operation and audit trail.
/// </summary>
public sealed record ScoringRulesSnapshot
{
/// <summary>
/// Snapshot identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Snapshot version.
/// </summary>
[JsonPropertyName("version")]
public required int Version { get; init; }
/// <summary>
/// When the snapshot was created.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Content digest of the snapshot (sha256:...).
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Description of this snapshot.
/// </summary>
[JsonPropertyName("description")]
public string? Description { get; init; }
/// <summary>
/// Source policy IDs that contributed to this snapshot.
/// </summary>
[JsonPropertyName("sourcePolicies")]
public ImmutableArray<string> SourcePolicies { get; init; } = [];
/// <summary>
/// Scoring weights configuration.
/// </summary>
[JsonPropertyName("weights")]
public required ScoringWeights Weights { get; init; }
/// <summary>
/// Thresholds for grade boundaries.
/// </summary>
[JsonPropertyName("thresholds")]
public required GradeThresholds Thresholds { get; init; }
/// <summary>
/// Severity multipliers.
/// </summary>
[JsonPropertyName("severityMultipliers")]
public required SeverityMultipliers SeverityMultipliers { get; init; }
/// <summary>
/// Assumption penalty configuration.
/// </summary>
[JsonPropertyName("assumptionPenalties")]
public required AssumptionPenaltyConfig AssumptionPenalties { get; init; }
/// <summary>
/// Trust source weights.
/// </summary>
[JsonPropertyName("trustSourceWeights")]
public required TrustSourceWeightConfig TrustSourceWeights { get; init; }
/// <summary>
/// Freshness decay configuration.
/// </summary>
[JsonPropertyName("freshnessDecay")]
public required FreshnessDecayConfig FreshnessDecay { get; init; }
/// <summary>
/// Custom rules (Rego/SPL).
/// </summary>
[JsonPropertyName("customRules")]
public ImmutableArray<CustomScoringRule> CustomRules { get; init; } = [];
/// <summary>
/// Whether this snapshot is signed.
/// </summary>
[JsonPropertyName("isSigned")]
public bool IsSigned { get; init; }
/// <summary>
/// Signature if signed.
/// </summary>
[JsonPropertyName("signature")]
public string? Signature { get; init; }
/// <summary>
/// Key ID used for signing.
/// </summary>
[JsonPropertyName("signingKeyId")]
public string? SigningKeyId { get; init; }
}
/// <summary>
/// Scoring category weights (must sum to 1.0).
/// </summary>
public sealed record ScoringWeights
{
/// <summary>
/// Weight for vulnerability severity (0.0 to 1.0).
/// </summary>
[JsonPropertyName("vulnerability")]
public double Vulnerability { get; init; } = 0.25;
/// <summary>
/// Weight for exploitability factors (0.0 to 1.0).
/// </summary>
[JsonPropertyName("exploitability")]
public double Exploitability { get; init; } = 0.20;
/// <summary>
/// Weight for reachability analysis (0.0 to 1.0).
/// </summary>
[JsonPropertyName("reachability")]
public double Reachability { get; init; } = 0.20;
/// <summary>
/// Weight for policy compliance (0.0 to 1.0).
/// </summary>
[JsonPropertyName("compliance")]
public double Compliance { get; init; } = 0.15;
/// <summary>
/// Weight for supply chain factors (0.0 to 1.0).
/// </summary>
[JsonPropertyName("supplyChain")]
public double SupplyChain { get; init; } = 0.10;
/// <summary>
/// Weight for mitigation/VEX status (0.0 to 1.0).
/// </summary>
[JsonPropertyName("mitigation")]
public double Mitigation { get; init; } = 0.10;
/// <summary>
/// Validates that weights sum to 1.0.
/// </summary>
public bool Validate()
{
var sum = Vulnerability + Exploitability + Reachability +
Compliance + SupplyChain + Mitigation;
return Math.Abs(sum - 1.0) < 0.001;
}
}
/// <summary>
/// Grade threshold configuration.
/// </summary>
public sealed record GradeThresholds
{
/// <summary>
/// Minimum score for grade A.
/// </summary>
[JsonPropertyName("a")]
public int A { get; init; } = 90;
/// <summary>
/// Minimum score for grade B.
/// </summary>
[JsonPropertyName("b")]
public int B { get; init; } = 80;
/// <summary>
/// Minimum score for grade C.
/// </summary>
[JsonPropertyName("c")]
public int C { get; init; } = 70;
/// <summary>
/// Minimum score for grade D.
/// </summary>
[JsonPropertyName("d")]
public int D { get; init; } = 60;
// Below D threshold is grade F
/// <summary>
/// Gets the grade for a score.
/// </summary>
public string GetGrade(int score) => score switch
{
_ when score >= A => "A",
_ when score >= B => "B",
_ when score >= C => "C",
_ when score >= D => "D",
_ => "F"
};
}
/// <summary>
/// Severity multipliers for scoring.
/// </summary>
public sealed record SeverityMultipliers
{
/// <summary>
/// Multiplier for critical severity.
/// </summary>
[JsonPropertyName("critical")]
public double Critical { get; init; } = 1.5;
/// <summary>
/// Multiplier for high severity.
/// </summary>
[JsonPropertyName("high")]
public double High { get; init; } = 1.2;
/// <summary>
/// Multiplier for medium severity.
/// </summary>
[JsonPropertyName("medium")]
public double Medium { get; init; } = 1.0;
/// <summary>
/// Multiplier for low severity.
/// </summary>
[JsonPropertyName("low")]
public double Low { get; init; } = 0.8;
/// <summary>
/// Multiplier for informational.
/// </summary>
[JsonPropertyName("informational")]
public double Informational { get; init; } = 0.5;
/// <summary>
/// Gets multiplier for a severity string.
/// </summary>
public double GetMultiplier(string severity) => severity?.ToUpperInvariant() switch
{
"CRITICAL" => Critical,
"HIGH" => High,
"MEDIUM" => Medium,
"LOW" => Low,
"INFORMATIONAL" or "INFO" => Informational,
_ => Medium
};
}
/// <summary>
/// Freshness decay configuration.
/// </summary>
public sealed record FreshnessDecayConfig
{
/// <summary>
/// Hours after which SBOM starts to decay.
/// </summary>
[JsonPropertyName("sbomDecayStartHours")]
public int SbomDecayStartHours { get; init; } = 168; // 7 days
/// <summary>
/// Hours after which feeds start to decay.
/// </summary>
[JsonPropertyName("feedDecayStartHours")]
public int FeedDecayStartHours { get; init; } = 24;
/// <summary>
/// Decay rate per hour after start.
/// </summary>
[JsonPropertyName("decayRatePerHour")]
public double DecayRatePerHour { get; init; } = 0.001;
/// <summary>
/// Minimum freshness score.
/// </summary>
[JsonPropertyName("minimumFreshness")]
public double MinimumFreshness { get; init; } = 0.5;
}
/// <summary>
/// Custom scoring rule.
/// </summary>
public sealed record CustomScoringRule
{
/// <summary>
/// Rule identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Rule name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Rule language (rego, spl).
/// </summary>
[JsonPropertyName("language")]
public required string Language { get; init; }
/// <summary>
/// Rule content.
/// </summary>
[JsonPropertyName("content")]
public required string Content { get; init; }
/// <summary>
/// Rule priority (higher = evaluated first).
/// </summary>
[JsonPropertyName("priority")]
public int Priority { get; init; }
/// <summary>
/// Whether rule is enabled.
/// </summary>
[JsonPropertyName("enabled")]
public bool Enabled { get; init; } = true;
}
/// <summary>
/// Builder for scoring rules snapshots.
/// </summary>
public sealed class ScoringRulesSnapshotBuilder
{
private ScoringRulesSnapshot _snapshot;
private ScoringRulesSnapshotBuilder(ScoringRulesSnapshot snapshot)
{
_snapshot = snapshot;
}
/// <summary>
/// Creates a new builder with defaults.
/// </summary>
public static ScoringRulesSnapshotBuilder Create(string id, int version)
{
return new ScoringRulesSnapshotBuilder(new ScoringRulesSnapshot
{
Id = id,
Version = version,
CreatedAt = DateTimeOffset.UtcNow,
Digest = "", // Will be computed on build
Weights = new ScoringWeights(),
Thresholds = new GradeThresholds(),
SeverityMultipliers = new SeverityMultipliers(),
AssumptionPenalties = new AssumptionPenaltyConfig(),
TrustSourceWeights = new TrustSourceWeightConfig(),
FreshnessDecay = new FreshnessDecayConfig()
});
}
public ScoringRulesSnapshotBuilder WithDescription(string description)
{
_snapshot = _snapshot with { Description = description };
return this;
}
public ScoringRulesSnapshotBuilder WithWeights(ScoringWeights weights)
{
_snapshot = _snapshot with { Weights = weights };
return this;
}
public ScoringRulesSnapshotBuilder WithThresholds(GradeThresholds thresholds)
{
_snapshot = _snapshot with { Thresholds = thresholds };
return this;
}
public ScoringRulesSnapshotBuilder WithSeverityMultipliers(SeverityMultipliers multipliers)
{
_snapshot = _snapshot with { SeverityMultipliers = multipliers };
return this;
}
public ScoringRulesSnapshotBuilder WithAssumptionPenalties(AssumptionPenaltyConfig penalties)
{
_snapshot = _snapshot with { AssumptionPenalties = penalties };
return this;
}
public ScoringRulesSnapshotBuilder WithTrustSourceWeights(TrustSourceWeightConfig weights)
{
_snapshot = _snapshot with { TrustSourceWeights = weights };
return this;
}
public ScoringRulesSnapshotBuilder WithFreshnessDecay(FreshnessDecayConfig decay)
{
_snapshot = _snapshot with { FreshnessDecay = decay };
return this;
}
public ScoringRulesSnapshotBuilder WithCustomRules(IEnumerable<CustomScoringRule> rules)
{
_snapshot = _snapshot with { CustomRules = rules.ToImmutableArray() };
return this;
}
public ScoringRulesSnapshotBuilder WithSourcePolicies(IEnumerable<string> policyIds)
{
_snapshot = _snapshot with { SourcePolicies = policyIds.ToImmutableArray() };
return this;
}
/// <summary>
/// Builds the snapshot with computed digest.
/// </summary>
public ScoringRulesSnapshot Build()
{
// Validate weights
if (!_snapshot.Weights.Validate())
{
throw new InvalidOperationException("Scoring weights must sum to 1.0");
}
// Compute digest
var canonical = StellaOps.Canonical.Json.CanonJson.Canonicalize(_snapshot with { Digest = "" });
var digest = StellaOps.Canonical.Json.CanonJson.Sha256Prefixed(canonical);
return _snapshot with { Digest = digest };
}
}
/// <summary>
/// Service for managing scoring rules snapshots.
/// </summary>
public interface IScoringRulesSnapshotService
{
/// <summary>
/// Creates a new snapshot from current rules.
/// </summary>
Task<ScoringRulesSnapshot> CreateSnapshotAsync(
string description,
CancellationToken ct = default);
/// <summary>
/// Gets a snapshot by ID.
/// </summary>
Task<ScoringRulesSnapshot?> GetSnapshotAsync(
string id,
CancellationToken ct = default);
/// <summary>
/// Gets the latest snapshot.
/// </summary>
Task<ScoringRulesSnapshot?> GetLatestSnapshotAsync(
CancellationToken ct = default);
/// <summary>
/// Validates a snapshot against its digest.
/// </summary>
Task<bool> ValidateSnapshotAsync(
ScoringRulesSnapshot snapshot,
CancellationToken ct = default);
/// <summary>
/// Lists all snapshots.
/// </summary>
Task<IReadOnlyList<ScoringRulesSnapshot>> ListSnapshotsAsync(
int limit = 100,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,412 @@
// -----------------------------------------------------------------------------
// TrustSourceWeights.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: D-SCORE-003 - Configurable trust source weights
// Description: Configurable weights for different vulnerability data sources.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Scoring;
/// <summary>
/// Known vulnerability data sources.
/// </summary>
public static class KnownSources
{
public const string NvdNist = "nvd-nist";
public const string CisaKev = "cisa-kev";
public const string Osv = "osv";
public const string GithubAdvisory = "github-advisory";
public const string VendorAdvisory = "vendor";
public const string RedHatCve = "redhat-cve";
public const string DebianSecurity = "debian-security";
public const string AlpineSecdb = "alpine-secdb";
public const string UbuntuOval = "ubuntu-oval";
public const string Epss = "epss";
public const string ExploitDb = "exploit-db";
public const string VulnDb = "vulndb";
public const string Snyk = "snyk";
public const string Internal = "internal";
}
/// <summary>
/// Configuration for trust source weights.
/// </summary>
public sealed record TrustSourceWeightConfig
{
/// <summary>
/// Weights by source ID (0.0 to 1.0).
/// </summary>
[JsonPropertyName("weights")]
public ImmutableDictionary<string, double> Weights { get; init; } =
DefaultWeights;
/// <summary>
/// Default weight for unknown sources.
/// </summary>
[JsonPropertyName("defaultWeight")]
public double DefaultWeight { get; init; } = 0.5;
/// <summary>
/// Source categories and their base weights.
/// </summary>
[JsonPropertyName("categoryWeights")]
public ImmutableDictionary<SourceCategory, double> CategoryWeights { get; init; } =
DefaultCategoryWeights;
/// <summary>
/// Whether to boost sources with corroborating data.
/// </summary>
[JsonPropertyName("enableCorroborationBoost")]
public bool EnableCorroborationBoost { get; init; } = true;
/// <summary>
/// Boost multiplier when multiple sources agree.
/// </summary>
[JsonPropertyName("corroborationBoostFactor")]
public double CorroborationBoostFactor { get; init; } = 1.1;
/// <summary>
/// Maximum number of corroborating sources to count.
/// </summary>
[JsonPropertyName("maxCorroborationCount")]
public int MaxCorroborationCount { get; init; } = 3;
/// <summary>
/// Default source weights.
/// </summary>
public static readonly ImmutableDictionary<string, double> DefaultWeights =
new Dictionary<string, double>
{
[KnownSources.NvdNist] = 0.90,
[KnownSources.CisaKev] = 0.98,
[KnownSources.Osv] = 0.75,
[KnownSources.GithubAdvisory] = 0.72,
[KnownSources.VendorAdvisory] = 0.88,
[KnownSources.RedHatCve] = 0.85,
[KnownSources.DebianSecurity] = 0.82,
[KnownSources.AlpineSecdb] = 0.80,
[KnownSources.UbuntuOval] = 0.82,
[KnownSources.Epss] = 0.70,
[KnownSources.ExploitDb] = 0.65,
[KnownSources.VulnDb] = 0.68,
[KnownSources.Snyk] = 0.70,
[KnownSources.Internal] = 0.60
}.ToImmutableDictionary();
/// <summary>
/// Default category weights.
/// </summary>
public static readonly ImmutableDictionary<SourceCategory, double> DefaultCategoryWeights =
new Dictionary<SourceCategory, double>
{
[SourceCategory.Government] = 0.95,
[SourceCategory.Vendor] = 0.85,
[SourceCategory.Coordinator] = 0.80,
[SourceCategory.Distro] = 0.82,
[SourceCategory.Community] = 0.70,
[SourceCategory.Commercial] = 0.68,
[SourceCategory.Internal] = 0.60
}.ToImmutableDictionary();
}
/// <summary>
/// Source categories.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SourceCategory
{
/// <summary>Government agency (NIST, CISA, BSI).</summary>
Government,
/// <summary>Software vendor.</summary>
Vendor,
/// <summary>Vulnerability coordinator (CERT).</summary>
Coordinator,
/// <summary>Linux distribution security team.</summary>
Distro,
/// <summary>Open source community.</summary>
Community,
/// <summary>Commercial security vendor.</summary>
Commercial,
/// <summary>Internal organization sources.</summary>
Internal
}
/// <summary>
/// Metadata about a vulnerability source.
/// </summary>
public sealed record SourceMetadata
{
/// <summary>
/// Source identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Source category.
/// </summary>
[JsonPropertyName("category")]
public required SourceCategory Category { get; init; }
/// <summary>
/// When data was fetched from this source.
/// </summary>
[JsonPropertyName("fetchedAt")]
public DateTimeOffset? FetchedAt { get; init; }
/// <summary>
/// Source data version/timestamp.
/// </summary>
[JsonPropertyName("dataVersion")]
public string? DataVersion { get; init; }
/// <summary>
/// Whether data is signed.
/// </summary>
[JsonPropertyName("isSigned")]
public bool IsSigned { get; init; }
}
/// <summary>
/// Finding data from a source.
/// </summary>
public sealed record SourceFinding
{
/// <summary>
/// Source metadata.
/// </summary>
[JsonPropertyName("source")]
public required SourceMetadata Source { get; init; }
/// <summary>
/// Severity from this source.
/// </summary>
[JsonPropertyName("severity")]
public string? Severity { get; init; }
/// <summary>
/// CVSS score from this source.
/// </summary>
[JsonPropertyName("cvssScore")]
public double? CvssScore { get; init; }
/// <summary>
/// VEX status from this source.
/// </summary>
[JsonPropertyName("vexStatus")]
public string? VexStatus { get; init; }
/// <summary>
/// Whether this source confirms exploitability.
/// </summary>
[JsonPropertyName("confirmsExploit")]
public bool? ConfirmsExploit { get; init; }
/// <summary>
/// Fix version from this source.
/// </summary>
[JsonPropertyName("fixVersion")]
public string? FixVersion { get; init; }
}
/// <summary>
/// Result of merging findings from multiple sources.
/// </summary>
public sealed record WeightedMergeResult
{
/// <summary>
/// Merged severity (highest trust source).
/// </summary>
[JsonPropertyName("severity")]
public string? Severity { get; init; }
/// <summary>
/// Weighted average CVSS score.
/// </summary>
[JsonPropertyName("cvssScore")]
public double? CvssScore { get; init; }
/// <summary>
/// VEX status from highest trust source.
/// </summary>
[JsonPropertyName("vexStatus")]
public string? VexStatus { get; init; }
/// <summary>
/// Fix version (earliest reported).
/// </summary>
[JsonPropertyName("fixVersion")]
public string? FixVersion { get; init; }
/// <summary>
/// Overall confidence in the merged result.
/// </summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>
/// Sources that contributed (ordered by weight).
/// </summary>
[JsonPropertyName("contributingSources")]
public ImmutableArray<string> ContributingSources { get; init; } = [];
/// <summary>
/// Whether sources corroborated each other.
/// </summary>
[JsonPropertyName("corroborated")]
public bool Corroborated { get; init; }
/// <summary>
/// Corroboration boost applied.
/// </summary>
[JsonPropertyName("corroborationBoost")]
public double CorroborationBoost { get; init; }
}
/// <summary>
/// Service for weighted source merging.
/// </summary>
public sealed class TrustSourceWeightService
{
private readonly TrustSourceWeightConfig _config;
public TrustSourceWeightService(TrustSourceWeightConfig? config = null)
{
_config = config ?? new TrustSourceWeightConfig();
}
/// <summary>
/// Gets the effective weight for a source.
/// </summary>
public double GetSourceWeight(SourceMetadata source)
{
// Check for explicit weight
if (_config.Weights.TryGetValue(source.Id, out var explicitWeight))
{
return ApplyModifiers(explicitWeight, source);
}
// Fall back to category weight
if (_config.CategoryWeights.TryGetValue(source.Category, out var categoryWeight))
{
return ApplyModifiers(categoryWeight, source);
}
return ApplyModifiers(_config.DefaultWeight, source);
}
private double ApplyModifiers(double baseWeight, SourceMetadata source)
{
var weight = baseWeight;
// Boost for signed data
if (source.IsSigned)
{
weight *= 1.05;
}
// Penalty for stale data (>7 days old)
if (source.FetchedAt.HasValue)
{
var age = DateTimeOffset.UtcNow - source.FetchedAt.Value;
if (age.TotalDays > 7)
{
weight *= 0.95;
}
if (age.TotalDays > 30)
{
weight *= 0.90;
}
}
return Math.Clamp(weight, 0.0, 1.0);
}
/// <summary>
/// Merges findings from multiple sources using weights.
/// </summary>
public WeightedMergeResult MergeFindings(IEnumerable<SourceFinding> findings)
{
var findingList = findings.ToList();
if (findingList.Count == 0)
{
return new WeightedMergeResult { Confidence = 0 };
}
// Sort by weight descending
var weighted = findingList
.Select(f => (Finding: f, Weight: GetSourceWeight(f.Source)))
.OrderByDescending(x => x.Weight)
.ToList();
var topFinding = weighted[0].Finding;
var topWeight = weighted[0].Weight;
// Calculate weighted CVSS
double? weightedCvss = null;
var cvssFindings = weighted.Where(w => w.Finding.CvssScore.HasValue).ToList();
if (cvssFindings.Count > 0)
{
var totalWeight = cvssFindings.Sum(w => w.Weight);
weightedCvss = cvssFindings.Sum(w => w.Finding.CvssScore!.Value * w.Weight) / totalWeight;
}
// Check for corroboration
var corroborated = false;
var corroborationBoost = 0.0;
if (_config.EnableCorroborationBoost && weighted.Count > 1)
{
// Check if multiple sources agree on severity
var severities = weighted
.Where(w => !string.IsNullOrEmpty(w.Finding.Severity))
.Select(w => w.Finding.Severity)
.Distinct()
.ToList();
if (severities.Count == 1)
{
var corroboratingCount = Math.Min(
weighted.Count(w => w.Finding.Severity == severities[0]),
_config.MaxCorroborationCount);
if (corroboratingCount > 1)
{
corroborated = true;
corroborationBoost = Math.Pow(
_config.CorroborationBoostFactor,
corroboratingCount - 1) - 1.0;
}
}
}
var confidence = Math.Clamp(topWeight + corroborationBoost, 0.0, 1.0);
return new WeightedMergeResult
{
Severity = topFinding.Severity,
CvssScore = weightedCvss,
VexStatus = topFinding.VexStatus,
FixVersion = findingList
.Where(f => !string.IsNullOrEmpty(f.FixVersion))
.OrderBy(f => f.FixVersion)
.FirstOrDefault()?.FixVersion,
Confidence = confidence,
ContributingSources = weighted.Select(w => w.Finding.Source.Id).ToImmutableArray(),
Corroborated = corroborated,
CorroborationBoost = corroborationBoost
};
}
}

View File

@@ -0,0 +1,429 @@
// -----------------------------------------------------------------------------
// JurisdictionTrustRules.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: VEX-L-003 - Jurisdiction-specific trust rules (US/EU/RU/CN)
// Description: VEX source trust rules by regulatory jurisdiction.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Vex;
/// <summary>
/// Jurisdiction codes for regulatory regions.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum Jurisdiction
{
/// <summary>United States (FDA, NIST, CISA).</summary>
US,
/// <summary>European Union (ENISA, BSI, ANSSI).</summary>
EU,
/// <summary>Russian Federation (FSTEC, FSB).</summary>
RU,
/// <summary>China (CNVD, CNNVD).</summary>
CN,
/// <summary>Japan (JPCERT, IPA).</summary>
JP,
/// <summary>Global (no specific jurisdiction).</summary>
Global
}
/// <summary>
/// VEX source identity.
/// </summary>
public sealed record VexSource
{
/// <summary>
/// Unique source identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Human-readable source name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Source type (vendor, coordinator, government, community).
/// </summary>
[JsonPropertyName("type")]
public required VexSourceType Type { get; init; }
/// <summary>
/// Jurisdictions where this source is authoritative.
/// </summary>
[JsonPropertyName("jurisdictions")]
public ImmutableArray<Jurisdiction> Jurisdictions { get; init; } = [];
/// <summary>
/// Base trust weight (0.0 to 1.0).
/// </summary>
[JsonPropertyName("baseTrustWeight")]
public double BaseTrustWeight { get; init; } = 0.5;
/// <summary>
/// Whether this source is a government authority.
/// </summary>
[JsonPropertyName("isGovernmentAuthority")]
public bool IsGovernmentAuthority { get; init; }
/// <summary>
/// Signing key identifiers for this source.
/// </summary>
[JsonPropertyName("keyIds")]
public ImmutableArray<string> KeyIds { get; init; } = [];
}
/// <summary>
/// VEX source types.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum VexSourceType
{
/// <summary>Product vendor.</summary>
Vendor,
/// <summary>Vulnerability coordinator (CERT).</summary>
Coordinator,
/// <summary>Government authority.</summary>
Government,
/// <summary>Community/open source.</summary>
Community,
/// <summary>Commercial security vendor.</summary>
Commercial
}
/// <summary>
/// Jurisdiction-specific trust configuration.
/// </summary>
public sealed record JurisdictionTrustConfig
{
/// <summary>
/// Jurisdiction this config applies to.
/// </summary>
[JsonPropertyName("jurisdiction")]
public required Jurisdiction Jurisdiction { get; init; }
/// <summary>
/// Ordered list of preferred sources (highest priority first).
/// </summary>
[JsonPropertyName("preferredSources")]
public ImmutableArray<string> PreferredSources { get; init; } = [];
/// <summary>
/// Trust weight overrides for specific sources.
/// </summary>
[JsonPropertyName("trustWeightOverrides")]
public ImmutableDictionary<string, double> TrustWeightOverrides { get; init; } =
ImmutableDictionary<string, double>.Empty;
/// <summary>
/// Whether government sources must be preferred.
/// </summary>
[JsonPropertyName("preferGovernmentSources")]
public bool PreferGovernmentSources { get; init; }
/// <summary>
/// Minimum trust weight for acceptance.
/// </summary>
[JsonPropertyName("minimumTrustWeight")]
public double MinimumTrustWeight { get; init; } = 0.3;
/// <summary>
/// Required source types for VEX acceptance.
/// </summary>
[JsonPropertyName("requiredSourceTypes")]
public ImmutableArray<VexSourceType> RequiredSourceTypes { get; init; } = [];
}
/// <summary>
/// Service for jurisdiction-aware VEX trust evaluation.
/// </summary>
public interface IJurisdictionTrustService
{
/// <summary>
/// Gets the effective trust weight for a source in a jurisdiction.
/// </summary>
double GetEffectiveTrustWeight(VexSource source, Jurisdiction jurisdiction);
/// <summary>
/// Ranks sources by trust for a jurisdiction.
/// </summary>
IReadOnlyList<VexSource> RankSourcesByTrust(
IEnumerable<VexSource> sources,
Jurisdiction jurisdiction);
/// <summary>
/// Validates that a VEX decision meets jurisdiction requirements.
/// </summary>
JurisdictionValidationResult ValidateForJurisdiction(
VexDecisionContext decision,
Jurisdiction jurisdiction);
}
/// <summary>
/// Context for a VEX decision being validated.
/// </summary>
public sealed record VexDecisionContext
{
/// <summary>
/// VEX status.
/// </summary>
[JsonPropertyName("status")]
public required string Status { get; init; }
/// <summary>
/// Source that provided this decision.
/// </summary>
[JsonPropertyName("source")]
public required VexSource Source { get; init; }
/// <summary>
/// Justification provided.
/// </summary>
[JsonPropertyName("justification")]
public string? Justification { get; init; }
/// <summary>
/// When the decision was made.
/// </summary>
[JsonPropertyName("timestamp")]
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Whether the decision is cryptographically signed.
/// </summary>
[JsonPropertyName("isSigned")]
public bool IsSigned { get; init; }
}
/// <summary>
/// Result of jurisdiction validation.
/// </summary>
public sealed record JurisdictionValidationResult
{
/// <summary>
/// Whether the decision is valid for the jurisdiction.
/// </summary>
[JsonPropertyName("isValid")]
public required bool IsValid { get; init; }
/// <summary>
/// Effective trust weight.
/// </summary>
[JsonPropertyName("effectiveTrustWeight")]
public required double EffectiveTrustWeight { get; init; }
/// <summary>
/// Validation issues.
/// </summary>
[JsonPropertyName("issues")]
public ImmutableArray<string> Issues { get; init; } = [];
/// <summary>
/// Suggested actions to improve trust.
/// </summary>
[JsonPropertyName("suggestions")]
public ImmutableArray<string> Suggestions { get; init; } = [];
}
/// <summary>
/// Default implementation of jurisdiction trust service.
/// </summary>
public sealed class JurisdictionTrustService : IJurisdictionTrustService
{
private readonly IReadOnlyDictionary<Jurisdiction, JurisdictionTrustConfig> _configs;
/// <summary>
/// Default jurisdiction configurations.
/// </summary>
public static readonly ImmutableDictionary<Jurisdiction, JurisdictionTrustConfig> DefaultConfigs =
new Dictionary<Jurisdiction, JurisdictionTrustConfig>
{
[Jurisdiction.US] = new()
{
Jurisdiction = Jurisdiction.US,
PreferredSources = ["nist-nvd", "cisa-kev", "fda-medical", "vendor"],
PreferGovernmentSources = true,
MinimumTrustWeight = 0.4,
TrustWeightOverrides = new Dictionary<string, double>
{
["nist-nvd"] = 0.95,
["cisa-kev"] = 0.98,
["vendor"] = 0.85
}.ToImmutableDictionary()
},
[Jurisdiction.EU] = new()
{
Jurisdiction = Jurisdiction.EU,
PreferredSources = ["enisa", "bsi", "anssi", "cert-eu", "vendor"],
PreferGovernmentSources = true,
MinimumTrustWeight = 0.4,
TrustWeightOverrides = new Dictionary<string, double>
{
["enisa"] = 0.95,
["bsi"] = 0.92,
["anssi"] = 0.92,
["vendor"] = 0.85
}.ToImmutableDictionary()
},
[Jurisdiction.RU] = new()
{
Jurisdiction = Jurisdiction.RU,
PreferredSources = ["fstec", "fsb-cert", "vendor"],
PreferGovernmentSources = true,
MinimumTrustWeight = 0.5,
TrustWeightOverrides = new Dictionary<string, double>
{
["fstec"] = 0.98,
["vendor"] = 0.80
}.ToImmutableDictionary()
},
[Jurisdiction.CN] = new()
{
Jurisdiction = Jurisdiction.CN,
PreferredSources = ["cnvd", "cnnvd", "vendor"],
PreferGovernmentSources = true,
MinimumTrustWeight = 0.5,
TrustWeightOverrides = new Dictionary<string, double>
{
["cnvd"] = 0.95,
["cnnvd"] = 0.95,
["vendor"] = 0.80
}.ToImmutableDictionary()
},
[Jurisdiction.Global] = new()
{
Jurisdiction = Jurisdiction.Global,
PreferredSources = ["vendor", "osv", "github-advisory"],
PreferGovernmentSources = false,
MinimumTrustWeight = 0.3,
TrustWeightOverrides = new Dictionary<string, double>
{
["vendor"] = 0.90,
["osv"] = 0.75,
["github-advisory"] = 0.70
}.ToImmutableDictionary()
}
}.ToImmutableDictionary();
public JurisdictionTrustService(
IReadOnlyDictionary<Jurisdiction, JurisdictionTrustConfig>? configs = null)
{
_configs = configs ?? DefaultConfigs;
}
public double GetEffectiveTrustWeight(VexSource source, Jurisdiction jurisdiction)
{
if (!_configs.TryGetValue(jurisdiction, out var config))
{
config = DefaultConfigs[Jurisdiction.Global];
}
// Check for explicit override
if (config.TrustWeightOverrides.TryGetValue(source.Id, out var overrideWeight))
{
return overrideWeight;
}
var weight = source.BaseTrustWeight;
// Bonus for government sources in jurisdictions that prefer them
if (config.PreferGovernmentSources && source.IsGovernmentAuthority)
{
weight *= 1.2;
}
// Bonus for sources that list this jurisdiction as authoritative
if (source.Jurisdictions.Contains(jurisdiction))
{
weight *= 1.1;
}
// Penalty for non-preferred sources
var preferenceIndex = config.PreferredSources
.Select((id, i) => (id, i))
.FirstOrDefault(x => x.id == source.Id).i;
if (preferenceIndex > 0)
{
weight *= 1.0 - (preferenceIndex * 0.05);
}
return Math.Clamp(weight, 0.0, 1.0);
}
public IReadOnlyList<VexSource> RankSourcesByTrust(
IEnumerable<VexSource> sources,
Jurisdiction jurisdiction)
{
return sources
.OrderByDescending(s => GetEffectiveTrustWeight(s, jurisdiction))
.ToList();
}
public JurisdictionValidationResult ValidateForJurisdiction(
VexDecisionContext decision,
Jurisdiction jurisdiction)
{
if (!_configs.TryGetValue(jurisdiction, out var config))
{
config = DefaultConfigs[Jurisdiction.Global];
}
var issues = new List<string>();
var suggestions = new List<string>();
var effectiveWeight = GetEffectiveTrustWeight(decision.Source, jurisdiction);
// Check minimum trust weight
if (effectiveWeight < config.MinimumTrustWeight)
{
issues.Add($"Source trust weight ({effectiveWeight:P0}) below minimum ({config.MinimumTrustWeight:P0})");
suggestions.Add("Consider obtaining VEX from a higher-trust source");
}
// Check government preference
if (config.PreferGovernmentSources && !decision.Source.IsGovernmentAuthority)
{
suggestions.Add($"Jurisdiction {jurisdiction} prefers government sources");
}
// Check signature requirement for high-trust decisions
if (effectiveWeight >= 0.8 && !decision.IsSigned)
{
issues.Add("High-trust VEX decisions should be cryptographically signed");
suggestions.Add("Request signed VEX statement from source");
}
// Check required source types
if (config.RequiredSourceTypes.Length > 0 &&
!config.RequiredSourceTypes.Contains(decision.Source.Type))
{
issues.Add($"Source type {decision.Source.Type} not in required types");
}
return new JurisdictionValidationResult
{
IsValid = issues.Count == 0,
EffectiveTrustWeight = effectiveWeight,
Issues = issues.ToImmutableArray(),
Suggestions = suggestions.ToImmutableArray()
};
}
}

View File

@@ -0,0 +1,571 @@
// -----------------------------------------------------------------------------
// VexCustomerOverride.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: VEX-L-004 - Customer override with signed audit trail
// Description: Customer-initiated VEX overrides with cryptographic audit trail.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Policy.Vex;
/// <summary>
/// Customer-initiated VEX override with full audit trail.
/// </summary>
public sealed record VexCustomerOverride
{
/// <summary>
/// Unique override identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// CVE or vulnerability ID being overridden.
/// </summary>
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
/// <summary>
/// Product or component PURL.
/// </summary>
[JsonPropertyName("productPurl")]
public required string ProductPurl { get; init; }
/// <summary>
/// Original VEX status from source.
/// </summary>
[JsonPropertyName("originalStatus")]
public required string OriginalStatus { get; init; }
/// <summary>
/// Overridden VEX status.
/// </summary>
[JsonPropertyName("overrideStatus")]
public required string OverrideStatus { get; init; }
/// <summary>
/// Justification for the override.
/// </summary>
[JsonPropertyName("justification")]
public required VexOverrideJustification Justification { get; init; }
/// <summary>
/// User who created the override.
/// </summary>
[JsonPropertyName("createdBy")]
public required OverrideActor CreatedBy { get; init; }
/// <summary>
/// When the override was created.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Approvers (for multi-party approval).
/// </summary>
[JsonPropertyName("approvers")]
public ImmutableArray<OverrideApproval> Approvers { get; init; } = [];
/// <summary>
/// Expiration time for the override.
/// </summary>
[JsonPropertyName("expiresAt")]
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Whether the override is currently active.
/// </summary>
[JsonPropertyName("isActive")]
public bool IsActive { get; init; } = true;
/// <summary>
/// Scope of the override.
/// </summary>
[JsonPropertyName("scope")]
public required OverrideScope Scope { get; init; }
/// <summary>
/// Cryptographic signature of the override.
/// </summary>
[JsonPropertyName("signature")]
public OverrideSignature? Signature { get; init; }
/// <summary>
/// Evidence references supporting the override.
/// </summary>
[JsonPropertyName("evidenceRefs")]
public ImmutableArray<string> EvidenceRefs { get; init; } = [];
/// <summary>
/// Tags for categorization.
/// </summary>
[JsonPropertyName("tags")]
public ImmutableArray<string> Tags { get; init; } = [];
/// <summary>
/// Audit events for this override.
/// </summary>
[JsonPropertyName("auditTrail")]
public ImmutableArray<OverrideAuditEvent> AuditTrail { get; init; } = [];
}
/// <summary>
/// Justification for a VEX override.
/// </summary>
public sealed record VexOverrideJustification
{
/// <summary>
/// Justification category.
/// </summary>
[JsonPropertyName("category")]
public required OverrideJustificationCategory Category { get; init; }
/// <summary>
/// Detailed explanation.
/// </summary>
[JsonPropertyName("explanation")]
public required string Explanation { get; init; }
/// <summary>
/// Compensating controls in place.
/// </summary>
[JsonPropertyName("compensatingControls")]
public ImmutableArray<string> CompensatingControls { get; init; } = [];
/// <summary>
/// Risk acceptance level.
/// </summary>
[JsonPropertyName("riskAcceptanceLevel")]
public RiskAcceptanceLevel? RiskAcceptanceLevel { get; init; }
/// <summary>
/// Remediation plan if applicable.
/// </summary>
[JsonPropertyName("remediationPlan")]
public RemediationPlan? RemediationPlan { get; init; }
}
/// <summary>
/// Categories for override justification.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum OverrideJustificationCategory
{
/// <summary>Vendor analysis incorrect.</summary>
VendorAnalysisIncorrect,
/// <summary>Compensating controls in place.</summary>
CompensatingControls,
/// <summary>Not applicable to deployment context.</summary>
NotApplicableToContext,
/// <summary>Risk accepted per policy.</summary>
RiskAccepted,
/// <summary>False positive confirmed.</summary>
FalsePositive,
/// <summary>Component not in use.</summary>
ComponentNotInUse,
/// <summary>Vulnerable code path not reachable.</summary>
CodePathNotReachable,
/// <summary>Already mitigated by other means.</summary>
AlreadyMitigated,
/// <summary>Business critical exception.</summary>
BusinessException
}
/// <summary>
/// Risk acceptance levels.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum RiskAcceptanceLevel
{
/// <summary>Low risk accepted.</summary>
Low,
/// <summary>Medium risk accepted.</summary>
Medium,
/// <summary>High risk accepted (requires senior approval).</summary>
High,
/// <summary>Critical risk accepted (requires executive approval).</summary>
Critical
}
/// <summary>
/// Remediation plan for accepted risk.
/// </summary>
public sealed record RemediationPlan
{
/// <summary>
/// Target remediation date.
/// </summary>
[JsonPropertyName("targetDate")]
public required DateTimeOffset TargetDate { get; init; }
/// <summary>
/// Remediation steps.
/// </summary>
[JsonPropertyName("steps")]
public ImmutableArray<string> Steps { get; init; } = [];
/// <summary>
/// Ticket/issue reference.
/// </summary>
[JsonPropertyName("ticketRef")]
public string? TicketRef { get; init; }
/// <summary>
/// Assigned owner.
/// </summary>
[JsonPropertyName("owner")]
public string? Owner { get; init; }
}
/// <summary>
/// Actor who created or modified an override.
/// </summary>
public sealed record OverrideActor
{
/// <summary>
/// User identifier.
/// </summary>
[JsonPropertyName("userId")]
public required string UserId { get; init; }
/// <summary>
/// User display name.
/// </summary>
[JsonPropertyName("displayName")]
public required string DisplayName { get; init; }
/// <summary>
/// User email.
/// </summary>
[JsonPropertyName("email")]
public string? Email { get; init; }
/// <summary>
/// User role at time of action.
/// </summary>
[JsonPropertyName("role")]
public string? Role { get; init; }
/// <summary>
/// Organization/tenant.
/// </summary>
[JsonPropertyName("organization")]
public string? Organization { get; init; }
}
/// <summary>
/// Approval for an override.
/// </summary>
public sealed record OverrideApproval
{
/// <summary>
/// Approver details.
/// </summary>
[JsonPropertyName("approver")]
public required OverrideActor Approver { get; init; }
/// <summary>
/// When approved.
/// </summary>
[JsonPropertyName("approvedAt")]
public required DateTimeOffset ApprovedAt { get; init; }
/// <summary>
/// Approval comment.
/// </summary>
[JsonPropertyName("comment")]
public string? Comment { get; init; }
/// <summary>
/// Signature of approval.
/// </summary>
[JsonPropertyName("signature")]
public OverrideSignature? Signature { get; init; }
}
/// <summary>
/// Scope of an override.
/// </summary>
public sealed record OverrideScope
{
/// <summary>
/// Scope type.
/// </summary>
[JsonPropertyName("type")]
public required OverrideScopeType Type { get; init; }
/// <summary>
/// Specific artifact digests if scoped.
/// </summary>
[JsonPropertyName("artifactDigests")]
public ImmutableArray<string> ArtifactDigests { get; init; } = [];
/// <summary>
/// Environment names if scoped.
/// </summary>
[JsonPropertyName("environments")]
public ImmutableArray<string> Environments { get; init; } = [];
/// <summary>
/// Version range if scoped.
/// </summary>
[JsonPropertyName("versionRange")]
public string? VersionRange { get; init; }
}
/// <summary>
/// Scope types for overrides.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum OverrideScopeType
{
/// <summary>Applies to all versions of the product.</summary>
AllVersions,
/// <summary>Applies to specific version range.</summary>
VersionRange,
/// <summary>Applies to specific artifacts only.</summary>
SpecificArtifacts,
/// <summary>Applies to specific environments only.</summary>
EnvironmentScoped
}
/// <summary>
/// Cryptographic signature for override.
/// </summary>
public sealed record OverrideSignature
{
/// <summary>
/// Signature algorithm.
/// </summary>
[JsonPropertyName("algorithm")]
public required string Algorithm { get; init; }
/// <summary>
/// Key identifier.
/// </summary>
[JsonPropertyName("keyId")]
public required string KeyId { get; init; }
/// <summary>
/// Signature value (base64).
/// </summary>
[JsonPropertyName("signature")]
public required string Signature { get; init; }
/// <summary>
/// Timestamp of signing.
/// </summary>
[JsonPropertyName("signedAt")]
public required DateTimeOffset SignedAt { get; init; }
/// <summary>
/// Certificate chain (PEM, if available).
/// </summary>
[JsonPropertyName("certificateChain")]
public string? CertificateChain { get; init; }
}
/// <summary>
/// Audit event for override lifecycle.
/// </summary>
public sealed record OverrideAuditEvent
{
/// <summary>
/// Event timestamp.
/// </summary>
[JsonPropertyName("timestamp")]
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Event type.
/// </summary>
[JsonPropertyName("eventType")]
public required OverrideAuditEventType EventType { get; init; }
/// <summary>
/// Actor who caused the event.
/// </summary>
[JsonPropertyName("actor")]
public required OverrideActor Actor { get; init; }
/// <summary>
/// Event details.
/// </summary>
[JsonPropertyName("details")]
public string? Details { get; init; }
/// <summary>
/// Previous value (for changes).
/// </summary>
[JsonPropertyName("previousValue")]
public string? PreviousValue { get; init; }
/// <summary>
/// New value (for changes).
/// </summary>
[JsonPropertyName("newValue")]
public string? NewValue { get; init; }
/// <summary>
/// IP address of actor.
/// </summary>
[JsonPropertyName("ipAddress")]
public string? IpAddress { get; init; }
/// <summary>
/// Event signature for tamper-evidence.
/// </summary>
[JsonPropertyName("eventSignature")]
public string? EventSignature { get; init; }
}
/// <summary>
/// Audit event types.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum OverrideAuditEventType
{
/// <summary>Override created.</summary>
Created,
/// <summary>Override approved.</summary>
Approved,
/// <summary>Override rejected.</summary>
Rejected,
/// <summary>Override modified.</summary>
Modified,
/// <summary>Override expired.</summary>
Expired,
/// <summary>Override revoked.</summary>
Revoked,
/// <summary>Override renewed.</summary>
Renewed,
/// <summary>Override applied to scan.</summary>
Applied,
/// <summary>Override viewed.</summary>
Viewed
}
/// <summary>
/// Service for managing customer VEX overrides.
/// </summary>
public interface IVexOverrideService
{
/// <summary>
/// Creates a new override.
/// </summary>
Task<VexCustomerOverride> CreateOverrideAsync(
CreateOverrideRequest request,
CancellationToken ct = default);
/// <summary>
/// Approves an override.
/// </summary>
Task<VexCustomerOverride> ApproveOverrideAsync(
string overrideId,
OverrideApproval approval,
CancellationToken ct = default);
/// <summary>
/// Revokes an override.
/// </summary>
Task<VexCustomerOverride> RevokeOverrideAsync(
string overrideId,
OverrideActor actor,
string reason,
CancellationToken ct = default);
/// <summary>
/// Gets active overrides for a vulnerability.
/// </summary>
Task<IReadOnlyList<VexCustomerOverride>> GetActiveOverridesAsync(
string vulnerabilityId,
string? productPurl = null,
CancellationToken ct = default);
/// <summary>
/// Gets the audit trail for an override.
/// </summary>
Task<IReadOnlyList<OverrideAuditEvent>> GetAuditTrailAsync(
string overrideId,
CancellationToken ct = default);
}
/// <summary>
/// Request to create an override.
/// </summary>
public sealed record CreateOverrideRequest
{
/// <summary>
/// Vulnerability ID.
/// </summary>
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
/// <summary>
/// Product PURL.
/// </summary>
[JsonPropertyName("productPurl")]
public required string ProductPurl { get; init; }
/// <summary>
/// Override status.
/// </summary>
[JsonPropertyName("overrideStatus")]
public required string OverrideStatus { get; init; }
/// <summary>
/// Justification.
/// </summary>
[JsonPropertyName("justification")]
public required VexOverrideJustification Justification { get; init; }
/// <summary>
/// Scope.
/// </summary>
[JsonPropertyName("scope")]
public required OverrideScope Scope { get; init; }
/// <summary>
/// Expiration.
/// </summary>
[JsonPropertyName("expiresAt")]
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Evidence references.
/// </summary>
[JsonPropertyName("evidenceRefs")]
public ImmutableArray<string> EvidenceRefs { get; init; } = [];
/// <summary>
/// Tags.
/// </summary>
[JsonPropertyName("tags")]
public ImmutableArray<string> Tags { get; init; } = [];
}

View File

@@ -6,6 +6,7 @@
// -----------------------------------------------------------------------------
using StellaOps.Policy.Scoring;
using StellaOps.Policy.Scoring.Models;
using Xunit;
namespace StellaOps.Policy.Scoring.Tests;

View File

@@ -45,6 +45,71 @@ The Scanner module now includes Smart-Diff foundation primitives:
- Emits to Attestor module for DSSE envelope wrapping
- Consumed by Findings Ledger for triage decisions
## Reachability Drift (Sprint 3600)
Reachability Drift Detection tracks function-level reachability changes between scans:
### Libraries
- `StellaOps.Scanner.ReachabilityDrift` - Drift detection engine, API models, attestation
- `StellaOps.Scanner.CallGraph` - Language-specific call graph extractors
- `StellaOps.Scanner.VulnSurfaces` - Vulnerability surface computation (trigger methods)
### Key Types
- `ReachabilityDriftResult` - Drift analysis output (newly reachable, mitigated paths)
- `DriftedSink` - Sink that changed reachability state with cause attribution
- `DriftCause` - Causal explanation (guard removed, new route, code change)
- `CompressedPath` - Compact path representation (entrypoint → key nodes → sink)
- `ReachabilityConfidenceTier` - Confirmed/Likely/Present/Unreachable tiers
### Predicate Schema
- URI: `stellaops.dev/predicates/reachability-drift@v1`
- DSSE-signed attestations for drift evidence chain
### Call Graph Support
- **.NET**: Roslyn semantic analysis (`DotNetCallGraphExtractor`)
- **Node.js**: Babel AST analysis (`NodeCallGraphExtractor`)
- **Future**: Java (ASM), Go (SSA), Python (AST)
### Entrypoint Detection
- ASP.NET Core: `[HttpGet]`, `[Route]`, minimal APIs
- Express/Fastify: route handlers
- Background: `IHostedService`, `BackgroundService`
- CLI: `Main`, command handlers
### Drift API Endpoints
- `POST /api/drift/analyze` - Compute drift between two scans
- `GET /api/drift/{driftId}` - Retrieve drift result
- `GET /api/drift/{driftId}/paths` - Get detailed paths
### Testing
- Unit tests: `src/Scanner/__Tests/StellaOps.Scanner.ReachabilityDrift.Tests/`
- Benchmark cases: `bench/reachability-benchmark/`
- Golden fixtures: deterministic path compression, DSSE output
## Vulnerability Surfaces (Sprint 3700)
Compute vulnerability surfaces by diffing vulnerable vs fixed package versions:
### Libraries
- `StellaOps.Scanner.VulnSurfaces` - Surface builder, method fingerprinting, trigger extraction
### Key Types
- `VulnSurface` - Computed surface with sink methods and triggers
- `VulnSurfaceSink` - Method that changed in security fix
- `VulnSurfaceTrigger` - Public API that can reach sink
- `MethodFingerprint` - Stable method identity across versions
### Per-Ecosystem Support
- **NuGet**: Cecil IL fingerprinting
- **npm**: Babel AST fingerprinting
- **Maven**: ASM bytecode fingerprinting
- **PyPI**: Python AST fingerprinting
### Integration with Reachability
- `ISurfaceQueryService` - Query triggers for CVE during scan
- Confidence tiers: Confirmed (trigger reachable) > Likely (API reachable) > Present (dep only)
- Path witnesses include surface evidence for audit trail
## Engineering Rules
- Target `net10.0`; prefer latest C# preview allowed in repo.
- Offline-first: no new external network calls; use cached feeds (`/local-nugets`).

View File

@@ -31,5 +31,6 @@
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj" />
<ProjectReference Include="../StellaOps.Scanner.Analyzers.Native/StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="../../Unknowns/__Libraries/StellaOps.Unknowns.Core/StellaOps.Unknowns.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,86 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.CallGraph;
/// <summary>
/// Configuration options for <see cref="ReachabilityAnalyzer"/>.
/// Defines limits and ordering rules for deterministic path output.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_3700_0001_0001 (WIT-007A, WIT-007B)
/// Contract: ReachabilityAnalyzer → PathWitnessBuilder output contract
///
/// Determinism guarantees:
/// - Paths are ordered by (SinkId ASC, EntrypointId ASC, PathLength ASC)
/// - Node IDs within paths are ordered from entrypoint to sink (caller → callee)
/// - Maximum caps prevent unbounded output
/// </remarks>
public sealed record ReachabilityAnalysisOptions
{
/// <summary>
/// Default options with sensible limits.
/// </summary>
public static ReachabilityAnalysisOptions Default { get; } = new();
/// <summary>
/// Maximum depth for BFS traversal (0 = unlimited, default = 256).
/// Prevents infinite loops in cyclic graphs.
/// </summary>
public int MaxDepth { get; init; } = 256;
/// <summary>
/// Maximum number of paths to return per sink (default = 10).
/// Limits witness explosion when many entrypoints reach the same sink.
/// </summary>
public int MaxPathsPerSink { get; init; } = 10;
/// <summary>
/// Maximum total paths to return (default = 100).
/// Hard cap to prevent memory issues with highly connected graphs.
/// </summary>
public int MaxTotalPaths { get; init; } = 100;
/// <summary>
/// Whether to include node metadata in path reconstruction (default = true).
/// When false, paths only contain node IDs without additional context.
/// </summary>
public bool IncludeNodeMetadata { get; init; } = true;
/// <summary>
/// Explicit list of sink node IDs to target (default = null, meaning use snapshot.SinkIds).
/// When set, analysis will only find paths to these specific sinks.
/// This enables targeted witness generation for specific vulnerabilities.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_3700_0001_0001 (WIT-007B)
/// Enables: PathWitnessBuilder can request paths to specific trigger methods.
/// </remarks>
public ImmutableArray<string>? ExplicitSinks { get; init; }
/// <summary>
/// Validates options and returns sanitized values.
/// </summary>
public ReachabilityAnalysisOptions Validated()
{
// Normalize explicit sinks: trim, dedupe, order
ImmutableArray<string>? normalizedSinks = null;
if (ExplicitSinks.HasValue && !ExplicitSinks.Value.IsDefaultOrEmpty)
{
normalizedSinks = ExplicitSinks.Value
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.Distinct(StringComparer.Ordinal)
.OrderBy(s => s, StringComparer.Ordinal)
.ToImmutableArray();
}
return new ReachabilityAnalysisOptions
{
MaxDepth = MaxDepth <= 0 ? 256 : Math.Min(MaxDepth, 1024),
MaxPathsPerSink = MaxPathsPerSink <= 0 ? 10 : Math.Min(MaxPathsPerSink, 100),
MaxTotalPaths = MaxTotalPaths <= 0 ? 100 : Math.Min(MaxTotalPaths, 1000),
IncludeNodeMetadata = IncludeNodeMetadata,
ExplicitSinks = normalizedSinks
};
}
}

View File

@@ -2,20 +2,53 @@ using System.Collections.Immutable;
namespace StellaOps.Scanner.CallGraph;
/// <summary>
/// Analyzes call graph reachability from entrypoints to sinks using BFS traversal.
/// Provides deterministically-ordered paths suitable for witness generation.
/// </summary>
/// <remarks>
/// Sprint: SPRINT_3700_0001_0001 (WIT-007A, WIT-007B)
/// Contract: Paths are ordered by (SinkId ASC, EntrypointId ASC, PathLength ASC).
/// Node IDs within paths are ordered from entrypoint to sink (caller → callee).
/// </remarks>
public sealed class ReachabilityAnalyzer
{
private readonly TimeProvider _timeProvider;
private readonly int _maxDepth;
private readonly ReachabilityAnalysisOptions _options;
/// <summary>
/// Creates a new ReachabilityAnalyzer with default options.
/// </summary>
public ReachabilityAnalyzer(TimeProvider? timeProvider = null, int maxDepth = 256)
: this(timeProvider, new ReachabilityAnalysisOptions { MaxDepth = maxDepth })
{
_timeProvider = timeProvider ?? TimeProvider.System;
_maxDepth = maxDepth <= 0 ? 256 : maxDepth;
}
/// <summary>
/// Creates a new ReachabilityAnalyzer with specified options.
/// </summary>
public ReachabilityAnalyzer(TimeProvider? timeProvider, ReachabilityAnalysisOptions options)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_options = (options ?? ReachabilityAnalysisOptions.Default).Validated();
}
/// <summary>
/// Analyzes reachability using default options.
/// </summary>
public ReachabilityAnalysisResult Analyze(CallGraphSnapshot snapshot)
=> Analyze(snapshot, _options);
/// <summary>
/// Analyzes reachability with explicit options for this invocation.
/// </summary>
/// <param name="snapshot">The call graph snapshot to analyze.</param>
/// <param name="options">Options controlling limits and output format.</param>
/// <returns>Analysis result with deterministically-ordered paths.</returns>
public ReachabilityAnalysisResult Analyze(CallGraphSnapshot snapshot, ReachabilityAnalysisOptions options)
{
ArgumentNullException.ThrowIfNull(snapshot);
var opts = (options ?? _options).Validated();
var trimmed = snapshot.Trimmed();
var adjacency = BuildAdjacency(trimmed);
@@ -47,7 +80,7 @@ public sealed class ReachabilityAnalyzer
continue;
}
if (depth >= _maxDepth)
if (depth >= opts.MaxDepth)
{
continue;
}
@@ -72,12 +105,18 @@ public sealed class ReachabilityAnalyzer
}
var reachableNodes = origins.Keys.OrderBy(id => id, StringComparer.Ordinal).ToImmutableArray();
var reachableSinks = trimmed.SinkIds
// WIT-007B: Use explicit sinks if specified, otherwise use snapshot sinks
var targetSinks = opts.ExplicitSinks.HasValue && !opts.ExplicitSinks.Value.IsDefaultOrEmpty
? opts.ExplicitSinks.Value
: trimmed.SinkIds;
var reachableSinks = targetSinks
.Where(origins.ContainsKey)
.OrderBy(id => id, StringComparer.Ordinal)
.ToImmutableArray();
var paths = BuildPaths(reachableSinks, origins, parents);
var paths = BuildPaths(reachableSinks, origins, parents, opts);
var computedAt = _timeProvider.GetUtcNow();
var provisional = new ReachabilityAnalysisResult(
@@ -136,9 +175,12 @@ public sealed class ReachabilityAnalyzer
private static ImmutableArray<ReachabilityPath> BuildPaths(
ImmutableArray<string> reachableSinks,
Dictionary<string, string> origins,
Dictionary<string, string?> parents)
Dictionary<string, string?> parents,
ReachabilityAnalysisOptions options)
{
var paths = new List<ReachabilityPath>(reachableSinks.Length);
var pathCountPerSink = new Dictionary<string, int>(StringComparer.Ordinal);
foreach (var sinkId in reachableSinks)
{
if (!origins.TryGetValue(sinkId, out var origin))
@@ -146,13 +188,29 @@ public sealed class ReachabilityAnalyzer
continue;
}
// Enforce per-sink limit
pathCountPerSink.TryGetValue(sinkId, out var currentCount);
if (currentCount >= options.MaxPathsPerSink)
{
continue;
}
pathCountPerSink[sinkId] = currentCount + 1;
var nodeIds = ReconstructPathNodeIds(sinkId, parents);
paths.Add(new ReachabilityPath(origin, sinkId, nodeIds));
// Enforce total path limit
if (paths.Count >= options.MaxTotalPaths)
{
break;
}
}
// Deterministic ordering: SinkId ASC, EntrypointId ASC, PathLength ASC
return paths
.OrderBy(p => p.SinkId, StringComparer.Ordinal)
.ThenBy(p => p.EntrypointId, StringComparer.Ordinal)
.ThenBy(p => p.NodeIds.Length)
.ToImmutableArray();
}

View File

@@ -0,0 +1,202 @@
// -----------------------------------------------------------------------------
// ComponentIdentity.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: SBOM-L-001 - Define component identity schema
// Description: Component identity with source, digest, and build recipe hash.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Core.Models;
/// <summary>
/// Represents a unique component identity in the SBOM ledger.
/// Combines source reference, content digest, and build recipe for
/// deterministic identification across builds and environments.
/// </summary>
public sealed record ComponentIdentity
{
/// <summary>
/// Package URL (PURL) identifying the component.
/// Example: pkg:npm/lodash@4.17.21
/// </summary>
[JsonPropertyName("purl")]
public required string Purl { get; init; }
/// <summary>
/// Content digest of the component artifact.
/// Format: algorithm:hex (e.g., sha256:abc123...)
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Build recipe hash capturing build-time configuration.
/// Includes compiler flags, environment, and reproducibility markers.
/// </summary>
[JsonPropertyName("buildRecipeHash")]
public string? BuildRecipeHash { get; init; }
/// <summary>
/// Source repository reference.
/// </summary>
[JsonPropertyName("sourceRef")]
public SourceReference? SourceRef { get; init; }
/// <summary>
/// Layer index where component was introduced (for container images).
/// </summary>
[JsonPropertyName("layerIndex")]
public int? LayerIndex { get; init; }
/// <summary>
/// Layer digest where component was introduced.
/// </summary>
[JsonPropertyName("layerDigest")]
public string? LayerDigest { get; init; }
/// <summary>
/// Loader that resolved this component (npm, pip, maven, etc.).
/// </summary>
[JsonPropertyName("loader")]
public string? Loader { get; init; }
/// <summary>
/// Whether this component is a direct dependency or transitive.
/// </summary>
[JsonPropertyName("isDirect")]
public bool IsDirect { get; init; }
/// <summary>
/// Parent component identities (for dependency graph).
/// </summary>
[JsonPropertyName("parentIds")]
public ImmutableArray<string> ParentIds { get; init; } = [];
/// <summary>
/// Scope of the dependency (runtime, dev, test, optional).
/// </summary>
[JsonPropertyName("scope")]
public DependencyScope Scope { get; init; } = DependencyScope.Runtime;
/// <summary>
/// Computes the canonical identity hash.
/// </summary>
public string ComputeIdentityHash()
{
var canonical = StellaOps.Canonical.Json.CanonJson.Canonicalize(this);
return StellaOps.Canonical.Json.CanonJson.Sha256Prefixed(canonical);
}
}
/// <summary>
/// Source code repository reference.
/// </summary>
public sealed record SourceReference
{
/// <summary>
/// Repository URL.
/// </summary>
[JsonPropertyName("repositoryUrl")]
public required string RepositoryUrl { get; init; }
/// <summary>
/// Commit SHA or tag.
/// </summary>
[JsonPropertyName("revision")]
public string? Revision { get; init; }
/// <summary>
/// Path within the repository.
/// </summary>
[JsonPropertyName("path")]
public string? Path { get; init; }
/// <summary>
/// VCS type (git, svn, hg).
/// </summary>
[JsonPropertyName("vcsType")]
public string VcsType { get; init; } = "git";
}
/// <summary>
/// Dependency scope.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum DependencyScope
{
/// <summary>Runtime dependency.</summary>
Runtime,
/// <summary>Development dependency.</summary>
Development,
/// <summary>Test dependency.</summary>
Test,
/// <summary>Optional/peer dependency.</summary>
Optional,
/// <summary>Build-time only dependency.</summary>
Build
}
/// <summary>
/// Build recipe capturing reproducibility information.
/// </summary>
public sealed record BuildRecipe
{
/// <summary>
/// Builder image or tool version.
/// </summary>
[JsonPropertyName("builder")]
public required string Builder { get; init; }
/// <summary>
/// Build command or entrypoint.
/// </summary>
[JsonPropertyName("buildCommand")]
public string? BuildCommand { get; init; }
/// <summary>
/// Environment variables affecting the build (sanitized).
/// </summary>
[JsonPropertyName("buildEnv")]
public ImmutableDictionary<string, string> BuildEnv { get; init; } =
ImmutableDictionary<string, string>.Empty;
/// <summary>
/// Compiler/interpreter version.
/// </summary>
[JsonPropertyName("compilerVersion")]
public string? CompilerVersion { get; init; }
/// <summary>
/// Build timestamp (if reproducible builds are not used).
/// </summary>
[JsonPropertyName("buildTimestamp")]
public DateTimeOffset? BuildTimestamp { get; init; }
/// <summary>
/// Whether build is reproducible (hermetic).
/// </summary>
[JsonPropertyName("reproducible")]
public bool Reproducible { get; init; }
/// <summary>
/// SLSA provenance level (1-4).
/// </summary>
[JsonPropertyName("slsaLevel")]
public int? SlsaLevel { get; init; }
/// <summary>
/// Computes the recipe hash.
/// </summary>
public string ComputeHash()
{
var canonical = StellaOps.Canonical.Json.CanonJson.Canonicalize(this);
return StellaOps.Canonical.Json.CanonJson.Sha256Prefixed(canonical);
}
}

View File

@@ -0,0 +1,432 @@
// -----------------------------------------------------------------------------
// FalsificationConditions.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: EXP-F-004 - Falsification conditions per finding
// Description: Models for specifying conditions that would falsify a finding.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Immutable;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Core.Models;
/// <summary>
/// Conditions that would falsify (invalidate) a vulnerability finding.
/// Inspired by Popperian falsifiability - what evidence would disprove this finding?
/// </summary>
public sealed record FalsificationConditions
{
/// <summary>
/// Finding identifier these conditions apply to.
/// </summary>
[JsonPropertyName("findingId")]
public required string FindingId { get; init; }
/// <summary>
/// Vulnerability ID (CVE, etc.).
/// </summary>
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
/// <summary>
/// Component PURL.
/// </summary>
[JsonPropertyName("componentPurl")]
public required string ComponentPurl { get; init; }
/// <summary>
/// Conditions that would falsify the finding.
/// </summary>
[JsonPropertyName("conditions")]
public required ImmutableArray<FalsificationCondition> Conditions { get; init; }
/// <summary>
/// Logical operator for combining conditions.
/// </summary>
[JsonPropertyName("operator")]
public FalsificationOperator Operator { get; init; } = FalsificationOperator.Any;
/// <summary>
/// When these conditions were generated.
/// </summary>
[JsonPropertyName("generatedAt")]
public required DateTimeOffset GeneratedAt { get; init; }
/// <summary>
/// Generator that produced these conditions.
/// </summary>
[JsonPropertyName("generator")]
public required string Generator { get; init; }
}
/// <summary>
/// A single falsification condition.
/// </summary>
public sealed record FalsificationCondition
{
/// <summary>
/// Condition identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Type of condition.
/// </summary>
[JsonPropertyName("type")]
public required FalsificationConditionType Type { get; init; }
/// <summary>
/// Human-readable description.
/// </summary>
[JsonPropertyName("description")]
public required string Description { get; init; }
/// <summary>
/// Machine-readable predicate (SPL, Rego, etc.).
/// </summary>
[JsonPropertyName("predicate")]
public string? Predicate { get; init; }
/// <summary>
/// Expected evidence type that would satisfy this condition.
/// </summary>
[JsonPropertyName("evidenceType")]
public required string EvidenceType { get; init; }
/// <summary>
/// Whether this condition has been evaluated.
/// </summary>
[JsonPropertyName("evaluated")]
public bool Evaluated { get; init; }
/// <summary>
/// Evaluation result if evaluated.
/// </summary>
[JsonPropertyName("result")]
public FalsificationResult? Result { get; init; }
/// <summary>
/// Confidence in the condition evaluation.
/// </summary>
[JsonPropertyName("confidence")]
public double Confidence { get; init; } = 1.0;
/// <summary>
/// Effort required to verify this condition.
/// </summary>
[JsonPropertyName("effort")]
public VerificationEffort Effort { get; init; } = VerificationEffort.Low;
}
/// <summary>
/// Types of falsification conditions.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum FalsificationConditionType
{
/// <summary>Code path is unreachable.</summary>
CodePathUnreachable,
/// <summary>Vulnerable function is not called.</summary>
FunctionNotCalled,
/// <summary>Component is not present.</summary>
ComponentNotPresent,
/// <summary>Version is not affected.</summary>
VersionNotAffected,
/// <summary>Dependency is dev-only.</summary>
DevDependencyOnly,
/// <summary>Required precondition is false.</summary>
PreconditionFalse,
/// <summary>Compensating control exists.</summary>
CompensatingControl,
/// <summary>VEX from vendor says not affected.</summary>
VendorVexNotAffected,
/// <summary>Runtime environment prevents exploit.</summary>
RuntimePrevents,
/// <summary>Network isolation prevents exploit.</summary>
NetworkIsolated,
/// <summary>Input validation prevents exploit.</summary>
InputValidated,
/// <summary>Fix already applied.</summary>
FixApplied,
/// <summary>Backport fixes the issue.</summary>
BackportApplied,
/// <summary>Custom condition.</summary>
Custom
}
/// <summary>
/// Operator for combining conditions.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum FalsificationOperator
{
/// <summary>Any condition falsifies (OR).</summary>
Any,
/// <summary>All conditions required (AND).</summary>
All
}
/// <summary>
/// Result of evaluating a falsification condition.
/// </summary>
public sealed record FalsificationResult
{
/// <summary>
/// Whether the condition is satisfied (finding is falsified).
/// </summary>
[JsonPropertyName("satisfied")]
public required bool Satisfied { get; init; }
/// <summary>
/// Evidence supporting the result.
/// </summary>
[JsonPropertyName("evidence")]
public string? Evidence { get; init; }
/// <summary>
/// Evidence digest.
/// </summary>
[JsonPropertyName("evidenceDigest")]
public string? EvidenceDigest { get; init; }
/// <summary>
/// When evaluated.
/// </summary>
[JsonPropertyName("evaluatedAt")]
public required DateTimeOffset EvaluatedAt { get; init; }
/// <summary>
/// Evaluator that produced the result.
/// </summary>
[JsonPropertyName("evaluator")]
public required string Evaluator { get; init; }
/// <summary>
/// Confidence in the result.
/// </summary>
[JsonPropertyName("confidence")]
public required double Confidence { get; init; }
/// <summary>
/// Explanation of the result.
/// </summary>
[JsonPropertyName("explanation")]
public string? Explanation { get; init; }
}
/// <summary>
/// Effort levels for verification.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum VerificationEffort
{
/// <summary>Automatic, no human effort.</summary>
Automatic,
/// <summary>Low effort (quick check).</summary>
Low,
/// <summary>Medium effort (investigation needed).</summary>
Medium,
/// <summary>High effort (significant analysis).</summary>
High,
/// <summary>Expert required.</summary>
Expert
}
/// <summary>
/// Generator for falsification conditions.
/// </summary>
public interface IFalsificationConditionGenerator
{
/// <summary>
/// Generates falsification conditions for a finding.
/// </summary>
FalsificationConditions Generate(FindingContext context);
}
/// <summary>
/// Context for generating falsification conditions.
/// </summary>
public sealed record FindingContext
{
/// <summary>
/// Finding identifier.
/// </summary>
[JsonPropertyName("findingId")]
public required string FindingId { get; init; }
/// <summary>
/// Vulnerability ID.
/// </summary>
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
/// <summary>
/// Component PURL.
/// </summary>
[JsonPropertyName("componentPurl")]
public required string ComponentPurl { get; init; }
/// <summary>
/// Vulnerability description.
/// </summary>
[JsonPropertyName("description")]
public string? Description { get; init; }
/// <summary>
/// Affected versions.
/// </summary>
[JsonPropertyName("affectedVersions")]
public ImmutableArray<string> AffectedVersions { get; init; } = [];
/// <summary>
/// Fixed versions.
/// </summary>
[JsonPropertyName("fixedVersions")]
public ImmutableArray<string> FixedVersions { get; init; } = [];
/// <summary>
/// CWE IDs.
/// </summary>
[JsonPropertyName("cweIds")]
public ImmutableArray<string> CweIds { get; init; } = [];
/// <summary>
/// Attack vector from CVSS.
/// </summary>
[JsonPropertyName("attackVector")]
public string? AttackVector { get; init; }
/// <summary>
/// Whether reachability data is available.
/// </summary>
[JsonPropertyName("hasReachabilityData")]
public bool HasReachabilityData { get; init; }
/// <summary>
/// Dependency scope (runtime, dev, test).
/// </summary>
[JsonPropertyName("dependencyScope")]
public string? DependencyScope { get; init; }
}
/// <summary>
/// Default falsification condition generator.
/// </summary>
public sealed class DefaultFalsificationConditionGenerator : IFalsificationConditionGenerator
{
public FalsificationConditions Generate(FindingContext context)
{
var conditions = new List<FalsificationCondition>();
var id = 0;
// Always add: component not present
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.ComponentNotPresent,
Description = $"Component {context.ComponentPurl} is not actually present in the artifact",
EvidenceType = "sbom-verification",
Effort = VerificationEffort.Automatic
});
// Version check if fixed versions known
if (context.FixedVersions.Length > 0)
{
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.VersionNotAffected,
Description = $"Installed version is >= {string.Join(" or ", context.FixedVersions)}",
EvidenceType = "version-verification",
Effort = VerificationEffort.Low
});
}
// Reachability condition
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.CodePathUnreachable,
Description = "Vulnerable code path is not reachable from application entry points",
EvidenceType = "reachability-analysis",
Effort = context.HasReachabilityData ? VerificationEffort.Automatic : VerificationEffort.Medium
});
// Dev dependency check
if (context.DependencyScope == "Development" || context.DependencyScope == "Test")
{
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.DevDependencyOnly,
Description = "Component is only used in development/test and not in production artifact",
EvidenceType = "scope-verification",
Effort = VerificationEffort.Low
});
}
// Network isolation for network-based attacks
if (context.AttackVector == "Network" || context.AttackVector == "N")
{
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.NetworkIsolated,
Description = "Component is not exposed to network traffic (air-gapped or internal only)",
EvidenceType = "network-topology",
Effort = VerificationEffort.Medium
});
}
// VEX from vendor
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.VendorVexNotAffected,
Description = "Vendor VEX statement indicates not_affected for this deployment",
EvidenceType = "vex-statement",
Effort = VerificationEffort.Low
});
// Compensating control
conditions.Add(new FalsificationCondition
{
Id = $"FC-{++id:D3}",
Type = FalsificationConditionType.CompensatingControl,
Description = "Compensating control (WAF, sandbox, etc.) mitigates the vulnerability",
EvidenceType = "control-documentation",
Effort = VerificationEffort.Medium
});
return new FalsificationConditions
{
FindingId = context.FindingId,
VulnerabilityId = context.VulnerabilityId,
ComponentPurl = context.ComponentPurl,
Conditions = conditions.ToImmutableArray(),
Operator = FalsificationOperator.Any,
GeneratedAt = DateTimeOffset.UtcNow,
Generator = "StellaOps.DefaultFalsificationGenerator/1.0"
};
}
}

View File

@@ -0,0 +1,307 @@
// -----------------------------------------------------------------------------
// LayerDependencyGraph.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: SBOM-L-003 - Layer-aware dependency graphs with loader resolution
// Description: Dependency graph that tracks layer provenance and loader info.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Core.Models;
/// <summary>
/// Layer-aware dependency graph for container images.
/// Tracks which layer introduced each dependency and which loader resolved it.
/// </summary>
public sealed class LayerDependencyGraph
{
private readonly Dictionary<string, DependencyNode> _nodes = new();
private readonly Dictionary<int, LayerInfo> _layers = new();
/// <summary>
/// All dependency nodes in the graph.
/// </summary>
public IReadOnlyDictionary<string, DependencyNode> Nodes => _nodes;
/// <summary>
/// Layer information indexed by layer index.
/// </summary>
public IReadOnlyDictionary<int, LayerInfo> Layers => _layers;
/// <summary>
/// Root nodes (direct dependencies with no parents in this graph).
/// </summary>
public IEnumerable<DependencyNode> Roots =>
_nodes.Values.Where(n => n.ParentIds.Length == 0 || n.IsDirect);
/// <summary>
/// Adds a layer to the graph.
/// </summary>
public void AddLayer(LayerInfo layer)
{
_layers[layer.Index] = layer;
}
/// <summary>
/// Adds a dependency node to the graph.
/// </summary>
public void AddNode(DependencyNode node)
{
_nodes[node.Id] = node;
}
/// <summary>
/// Gets all dependencies introduced in a specific layer.
/// </summary>
public IEnumerable<DependencyNode> GetDependenciesInLayer(int layerIndex)
{
return _nodes.Values.Where(n => n.LayerIndex == layerIndex);
}
/// <summary>
/// Gets all dependencies resolved by a specific loader.
/// </summary>
public IEnumerable<DependencyNode> GetDependenciesByLoader(string loader)
{
return _nodes.Values.Where(n =>
string.Equals(n.Loader, loader, StringComparison.OrdinalIgnoreCase));
}
/// <summary>
/// Gets the transitive closure of dependencies for a node.
/// </summary>
public IEnumerable<DependencyNode> GetTransitiveDependencies(string nodeId)
{
var visited = new HashSet<string>();
var result = new List<DependencyNode>();
CollectTransitive(nodeId, visited, result);
return result;
}
private void CollectTransitive(string nodeId, HashSet<string> visited, List<DependencyNode> result)
{
if (!visited.Add(nodeId)) return;
if (!_nodes.TryGetValue(nodeId, out var node)) return;
result.Add(node);
foreach (var childId in node.ChildIds)
{
CollectTransitive(childId, visited, result);
}
}
/// <summary>
/// Computes the graph digest for integrity verification.
/// </summary>
public string ComputeGraphDigest()
{
var sortedNodes = _nodes.Values
.OrderBy(n => n.Id, StringComparer.Ordinal)
.ToList();
var canonical = StellaOps.Canonical.Json.CanonJson.Canonicalize(sortedNodes);
return StellaOps.Canonical.Json.CanonJson.Sha256Prefixed(canonical);
}
/// <summary>
/// Computes a diff between this graph and another.
/// </summary>
public GraphDiff ComputeDiff(LayerDependencyGraph other)
{
var added = other._nodes.Keys.Except(_nodes.Keys).ToImmutableArray();
var removed = _nodes.Keys.Except(other._nodes.Keys).ToImmutableArray();
var modified = new List<string>();
foreach (var key in _nodes.Keys.Intersect(other._nodes.Keys))
{
if (_nodes[key].Digest != other._nodes[key].Digest)
{
modified.Add(key);
}
}
return new GraphDiff
{
AddedNodeIds = added,
RemovedNodeIds = removed,
ModifiedNodeIds = modified.ToImmutableArray(),
BaseGraphDigest = ComputeGraphDigest(),
HeadGraphDigest = other.ComputeGraphDigest()
};
}
}
/// <summary>
/// Information about a container layer.
/// </summary>
public sealed record LayerInfo
{
/// <summary>
/// Layer index (0-based, from base).
/// </summary>
[JsonPropertyName("index")]
public required int Index { get; init; }
/// <summary>
/// Layer digest.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Layer command (e.g., RUN, COPY).
/// </summary>
[JsonPropertyName("command")]
public string? Command { get; init; }
/// <summary>
/// Layer size in bytes.
/// </summary>
[JsonPropertyName("size")]
public long? Size { get; init; }
/// <summary>
/// Whether this layer is from the base image.
/// </summary>
[JsonPropertyName("isBaseImage")]
public bool IsBaseImage { get; init; }
/// <summary>
/// Base image reference if this is a base layer.
/// </summary>
[JsonPropertyName("baseImageRef")]
public string? BaseImageRef { get; init; }
}
/// <summary>
/// Dependency node in the graph.
/// </summary>
public sealed record DependencyNode
{
/// <summary>
/// Unique node ID (typically the identity hash).
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Package URL.
/// </summary>
[JsonPropertyName("purl")]
public required string Purl { get; init; }
/// <summary>
/// Package name.
/// </summary>
[JsonPropertyName("name")]
public required string Name { get; init; }
/// <summary>
/// Package version.
/// </summary>
[JsonPropertyName("version")]
public string? Version { get; init; }
/// <summary>
/// Content digest.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// Loader that resolved this dependency.
/// </summary>
[JsonPropertyName("loader")]
public required string Loader { get; init; }
/// <summary>
/// Layer index where introduced.
/// </summary>
[JsonPropertyName("layerIndex")]
public int? LayerIndex { get; init; }
/// <summary>
/// Whether this is a direct dependency.
/// </summary>
[JsonPropertyName("isDirect")]
public bool IsDirect { get; init; }
/// <summary>
/// Dependency scope.
/// </summary>
[JsonPropertyName("scope")]
public DependencyScope Scope { get; init; } = DependencyScope.Runtime;
/// <summary>
/// Parent node IDs.
/// </summary>
[JsonPropertyName("parentIds")]
public ImmutableArray<string> ParentIds { get; init; } = [];
/// <summary>
/// Child node IDs.
/// </summary>
[JsonPropertyName("childIds")]
public ImmutableArray<string> ChildIds { get; init; } = [];
/// <summary>
/// Build recipe hash if available.
/// </summary>
[JsonPropertyName("buildRecipeHash")]
public string? BuildRecipeHash { get; init; }
/// <summary>
/// Vulnerabilities associated with this node.
/// </summary>
[JsonPropertyName("vulnerabilities")]
public ImmutableArray<string> Vulnerabilities { get; init; } = [];
}
/// <summary>
/// Diff between two dependency graphs.
/// </summary>
public sealed record GraphDiff
{
/// <summary>
/// Node IDs added in the head graph.
/// </summary>
[JsonPropertyName("addedNodeIds")]
public ImmutableArray<string> AddedNodeIds { get; init; } = [];
/// <summary>
/// Node IDs removed from the base graph.
/// </summary>
[JsonPropertyName("removedNodeIds")]
public ImmutableArray<string> RemovedNodeIds { get; init; } = [];
/// <summary>
/// Node IDs with modified content.
/// </summary>
[JsonPropertyName("modifiedNodeIds")]
public ImmutableArray<string> ModifiedNodeIds { get; init; } = [];
/// <summary>
/// Base graph digest.
/// </summary>
[JsonPropertyName("baseGraphDigest")]
public required string BaseGraphDigest { get; init; }
/// <summary>
/// Head graph digest.
/// </summary>
[JsonPropertyName("headGraphDigest")]
public required string HeadGraphDigest { get; init; }
/// <summary>
/// Whether there are any changes.
/// </summary>
[JsonIgnore]
public bool HasChanges =>
AddedNodeIds.Length > 0 ||
RemovedNodeIds.Length > 0 ||
ModifiedNodeIds.Length > 0;
}

View File

@@ -0,0 +1,364 @@
// -----------------------------------------------------------------------------
// SbomVersioning.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: SBOM-L-004 - SBOM versioning and merge semantics API
// Description: SBOM version control and merge operations.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Core.Models;
/// <summary>
/// Versioned SBOM with lineage tracking.
/// </summary>
public sealed record VersionedSbom
{
/// <summary>
/// Unique SBOM identifier.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Version number (monotonically increasing).
/// </summary>
[JsonPropertyName("version")]
public required int Version { get; init; }
/// <summary>
/// Parent SBOM ID (for lineage).
/// </summary>
[JsonPropertyName("parentId")]
public string? ParentId { get; init; }
/// <summary>
/// Parent version number.
/// </summary>
[JsonPropertyName("parentVersion")]
public int? ParentVersion { get; init; }
/// <summary>
/// Content digest of the SBOM.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
/// <summary>
/// SBOM format (spdx, cyclonedx).
/// </summary>
[JsonPropertyName("format")]
public required SbomFormat Format { get; init; }
/// <summary>
/// Format version (e.g., "3.0.1" for SPDX).
/// </summary>
[JsonPropertyName("formatVersion")]
public required string FormatVersion { get; init; }
/// <summary>
/// Creation timestamp.
/// </summary>
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Tool that generated this SBOM.
/// </summary>
[JsonPropertyName("generatorTool")]
public required string GeneratorTool { get; init; }
/// <summary>
/// Generator tool version.
/// </summary>
[JsonPropertyName("generatorVersion")]
public required string GeneratorVersion { get; init; }
/// <summary>
/// Subject artifact digest.
/// </summary>
[JsonPropertyName("subjectDigest")]
public required string SubjectDigest { get; init; }
/// <summary>
/// Component count.
/// </summary>
[JsonPropertyName("componentCount")]
public int ComponentCount { get; init; }
/// <summary>
/// Merge metadata if this SBOM was created by merging others.
/// </summary>
[JsonPropertyName("mergeMetadata")]
public SbomMergeMetadata? MergeMetadata { get; init; }
}
/// <summary>
/// SBOM format types.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SbomFormat
{
/// <summary>SPDX format.</summary>
Spdx,
/// <summary>CycloneDX format.</summary>
CycloneDx,
/// <summary>SWID format.</summary>
Swid
}
/// <summary>
/// Metadata about an SBOM merge operation.
/// </summary>
public sealed record SbomMergeMetadata
{
/// <summary>
/// Source SBOM references that were merged.
/// </summary>
[JsonPropertyName("sources")]
public required ImmutableArray<SbomMergeSource> Sources { get; init; }
/// <summary>
/// Merge strategy used.
/// </summary>
[JsonPropertyName("strategy")]
public required SbomMergeStrategy Strategy { get; init; }
/// <summary>
/// Timestamp of the merge.
/// </summary>
[JsonPropertyName("mergedAt")]
public required DateTimeOffset MergedAt { get; init; }
/// <summary>
/// Conflicts encountered and how they were resolved.
/// </summary>
[JsonPropertyName("conflicts")]
public ImmutableArray<SbomMergeConflict> Conflicts { get; init; } = [];
}
/// <summary>
/// Reference to an SBOM that was merged.
/// </summary>
public sealed record SbomMergeSource
{
/// <summary>
/// Source SBOM ID.
/// </summary>
[JsonPropertyName("id")]
public required string Id { get; init; }
/// <summary>
/// Source SBOM version.
/// </summary>
[JsonPropertyName("version")]
public required int Version { get; init; }
/// <summary>
/// Source SBOM digest.
/// </summary>
[JsonPropertyName("digest")]
public required string Digest { get; init; }
}
/// <summary>
/// Merge strategy for SBOMs.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SbomMergeStrategy
{
/// <summary>Union: include all components from all sources.</summary>
Union,
/// <summary>Intersection: only components present in all sources.</summary>
Intersection,
/// <summary>Latest: prefer components from most recent SBOM.</summary>
Latest,
/// <summary>Priority: use explicit priority ordering.</summary>
Priority
}
/// <summary>
/// Conflict encountered during SBOM merge.
/// </summary>
public sealed record SbomMergeConflict
{
/// <summary>
/// Component PURL that had a conflict.
/// </summary>
[JsonPropertyName("purl")]
public required string Purl { get; init; }
/// <summary>
/// Type of conflict.
/// </summary>
[JsonPropertyName("conflictType")]
public required SbomConflictType ConflictType { get; init; }
/// <summary>
/// Values from different sources.
/// </summary>
[JsonPropertyName("sourceValues")]
public required ImmutableDictionary<string, string> SourceValues { get; init; }
/// <summary>
/// Resolved value.
/// </summary>
[JsonPropertyName("resolvedValue")]
public required string ResolvedValue { get; init; }
/// <summary>
/// Resolution reason.
/// </summary>
[JsonPropertyName("resolutionReason")]
public string? ResolutionReason { get; init; }
}
/// <summary>
/// Types of SBOM merge conflicts.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum SbomConflictType
{
/// <summary>Different versions of the same package.</summary>
VersionMismatch,
/// <summary>Different digests for same version.</summary>
DigestMismatch,
/// <summary>Different license declarations.</summary>
LicenseMismatch,
/// <summary>Different supplier information.</summary>
SupplierMismatch
}
/// <summary>
/// Service for SBOM versioning and merge operations.
/// </summary>
public interface ISbomVersioningService
{
/// <summary>
/// Creates a new version of an SBOM.
/// </summary>
Task<VersionedSbom> CreateVersionAsync(
string parentId,
int parentVersion,
ReadOnlyMemory<byte> sbomContent,
SbomFormat format,
CancellationToken ct = default);
/// <summary>
/// Gets the version history of an SBOM.
/// </summary>
Task<IReadOnlyList<VersionedSbom>> GetVersionHistoryAsync(
string sbomId,
CancellationToken ct = default);
/// <summary>
/// Merges multiple SBOMs into one.
/// </summary>
Task<VersionedSbom> MergeAsync(
IReadOnlyList<SbomMergeSource> sources,
SbomMergeStrategy strategy,
CancellationToken ct = default);
/// <summary>
/// Computes the diff between two SBOM versions.
/// </summary>
Task<SbomDiff> ComputeDiffAsync(
string sbomId,
int baseVersion,
int headVersion,
CancellationToken ct = default);
}
/// <summary>
/// Diff between two SBOM versions.
/// </summary>
public sealed record SbomDiff
{
/// <summary>
/// Base SBOM reference.
/// </summary>
[JsonPropertyName("base")]
public required SbomMergeSource Base { get; init; }
/// <summary>
/// Head SBOM reference.
/// </summary>
[JsonPropertyName("head")]
public required SbomMergeSource Head { get; init; }
/// <summary>
/// Components added in head.
/// </summary>
[JsonPropertyName("added")]
public ImmutableArray<string> Added { get; init; } = [];
/// <summary>
/// Components removed from base.
/// </summary>
[JsonPropertyName("removed")]
public ImmutableArray<string> Removed { get; init; } = [];
/// <summary>
/// Components with version changes.
/// </summary>
[JsonPropertyName("versionChanged")]
public ImmutableArray<ComponentVersionChange> VersionChanged { get; init; } = [];
}
/// <summary>
/// Component version change in a diff.
/// </summary>
public sealed record ComponentVersionChange
{
/// <summary>
/// Component PURL (without version).
/// </summary>
[JsonPropertyName("purl")]
public required string Purl { get; init; }
/// <summary>
/// Version in base.
/// </summary>
[JsonPropertyName("baseVersion")]
public required string BaseVersion { get; init; }
/// <summary>
/// Version in head.
/// </summary>
[JsonPropertyName("headVersion")]
public required string HeadVersion { get; init; }
/// <summary>
/// Whether this is an upgrade or downgrade.
/// </summary>
[JsonPropertyName("direction")]
public required VersionChangeDirection Direction { get; init; }
}
/// <summary>
/// Direction of version change.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum VersionChangeDirection
{
/// <summary>Version increased.</summary>
Upgrade,
/// <summary>Version decreased.</summary>
Downgrade,
/// <summary>Cannot determine (non-semver).</summary>
Unknown
}

View File

@@ -0,0 +1,528 @@
// -----------------------------------------------------------------------------
// ZeroDayWindowTracking.cs
// Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
// Task: UNK-005 - Zero-day window tracking
// Description: Track exposure window for zero-day vulnerabilities.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Core.Models;
/// <summary>
/// Tracks the zero-day exposure window for a vulnerability.
/// The window is the time between exploit availability and patch/mitigation.
/// </summary>
public sealed record ZeroDayWindow
{
/// <summary>
/// Vulnerability identifier.
/// </summary>
[JsonPropertyName("vulnerabilityId")]
public required string VulnerabilityId { get; init; }
/// <summary>
/// When the vulnerability was first disclosed publicly.
/// </summary>
[JsonPropertyName("disclosedAt")]
public DateTimeOffset? DisclosedAt { get; init; }
/// <summary>
/// When an exploit was first seen in the wild.
/// </summary>
[JsonPropertyName("exploitSeenAt")]
public DateTimeOffset? ExploitSeenAt { get; init; }
/// <summary>
/// When a patch was first available.
/// </summary>
[JsonPropertyName("patchAvailableAt")]
public DateTimeOffset? PatchAvailableAt { get; init; }
/// <summary>
/// When we first detected this in the artifact.
/// </summary>
[JsonPropertyName("detectedAt")]
public required DateTimeOffset DetectedAt { get; init; }
/// <summary>
/// When the artifact was remediated (patched/mitigated).
/// </summary>
[JsonPropertyName("remediatedAt")]
public DateTimeOffset? RemediatedAt { get; init; }
/// <summary>
/// Current window status.
/// </summary>
[JsonPropertyName("status")]
public required ZeroDayWindowStatus Status { get; init; }
/// <summary>
/// Exposure duration in hours (time we were exposed).
/// </summary>
[JsonPropertyName("exposureHours")]
public double? ExposureHours { get; init; }
/// <summary>
/// Pre-disclosure exposure (time between exploit seen and disclosure).
/// </summary>
[JsonPropertyName("preDisclosureHours")]
public double? PreDisclosureHours { get; init; }
/// <summary>
/// Time from disclosure to patch availability.
/// </summary>
[JsonPropertyName("disclosureToPatchHours")]
public double? DisclosureToPatchHours { get; init; }
/// <summary>
/// Time from patch availability to our remediation.
/// </summary>
[JsonPropertyName("patchToRemediationHours")]
public double? PatchToRemediationHours { get; init; }
/// <summary>
/// Whether this was a true zero-day (exploit before patch).
/// </summary>
[JsonPropertyName("isTrueZeroDay")]
public bool IsTrueZeroDay { get; init; }
/// <summary>
/// Risk score based on exposure window (0-100).
/// </summary>
[JsonPropertyName("windowRiskScore")]
public int WindowRiskScore { get; init; }
/// <summary>
/// Timeline events.
/// </summary>
[JsonPropertyName("timeline")]
public ImmutableArray<WindowTimelineEvent> Timeline { get; init; } = [];
}
/// <summary>
/// Status of the zero-day window.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ZeroDayWindowStatus
{
/// <summary>Actively exposed with no patch.</summary>
ActiveNoPatch,
/// <summary>Actively exposed, patch available but not applied.</summary>
ActivePatchAvailable,
/// <summary>Actively exposed, mitigated by controls.</summary>
ActiveMitigated,
/// <summary>Remediated - no longer exposed.</summary>
Remediated,
/// <summary>Unknown - insufficient data.</summary>
Unknown
}
/// <summary>
/// Timeline event for window tracking.
/// </summary>
public sealed record WindowTimelineEvent
{
/// <summary>
/// Event timestamp.
/// </summary>
[JsonPropertyName("timestamp")]
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Event type.
/// </summary>
[JsonPropertyName("eventType")]
public required WindowEventType EventType { get; init; }
/// <summary>
/// Event description.
/// </summary>
[JsonPropertyName("description")]
public required string Description { get; init; }
/// <summary>
/// Source of the event.
/// </summary>
[JsonPropertyName("source")]
public string? Source { get; init; }
}
/// <summary>
/// Types of window timeline events.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum WindowEventType
{
/// <summary>Vulnerability disclosed.</summary>
Disclosed,
/// <summary>Exploit seen in the wild.</summary>
ExploitSeen,
/// <summary>Patch released.</summary>
PatchReleased,
/// <summary>Detected in our artifact.</summary>
Detected,
/// <summary>Mitigation applied.</summary>
Mitigated,
/// <summary>Patch applied.</summary>
Patched,
/// <summary>Added to KEV.</summary>
AddedToKev,
/// <summary>CISA deadline set.</summary>
CisaDeadline
}
/// <summary>
/// Aggregate statistics for zero-day windows.
/// </summary>
public sealed record ZeroDayWindowStats
{
/// <summary>
/// Artifact digest.
/// </summary>
[JsonPropertyName("artifactDigest")]
public required string ArtifactDigest { get; init; }
/// <summary>
/// When stats were computed.
/// </summary>
[JsonPropertyName("computedAt")]
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Total zero-day windows tracked.
/// </summary>
[JsonPropertyName("totalWindows")]
public int TotalWindows { get; init; }
/// <summary>
/// Currently active windows.
/// </summary>
[JsonPropertyName("activeWindows")]
public int ActiveWindows { get; init; }
/// <summary>
/// True zero-day count (exploit before patch).
/// </summary>
[JsonPropertyName("trueZeroDays")]
public int TrueZeroDays { get; init; }
/// <summary>
/// Average exposure hours across all windows.
/// </summary>
[JsonPropertyName("avgExposureHours")]
public double AvgExposureHours { get; init; }
/// <summary>
/// Maximum exposure hours.
/// </summary>
[JsonPropertyName("maxExposureHours")]
public double MaxExposureHours { get; init; }
/// <summary>
/// Average time from patch to remediation.
/// </summary>
[JsonPropertyName("avgPatchToRemediationHours")]
public double AvgPatchToRemediationHours { get; init; }
/// <summary>
/// Windows by status.
/// </summary>
[JsonPropertyName("byStatus")]
public ImmutableDictionary<ZeroDayWindowStatus, int> ByStatus { get; init; } =
ImmutableDictionary<ZeroDayWindowStatus, int>.Empty;
/// <summary>
/// Aggregate risk score (0-100).
/// </summary>
[JsonPropertyName("aggregateRiskScore")]
public int AggregateRiskScore { get; init; }
}
/// <summary>
/// Service for tracking zero-day windows.
/// </summary>
public interface IZeroDayWindowTracker
{
/// <summary>
/// Records a detection event.
/// </summary>
Task<ZeroDayWindow> RecordDetectionAsync(
string vulnerabilityId,
string artifactDigest,
DateTimeOffset detectedAt,
CancellationToken ct = default);
/// <summary>
/// Records a remediation event.
/// </summary>
Task<ZeroDayWindow> RecordRemediationAsync(
string vulnerabilityId,
string artifactDigest,
DateTimeOffset remediatedAt,
CancellationToken ct = default);
/// <summary>
/// Gets the current window for a vulnerability.
/// </summary>
Task<ZeroDayWindow?> GetWindowAsync(
string vulnerabilityId,
string artifactDigest,
CancellationToken ct = default);
/// <summary>
/// Gets aggregate stats for an artifact.
/// </summary>
Task<ZeroDayWindowStats> GetStatsAsync(
string artifactDigest,
CancellationToken ct = default);
}
/// <summary>
/// Calculator for zero-day window metrics.
/// </summary>
public sealed class ZeroDayWindowCalculator
{
/// <summary>
/// Computes the risk score for a window.
/// </summary>
public int ComputeRiskScore(ZeroDayWindow window)
{
var score = 0.0;
// Base score from exposure hours
if (window.ExposureHours.HasValue)
{
score = window.ExposureHours.Value switch
{
< 24 => 20,
< 72 => 40,
< 168 => 60, // 1 week
< 720 => 80, // 30 days
_ => 100
};
}
else if (window.Status == ZeroDayWindowStatus.ActiveNoPatch)
{
// Unknown duration but still exposed with no patch
score = 90;
}
else if (window.Status == ZeroDayWindowStatus.ActivePatchAvailable)
{
// Patch available but not applied
var hoursSincePatch = window.PatchAvailableAt.HasValue
? (DateTimeOffset.UtcNow - window.PatchAvailableAt.Value).TotalHours
: 0;
score = hoursSincePatch switch
{
< 24 => 30,
< 72 => 50,
< 168 => 70,
_ => 85
};
}
// Boost for true zero-day
if (window.IsTrueZeroDay)
{
score *= 1.2;
}
return Math.Clamp((int)score, 0, 100);
}
/// <summary>
/// Computes aggregate stats from a collection of windows.
/// </summary>
public ZeroDayWindowStats ComputeStats(string artifactDigest, IEnumerable<ZeroDayWindow> windows)
{
var windowList = windows.ToList();
if (windowList.Count == 0)
{
return new ZeroDayWindowStats
{
ArtifactDigest = artifactDigest,
ComputedAt = DateTimeOffset.UtcNow,
TotalWindows = 0,
AggregateRiskScore = 0
};
}
var exposureHours = windowList
.Where(w => w.ExposureHours.HasValue)
.Select(w => w.ExposureHours!.Value)
.ToList();
var patchToRemediation = windowList
.Where(w => w.PatchToRemediationHours.HasValue)
.Select(w => w.PatchToRemediationHours!.Value)
.ToList();
var byStatus = windowList
.GroupBy(w => w.Status)
.ToImmutableDictionary(g => g.Key, g => g.Count());
// Aggregate risk is max of individual risks, with boost for multiple high-risk windows
var riskScores = windowList.Select(w => w.WindowRiskScore).OrderDescending().ToList();
var aggregateRisk = riskScores.FirstOrDefault();
if (riskScores.Count(r => r >= 70) > 1)
{
aggregateRisk = Math.Min(100, aggregateRisk + 10);
}
return new ZeroDayWindowStats
{
ArtifactDigest = artifactDigest,
ComputedAt = DateTimeOffset.UtcNow,
TotalWindows = windowList.Count,
ActiveWindows = windowList.Count(w =>
w.Status == ZeroDayWindowStatus.ActiveNoPatch ||
w.Status == ZeroDayWindowStatus.ActivePatchAvailable),
TrueZeroDays = windowList.Count(w => w.IsTrueZeroDay),
AvgExposureHours = exposureHours.Count > 0 ? exposureHours.Average() : 0,
MaxExposureHours = exposureHours.Count > 0 ? exposureHours.Max() : 0,
AvgPatchToRemediationHours = patchToRemediation.Count > 0 ? patchToRemediation.Average() : 0,
ByStatus = byStatus,
AggregateRiskScore = aggregateRisk
};
}
/// <summary>
/// Builds a window with computed metrics.
/// </summary>
public ZeroDayWindow BuildWindow(
string vulnerabilityId,
DateTimeOffset detectedAt,
DateTimeOffset? disclosedAt = null,
DateTimeOffset? exploitSeenAt = null,
DateTimeOffset? patchAvailableAt = null,
DateTimeOffset? remediatedAt = null)
{
var now = DateTimeOffset.UtcNow;
var timeline = new List<WindowTimelineEvent>();
if (disclosedAt.HasValue)
{
timeline.Add(new WindowTimelineEvent
{
Timestamp = disclosedAt.Value,
EventType = WindowEventType.Disclosed,
Description = "Vulnerability publicly disclosed"
});
}
if (exploitSeenAt.HasValue)
{
timeline.Add(new WindowTimelineEvent
{
Timestamp = exploitSeenAt.Value,
EventType = WindowEventType.ExploitSeen,
Description = "Exploit observed in the wild"
});
}
if (patchAvailableAt.HasValue)
{
timeline.Add(new WindowTimelineEvent
{
Timestamp = patchAvailableAt.Value,
EventType = WindowEventType.PatchReleased,
Description = "Patch released by vendor"
});
}
timeline.Add(new WindowTimelineEvent
{
Timestamp = detectedAt,
EventType = WindowEventType.Detected,
Description = "Detected in artifact"
});
if (remediatedAt.HasValue)
{
timeline.Add(new WindowTimelineEvent
{
Timestamp = remediatedAt.Value,
EventType = WindowEventType.Patched,
Description = "Remediation applied"
});
}
// Compute metrics
double? exposureHours = null;
if (remediatedAt.HasValue)
{
var exposureStart = exploitSeenAt ?? disclosedAt ?? detectedAt;
exposureHours = (remediatedAt.Value - exposureStart).TotalHours;
}
else
{
var exposureStart = exploitSeenAt ?? disclosedAt ?? detectedAt;
exposureHours = (now - exposureStart).TotalHours;
}
double? preDisclosureHours = null;
if (exploitSeenAt.HasValue && disclosedAt.HasValue && exploitSeenAt < disclosedAt)
{
preDisclosureHours = (disclosedAt.Value - exploitSeenAt.Value).TotalHours;
}
double? disclosureToPatchHours = null;
if (disclosedAt.HasValue && patchAvailableAt.HasValue)
{
disclosureToPatchHours = (patchAvailableAt.Value - disclosedAt.Value).TotalHours;
}
double? patchToRemediationHours = null;
if (patchAvailableAt.HasValue && remediatedAt.HasValue)
{
patchToRemediationHours = (remediatedAt.Value - patchAvailableAt.Value).TotalHours;
}
var isTrueZeroDay = exploitSeenAt.HasValue &&
(!patchAvailableAt.HasValue || exploitSeenAt < patchAvailableAt);
var status = (remediatedAt.HasValue, patchAvailableAt.HasValue) switch
{
(true, _) => ZeroDayWindowStatus.Remediated,
(false, true) => ZeroDayWindowStatus.ActivePatchAvailable,
(false, false) => ZeroDayWindowStatus.ActiveNoPatch,
};
var window = new ZeroDayWindow
{
VulnerabilityId = vulnerabilityId,
DisclosedAt = disclosedAt,
ExploitSeenAt = exploitSeenAt,
PatchAvailableAt = patchAvailableAt,
DetectedAt = detectedAt,
RemediatedAt = remediatedAt,
Status = status,
ExposureHours = exposureHours,
PreDisclosureHours = preDisclosureHours,
DisclosureToPatchHours = disclosureToPatchHours,
PatchToRemediationHours = patchToRemediationHours,
IsTrueZeroDay = isTrueZeroDay,
Timeline = timeline.OrderBy(e => e.Timestamp).ToImmutableArray()
};
return window with { WindowRiskScore = ComputeRiskScore(window) };
}
}

View File

@@ -14,6 +14,7 @@
<ItemGroup>
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../StellaOps.Scanner.ProofSpine/StellaOps.Scanner.ProofSpine.csproj" />

View File

@@ -0,0 +1,150 @@
// -----------------------------------------------------------------------------
// GraphDeltaComputer.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-006)
// Description: Implementation of graph delta computation.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// Computes deltas between call graph versions for incremental reachability.
/// </summary>
public sealed class GraphDeltaComputer : IGraphDeltaComputer
{
private readonly IGraphSnapshotStore? _snapshotStore;
private readonly ILogger<GraphDeltaComputer> _logger;
public GraphDeltaComputer(
ILogger<GraphDeltaComputer> logger,
IGraphSnapshotStore? snapshotStore = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_snapshotStore = snapshotStore;
}
/// <inheritdoc />
public Task<GraphDelta> ComputeDeltaAsync(
IGraphSnapshot previousGraph,
IGraphSnapshot currentGraph,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(previousGraph);
ArgumentNullException.ThrowIfNull(currentGraph);
// If hashes match, no changes
if (previousGraph.Hash == currentGraph.Hash)
{
_logger.LogDebug("Graph hashes match, no delta");
return Task.FromResult(GraphDelta.Empty);
}
// Compute node deltas
var addedNodes = currentGraph.NodeKeys.Except(previousGraph.NodeKeys).ToHashSet();
var removedNodes = previousGraph.NodeKeys.Except(currentGraph.NodeKeys).ToHashSet();
// Compute edge deltas
var previousEdgeSet = previousGraph.Edges.ToHashSet();
var currentEdgeSet = currentGraph.Edges.ToHashSet();
var addedEdges = currentGraph.Edges.Where(e => !previousEdgeSet.Contains(e)).ToList();
var removedEdges = previousGraph.Edges.Where(e => !currentEdgeSet.Contains(e)).ToList();
// Compute affected method keys
var affected = new HashSet<string>();
affected.UnionWith(addedNodes);
affected.UnionWith(removedNodes);
foreach (var edge in addedEdges)
{
affected.Add(edge.CallerKey);
affected.Add(edge.CalleeKey);
}
foreach (var edge in removedEdges)
{
affected.Add(edge.CallerKey);
affected.Add(edge.CalleeKey);
}
var delta = new GraphDelta
{
AddedNodes = addedNodes,
RemovedNodes = removedNodes,
AddedEdges = addedEdges,
RemovedEdges = removedEdges,
AffectedMethodKeys = affected,
PreviousHash = previousGraph.Hash,
CurrentHash = currentGraph.Hash
};
_logger.LogInformation(
"Computed graph delta: +{AddedNodes} nodes, -{RemovedNodes} nodes, +{AddedEdges} edges, -{RemovedEdges} edges, {Affected} affected",
addedNodes.Count, removedNodes.Count, addedEdges.Count, removedEdges.Count, affected.Count);
return Task.FromResult(delta);
}
/// <inheritdoc />
public async Task<GraphDelta> ComputeDeltaFromHashesAsync(
string serviceId,
string previousHash,
string currentHash,
CancellationToken cancellationToken = default)
{
if (previousHash == currentHash)
{
return GraphDelta.Empty;
}
if (_snapshotStore is null)
{
// Without snapshot store, we must do full recompute
_logger.LogWarning(
"No snapshot store available, forcing full recompute for {ServiceId}",
serviceId);
return GraphDelta.FullRecompute(previousHash, currentHash);
}
// Try to load snapshots
var previousSnapshot = await _snapshotStore.GetSnapshotAsync(serviceId, previousHash, cancellationToken);
var currentSnapshot = await _snapshotStore.GetSnapshotAsync(serviceId, currentHash, cancellationToken);
if (previousSnapshot is null || currentSnapshot is null)
{
_logger.LogWarning(
"Could not load snapshots for delta computation, forcing full recompute");
return GraphDelta.FullRecompute(previousHash, currentHash);
}
return await ComputeDeltaAsync(previousSnapshot, currentSnapshot, cancellationToken);
}
}
/// <summary>
/// Store for graph snapshots used in delta computation.
/// </summary>
public interface IGraphSnapshotStore
{
/// <summary>
/// Gets a graph snapshot by service and hash.
/// </summary>
Task<IGraphSnapshot?> GetSnapshotAsync(
string serviceId,
string graphHash,
CancellationToken cancellationToken = default);
/// <summary>
/// Stores a graph snapshot.
/// </summary>
Task StoreSnapshotAsync(
string serviceId,
IGraphSnapshot snapshot,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,136 @@
// -----------------------------------------------------------------------------
// IGraphDeltaComputer.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-005)
// Description: Interface for computing graph deltas between versions.
// -----------------------------------------------------------------------------
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// Computes the difference between two call graphs.
/// Used to identify which (entry, sink) pairs need recomputation.
/// </summary>
public interface IGraphDeltaComputer
{
/// <summary>
/// Computes the delta between two call graphs.
/// </summary>
/// <param name="previousGraph">Previous graph state.</param>
/// <param name="currentGraph">Current graph state.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Delta result with added/removed nodes and edges.</returns>
Task<GraphDelta> ComputeDeltaAsync(
IGraphSnapshot previousGraph,
IGraphSnapshot currentGraph,
CancellationToken cancellationToken = default);
/// <summary>
/// Computes delta from graph hashes if snapshots aren't available.
/// </summary>
/// <param name="serviceId">Service identifier.</param>
/// <param name="previousHash">Previous graph hash.</param>
/// <param name="currentHash">Current graph hash.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Delta result.</returns>
Task<GraphDelta> ComputeDeltaFromHashesAsync(
string serviceId,
string previousHash,
string currentHash,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Snapshot of a call graph for delta computation.
/// </summary>
public interface IGraphSnapshot
{
/// <summary>
/// Graph hash for identity.
/// </summary>
string Hash { get; }
/// <summary>
/// All node (method) keys in the graph.
/// </summary>
IReadOnlySet<string> NodeKeys { get; }
/// <summary>
/// All edges in the graph (caller -> callee).
/// </summary>
IReadOnlyList<GraphEdge> Edges { get; }
/// <summary>
/// Entry point method keys.
/// </summary>
IReadOnlySet<string> EntryPoints { get; }
}
/// <summary>
/// An edge in the call graph.
/// </summary>
public readonly record struct GraphEdge(string CallerKey, string CalleeKey);
/// <summary>
/// Result of computing graph delta.
/// </summary>
public sealed record GraphDelta
{
/// <summary>
/// Whether there are any changes.
/// </summary>
public bool HasChanges => AddedNodes.Count > 0 || RemovedNodes.Count > 0 ||
AddedEdges.Count > 0 || RemovedEdges.Count > 0;
/// <summary>
/// Nodes added in current graph (ΔV+).
/// </summary>
public IReadOnlySet<string> AddedNodes { get; init; } = new HashSet<string>();
/// <summary>
/// Nodes removed from previous graph (ΔV-).
/// </summary>
public IReadOnlySet<string> RemovedNodes { get; init; } = new HashSet<string>();
/// <summary>
/// Edges added in current graph (ΔE+).
/// </summary>
public IReadOnlyList<GraphEdge> AddedEdges { get; init; } = [];
/// <summary>
/// Edges removed from previous graph (ΔE-).
/// </summary>
public IReadOnlyList<GraphEdge> RemovedEdges { get; init; } = [];
/// <summary>
/// All affected method keys (union of added, removed, and edge endpoints).
/// </summary>
public IReadOnlySet<string> AffectedMethodKeys { get; init; } = new HashSet<string>();
/// <summary>
/// Previous graph hash.
/// </summary>
public string? PreviousHash { get; init; }
/// <summary>
/// Current graph hash.
/// </summary>
public string? CurrentHash { get; init; }
/// <summary>
/// Creates an empty delta (no changes).
/// </summary>
public static GraphDelta Empty => new();
/// <summary>
/// Creates a full recompute delta (graph hash mismatch, must recompute all).
/// </summary>
public static GraphDelta FullRecompute(string? previousHash, string currentHash) => new()
{
PreviousHash = previousHash,
CurrentHash = currentHash
};
}

View File

@@ -0,0 +1,251 @@
// -----------------------------------------------------------------------------
// IReachabilityCache.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-003)
// Description: Interface for reachability result caching.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// Interface for caching reachability analysis results.
/// Enables incremental recomputation by caching (entry, sink) pairs.
/// </summary>
public interface IReachabilityCache
{
/// <summary>
/// Gets cached reachability results for a service.
/// </summary>
/// <param name="serviceId">Service identifier.</param>
/// <param name="graphHash">Hash of the current call graph.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Cached result if valid, null otherwise.</returns>
Task<CachedReachabilityResult?> GetAsync(
string serviceId,
string graphHash,
CancellationToken cancellationToken = default);
/// <summary>
/// Stores reachability results in cache.
/// </summary>
/// <param name="entry">Cache entry to store.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task SetAsync(
ReachabilityCacheEntry entry,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets reachable set for a specific (entry, sink) pair.
/// </summary>
/// <param name="serviceId">Service identifier.</param>
/// <param name="entryMethodKey">Entry point method key.</param>
/// <param name="sinkMethodKey">Sink method key.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Cached reachable result if available.</returns>
Task<ReachablePairResult?> GetReachablePairAsync(
string serviceId,
string entryMethodKey,
string sinkMethodKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates cache entries affected by graph changes.
/// </summary>
/// <param name="serviceId">Service identifier.</param>
/// <param name="affectedMethodKeys">Method keys that changed.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Number of invalidated entries.</returns>
Task<int> InvalidateAsync(
string serviceId,
IEnumerable<string> affectedMethodKeys,
CancellationToken cancellationToken = default);
/// <summary>
/// Invalidates all cache entries for a service.
/// </summary>
/// <param name="serviceId">Service identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task InvalidateAllAsync(
string serviceId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets cache statistics for a service.
/// </summary>
/// <param name="serviceId">Service identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Cache statistics.</returns>
Task<CacheStatistics> GetStatisticsAsync(
string serviceId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Cached reachability analysis result.
/// </summary>
public sealed record CachedReachabilityResult
{
/// <summary>
/// Service identifier.
/// </summary>
public required string ServiceId { get; init; }
/// <summary>
/// Graph hash when results were computed.
/// </summary>
public required string GraphHash { get; init; }
/// <summary>
/// When the cache was populated.
/// </summary>
public DateTimeOffset CachedAt { get; init; }
/// <summary>
/// Time-to-live remaining.
/// </summary>
public TimeSpan? TimeToLive { get; init; }
/// <summary>
/// Cached reachable pairs.
/// </summary>
public IReadOnlyList<ReachablePairResult> ReachablePairs { get; init; } = [];
/// <summary>
/// Total entry points analyzed.
/// </summary>
public int EntryPointCount { get; init; }
/// <summary>
/// Total sinks analyzed.
/// </summary>
public int SinkCount { get; init; }
}
/// <summary>
/// Result for a single (entry, sink) reachability pair.
/// </summary>
public sealed record ReachablePairResult
{
/// <summary>
/// Entry point method key.
/// </summary>
public required string EntryMethodKey { get; init; }
/// <summary>
/// Sink method key.
/// </summary>
public required string SinkMethodKey { get; init; }
/// <summary>
/// Whether the sink is reachable from the entry.
/// </summary>
public bool IsReachable { get; init; }
/// <summary>
/// Shortest path length if reachable.
/// </summary>
public int? PathLength { get; init; }
/// <summary>
/// Confidence score.
/// </summary>
public double Confidence { get; init; }
/// <summary>
/// When this pair was last computed.
/// </summary>
public DateTimeOffset ComputedAt { get; init; }
}
/// <summary>
/// Entry for storing in the reachability cache.
/// </summary>
public sealed record ReachabilityCacheEntry
{
/// <summary>
/// Service identifier.
/// </summary>
public required string ServiceId { get; init; }
/// <summary>
/// Graph hash for cache key.
/// </summary>
public required string GraphHash { get; init; }
/// <summary>
/// SBOM hash for versioning.
/// </summary>
public string? SbomHash { get; init; }
/// <summary>
/// Reachable pairs to cache.
/// </summary>
public required IReadOnlyList<ReachablePairResult> ReachablePairs { get; init; }
/// <summary>
/// Entry points analyzed.
/// </summary>
public int EntryPointCount { get; init; }
/// <summary>
/// Sinks analyzed.
/// </summary>
public int SinkCount { get; init; }
/// <summary>
/// Time-to-live for this cache entry.
/// </summary>
public TimeSpan? TimeToLive { get; init; }
}
/// <summary>
/// Cache statistics for monitoring.
/// </summary>
public sealed record CacheStatistics
{
/// <summary>
/// Service identifier.
/// </summary>
public required string ServiceId { get; init; }
/// <summary>
/// Number of cached pairs.
/// </summary>
public int CachedPairCount { get; init; }
/// <summary>
/// Total cache hits.
/// </summary>
public long HitCount { get; init; }
/// <summary>
/// Total cache misses.
/// </summary>
public long MissCount { get; init; }
/// <summary>
/// Cache hit ratio.
/// </summary>
public double HitRatio => HitCount + MissCount > 0
? (double)HitCount / (HitCount + MissCount)
: 0.0;
/// <summary>
/// Last cache population time.
/// </summary>
public DateTimeOffset? LastPopulatedAt { get; init; }
/// <summary>
/// Last invalidation time.
/// </summary>
public DateTimeOffset? LastInvalidatedAt { get; init; }
/// <summary>
/// Current graph hash.
/// </summary>
public string? CurrentGraphHash { get; init; }
}

View File

@@ -0,0 +1,201 @@
// -----------------------------------------------------------------------------
// ImpactSetCalculator.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-007)
// Description: Calculates which entry/sink pairs are affected by graph changes.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// Calculates the impact set: which (entry, sink) pairs need recomputation
/// based on graph delta.
/// </summary>
public interface IImpactSetCalculator
{
/// <summary>
/// Calculates which entry/sink pairs are affected by graph changes.
/// </summary>
/// <param name="delta">Graph delta.</param>
/// <param name="graph">Current graph snapshot.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Impact set with affected pairs.</returns>
Task<ImpactSet> CalculateImpactAsync(
GraphDelta delta,
IGraphSnapshot graph,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Set of (entry, sink) pairs affected by graph changes.
/// </summary>
public sealed record ImpactSet
{
/// <summary>
/// Whether full recomputation is required.
/// </summary>
public bool RequiresFullRecompute { get; init; }
/// <summary>
/// Entry points that need reanalysis.
/// </summary>
public IReadOnlySet<string> AffectedEntryPoints { get; init; } = new HashSet<string>();
/// <summary>
/// Sinks that may have changed reachability.
/// </summary>
public IReadOnlySet<string> AffectedSinks { get; init; } = new HashSet<string>();
/// <summary>
/// Specific (entry, sink) pairs that need recomputation.
/// </summary>
public IReadOnlyList<(string EntryKey, string SinkKey)> AffectedPairs { get; init; } = [];
/// <summary>
/// Estimated savings ratio compared to full recompute.
/// </summary>
public double SavingsRatio { get; init; }
/// <summary>
/// Creates an impact set requiring full recomputation.
/// </summary>
public static ImpactSet FullRecompute() => new() { RequiresFullRecompute = true };
/// <summary>
/// Creates an empty impact set (no changes needed).
/// </summary>
public static ImpactSet Empty() => new() { SavingsRatio = 1.0 };
}
/// <summary>
/// Default implementation of impact set calculator.
/// Uses BFS to find ancestors of changed nodes to determine affected entries.
/// </summary>
public sealed class ImpactSetCalculator : IImpactSetCalculator
{
private readonly ILogger<ImpactSetCalculator> _logger;
private readonly int _maxAffectedRatioForIncremental;
public ImpactSetCalculator(
ILogger<ImpactSetCalculator> logger,
int maxAffectedRatioForIncremental = 30)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_maxAffectedRatioForIncremental = maxAffectedRatioForIncremental;
}
/// <inheritdoc />
public Task<ImpactSet> CalculateImpactAsync(
GraphDelta delta,
IGraphSnapshot graph,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(delta);
ArgumentNullException.ThrowIfNull(graph);
if (!delta.HasChanges)
{
_logger.LogDebug("No graph changes, empty impact set");
return Task.FromResult(ImpactSet.Empty());
}
// Build reverse adjacency for ancestor lookup
var reverseAdj = BuildReverseAdjacency(graph.Edges);
// Find all ancestors of affected method keys
var affectedAncestors = new HashSet<string>();
foreach (var methodKey in delta.AffectedMethodKeys)
{
cancellationToken.ThrowIfCancellationRequested();
var ancestors = FindAncestors(methodKey, reverseAdj);
affectedAncestors.UnionWith(ancestors);
}
// Intersect with entry points to find affected entries
var affectedEntries = graph.EntryPoints
.Where(e => affectedAncestors.Contains(e) || delta.AffectedMethodKeys.Contains(e))
.ToHashSet();
// Check if too many entries affected (fall back to full recompute)
var affectedRatio = graph.EntryPoints.Count > 0
? (double)affectedEntries.Count / graph.EntryPoints.Count * 100
: 0;
if (affectedRatio > _maxAffectedRatioForIncremental)
{
_logger.LogInformation(
"Affected ratio {Ratio:F1}% exceeds threshold {Threshold}%, forcing full recompute",
affectedRatio, _maxAffectedRatioForIncremental);
return Task.FromResult(ImpactSet.FullRecompute());
}
// Determine affected sinks (any sink reachable from affected methods)
var affectedSinks = delta.AffectedMethodKeys.ToHashSet();
var savingsRatio = graph.EntryPoints.Count > 0
? 1.0 - ((double)affectedEntries.Count / graph.EntryPoints.Count)
: 1.0;
var impact = new ImpactSet
{
RequiresFullRecompute = false,
AffectedEntryPoints = affectedEntries,
AffectedSinks = affectedSinks,
SavingsRatio = savingsRatio
};
_logger.LogInformation(
"Impact set calculated: {AffectedEntries} entries, {AffectedSinks} potential sinks, {Savings:P1} savings",
affectedEntries.Count, affectedSinks.Count, savingsRatio);
return Task.FromResult(impact);
}
private static Dictionary<string, List<string>> BuildReverseAdjacency(IReadOnlyList<GraphEdge> edges)
{
var reverseAdj = new Dictionary<string, List<string>>();
foreach (var edge in edges)
{
if (!reverseAdj.TryGetValue(edge.CalleeKey, out var callers))
{
callers = new List<string>();
reverseAdj[edge.CalleeKey] = callers;
}
callers.Add(edge.CallerKey);
}
return reverseAdj;
}
private static HashSet<string> FindAncestors(string startNode, Dictionary<string, List<string>> reverseAdj)
{
var ancestors = new HashSet<string>();
var queue = new Queue<string>();
queue.Enqueue(startNode);
while (queue.Count > 0)
{
var current = queue.Dequeue();
if (!reverseAdj.TryGetValue(current, out var callers))
continue;
foreach (var caller in callers)
{
if (ancestors.Add(caller))
{
queue.Enqueue(caller);
}
}
}
return ancestors;
}
}

View File

@@ -0,0 +1,467 @@
// -----------------------------------------------------------------------------
// IncrementalReachabilityService.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-012)
// Description: Orchestrates incremental reachability analysis with caching.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// Service for performing incremental reachability analysis with caching.
/// Orchestrates cache lookup, delta computation, selective recompute, and state flip detection.
/// </summary>
public interface IIncrementalReachabilityService
{
/// <summary>
/// Performs incremental reachability analysis.
/// </summary>
/// <param name="request">Analysis request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Incremental analysis result.</returns>
Task<IncrementalReachabilityResult> AnalyzeAsync(
IncrementalReachabilityRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request for incremental reachability analysis.
/// </summary>
public sealed record IncrementalReachabilityRequest
{
/// <summary>
/// Service identifier.
/// </summary>
public required string ServiceId { get; init; }
/// <summary>
/// Current call graph snapshot.
/// </summary>
public required IGraphSnapshot CurrentGraph { get; init; }
/// <summary>
/// Sink method keys to analyze.
/// </summary>
public required IReadOnlyList<string> Sinks { get; init; }
/// <summary>
/// Whether to detect state flips.
/// </summary>
public bool DetectStateFlips { get; init; } = true;
/// <summary>
/// Whether to update cache with new results.
/// </summary>
public bool UpdateCache { get; init; } = true;
/// <summary>
/// Maximum BFS depth for reachability analysis.
/// </summary>
public int MaxDepth { get; init; } = 50;
}
/// <summary>
/// Result of incremental reachability analysis.
/// </summary>
public sealed record IncrementalReachabilityResult
{
/// <summary>
/// Service identifier.
/// </summary>
public required string ServiceId { get; init; }
/// <summary>
/// Reachability results for each (entry, sink) pair.
/// </summary>
public IReadOnlyList<ReachablePairResult> Results { get; init; } = [];
/// <summary>
/// State flip detection result.
/// </summary>
public StateFlipResult? StateFlips { get; init; }
/// <summary>
/// Whether results came from cache.
/// </summary>
public bool FromCache { get; init; }
/// <summary>
/// Whether incremental analysis was used.
/// </summary>
public bool WasIncremental { get; init; }
/// <summary>
/// Savings ratio from incremental analysis (0.0 = full recompute, 1.0 = all cached).
/// </summary>
public double SavingsRatio { get; init; }
/// <summary>
/// Analysis duration.
/// </summary>
public TimeSpan Duration { get; init; }
/// <summary>
/// Graph hash used for caching.
/// </summary>
public string? GraphHash { get; init; }
}
/// <summary>
/// Default implementation of incremental reachability service.
/// </summary>
public sealed class IncrementalReachabilityService : IIncrementalReachabilityService
{
private readonly IReachabilityCache _cache;
private readonly IGraphDeltaComputer _deltaComputer;
private readonly IImpactSetCalculator _impactCalculator;
private readonly IStateFlipDetector _stateFlipDetector;
private readonly ILogger<IncrementalReachabilityService> _logger;
public IncrementalReachabilityService(
IReachabilityCache cache,
IGraphDeltaComputer deltaComputer,
IImpactSetCalculator impactCalculator,
IStateFlipDetector stateFlipDetector,
ILogger<IncrementalReachabilityService> logger)
{
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_deltaComputer = deltaComputer ?? throw new ArgumentNullException(nameof(deltaComputer));
_impactCalculator = impactCalculator ?? throw new ArgumentNullException(nameof(impactCalculator));
_stateFlipDetector = stateFlipDetector ?? throw new ArgumentNullException(nameof(stateFlipDetector));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<IncrementalReachabilityResult> AnalyzeAsync(
IncrementalReachabilityRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var graphHash = request.CurrentGraph.Hash;
_logger.LogInformation(
"Starting incremental reachability analysis for {ServiceId}, graph {Hash}",
request.ServiceId, graphHash);
// Step 1: Check cache for exact match
var cached = await _cache.GetAsync(request.ServiceId, graphHash, cancellationToken);
if (cached is not null)
{
IncrementalReachabilityMetrics.CacheHits.Add(1);
_logger.LogInformation("Cache hit for {ServiceId}, returning cached results", request.ServiceId);
sw.Stop();
return new IncrementalReachabilityResult
{
ServiceId = request.ServiceId,
Results = cached.ReachablePairs,
FromCache = true,
WasIncremental = false,
SavingsRatio = 1.0,
Duration = sw.Elapsed,
GraphHash = graphHash
};
}
IncrementalReachabilityMetrics.CacheMisses.Add(1);
// Step 2: Get previous cache to compute delta
var stats = await _cache.GetStatisticsAsync(request.ServiceId, cancellationToken);
var previousHash = stats.CurrentGraphHash;
GraphDelta delta;
ImpactSet impact;
IReadOnlyList<ReachablePairResult> previousResults = [];
if (previousHash is not null && previousHash != graphHash)
{
// Compute delta
delta = await _deltaComputer.ComputeDeltaFromHashesAsync(
request.ServiceId, previousHash, graphHash, cancellationToken);
impact = await _impactCalculator.CalculateImpactAsync(
delta, request.CurrentGraph, cancellationToken);
// Get previous results for state flip detection
var previousCached = await _cache.GetAsync(request.ServiceId, previousHash, cancellationToken);
previousResults = previousCached?.ReachablePairs ?? [];
}
else
{
// No previous cache, full compute
delta = GraphDelta.FullRecompute(previousHash, graphHash);
impact = ImpactSet.FullRecompute();
}
// Step 3: Compute reachability (full or incremental)
IReadOnlyList<ReachablePairResult> results;
if (impact.RequiresFullRecompute)
{
IncrementalReachabilityMetrics.FullRecomputes.Add(1);
results = ComputeFullReachability(request);
}
else
{
IncrementalReachabilityMetrics.IncrementalComputes.Add(1);
results = await ComputeIncrementalReachabilityAsync(
request, impact, previousResults, cancellationToken);
}
// Step 4: Detect state flips
StateFlipResult? stateFlips = null;
if (request.DetectStateFlips && previousResults.Count > 0)
{
stateFlips = await _stateFlipDetector.DetectFlipsAsync(
previousResults, results, cancellationToken);
}
// Step 5: Update cache
if (request.UpdateCache)
{
var entry = new ReachabilityCacheEntry
{
ServiceId = request.ServiceId,
GraphHash = graphHash,
ReachablePairs = results,
EntryPointCount = request.CurrentGraph.EntryPoints.Count,
SinkCount = request.Sinks.Count,
TimeToLive = TimeSpan.FromHours(24)
};
await _cache.SetAsync(entry, cancellationToken);
}
sw.Stop();
IncrementalReachabilityMetrics.AnalysisDurationMs.Record(sw.ElapsedMilliseconds);
_logger.LogInformation(
"Incremental analysis complete for {ServiceId}: {ResultCount} pairs, {Savings:P1} savings, {Duration}ms",
request.ServiceId, results.Count, impact.SavingsRatio, sw.ElapsedMilliseconds);
return new IncrementalReachabilityResult
{
ServiceId = request.ServiceId,
Results = results,
StateFlips = stateFlips,
FromCache = false,
WasIncremental = !impact.RequiresFullRecompute,
SavingsRatio = impact.SavingsRatio,
Duration = sw.Elapsed,
GraphHash = graphHash
};
}
private List<ReachablePairResult> ComputeFullReachability(IncrementalReachabilityRequest request)
{
var results = new List<ReachablePairResult>();
var now = DateTimeOffset.UtcNow;
// Build forward adjacency for BFS
var adj = new Dictionary<string, List<string>>();
foreach (var edge in request.CurrentGraph.Edges)
{
if (!adj.TryGetValue(edge.CallerKey, out var callees))
{
callees = new List<string>();
adj[edge.CallerKey] = callees;
}
callees.Add(edge.CalleeKey);
}
var sinkSet = request.Sinks.ToHashSet();
foreach (var entry in request.CurrentGraph.EntryPoints)
{
// BFS from entry to find reachable sinks
var reachableSinks = BfsToSinks(entry, sinkSet, adj, request.MaxDepth);
foreach (var (sink, pathLength) in reachableSinks)
{
results.Add(new ReachablePairResult
{
EntryMethodKey = entry,
SinkMethodKey = sink,
IsReachable = true,
PathLength = pathLength,
Confidence = 1.0,
ComputedAt = now
});
}
// Add unreachable pairs for sinks not reached
foreach (var sink in sinkSet.Except(reachableSinks.Keys))
{
results.Add(new ReachablePairResult
{
EntryMethodKey = entry,
SinkMethodKey = sink,
IsReachable = false,
Confidence = 1.0,
ComputedAt = now
});
}
}
return results;
}
private async Task<IReadOnlyList<ReachablePairResult>> ComputeIncrementalReachabilityAsync(
IncrementalReachabilityRequest request,
ImpactSet impact,
IReadOnlyList<ReachablePairResult> previousResults,
CancellationToken cancellationToken)
{
var results = new Dictionary<(string, string), ReachablePairResult>();
var now = DateTimeOffset.UtcNow;
// Copy unaffected results from previous
foreach (var prev in previousResults)
{
var key = (prev.EntryMethodKey, prev.SinkMethodKey);
if (!impact.AffectedEntryPoints.Contains(prev.EntryMethodKey))
{
// Entry not affected, keep previous result
results[key] = prev;
}
}
// Recompute only affected entries
var adj = new Dictionary<string, List<string>>();
foreach (var edge in request.CurrentGraph.Edges)
{
if (!adj.TryGetValue(edge.CallerKey, out var callees))
{
callees = new List<string>();
adj[edge.CallerKey] = callees;
}
callees.Add(edge.CalleeKey);
}
var sinkSet = request.Sinks.ToHashSet();
foreach (var entry in impact.AffectedEntryPoints)
{
cancellationToken.ThrowIfCancellationRequested();
if (!request.CurrentGraph.EntryPoints.Contains(entry))
continue; // Entry no longer exists
var reachableSinks = BfsToSinks(entry, sinkSet, adj, request.MaxDepth);
foreach (var (sink, pathLength) in reachableSinks)
{
var key = (entry, sink);
results[key] = new ReachablePairResult
{
EntryMethodKey = entry,
SinkMethodKey = sink,
IsReachable = true,
PathLength = pathLength,
Confidence = 1.0,
ComputedAt = now
};
}
foreach (var sink in sinkSet.Except(reachableSinks.Keys))
{
var key = (entry, sink);
results[key] = new ReachablePairResult
{
EntryMethodKey = entry,
SinkMethodKey = sink,
IsReachable = false,
Confidence = 1.0,
ComputedAt = now
};
}
}
return results.Values.ToList();
}
private static Dictionary<string, int> BfsToSinks(
string startNode,
HashSet<string> sinks,
Dictionary<string, List<string>> adj,
int maxDepth)
{
var reached = new Dictionary<string, int>();
var visited = new HashSet<string>();
var queue = new Queue<(string Node, int Depth)>();
queue.Enqueue((startNode, 0));
visited.Add(startNode);
while (queue.Count > 0)
{
var (current, depth) = queue.Dequeue();
if (depth > maxDepth)
break;
if (sinks.Contains(current))
{
reached[current] = depth;
}
if (!adj.TryGetValue(current, out var callees))
continue;
foreach (var callee in callees)
{
if (visited.Add(callee))
{
queue.Enqueue((callee, depth + 1));
}
}
}
return reached;
}
}
/// <summary>
/// Metrics for incremental reachability service.
/// </summary>
internal static class IncrementalReachabilityMetrics
{
private static readonly string MeterName = "StellaOps.Scanner.Reachability.Cache";
public static readonly System.Diagnostics.Metrics.Counter<long> CacheHits =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.reachability_cache.hits",
description: "Number of cache hits");
public static readonly System.Diagnostics.Metrics.Counter<long> CacheMisses =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.reachability_cache.misses",
description: "Number of cache misses");
public static readonly System.Diagnostics.Metrics.Counter<long> FullRecomputes =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.reachability_cache.full_recomputes",
description: "Number of full recomputes");
public static readonly System.Diagnostics.Metrics.Counter<long> IncrementalComputes =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.reachability_cache.incremental_computes",
description: "Number of incremental computes");
public static readonly System.Diagnostics.Metrics.Histogram<long> AnalysisDurationMs =
new System.Diagnostics.Metrics.Meter(MeterName).CreateHistogram<long>(
"stellaops.reachability_cache.analysis_duration_ms",
unit: "ms",
description: "Analysis duration in milliseconds");
}

View File

@@ -0,0 +1,391 @@
// -----------------------------------------------------------------------------
// PostgresReachabilityCache.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-004)
// Description: PostgreSQL implementation of IReachabilityCache.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Data;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Npgsql;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// PostgreSQL implementation of the reachability cache.
/// </summary>
public sealed class PostgresReachabilityCache : IReachabilityCache
{
private readonly string _connectionString;
private readonly ILogger<PostgresReachabilityCache> _logger;
public PostgresReachabilityCache(
string connectionString,
ILogger<PostgresReachabilityCache> logger)
{
_connectionString = connectionString ?? throw new ArgumentNullException(nameof(connectionString));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<CachedReachabilityResult?> GetAsync(
string serviceId,
string graphHash,
CancellationToken cancellationToken = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(cancellationToken);
// Get cache entry
const string entrySql = """
SELECT id, cached_at, expires_at, entry_point_count, sink_count
FROM reach_cache_entries
WHERE service_id = @serviceId AND graph_hash = @graphHash
AND (expires_at IS NULL OR expires_at > NOW())
""";
await using var entryCmd = new NpgsqlCommand(entrySql, conn);
entryCmd.Parameters.AddWithValue("@serviceId", serviceId);
entryCmd.Parameters.AddWithValue("@graphHash", graphHash);
await using var entryReader = await entryCmd.ExecuteReaderAsync(cancellationToken);
if (!await entryReader.ReadAsync(cancellationToken))
{
return null; // Cache miss
}
var entryId = entryReader.GetGuid(0);
var cachedAt = entryReader.GetDateTime(1);
var expiresAt = entryReader.IsDBNull(2) ? (DateTimeOffset?)null : entryReader.GetDateTime(2);
var entryPointCount = entryReader.GetInt32(3);
var sinkCount = entryReader.GetInt32(4);
await entryReader.CloseAsync();
// Get cached pairs
const string pairsSql = """
SELECT entry_method_key, sink_method_key, is_reachable, path_length, confidence, computed_at
FROM reach_cache_pairs
WHERE cache_entry_id = @entryId
""";
await using var pairsCmd = new NpgsqlCommand(pairsSql, conn);
pairsCmd.Parameters.AddWithValue("@entryId", entryId);
var pairs = new List<ReachablePairResult>();
await using var pairsReader = await pairsCmd.ExecuteReaderAsync(cancellationToken);
while (await pairsReader.ReadAsync(cancellationToken))
{
pairs.Add(new ReachablePairResult
{
EntryMethodKey = pairsReader.GetString(0),
SinkMethodKey = pairsReader.GetString(1),
IsReachable = pairsReader.GetBoolean(2),
PathLength = pairsReader.IsDBNull(3) ? null : pairsReader.GetInt32(3),
Confidence = pairsReader.GetDouble(4),
ComputedAt = pairsReader.GetDateTime(5)
});
}
// Update stats
await UpdateStatsAsync(conn, serviceId, isHit: true, cancellationToken: cancellationToken);
_logger.LogDebug("Cache hit for {ServiceId}, {PairCount} pairs", serviceId, pairs.Count);
return new CachedReachabilityResult
{
ServiceId = serviceId,
GraphHash = graphHash,
CachedAt = cachedAt,
TimeToLive = expiresAt.HasValue ? expiresAt.Value - DateTimeOffset.UtcNow : null,
ReachablePairs = pairs,
EntryPointCount = entryPointCount,
SinkCount = sinkCount
};
}
/// <inheritdoc />
public async Task SetAsync(
ReachabilityCacheEntry entry,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(entry);
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(cancellationToken);
await using var tx = await conn.BeginTransactionAsync(cancellationToken);
try
{
// Delete existing entry for this service/hash
const string deleteSql = """
DELETE FROM reach_cache_entries
WHERE service_id = @serviceId AND graph_hash = @graphHash
""";
await using var deleteCmd = new NpgsqlCommand(deleteSql, conn, tx);
deleteCmd.Parameters.AddWithValue("@serviceId", entry.ServiceId);
deleteCmd.Parameters.AddWithValue("@graphHash", entry.GraphHash);
await deleteCmd.ExecuteNonQueryAsync(cancellationToken);
// Insert new cache entry
var reachableCount = 0;
var unreachableCount = 0;
foreach (var pair in entry.ReachablePairs)
{
if (pair.IsReachable) reachableCount++;
else unreachableCount++;
}
var expiresAt = entry.TimeToLive.HasValue
? (object)DateTimeOffset.UtcNow.Add(entry.TimeToLive.Value)
: DBNull.Value;
const string insertEntrySql = """
INSERT INTO reach_cache_entries
(service_id, graph_hash, sbom_hash, entry_point_count, sink_count,
pair_count, reachable_count, unreachable_count, expires_at)
VALUES
(@serviceId, @graphHash, @sbomHash, @entryPointCount, @sinkCount,
@pairCount, @reachableCount, @unreachableCount, @expiresAt)
RETURNING id
""";
await using var insertCmd = new NpgsqlCommand(insertEntrySql, conn, tx);
insertCmd.Parameters.AddWithValue("@serviceId", entry.ServiceId);
insertCmd.Parameters.AddWithValue("@graphHash", entry.GraphHash);
insertCmd.Parameters.AddWithValue("@sbomHash", entry.SbomHash ?? (object)DBNull.Value);
insertCmd.Parameters.AddWithValue("@entryPointCount", entry.EntryPointCount);
insertCmd.Parameters.AddWithValue("@sinkCount", entry.SinkCount);
insertCmd.Parameters.AddWithValue("@pairCount", entry.ReachablePairs.Count);
insertCmd.Parameters.AddWithValue("@reachableCount", reachableCount);
insertCmd.Parameters.AddWithValue("@unreachableCount", unreachableCount);
insertCmd.Parameters.AddWithValue("@expiresAt", expiresAt);
var entryId = (Guid)(await insertCmd.ExecuteScalarAsync(cancellationToken))!;
// Insert pairs in batches
if (entry.ReachablePairs.Count > 0)
{
await InsertPairsBatchAsync(conn, tx, entryId, entry.ReachablePairs, cancellationToken);
}
await tx.CommitAsync(cancellationToken);
// Update stats
await UpdateStatsAsync(conn, entry.ServiceId, isHit: false, entry.GraphHash, cancellationToken);
_logger.LogInformation(
"Cached {PairCount} pairs for {ServiceId}, graph {Hash}",
entry.ReachablePairs.Count, entry.ServiceId, entry.GraphHash);
}
catch
{
await tx.RollbackAsync(cancellationToken);
throw;
}
}
/// <inheritdoc />
public async Task<ReachablePairResult?> GetReachablePairAsync(
string serviceId,
string entryMethodKey,
string sinkMethodKey,
CancellationToken cancellationToken = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(cancellationToken);
const string sql = """
SELECT p.is_reachable, p.path_length, p.confidence, p.computed_at
FROM reach_cache_pairs p
JOIN reach_cache_entries e ON p.cache_entry_id = e.id
WHERE e.service_id = @serviceId
AND p.entry_method_key = @entryKey
AND p.sink_method_key = @sinkKey
AND (e.expires_at IS NULL OR e.expires_at > NOW())
ORDER BY e.cached_at DESC
LIMIT 1
""";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@serviceId", serviceId);
cmd.Parameters.AddWithValue("@entryKey", entryMethodKey);
cmd.Parameters.AddWithValue("@sinkKey", sinkMethodKey);
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
if (!await reader.ReadAsync(cancellationToken))
{
return null;
}
return new ReachablePairResult
{
EntryMethodKey = entryMethodKey,
SinkMethodKey = sinkMethodKey,
IsReachable = reader.GetBoolean(0),
PathLength = reader.IsDBNull(1) ? null : reader.GetInt32(1),
Confidence = reader.GetDouble(2),
ComputedAt = reader.GetDateTime(3)
};
}
/// <inheritdoc />
public async Task<int> InvalidateAsync(
string serviceId,
IEnumerable<string> affectedMethodKeys,
CancellationToken cancellationToken = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(cancellationToken);
// For now, invalidate entire cache for service
// More granular invalidation would require additional indices
const string sql = """
DELETE FROM reach_cache_entries
WHERE service_id = @serviceId
""";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@serviceId", serviceId);
var deleted = await cmd.ExecuteNonQueryAsync(cancellationToken);
if (deleted > 0)
{
await UpdateInvalidationTimeAsync(conn, serviceId, cancellationToken);
_logger.LogInformation("Invalidated {Count} cache entries for {ServiceId}", deleted, serviceId);
}
return deleted;
}
/// <inheritdoc />
public async Task InvalidateAllAsync(
string serviceId,
CancellationToken cancellationToken = default)
{
await InvalidateAsync(serviceId, Array.Empty<string>(), cancellationToken);
}
/// <inheritdoc />
public async Task<CacheStatistics> GetStatisticsAsync(
string serviceId,
CancellationToken cancellationToken = default)
{
await using var conn = new NpgsqlConnection(_connectionString);
await conn.OpenAsync(cancellationToken);
const string sql = """
SELECT total_hits, total_misses, full_recomputes, incremental_computes,
current_graph_hash, last_populated_at, last_invalidated_at
FROM reach_cache_stats
WHERE service_id = @serviceId
""";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@serviceId", serviceId);
await using var reader = await cmd.ExecuteReaderAsync(cancellationToken);
if (!await reader.ReadAsync(cancellationToken))
{
return new CacheStatistics { ServiceId = serviceId };
}
// Get cached pair count
await reader.CloseAsync();
const string countSql = """
SELECT COALESCE(SUM(pair_count), 0)
FROM reach_cache_entries
WHERE service_id = @serviceId AND (expires_at IS NULL OR expires_at > NOW())
""";
await using var countCmd = new NpgsqlCommand(countSql, conn);
countCmd.Parameters.AddWithValue("@serviceId", serviceId);
var pairCount = Convert.ToInt32(await countCmd.ExecuteScalarAsync(cancellationToken));
return new CacheStatistics
{
ServiceId = serviceId,
CachedPairCount = pairCount,
HitCount = reader.GetInt64(0),
MissCount = reader.GetInt64(1),
LastPopulatedAt = reader.IsDBNull(5) ? null : reader.GetDateTime(5),
LastInvalidatedAt = reader.IsDBNull(6) ? null : reader.GetDateTime(6),
CurrentGraphHash = reader.IsDBNull(4) ? null : reader.GetString(4)
};
}
private async Task InsertPairsBatchAsync(
NpgsqlConnection conn,
NpgsqlTransaction tx,
Guid entryId,
IReadOnlyList<ReachablePairResult> pairs,
CancellationToken cancellationToken)
{
await using var writer = await conn.BeginBinaryImportAsync(
"COPY reach_cache_pairs (cache_entry_id, entry_method_key, sink_method_key, is_reachable, path_length, confidence, computed_at) FROM STDIN (FORMAT BINARY)",
cancellationToken);
foreach (var pair in pairs)
{
await writer.StartRowAsync(cancellationToken);
await writer.WriteAsync(entryId, NpgsqlTypes.NpgsqlDbType.Uuid, cancellationToken);
await writer.WriteAsync(pair.EntryMethodKey, NpgsqlTypes.NpgsqlDbType.Text, cancellationToken);
await writer.WriteAsync(pair.SinkMethodKey, NpgsqlTypes.NpgsqlDbType.Text, cancellationToken);
await writer.WriteAsync(pair.IsReachable, NpgsqlTypes.NpgsqlDbType.Boolean, cancellationToken);
if (pair.PathLength.HasValue)
await writer.WriteAsync(pair.PathLength.Value, NpgsqlTypes.NpgsqlDbType.Integer, cancellationToken);
else
await writer.WriteNullAsync(cancellationToken);
await writer.WriteAsync(pair.Confidence, NpgsqlTypes.NpgsqlDbType.Double, cancellationToken);
await writer.WriteAsync(pair.ComputedAt.UtcDateTime, NpgsqlTypes.NpgsqlDbType.TimestampTz, cancellationToken);
}
await writer.CompleteAsync(cancellationToken);
}
private static async Task UpdateStatsAsync(
NpgsqlConnection conn,
string serviceId,
bool isHit,
string? graphHash = null,
CancellationToken cancellationToken = default)
{
const string sql = "SELECT update_reach_cache_stats(@serviceId, @isHit, NULL, @graphHash)";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@serviceId", serviceId);
cmd.Parameters.AddWithValue("@isHit", isHit);
cmd.Parameters.AddWithValue("@graphHash", graphHash ?? (object)DBNull.Value);
await cmd.ExecuteNonQueryAsync(cancellationToken);
}
private static async Task UpdateInvalidationTimeAsync(
NpgsqlConnection conn,
string serviceId,
CancellationToken cancellationToken)
{
const string sql = """
UPDATE reach_cache_stats
SET last_invalidated_at = NOW(), updated_at = NOW()
WHERE service_id = @serviceId
""";
await using var cmd = new NpgsqlCommand(sql, conn);
cmd.Parameters.AddWithValue("@serviceId", serviceId);
await cmd.ExecuteNonQueryAsync(cancellationToken);
}
}

View File

@@ -0,0 +1,264 @@
// -----------------------------------------------------------------------------
// StateFlipDetector.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-011)
// Description: Detects reachability state changes between scans.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Cache;
/// <summary>
/// Detects state flips: transitions between reachable and unreachable states.
/// Used for PR gates and change tracking.
/// </summary>
public interface IStateFlipDetector
{
/// <summary>
/// Detects state flips between previous and current reachability results.
/// </summary>
/// <param name="previous">Previous scan results.</param>
/// <param name="current">Current scan results.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>State flip detection result.</returns>
Task<StateFlipResult> DetectFlipsAsync(
IReadOnlyList<ReachablePairResult> previous,
IReadOnlyList<ReachablePairResult> current,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of state flip detection.
/// </summary>
public sealed record StateFlipResult
{
/// <summary>
/// Whether any state flips occurred.
/// </summary>
public bool HasFlips => NewlyReachable.Count > 0 || NewlyUnreachable.Count > 0;
/// <summary>
/// Pairs that became reachable (were unreachable, now reachable).
/// This represents NEW RISK.
/// </summary>
public IReadOnlyList<StateFlip> NewlyReachable { get; init; } = [];
/// <summary>
/// Pairs that became unreachable (were reachable, now unreachable).
/// This represents MITIGATED risk.
/// </summary>
public IReadOnlyList<StateFlip> NewlyUnreachable { get; init; } = [];
/// <summary>
/// Count of new risks introduced.
/// </summary>
public int NewRiskCount => NewlyReachable.Count;
/// <summary>
/// Count of mitigated risks.
/// </summary>
public int MitigatedCount => NewlyUnreachable.Count;
/// <summary>
/// Net change in reachable vulnerability paths.
/// Positive = more risk, Negative = less risk.
/// </summary>
public int NetChange => NewlyReachable.Count - NewlyUnreachable.Count;
/// <summary>
/// Summary for PR annotation.
/// </summary>
public string Summary => HasFlips
? $"Reachability changed: +{NewRiskCount} new paths, -{MitigatedCount} removed paths"
: "No reachability changes";
/// <summary>
/// Whether this should block a PR (new reachable paths introduced).
/// </summary>
public bool ShouldBlockPr => NewlyReachable.Count > 0;
/// <summary>
/// Creates an empty result (no flips).
/// </summary>
public static StateFlipResult Empty => new();
}
/// <summary>
/// A single state flip event.
/// </summary>
public sealed record StateFlip
{
/// <summary>
/// Entry point method key.
/// </summary>
public required string EntryMethodKey { get; init; }
/// <summary>
/// Sink method key.
/// </summary>
public required string SinkMethodKey { get; init; }
/// <summary>
/// Previous state (reachable = true, unreachable = false).
/// </summary>
public bool WasReachable { get; init; }
/// <summary>
/// New state.
/// </summary>
public bool IsReachable { get; init; }
/// <summary>
/// Type of flip.
/// </summary>
public StateFlipType FlipType => IsReachable ? StateFlipType.BecameReachable : StateFlipType.BecameUnreachable;
/// <summary>
/// Associated CVE if applicable.
/// </summary>
public string? CveId { get; init; }
/// <summary>
/// Package name if applicable.
/// </summary>
public string? PackageName { get; init; }
}
/// <summary>
/// Type of state flip.
/// </summary>
public enum StateFlipType
{
/// <summary>
/// Was unreachable, now reachable (NEW RISK).
/// </summary>
BecameReachable,
/// <summary>
/// Was reachable, now unreachable (MITIGATED).
/// </summary>
BecameUnreachable
}
/// <summary>
/// Default implementation of state flip detector.
/// </summary>
public sealed class StateFlipDetector : IStateFlipDetector
{
private readonly ILogger<StateFlipDetector> _logger;
public StateFlipDetector(ILogger<StateFlipDetector> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public Task<StateFlipResult> DetectFlipsAsync(
IReadOnlyList<ReachablePairResult> previous,
IReadOnlyList<ReachablePairResult> current,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(previous);
ArgumentNullException.ThrowIfNull(current);
// Build lookup for previous state
var previousState = previous.ToDictionary(
p => (p.EntryMethodKey, p.SinkMethodKey),
p => p.IsReachable);
// Build lookup for current state
var currentState = current.ToDictionary(
p => (p.EntryMethodKey, p.SinkMethodKey),
p => p.IsReachable);
var newlyReachable = new List<StateFlip>();
var newlyUnreachable = new List<StateFlip>();
// Check all current pairs for flips
foreach (var pair in current)
{
cancellationToken.ThrowIfCancellationRequested();
var key = (pair.EntryMethodKey, pair.SinkMethodKey);
if (previousState.TryGetValue(key, out var wasReachable))
{
if (!wasReachable && pair.IsReachable)
{
// Was unreachable, now reachable = NEW RISK
newlyReachable.Add(new StateFlip
{
EntryMethodKey = pair.EntryMethodKey,
SinkMethodKey = pair.SinkMethodKey,
WasReachable = false,
IsReachable = true
});
}
else if (wasReachable && !pair.IsReachable)
{
// Was reachable, now unreachable = MITIGATED
newlyUnreachable.Add(new StateFlip
{
EntryMethodKey = pair.EntryMethodKey,
SinkMethodKey = pair.SinkMethodKey,
WasReachable = true,
IsReachable = false
});
}
}
else if (pair.IsReachable)
{
// New pair that is reachable = NEW RISK
newlyReachable.Add(new StateFlip
{
EntryMethodKey = pair.EntryMethodKey,
SinkMethodKey = pair.SinkMethodKey,
WasReachable = false,
IsReachable = true
});
}
}
// Check for pairs that existed previously but no longer exist (removed code = mitigated)
foreach (var prevPair in previous.Where(p => p.IsReachable))
{
var key = (prevPair.EntryMethodKey, prevPair.SinkMethodKey);
if (!currentState.ContainsKey(key))
{
// Pair no longer exists and was reachable = MITIGATED
newlyUnreachable.Add(new StateFlip
{
EntryMethodKey = prevPair.EntryMethodKey,
SinkMethodKey = prevPair.SinkMethodKey,
WasReachable = true,
IsReachable = false
});
}
}
var result = new StateFlipResult
{
NewlyReachable = newlyReachable,
NewlyUnreachable = newlyUnreachable
};
if (result.HasFlips)
{
_logger.LogInformation(
"State flips detected: +{NewRisk} new reachable, -{Mitigated} unreachable (net: {Net})",
result.NewRiskCount, result.MitigatedCount, result.NetChange);
}
else
{
_logger.LogDebug("No state flips detected");
}
return Task.FromResult(result);
}
}

View File

@@ -4,6 +4,11 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Npgsql" Version="9.0.3" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" />
<ProjectReference Include="..\StellaOps.Scanner.ProofSpine\StellaOps.Scanner.ProofSpine.csproj" />
@@ -11,6 +16,7 @@
<ProjectReference Include="..\StellaOps.Scanner.SmartDiff\StellaOps.Scanner.SmartDiff.csproj" />
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj" />
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,238 @@
// -----------------------------------------------------------------------------
// ISurfaceQueryService.cs
// Sprint: SPRINT_3700_0004_0001_reachability_integration (REACH-001)
// Description: Interface for querying vulnerability surfaces during scans.
// -----------------------------------------------------------------------------
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.Reachability.Surfaces;
/// <summary>
/// Service for querying vulnerability surfaces to resolve trigger methods for reachability analysis.
/// </summary>
public interface ISurfaceQueryService
{
/// <summary>
/// Queries the vulnerability surface for a specific CVE and package.
/// </summary>
/// <param name="request">Query request with CVE and package details.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Query result with trigger methods or fallback indicators.</returns>
Task<SurfaceQueryResult> QueryAsync(
SurfaceQueryRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Bulk query for multiple CVE/package combinations.
/// </summary>
/// <param name="requests">Collection of query requests.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Dictionary of results keyed by query key.</returns>
Task<IReadOnlyDictionary<string, SurfaceQueryResult>> QueryBulkAsync(
IEnumerable<SurfaceQueryRequest> requests,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a surface exists for the given CVE and package.
/// </summary>
/// <param name="cveId">CVE identifier.</param>
/// <param name="ecosystem">Package ecosystem.</param>
/// <param name="packageName">Package name.</param>
/// <param name="version">Package version.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if surface exists.</returns>
Task<bool> ExistsAsync(
string cveId,
string ecosystem,
string packageName,
string version,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request to query a vulnerability surface.
/// </summary>
public sealed record SurfaceQueryRequest
{
/// <summary>
/// CVE identifier.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Package ecosystem (nuget, npm, maven, pypi).
/// </summary>
public required string Ecosystem { get; init; }
/// <summary>
/// Package name.
/// </summary>
public required string PackageName { get; init; }
/// <summary>
/// Vulnerable package version.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// Whether to include internal paths in the result.
/// </summary>
public bool IncludePaths { get; init; }
/// <summary>
/// Maximum number of triggers to return.
/// </summary>
public int MaxTriggers { get; init; } = 100;
/// <summary>
/// Gets a unique key for caching/batching.
/// </summary>
public string QueryKey => $"{CveId}|{Ecosystem}|{PackageName}|{Version}";
}
/// <summary>
/// Result of a vulnerability surface query.
/// </summary>
public sealed record SurfaceQueryResult
{
/// <summary>
/// Whether a surface was found.
/// </summary>
public bool SurfaceFound { get; init; }
/// <summary>
/// The source of sink methods for reachability analysis.
/// </summary>
public SinkSource Source { get; init; }
/// <summary>
/// Surface ID if found.
/// </summary>
public Guid? SurfaceId { get; init; }
/// <summary>
/// Trigger method keys (public API entry points).
/// </summary>
public IReadOnlyList<TriggerMethodInfo> Triggers { get; init; } = [];
/// <summary>
/// Sink method keys (changed vulnerability methods).
/// </summary>
public IReadOnlyList<string> Sinks { get; init; } = [];
/// <summary>
/// Error message if query failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// When the surface was computed.
/// </summary>
public DateTimeOffset? ComputedAt { get; init; }
/// <summary>
/// Creates a result indicating surface was found.
/// </summary>
public static SurfaceQueryResult Found(
Guid surfaceId,
IReadOnlyList<TriggerMethodInfo> triggers,
IReadOnlyList<string> sinks,
DateTimeOffset computedAt)
{
return new SurfaceQueryResult
{
SurfaceFound = true,
Source = SinkSource.Surface,
SurfaceId = surfaceId,
Triggers = triggers,
Sinks = sinks,
ComputedAt = computedAt
};
}
/// <summary>
/// Creates a result indicating fallback to package API.
/// </summary>
public static SurfaceQueryResult FallbackToPackageApi(string reason)
{
return new SurfaceQueryResult
{
SurfaceFound = false,
Source = SinkSource.PackageApi,
Error = reason
};
}
/// <summary>
/// Creates a result indicating no surface data available.
/// </summary>
public static SurfaceQueryResult NotFound(string cveId, string packageName)
{
return new SurfaceQueryResult
{
SurfaceFound = false,
Source = SinkSource.FallbackAll,
Error = $"No surface found for {cveId} in {packageName}"
};
}
}
/// <summary>
/// Information about a trigger method.
/// </summary>
public sealed record TriggerMethodInfo
{
/// <summary>
/// Fully qualified method key.
/// </summary>
public required string MethodKey { get; init; }
/// <summary>
/// Simple method name.
/// </summary>
public required string MethodName { get; init; }
/// <summary>
/// Declaring type.
/// </summary>
public required string DeclaringType { get; init; }
/// <summary>
/// Number of sinks reachable from this trigger.
/// </summary>
public int SinkCount { get; init; }
/// <summary>
/// Shortest path length to any sink.
/// </summary>
public int ShortestPathLength { get; init; }
/// <summary>
/// Whether this trigger is an interface method.
/// </summary>
public bool IsInterfaceTrigger { get; init; }
}
/// <summary>
/// Source of sink methods for reachability analysis.
/// </summary>
public enum SinkSource
{
/// <summary>
/// Sinks from computed vulnerability surface (highest precision).
/// </summary>
Surface,
/// <summary>
/// Sinks from package public API (medium precision).
/// </summary>
PackageApi,
/// <summary>
/// Fallback: all methods in package (lowest precision).
/// </summary>
FallbackAll
}

View File

@@ -0,0 +1,104 @@
// -----------------------------------------------------------------------------
// ISurfaceRepository.cs
// Sprint: SPRINT_3700_0004_0001_reachability_integration (REACH-002)
// Description: Repository interface for vulnerability surface data access.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.Reachability.Surfaces;
/// <summary>
/// Repository for accessing vulnerability surface data.
/// </summary>
public interface ISurfaceRepository
{
/// <summary>
/// Gets a vulnerability surface by CVE and package.
/// </summary>
Task<SurfaceInfo?> GetSurfaceAsync(
string cveId,
string ecosystem,
string packageName,
string version,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets trigger methods for a surface.
/// </summary>
Task<IReadOnlyList<TriggerMethodInfo>> GetTriggersAsync(
Guid surfaceId,
int maxCount,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets sink method keys for a surface.
/// </summary>
Task<IReadOnlyList<string>> GetSinksAsync(
Guid surfaceId,
CancellationToken cancellationToken = default);
/// <summary>
/// Checks if a surface exists.
/// </summary>
Task<bool> ExistsAsync(
string cveId,
string ecosystem,
string packageName,
string version,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Information about a vulnerability surface.
/// </summary>
public sealed record SurfaceInfo
{
/// <summary>
/// Surface ID.
/// </summary>
public Guid Id { get; init; }
/// <summary>
/// CVE identifier.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Package ecosystem.
/// </summary>
public required string Ecosystem { get; init; }
/// <summary>
/// Package name.
/// </summary>
public required string PackageName { get; init; }
/// <summary>
/// Vulnerable version.
/// </summary>
public required string VulnVersion { get; init; }
/// <summary>
/// Fixed version.
/// </summary>
public string? FixedVersion { get; init; }
/// <summary>
/// When the surface was computed.
/// </summary>
public DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// Number of changed methods (sinks).
/// </summary>
public int ChangedMethodCount { get; init; }
/// <summary>
/// Number of trigger methods.
/// </summary>
public int TriggerCount { get; init; }
}

View File

@@ -0,0 +1,97 @@
// -----------------------------------------------------------------------------
// ReachabilityConfidenceTier.cs
// Sprint: SPRINT_3700_0004_0001_reachability_integration (REACH-004)
// Description: Confidence tiers for reachability analysis results.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.Reachability;
/// <summary>
/// Confidence tier for reachability analysis results.
/// Higher tiers indicate more precise and actionable findings.
/// </summary>
public enum ReachabilityConfidenceTier
{
/// <summary>
/// Confirmed reachable: Surface + trigger method reachable.
/// Path from entrypoint to specific trigger method that reaches vulnerable code.
/// Highest confidence - "You WILL hit the vulnerable code via this path."
/// </summary>
Confirmed = 100,
/// <summary>
/// Likely reachable: No surface but package API is called.
/// Path to public API of vulnerable package exists.
/// Medium confidence - "You call the package; vulnerability MAY be triggered."
/// </summary>
Likely = 75,
/// <summary>
/// Present: Package is in dependency tree but no call graph data.
/// Dependency exists but reachability cannot be determined.
/// Lower confidence - "Package is present; impact unknown."
/// </summary>
Present = 50,
/// <summary>
/// Unreachable: No path to vulnerable code found.
/// Surface analyzed, no triggers reached from entrypoints.
/// Evidence for not_affected VEX status.
/// </summary>
Unreachable = 25,
/// <summary>
/// Unknown: Insufficient data to determine reachability.
/// </summary>
Unknown = 0
}
/// <summary>
/// Extension methods for ReachabilityConfidenceTier.
/// </summary>
public static class ReachabilityConfidenceTierExtensions
{
/// <summary>
/// Gets human-readable description of the confidence tier.
/// </summary>
public static string GetDescription(this ReachabilityConfidenceTier tier) => tier switch
{
ReachabilityConfidenceTier.Confirmed => "Confirmed reachable via trigger method",
ReachabilityConfidenceTier.Likely => "Likely reachable via package API",
ReachabilityConfidenceTier.Present => "Package present but reachability undetermined",
ReachabilityConfidenceTier.Unreachable => "No path to vulnerable code found",
ReachabilityConfidenceTier.Unknown => "Insufficient data for analysis",
_ => "Unknown confidence tier"
};
/// <summary>
/// Gets the VEX status recommendation for this tier.
/// </summary>
public static string GetVexRecommendation(this ReachabilityConfidenceTier tier) => tier switch
{
ReachabilityConfidenceTier.Confirmed => "affected",
ReachabilityConfidenceTier.Likely => "under_investigation",
ReachabilityConfidenceTier.Present => "under_investigation",
ReachabilityConfidenceTier.Unreachable => "not_affected",
ReachabilityConfidenceTier.Unknown => "under_investigation",
_ => "under_investigation"
};
/// <summary>
/// Checks if this tier indicates potential impact.
/// </summary>
public static bool IndicatesImpact(this ReachabilityConfidenceTier tier) =>
tier is ReachabilityConfidenceTier.Confirmed or ReachabilityConfidenceTier.Likely;
/// <summary>
/// Checks if this tier can provide evidence for not_affected.
/// </summary>
public static bool CanBeNotAffected(this ReachabilityConfidenceTier tier) =>
tier is ReachabilityConfidenceTier.Unreachable;
/// <summary>
/// Gets a confidence score (0.0 - 1.0) for this tier.
/// </summary>
public static double GetConfidenceScore(this ReachabilityConfidenceTier tier) =>
(int)tier / 100.0;
}

View File

@@ -0,0 +1,473 @@
// -----------------------------------------------------------------------------
// SurfaceAwareReachabilityAnalyzer.cs
// Sprint: SPRINT_3700_0004_0001_reachability_integration (REACH-005, REACH-006, REACH-009)
// Description: Reachability analyzer that uses vulnerability surfaces for precise sink resolution.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Surfaces;
/// <summary>
/// Reachability analyzer that integrates with vulnerability surfaces
/// for precise trigger-based sink resolution.
/// </summary>
public sealed class SurfaceAwareReachabilityAnalyzer : ISurfaceAwareReachabilityAnalyzer
{
private readonly ISurfaceQueryService _surfaceQuery;
private readonly IReachabilityGraphService _graphService;
private readonly ILogger<SurfaceAwareReachabilityAnalyzer> _logger;
public SurfaceAwareReachabilityAnalyzer(
ISurfaceQueryService surfaceQuery,
IReachabilityGraphService graphService,
ILogger<SurfaceAwareReachabilityAnalyzer> logger)
{
_surfaceQuery = surfaceQuery ?? throw new ArgumentNullException(nameof(surfaceQuery));
_graphService = graphService ?? throw new ArgumentNullException(nameof(graphService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<SurfaceAwareReachabilityResult> AnalyzeAsync(
SurfaceAwareReachabilityRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var findings = new List<SurfaceReachabilityFinding>();
// Query surfaces for all vulnerabilities
var surfaceRequests = request.Vulnerabilities
.Select(v => new SurfaceQueryRequest
{
CveId = v.CveId,
Ecosystem = v.Ecosystem,
PackageName = v.PackageName,
Version = v.Version,
IncludePaths = true
})
.ToList();
var surfaceResults = await _surfaceQuery.QueryBulkAsync(surfaceRequests, cancellationToken);
foreach (var vuln in request.Vulnerabilities)
{
cancellationToken.ThrowIfCancellationRequested();
var queryKey = $"{vuln.CveId}|{vuln.Ecosystem}|{vuln.PackageName}|{vuln.Version}";
if (!surfaceResults.TryGetValue(queryKey, out var surface))
{
// No surface result - should not happen but handle gracefully
findings.Add(CreateUnknownFinding(vuln, "No surface query result"));
continue;
}
var finding = await AnalyzeVulnerabilityAsync(vuln, surface, request.CallGraph, cancellationToken);
findings.Add(finding);
}
sw.Stop();
// Compute summary statistics
var confirmedCount = findings.Count(f => f.ConfidenceTier == ReachabilityConfidenceTier.Confirmed);
var likelyCount = findings.Count(f => f.ConfidenceTier == ReachabilityConfidenceTier.Likely);
var unreachableCount = findings.Count(f => f.ConfidenceTier == ReachabilityConfidenceTier.Unreachable);
_logger.LogInformation(
"Surface-aware reachability analysis complete: {Total} vulns, {Confirmed} confirmed, {Likely} likely, {Unreachable} unreachable in {Duration}ms",
findings.Count, confirmedCount, likelyCount, unreachableCount, sw.ElapsedMilliseconds);
return new SurfaceAwareReachabilityResult
{
Findings = findings,
TotalVulnerabilities = findings.Count,
ConfirmedReachable = confirmedCount,
LikelyReachable = likelyCount,
Unreachable = unreachableCount,
AnalysisDuration = sw.Elapsed
};
}
private async Task<SurfaceReachabilityFinding> AnalyzeVulnerabilityAsync(
VulnerabilityInfo vuln,
SurfaceQueryResult surface,
ICallGraphAccessor? callGraph,
CancellationToken cancellationToken)
{
// Determine sink source and resolve sinks
IReadOnlyList<string> sinks;
SinkSource sinkSource;
if (surface.SurfaceFound && surface.Triggers.Count > 0)
{
// Use trigger methods as sinks (highest precision)
sinks = surface.Triggers.Select(t => t.MethodKey).ToList();
sinkSource = SinkSource.Surface;
_logger.LogDebug(
"{CveId}/{PackageName}: Using {TriggerCount} trigger methods from surface",
vuln.CveId, vuln.PackageName, sinks.Count);
}
else if (surface.Source == SinkSource.PackageApi)
{
// Fallback to package API methods
sinks = await ResolvePackageApiMethodsAsync(vuln, cancellationToken);
sinkSource = SinkSource.PackageApi;
_logger.LogDebug(
"{CveId}/{PackageName}: Using {SinkCount} package API methods as fallback",
vuln.CveId, vuln.PackageName, sinks.Count);
}
else
{
// Ultimate fallback - no sink resolution possible
return CreatePresentFinding(vuln, surface);
}
// If no call graph, we can't determine reachability
if (callGraph is null)
{
return CreatePresentFinding(vuln, surface);
}
// Perform reachability analysis from entrypoints to sinks
var reachablePaths = await _graphService.FindPathsToSinksAsync(
callGraph,
sinks,
cancellationToken);
if (reachablePaths.Count == 0)
{
// No paths found - unreachable
return new SurfaceReachabilityFinding
{
CveId = vuln.CveId,
PackageName = vuln.PackageName,
Version = vuln.Version,
ConfidenceTier = ReachabilityConfidenceTier.Unreachable,
SinkSource = sinkSource,
SurfaceId = surface.SurfaceId,
Message = "No execution path to vulnerable code found",
ReachableTriggers = [],
Witnesses = []
};
}
// Paths found - determine confidence tier
var tier = sinkSource == SinkSource.Surface
? ReachabilityConfidenceTier.Confirmed
: ReachabilityConfidenceTier.Likely;
var reachableTriggers = reachablePaths
.Select(p => p.SinkMethodKey)
.Distinct()
.ToList();
return new SurfaceReachabilityFinding
{
CveId = vuln.CveId,
PackageName = vuln.PackageName,
Version = vuln.Version,
ConfidenceTier = tier,
SinkSource = sinkSource,
SurfaceId = surface.SurfaceId,
Message = $"{tier.GetDescription()}: {reachablePaths.Count} paths to {reachableTriggers.Count} triggers",
ReachableTriggers = reachableTriggers,
Witnesses = reachablePaths.Select(p => new PathWitness
{
EntrypointMethodKey = p.EntrypointMethodKey,
SinkMethodKey = p.SinkMethodKey,
PathLength = p.PathLength,
PathMethodKeys = p.PathMethodKeys
}).ToList()
};
}
private async Task<IReadOnlyList<string>> ResolvePackageApiMethodsAsync(
VulnerabilityInfo vuln,
CancellationToken cancellationToken)
{
// TODO: Implement package API method resolution
// This would query the package's public API methods as fallback sinks
await Task.CompletedTask;
return [];
}
private static SurfaceReachabilityFinding CreatePresentFinding(
VulnerabilityInfo vuln,
SurfaceQueryResult surface)
{
return new SurfaceReachabilityFinding
{
CveId = vuln.CveId,
PackageName = vuln.PackageName,
Version = vuln.Version,
ConfidenceTier = ReachabilityConfidenceTier.Present,
SinkSource = surface.Source,
SurfaceId = surface.SurfaceId,
Message = "Package present; reachability undetermined",
ReachableTriggers = [],
Witnesses = []
};
}
private static SurfaceReachabilityFinding CreateUnknownFinding(
VulnerabilityInfo vuln,
string reason)
{
return new SurfaceReachabilityFinding
{
CveId = vuln.CveId,
PackageName = vuln.PackageName,
Version = vuln.Version,
ConfidenceTier = ReachabilityConfidenceTier.Unknown,
SinkSource = SinkSource.FallbackAll,
Message = reason,
ReachableTriggers = [],
Witnesses = []
};
}
}
/// <summary>
/// Interface for surface-aware reachability analysis.
/// </summary>
public interface ISurfaceAwareReachabilityAnalyzer
{
/// <summary>
/// Analyzes reachability for vulnerabilities using surface data.
/// </summary>
Task<SurfaceAwareReachabilityResult> AnalyzeAsync(
SurfaceAwareReachabilityRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request for surface-aware reachability analysis.
/// </summary>
public sealed record SurfaceAwareReachabilityRequest
{
/// <summary>
/// Vulnerabilities to analyze.
/// </summary>
public required IReadOnlyList<VulnerabilityInfo> Vulnerabilities { get; init; }
/// <summary>
/// Call graph accessor for the analyzed codebase.
/// </summary>
public ICallGraphAccessor? CallGraph { get; init; }
/// <summary>
/// Maximum depth for path finding.
/// </summary>
public int MaxPathDepth { get; init; } = 20;
}
/// <summary>
/// Result of surface-aware reachability analysis.
/// </summary>
public sealed record SurfaceAwareReachabilityResult
{
/// <summary>
/// Individual findings for each vulnerability.
/// </summary>
public IReadOnlyList<SurfaceReachabilityFinding> Findings { get; init; } = [];
/// <summary>
/// Total vulnerabilities analyzed.
/// </summary>
public int TotalVulnerabilities { get; init; }
/// <summary>
/// Count of confirmed reachable vulnerabilities.
/// </summary>
public int ConfirmedReachable { get; init; }
/// <summary>
/// Count of likely reachable vulnerabilities.
/// </summary>
public int LikelyReachable { get; init; }
/// <summary>
/// Count of unreachable vulnerabilities.
/// </summary>
public int Unreachable { get; init; }
/// <summary>
/// Analysis duration.
/// </summary>
public TimeSpan AnalysisDuration { get; init; }
}
/// <summary>
/// Reachability finding for a single vulnerability.
/// </summary>
public sealed record SurfaceReachabilityFinding
{
/// <summary>
/// CVE identifier.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Package name.
/// </summary>
public required string PackageName { get; init; }
/// <summary>
/// Package version.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// Confidence tier for this finding.
/// </summary>
public ReachabilityConfidenceTier ConfidenceTier { get; init; }
/// <summary>
/// Source of sink methods used.
/// </summary>
public SinkSource SinkSource { get; init; }
/// <summary>
/// Surface ID if available.
/// </summary>
public Guid? SurfaceId { get; init; }
/// <summary>
/// Human-readable message.
/// </summary>
public required string Message { get; init; }
/// <summary>
/// Trigger methods that are reachable.
/// </summary>
public IReadOnlyList<string> ReachableTriggers { get; init; } = [];
/// <summary>
/// Path witnesses from entrypoints to triggers.
/// </summary>
public IReadOnlyList<PathWitness> Witnesses { get; init; } = [];
}
/// <summary>
/// Vulnerability information for analysis.
/// </summary>
public sealed record VulnerabilityInfo
{
/// <summary>
/// CVE identifier.
/// </summary>
public required string CveId { get; init; }
/// <summary>
/// Package ecosystem.
/// </summary>
public required string Ecosystem { get; init; }
/// <summary>
/// Package name.
/// </summary>
public required string PackageName { get; init; }
/// <summary>
/// Package version.
/// </summary>
public required string Version { get; init; }
}
/// <summary>
/// Path witness from entrypoint to sink.
/// </summary>
public sealed record PathWitness
{
/// <summary>
/// Entrypoint method key.
/// </summary>
public required string EntrypointMethodKey { get; init; }
/// <summary>
/// Sink (trigger) method key.
/// </summary>
public required string SinkMethodKey { get; init; }
/// <summary>
/// Number of hops in path.
/// </summary>
public int PathLength { get; init; }
/// <summary>
/// Ordered method keys in path.
/// </summary>
public IReadOnlyList<string> PathMethodKeys { get; init; } = [];
}
/// <summary>
/// Interface for call graph access.
/// </summary>
public interface ICallGraphAccessor
{
/// <summary>
/// Gets entrypoint method keys.
/// </summary>
IReadOnlyList<string> GetEntrypoints();
/// <summary>
/// Gets callees of a method.
/// </summary>
IReadOnlyList<string> GetCallees(string methodKey);
/// <summary>
/// Checks if a method exists.
/// </summary>
bool ContainsMethod(string methodKey);
}
/// <summary>
/// Interface for reachability graph operations.
/// </summary>
public interface IReachabilityGraphService
{
/// <summary>
/// Finds paths from entrypoints to any of the specified sinks.
/// </summary>
Task<IReadOnlyList<ReachablePath>> FindPathsToSinksAsync(
ICallGraphAccessor callGraph,
IReadOnlyList<string> sinkMethodKeys,
CancellationToken cancellationToken = default);
}
/// <summary>
/// A reachable path from entrypoint to sink.
/// </summary>
public sealed record ReachablePath
{
/// <summary>
/// Entrypoint method key.
/// </summary>
public required string EntrypointMethodKey { get; init; }
/// <summary>
/// Sink method key.
/// </summary>
public required string SinkMethodKey { get; init; }
/// <summary>
/// Path length.
/// </summary>
public int PathLength { get; init; }
/// <summary>
/// Ordered method keys in path.
/// </summary>
public IReadOnlyList<string> PathMethodKeys { get; init; } = [];
}

View File

@@ -0,0 +1,275 @@
// -----------------------------------------------------------------------------
// SurfaceQueryService.cs
// Sprint: SPRINT_3700_0004_0001_reachability_integration (REACH-002, REACH-003, REACH-007)
// Description: Implementation of vulnerability surface query service.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.Reachability.Surfaces;
/// <summary>
/// Implementation of the vulnerability surface query service.
/// Queries the database for pre-computed vulnerability surfaces.
/// </summary>
public sealed class SurfaceQueryService : ISurfaceQueryService
{
private readonly ISurfaceRepository _repository;
private readonly IMemoryCache _cache;
private readonly ILogger<SurfaceQueryService> _logger;
private readonly SurfaceQueryOptions _options;
private static readonly TimeSpan DefaultCacheDuration = TimeSpan.FromMinutes(15);
public SurfaceQueryService(
ISurfaceRepository repository,
IMemoryCache cache,
ILogger<SurfaceQueryService> logger,
SurfaceQueryOptions? options = null)
{
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? new SurfaceQueryOptions();
}
/// <inheritdoc />
public async Task<SurfaceQueryResult> QueryAsync(
SurfaceQueryRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var cacheKey = $"surface:{request.QueryKey}";
// Check cache first
if (_options.EnableCaching && _cache.TryGetValue<SurfaceQueryResult>(cacheKey, out var cached))
{
SurfaceQueryMetrics.CacheHits.Add(1);
return cached!;
}
SurfaceQueryMetrics.CacheMisses.Add(1);
var sw = Stopwatch.StartNew();
try
{
// Query repository
var surface = await _repository.GetSurfaceAsync(
request.CveId,
request.Ecosystem,
request.PackageName,
request.Version,
cancellationToken);
SurfaceQueryResult result;
if (surface is not null)
{
// Surface found - get triggers
var triggers = await _repository.GetTriggersAsync(
surface.Id,
request.MaxTriggers,
cancellationToken);
var sinks = await _repository.GetSinksAsync(surface.Id, cancellationToken);
result = SurfaceQueryResult.Found(
surface.Id,
triggers,
sinks,
surface.ComputedAt);
SurfaceQueryMetrics.SurfaceHits.Add(1);
_logger.LogDebug(
"Surface found for {CveId}/{PackageName}: {TriggerCount} triggers, {SinkCount} sinks",
request.CveId, request.PackageName, triggers.Count, sinks.Count);
}
else
{
// Surface not found - apply fallback cascade
result = ApplyFallbackCascade(request);
SurfaceQueryMetrics.SurfaceMisses.Add(1);
}
sw.Stop();
SurfaceQueryMetrics.QueryDurationMs.Record(sw.ElapsedMilliseconds);
// Cache result
if (_options.EnableCaching)
{
var cacheOptions = new MemoryCacheEntryOptions
{
AbsoluteExpirationRelativeToNow = _options.CacheDuration ?? DefaultCacheDuration
};
_cache.Set(cacheKey, result, cacheOptions);
}
return result;
}
catch (Exception ex)
{
sw.Stop();
SurfaceQueryMetrics.QueryErrors.Add(1);
_logger.LogWarning(ex, "Failed to query surface for {CveId}/{PackageName}", request.CveId, request.PackageName);
return SurfaceQueryResult.FallbackToPackageApi($"Query failed: {ex.Message}");
}
}
/// <inheritdoc />
public async Task<IReadOnlyDictionary<string, SurfaceQueryResult>> QueryBulkAsync(
IEnumerable<SurfaceQueryRequest> requests,
CancellationToken cancellationToken = default)
{
var requestList = requests.ToList();
var results = new Dictionary<string, SurfaceQueryResult>(requestList.Count);
// Split into cached and uncached
var uncachedRequests = new List<SurfaceQueryRequest>();
foreach (var request in requestList)
{
var cacheKey = $"surface:{request.QueryKey}";
if (_options.EnableCaching && _cache.TryGetValue<SurfaceQueryResult>(cacheKey, out var cached))
{
results[request.QueryKey] = cached!;
SurfaceQueryMetrics.CacheHits.Add(1);
}
else
{
uncachedRequests.Add(request);
SurfaceQueryMetrics.CacheMisses.Add(1);
}
}
// Query remaining in parallel batches
if (uncachedRequests.Count > 0)
{
var batchSize = _options.BulkQueryBatchSize;
var batches = uncachedRequests
.Select((r, i) => new { Request = r, Index = i })
.GroupBy(x => x.Index / batchSize)
.Select(g => g.Select(x => x.Request).ToList());
foreach (var batch in batches)
{
var tasks = batch.Select(r => QueryAsync(r, cancellationToken));
var batchResults = await Task.WhenAll(tasks);
for (var i = 0; i < batch.Count; i++)
{
results[batch[i].QueryKey] = batchResults[i];
}
}
}
return results;
}
/// <inheritdoc />
public async Task<bool> ExistsAsync(
string cveId,
string ecosystem,
string packageName,
string version,
CancellationToken cancellationToken = default)
{
var cacheKey = $"surface_exists:{cveId}|{ecosystem}|{packageName}|{version}";
if (_options.EnableCaching && _cache.TryGetValue<bool>(cacheKey, out var exists))
{
return exists;
}
var result = await _repository.ExistsAsync(cveId, ecosystem, packageName, version, cancellationToken);
if (_options.EnableCaching)
{
_cache.Set(cacheKey, result, TimeSpan.FromMinutes(5));
}
return result;
}
private SurfaceQueryResult ApplyFallbackCascade(SurfaceQueryRequest request)
{
_logger.LogDebug(
"No surface for {CveId}/{PackageName} v{Version}, applying fallback cascade",
request.CveId, request.PackageName, request.Version);
// Fallback cascade:
// 1. If we have package API info, use that
// 2. Otherwise, fall back to "all methods" mode
// For now, return FallbackAll - in future we can add PackageApi lookup
return SurfaceQueryResult.NotFound(request.CveId, request.PackageName);
}
}
/// <summary>
/// Options for surface query service.
/// </summary>
public sealed record SurfaceQueryOptions
{
/// <summary>
/// Whether to enable in-memory caching.
/// </summary>
public bool EnableCaching { get; init; } = true;
/// <summary>
/// Cache duration for surface results.
/// </summary>
public TimeSpan? CacheDuration { get; init; }
/// <summary>
/// Batch size for bulk queries.
/// </summary>
public int BulkQueryBatchSize { get; init; } = 10;
}
/// <summary>
/// Metrics for surface query service.
/// </summary>
internal static class SurfaceQueryMetrics
{
private static readonly string MeterName = "StellaOps.Scanner.Reachability.Surfaces";
public static readonly System.Diagnostics.Metrics.Counter<long> CacheHits =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.surface_query.cache_hits",
description: "Number of surface query cache hits");
public static readonly System.Diagnostics.Metrics.Counter<long> CacheMisses =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.surface_query.cache_misses",
description: "Number of surface query cache misses");
public static readonly System.Diagnostics.Metrics.Counter<long> SurfaceHits =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.surface_query.surface_hits",
description: "Number of surfaces found");
public static readonly System.Diagnostics.Metrics.Counter<long> SurfaceMisses =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.surface_query.surface_misses",
description: "Number of surfaces not found");
public static readonly System.Diagnostics.Metrics.Counter<long> QueryErrors =
new System.Diagnostics.Metrics.Meter(MeterName).CreateCounter<long>(
"stellaops.surface_query.errors",
description: "Number of query errors");
public static readonly System.Diagnostics.Metrics.Histogram<long> QueryDurationMs =
new System.Diagnostics.Metrics.Meter(MeterName).CreateHistogram<long>(
"stellaops.surface_query.duration_ms",
unit: "ms",
description: "Surface query duration in milliseconds");
}

View File

@@ -1,3 +1,5 @@
using System.Collections.Immutable;
namespace StellaOps.Scanner.Reachability.Witnesses;
/// <summary>
@@ -20,6 +22,18 @@ public interface IPathWitnessBuilder
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>All generated witnesses.</returns>
IAsyncEnumerable<PathWitness> BuildAllAsync(BatchWitnessRequest request, CancellationToken cancellationToken = default);
/// <summary>
/// Creates path witnesses from pre-computed ReachabilityAnalyzer output.
/// Sprint: SPRINT_3700_0001_0001 (WIT-008)
/// This method uses deterministic paths from the analyzer instead of computing its own.
/// </summary>
/// <param name="request">The analyzer-based witness request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>All generated witnesses from the analyzer paths.</returns>
IAsyncEnumerable<PathWitness> BuildFromAnalyzerAsync(
AnalyzerWitnessRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
@@ -173,3 +187,92 @@ public sealed record BatchWitnessRequest
/// </summary>
public string? BuildId { get; init; }
}
/// <summary>
/// Request to build witnesses from pre-computed ReachabilityAnalyzer output.
/// Sprint: SPRINT_3700_0001_0001 (WIT-008)
/// </summary>
public sealed record AnalyzerWitnessRequest
{
/// <summary>
/// The SBOM digest for artifact context.
/// </summary>
public required string SbomDigest { get; init; }
/// <summary>
/// Package URL of the vulnerable component.
/// </summary>
public required string ComponentPurl { get; init; }
/// <summary>
/// Vulnerability ID (e.g., "CVE-2024-12345").
/// </summary>
public required string VulnId { get; init; }
/// <summary>
/// Vulnerability source (e.g., "NVD").
/// </summary>
public required string VulnSource { get; init; }
/// <summary>
/// Affected version range.
/// </summary>
public required string AffectedRange { get; init; }
/// <summary>
/// Sink taxonomy type for all sinks in the paths.
/// </summary>
public required string SinkType { get; init; }
/// <summary>
/// Graph digest from the analyzer result.
/// </summary>
public required string GraphDigest { get; init; }
/// <summary>
/// Pre-computed paths from ReachabilityAnalyzer.
/// Each path contains (EntrypointId, SinkId, NodeIds ordered from entrypoint to sink).
/// </summary>
public required IReadOnlyList<AnalyzerPathData> Paths { get; init; }
/// <summary>
/// Node metadata lookup for resolving node details.
/// Key is node ID, value contains name, file, line info.
/// </summary>
public required IReadOnlyDictionary<string, AnalyzerNodeData> NodeMetadata { get; init; }
/// <summary>
/// Optional attack surface digest.
/// </summary>
public string? SurfaceDigest { get; init; }
/// <summary>
/// Optional analysis config digest.
/// </summary>
public string? AnalysisConfigDigest { get; init; }
/// <summary>
/// Optional build ID.
/// </summary>
public string? BuildId { get; init; }
}
/// <summary>
/// Lightweight representation of a reachability path from the analyzer.
/// Sprint: SPRINT_3700_0001_0001 (WIT-008)
/// </summary>
public sealed record AnalyzerPathData(
string EntrypointId,
string SinkId,
ImmutableArray<string> NodeIds);
/// <summary>
/// Lightweight node metadata for witness generation.
/// Sprint: SPRINT_3700_0001_0001 (WIT-008)
/// </summary>
public sealed record AnalyzerNodeData(
string Name,
string? FilePath,
int? Line,
string? EntrypointKind);

View File

@@ -0,0 +1,28 @@
using StellaOps.Attestor.Envelope;
namespace StellaOps.Scanner.Reachability.Witnesses;
/// <summary>
/// Service for creating and verifying DSSE-signed path witness envelopes.
/// Sprint: SPRINT_3700_0001_0001 (WIT-007D)
/// </summary>
public interface IWitnessDsseSigner
{
/// <summary>
/// Signs a path witness and creates a DSSE envelope.
/// </summary>
/// <param name="witness">The path witness to sign.</param>
/// <param name="signingKey">The key to use for signing (must have private material).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Result containing the DSSE envelope or error.</returns>
WitnessDsseResult SignWitness(PathWitness witness, EnvelopeKey signingKey, CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a DSSE-signed witness envelope.
/// </summary>
/// <param name="envelope">The DSSE envelope containing the signed witness.</param>
/// <param name="publicKey">The public key to verify against.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Result containing the verified witness or error.</returns>
WitnessVerifyResult VerifyWitness(DsseEnvelope envelope, EnvelopeKey publicKey, CancellationToken cancellationToken = default);
}

View File

@@ -164,6 +164,111 @@ public sealed class PathWitnessBuilder : IPathWitnessBuilder
}
}
/// <inheritdoc />
/// <summary>
/// Creates path witnesses from pre-computed ReachabilityAnalyzer output.
/// Sprint: SPRINT_3700_0001_0001 (WIT-008)
/// </summary>
public async IAsyncEnumerable<PathWitness> BuildFromAnalyzerAsync(
AnalyzerWitnessRequest request,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.Paths.Count == 0)
{
yield break;
}
var nodeMetadata = request.NodeMetadata;
foreach (var analyzerPath in request.Paths)
{
cancellationToken.ThrowIfCancellationRequested();
// Convert analyzer NodeIds to PathSteps with metadata
var pathSteps = new List<PathStep>();
foreach (var nodeId in analyzerPath.NodeIds)
{
if (nodeMetadata.TryGetValue(nodeId, out var node))
{
pathSteps.Add(new PathStep
{
Symbol = node.Name,
SymbolId = nodeId,
File = node.FilePath,
Line = node.Line
});
}
else
{
// Node not found, add with just the ID
pathSteps.Add(new PathStep
{
Symbol = nodeId,
SymbolId = nodeId,
File = null,
Line = null
});
}
}
// Get entrypoint metadata
nodeMetadata.TryGetValue(analyzerPath.EntrypointId, out var entrypointNode);
var entrypointKind = entrypointNode?.EntrypointKind ?? "unknown";
var entrypointName = entrypointNode?.Name ?? analyzerPath.EntrypointId;
// Get sink metadata
nodeMetadata.TryGetValue(analyzerPath.SinkId, out var sinkNode);
var sinkSymbol = sinkNode?.Name ?? analyzerPath.SinkId;
// Build the witness
var witness = new PathWitness
{
WitnessId = string.Empty, // Will be set after hashing
Artifact = new WitnessArtifact
{
SbomDigest = request.SbomDigest,
ComponentPurl = request.ComponentPurl
},
Vuln = new WitnessVuln
{
Id = request.VulnId,
Source = request.VulnSource,
AffectedRange = request.AffectedRange
},
Entrypoint = new WitnessEntrypoint
{
Kind = entrypointKind,
Name = entrypointName,
SymbolId = analyzerPath.EntrypointId
},
Path = pathSteps,
Sink = new WitnessSink
{
Symbol = sinkSymbol,
SymbolId = analyzerPath.SinkId,
SinkType = request.SinkType
},
Gates = null, // Gate detection not applied for analyzer-based paths yet
Evidence = new WitnessEvidence
{
CallgraphDigest = request.GraphDigest,
SurfaceDigest = request.SurfaceDigest,
AnalysisConfigDigest = request.AnalysisConfigDigest,
BuildId = request.BuildId
},
ObservedAt = _timeProvider.GetUtcNow()
};
// Compute witness ID from canonical content
var witnessId = ComputeWitnessId(witness);
witness = witness with { WitnessId = witnessId };
yield return witness;
}
}
/// <summary>
/// Finds the shortest path from source to target using BFS.
/// </summary>

View File

@@ -0,0 +1,179 @@
using StellaOps.Attestor.Envelope;
using StellaOps.Cryptography;
namespace StellaOps.Scanner.Reachability.Witnesses;
/// <summary>
/// Generates signed DSSE envelopes for path witnesses.
/// Sprint: SPRINT_3700_0001_0001 (WIT-009)
/// Combines PathWitnessBuilder with WitnessDsseSigner for end-to-end witness attestation.
/// </summary>
public sealed class SignedWitnessGenerator : ISignedWitnessGenerator
{
private readonly IPathWitnessBuilder _builder;
private readonly IWitnessDsseSigner _signer;
/// <summary>
/// Creates a new SignedWitnessGenerator.
/// </summary>
public SignedWitnessGenerator(IPathWitnessBuilder builder, IWitnessDsseSigner signer)
{
_builder = builder ?? throw new ArgumentNullException(nameof(builder));
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
}
/// <inheritdoc />
public async Task<SignedWitnessResult?> GenerateSignedWitnessAsync(
PathWitnessRequest request,
EnvelopeKey signingKey,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(signingKey);
// Build the witness
var witness = await _builder.BuildAsync(request, cancellationToken).ConfigureAwait(false);
if (witness is null)
{
return null;
}
// Sign it
var signResult = _signer.SignWitness(witness, signingKey, cancellationToken);
if (!signResult.IsSuccess)
{
return new SignedWitnessResult
{
IsSuccess = false,
Error = signResult.Error
};
}
return new SignedWitnessResult
{
IsSuccess = true,
Witness = witness,
Envelope = signResult.Envelope,
PayloadBytes = signResult.PayloadBytes
};
}
/// <inheritdoc />
public async IAsyncEnumerable<SignedWitnessResult> GenerateSignedWitnessesAsync(
BatchWitnessRequest request,
EnvelopeKey signingKey,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(signingKey);
await foreach (var witness in _builder.BuildAllAsync(request, cancellationToken).ConfigureAwait(false))
{
var signResult = _signer.SignWitness(witness, signingKey, cancellationToken);
yield return signResult.IsSuccess
? new SignedWitnessResult
{
IsSuccess = true,
Witness = witness,
Envelope = signResult.Envelope,
PayloadBytes = signResult.PayloadBytes
}
: new SignedWitnessResult
{
IsSuccess = false,
Error = signResult.Error
};
}
}
/// <inheritdoc />
public async IAsyncEnumerable<SignedWitnessResult> GenerateSignedWitnessesFromAnalyzerAsync(
AnalyzerWitnessRequest request,
EnvelopeKey signingKey,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(signingKey);
await foreach (var witness in _builder.BuildFromAnalyzerAsync(request, cancellationToken).ConfigureAwait(false))
{
var signResult = _signer.SignWitness(witness, signingKey, cancellationToken);
yield return signResult.IsSuccess
? new SignedWitnessResult
{
IsSuccess = true,
Witness = witness,
Envelope = signResult.Envelope,
PayloadBytes = signResult.PayloadBytes
}
: new SignedWitnessResult
{
IsSuccess = false,
Error = signResult.Error
};
}
}
}
/// <summary>
/// Interface for generating signed DSSE envelopes for path witnesses.
/// </summary>
public interface ISignedWitnessGenerator
{
/// <summary>
/// Generates a signed witness from a single request.
/// </summary>
Task<SignedWitnessResult?> GenerateSignedWitnessAsync(
PathWitnessRequest request,
EnvelopeKey signingKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates signed witnesses from a batch request.
/// </summary>
IAsyncEnumerable<SignedWitnessResult> GenerateSignedWitnessesAsync(
BatchWitnessRequest request,
EnvelopeKey signingKey,
CancellationToken cancellationToken = default);
/// <summary>
/// Generates signed witnesses from pre-computed analyzer paths.
/// </summary>
IAsyncEnumerable<SignedWitnessResult> GenerateSignedWitnessesFromAnalyzerAsync(
AnalyzerWitnessRequest request,
EnvelopeKey signingKey,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of generating a signed witness.
/// </summary>
public sealed record SignedWitnessResult
{
/// <summary>
/// Whether the signing succeeded.
/// </summary>
public bool IsSuccess { get; init; }
/// <summary>
/// The generated witness (if successful).
/// </summary>
public PathWitness? Witness { get; init; }
/// <summary>
/// The DSSE envelope containing the signed witness (if successful).
/// </summary>
public DsseEnvelope? Envelope { get; init; }
/// <summary>
/// The canonical JSON payload bytes (if successful).
/// </summary>
public byte[]? PayloadBytes { get; init; }
/// <summary>
/// Error message (if failed).
/// </summary>
public string? Error { get; init; }
}

View File

@@ -0,0 +1,207 @@
using System.Text;
using System.Text.Json;
using StellaOps.Attestor.Envelope;
namespace StellaOps.Scanner.Reachability.Witnesses;
/// <summary>
/// Service for creating and verifying DSSE-signed path witness envelopes.
/// Sprint: SPRINT_3700_0001_0001 (WIT-007D)
/// </summary>
public sealed class WitnessDsseSigner : IWitnessDsseSigner
{
private readonly EnvelopeSignatureService _signatureService;
private static readonly JsonSerializerOptions CanonicalJsonOptions = new(JsonSerializerDefaults.Web)
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
WriteIndented = false,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
/// <summary>
/// Creates a new WitnessDsseSigner with the specified signature service.
/// </summary>
public WitnessDsseSigner(EnvelopeSignatureService signatureService)
{
_signatureService = signatureService ?? throw new ArgumentNullException(nameof(signatureService));
}
/// <summary>
/// Creates a new WitnessDsseSigner with a default signature service.
/// </summary>
public WitnessDsseSigner() : this(new EnvelopeSignatureService())
{
}
/// <inheritdoc />
public WitnessDsseResult SignWitness(PathWitness witness, EnvelopeKey signingKey, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(witness);
ArgumentNullException.ThrowIfNull(signingKey);
cancellationToken.ThrowIfCancellationRequested();
try
{
// Serialize witness to canonical JSON bytes
var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(witness, CanonicalJsonOptions);
// Build the PAE (Pre-Authentication Encoding) for DSSE
var pae = BuildPae(WitnessSchema.DssePayloadType, payloadBytes);
// Sign the PAE
var signResult = _signatureService.Sign(pae, signingKey, cancellationToken);
if (!signResult.IsSuccess)
{
return WitnessDsseResult.Failure($"Signing failed: {signResult.Error?.Message}");
}
var signature = signResult.Value;
// Create the DSSE envelope
var dsseSignature = new DsseSignature(
signature: Convert.ToBase64String(signature.Value.Span),
keyId: signature.KeyId);
var envelope = new DsseEnvelope(
payloadType: WitnessSchema.DssePayloadType,
payload: payloadBytes,
signatures: [dsseSignature]);
return WitnessDsseResult.Success(envelope, payloadBytes);
}
catch (Exception ex) when (ex is JsonException or InvalidOperationException)
{
return WitnessDsseResult.Failure($"Failed to create DSSE envelope: {ex.Message}");
}
}
/// <inheritdoc />
public WitnessVerifyResult VerifyWitness(DsseEnvelope envelope, EnvelopeKey publicKey, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(envelope);
ArgumentNullException.ThrowIfNull(publicKey);
cancellationToken.ThrowIfCancellationRequested();
try
{
// Verify payload type
if (!string.Equals(envelope.PayloadType, WitnessSchema.DssePayloadType, StringComparison.Ordinal))
{
return WitnessVerifyResult.Failure($"Invalid payload type: expected '{WitnessSchema.DssePayloadType}', got '{envelope.PayloadType}'");
}
// Deserialize the witness from payload
var witness = JsonSerializer.Deserialize<PathWitness>(envelope.Payload.Span, CanonicalJsonOptions);
if (witness is null)
{
return WitnessVerifyResult.Failure("Failed to deserialize witness from payload");
}
// Verify schema version
if (!string.Equals(witness.WitnessSchema, WitnessSchema.Version, StringComparison.Ordinal))
{
return WitnessVerifyResult.Failure($"Unsupported witness schema: {witness.WitnessSchema}");
}
// Find signature matching the public key
var matchingSignature = envelope.Signatures.FirstOrDefault(
s => string.Equals(s.KeyId, publicKey.KeyId, StringComparison.Ordinal));
if (matchingSignature is null)
{
return WitnessVerifyResult.Failure($"No signature found for key ID: {publicKey.KeyId}");
}
// Build PAE and verify signature
var pae = BuildPae(envelope.PayloadType, envelope.Payload.ToArray());
var signatureBytes = Convert.FromBase64String(matchingSignature.Signature);
var envelopeSignature = new EnvelopeSignature(publicKey.KeyId, publicKey.AlgorithmId, signatureBytes);
var verifyResult = _signatureService.Verify(pae, envelopeSignature, publicKey, cancellationToken);
if (!verifyResult.IsSuccess)
{
return WitnessVerifyResult.Failure($"Signature verification failed: {verifyResult.Error?.Message}");
}
return WitnessVerifyResult.Success(witness, matchingSignature.KeyId);
}
catch (Exception ex) when (ex is JsonException or FormatException or InvalidOperationException)
{
return WitnessVerifyResult.Failure($"Verification failed: {ex.Message}");
}
}
/// <summary>
/// Builds the DSSE Pre-Authentication Encoding (PAE) for a payload.
/// PAE = "DSSEv1" SP len(type) SP type SP len(payload) SP payload
/// </summary>
private static byte[] BuildPae(string payloadType, byte[] payload)
{
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
using var stream = new MemoryStream();
using var writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true);
// Write "DSSEv1 "
writer.Write(Encoding.UTF8.GetBytes("DSSEv1 "));
// Write len(type) as little-endian 8-byte integer followed by space
WriteLengthAndSpace(writer, typeBytes.Length);
// Write type followed by space
writer.Write(typeBytes);
writer.Write((byte)' ');
// Write len(payload) as little-endian 8-byte integer followed by space
WriteLengthAndSpace(writer, payload.Length);
// Write payload
writer.Write(payload);
writer.Flush();
return stream.ToArray();
}
private static void WriteLengthAndSpace(BinaryWriter writer, int length)
{
// Write length as ASCII decimal string
writer.Write(Encoding.UTF8.GetBytes(length.ToString()));
writer.Write((byte)' ');
}
}
/// <summary>
/// Result of DSSE signing a witness.
/// </summary>
public sealed record WitnessDsseResult
{
public bool IsSuccess { get; init; }
public DsseEnvelope? Envelope { get; init; }
public byte[]? PayloadBytes { get; init; }
public string? Error { get; init; }
public static WitnessDsseResult Success(DsseEnvelope envelope, byte[] payloadBytes)
=> new() { IsSuccess = true, Envelope = envelope, PayloadBytes = payloadBytes };
public static WitnessDsseResult Failure(string error)
=> new() { IsSuccess = false, Error = error };
}
/// <summary>
/// Result of verifying a DSSE-signed witness.
/// </summary>
public sealed record WitnessVerifyResult
{
public bool IsSuccess { get; init; }
public PathWitness? Witness { get; init; }
public string? VerifiedKeyId { get; init; }
public string? Error { get; init; }
public static WitnessVerifyResult Success(PathWitness witness, string keyId)
=> new() { IsSuccess = true, Witness = witness, VerifiedKeyId = keyId };
public static WitnessVerifyResult Failure(string error)
=> new() { IsSuccess = false, Error = error };
}

View File

@@ -2,6 +2,7 @@ namespace StellaOps.Scanner.Reachability.Witnesses;
/// <summary>
/// Constants for the stellaops.witness.v1 schema.
/// Sprint: SPRINT_3700_0001_0001 (WIT-007C)
/// </summary>
public static class WitnessSchema
{
@@ -16,7 +17,29 @@ public static class WitnessSchema
public const string WitnessIdPrefix = "wit:";
/// <summary>
/// Default DSSE payload type for witnesses.
/// Default DSSE payload type for path witnesses.
/// Used when creating DSSE envelopes for path witness attestations.
/// </summary>
public const string DssePayloadType = "application/vnd.stellaops.witness.v1+json";
/// <summary>
/// DSSE predicate type URI for path witnesses (in-toto style).
/// Matches PredicateTypes.StellaOpsPathWitness in Signer.Core.
/// </summary>
public const string PredicateType = "stella.ops/pathWitness@v1";
/// <summary>
/// Witness type for reachability path witnesses.
/// </summary>
public const string WitnessTypeReachabilityPath = "reachability_path";
/// <summary>
/// Witness type for gate proof witnesses.
/// </summary>
public const string WitnessTypeGateProof = "gate_proof";
/// <summary>
/// JSON schema URI for witness validation.
/// </summary>
public const string JsonSchemaUri = "https://stellaops.org/schemas/witness-v1.json";
}

View File

@@ -0,0 +1,54 @@
// -----------------------------------------------------------------------------
// DriftAttestationOptions.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-016
// Description: Configuration options for drift attestation service.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.ReachabilityDrift.Attestation;
/// <summary>
/// Configuration options for drift attestation.
/// </summary>
public sealed class DriftAttestationOptions
{
/// <summary>
/// Configuration section name.
/// </summary>
public const string SectionName = "DriftAttestation";
/// <summary>
/// Whether attestation creation is enabled.
/// </summary>
public bool Enabled { get; set; } = true;
/// <summary>
/// Whether to use the remote signer service.
/// </summary>
public bool UseSignerService { get; set; } = true;
/// <summary>
/// Default key ID for signing if not specified in request.
/// </summary>
public string? DefaultKeyId { get; set; }
/// <summary>
/// Whether to submit attestations to Rekor by default.
/// </summary>
public bool SubmitToRekorByDefault { get; set; }
/// <summary>
/// Sink ruleset identifier for analysis metadata.
/// </summary>
public string? SinkRuleset { get; set; }
/// <summary>
/// Signer service endpoint URL.
/// </summary>
public string? SignerServiceUrl { get; set; }
/// <summary>
/// Timeout for signer service calls in seconds.
/// </summary>
public int SignerTimeoutSeconds { get; set; } = 30;
}

View File

@@ -0,0 +1,358 @@
// -----------------------------------------------------------------------------
// DriftAttestationService.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-016
// Description: Service for creating signed reachability drift attestations.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Diagnostics;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Attestor.ProofChain.Predicates;
using StellaOps.Signer.Core;
namespace StellaOps.Scanner.ReachabilityDrift.Attestation;
/// <summary>
/// Default implementation of <see cref="IDriftAttestationService"/>.
/// Creates stellaops.dev/predicates/reachability-drift@v1 attestations wrapped in DSSE envelopes.
/// </summary>
public sealed class DriftAttestationService : IDriftAttestationService
{
private static readonly JsonSerializerOptions CanonicalJsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
private readonly IDriftSignerClient? _signerClient;
private readonly IOptionsMonitor<DriftAttestationOptions> _options;
private readonly TimeProvider _timeProvider;
private readonly ILogger<DriftAttestationService> _logger;
public DriftAttestationService(
IDriftSignerClient? signerClient,
IOptionsMonitor<DriftAttestationOptions> options,
TimeProvider timeProvider,
ILogger<DriftAttestationService> logger)
{
_signerClient = signerClient;
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc/>
public async Task<DriftAttestationResult> CreateAttestationAsync(
DriftAttestationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
using var activity = Activity.Current?.Source.StartActivity(
"reachability_drift.attest",
ActivityKind.Internal);
activity?.SetTag("tenant", request.TenantId);
activity?.SetTag("base_scan", request.DriftResult.BaseScanId);
activity?.SetTag("head_scan", request.DriftResult.HeadScanId);
var options = _options.CurrentValue;
if (!options.Enabled)
{
_logger.LogDebug("Drift attestation is disabled");
return new DriftAttestationResult
{
Success = false,
Error = "Attestation creation is disabled"
};
}
try
{
// Build the predicate
var predicate = BuildPredicate(request);
// Build the in-toto statement
var statement = BuildStatement(request, predicate);
var statementJson = SerializeCanonical(statement);
var payloadBase64 = Convert.ToBase64String(statementJson);
// Sign the payload
DriftDsseSignature signature;
string? keyId;
if (_signerClient is not null && options.UseSignerService)
{
var signResult = await _signerClient.SignAsync(
new DriftSignerRequest
{
PayloadType = ReachabilityDriftPredicate.PredicateType,
PayloadBase64 = payloadBase64,
KeyId = request.KeyId ?? options.DefaultKeyId,
TenantId = request.TenantId
},
cancellationToken).ConfigureAwait(false);
if (!signResult.Success)
{
_logger.LogWarning("Failed to sign drift attestation: {Error}", signResult.Error);
return new DriftAttestationResult
{
Success = false,
Error = signResult.Error ?? "Signing failed"
};
}
keyId = signResult.KeyId;
signature = new DriftDsseSignature
{
KeyId = signResult.KeyId ?? "unknown",
Sig = signResult.Signature!
};
}
else
{
// Create locally-signed envelope (dev/test mode)
keyId = "local-dev-key";
signature = SignLocally(statementJson);
_logger.LogDebug("Created locally-signed attestation (signer service not available)");
}
var envelope = new DriftDsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = payloadBase64,
Signatures = [signature]
};
var envelopeJson = JsonSerializer.Serialize(envelope, CanonicalJsonOptions);
var envelopeDigestHex = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(envelopeJson))).ToLowerInvariant();
var attestationDigest = $"sha256:{envelopeDigestHex}";
_logger.LogInformation(
"Created drift attestation for scans {BaseScan} → {HeadScan}. " +
"Newly reachable: {NewlyReachable}, Newly unreachable: {NewlyUnreachable}. Digest: {Digest}",
request.DriftResult.BaseScanId,
request.DriftResult.HeadScanId,
request.DriftResult.NewlyReachable.Length,
request.DriftResult.NewlyUnreachable.Length,
attestationDigest);
return new DriftAttestationResult
{
Success = true,
AttestationDigest = attestationDigest,
EnvelopeJson = envelopeJson,
KeyId = keyId,
CreatedAt = _timeProvider.GetUtcNow()
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to create drift attestation");
activity?.SetStatus(ActivityStatusCode.Error, ex.Message);
return new DriftAttestationResult
{
Success = false,
Error = ex.Message
};
}
}
private ReachabilityDriftPredicate BuildPredicate(DriftAttestationRequest request)
{
var drift = request.DriftResult;
var now = _timeProvider.GetUtcNow();
return new ReachabilityDriftPredicate
{
BaseImage = new DriftImageReference
{
Name = request.BaseImage.Name,
Digest = request.BaseImage.Digest,
Tag = request.BaseImage.Tag
},
TargetImage = new DriftImageReference
{
Name = request.TargetImage.Name,
Digest = request.TargetImage.Digest,
Tag = request.TargetImage.Tag
},
BaseScanId = drift.BaseScanId,
HeadScanId = drift.HeadScanId,
Drift = new DriftPredicateSummary
{
NewlyReachableCount = drift.NewlyReachable.Length,
NewlyUnreachableCount = drift.NewlyUnreachable.Length,
NewlyReachable = drift.NewlyReachable
.Select(s => MapSinkToSummary(s))
.ToImmutableArray(),
NewlyUnreachable = drift.NewlyUnreachable
.Select(s => MapSinkToSummary(s))
.ToImmutableArray()
},
Analysis = new DriftAnalysisMetadata
{
AnalyzedAt = now,
Scanner = new DriftScannerInfo
{
Name = "StellaOps.Scanner",
Version = GetScannerVersion(),
Ruleset = _options.CurrentValue.SinkRuleset
},
BaseGraphDigest = request.BaseGraphDigest,
HeadGraphDigest = request.HeadGraphDigest,
CodeChangesDigest = request.CodeChangesDigest
}
};
}
private static DriftedSinkPredicateSummary MapSinkToSummary(DriftedSink sink)
{
return new DriftedSinkPredicateSummary
{
SinkNodeId = sink.SinkNodeId,
Symbol = sink.Symbol,
SinkCategory = sink.SinkCategory.ToString(),
CauseKind = sink.Cause.Kind.ToString(),
CauseDescription = sink.Cause.Description,
AssociatedCves = sink.AssociatedVulns
.Select(v => v.CveId)
.Where(cve => !string.IsNullOrEmpty(cve))
.ToImmutableArray()!,
PathHash = ComputePathHash(sink.Path)
};
}
private static string ComputePathHash(CompressedPath path)
{
// Create a deterministic representation of the path
var pathData = new StringBuilder();
pathData.Append(path.Entrypoint.NodeId);
pathData.Append(':');
foreach (var node in path.KeyNodes)
{
pathData.Append(node.NodeId);
pathData.Append(':');
}
pathData.Append(path.Sink.NodeId);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(pathData.ToString()));
return Convert.ToHexString(hash).ToLowerInvariant()[..16]; // First 64 bits
}
private DriftInTotoStatement BuildStatement(
DriftAttestationRequest request,
ReachabilityDriftPredicate predicate)
{
return new DriftInTotoStatement
{
Type = "https://in-toto.io/Statement/v1",
Subject =
[
new DriftSubject
{
Name = request.TargetImage.Name,
Digest = new Dictionary<string, string>
{
["sha256"] = request.TargetImage.Digest.Replace("sha256:", "")
}
}
],
PredicateType = ReachabilityDriftPredicate.PredicateType,
Predicate = predicate
};
}
private static byte[] SerializeCanonical<T>(T value)
{
return JsonSerializer.SerializeToUtf8Bytes(value, CanonicalJsonOptions);
}
private static DriftDsseSignature SignLocally(byte[] payload)
{
// Local/dev signing: create a placeholder signature
// In production, this would use a real key
var paeString = $"DSSEv1 {payload.Length} application/vnd.in-toto+json {payload.Length} ";
var paeBytes = Encoding.UTF8.GetBytes(paeString).Concat(payload).ToArray();
var hash = SHA256.HashData(paeBytes);
return new DriftDsseSignature
{
KeyId = "local-dev-key",
Sig = Convert.ToBase64String(hash)
};
}
private static string GetScannerVersion()
{
var assembly = typeof(DriftAttestationService).Assembly;
var version = assembly.GetName().Version;
return version?.ToString() ?? "0.0.0";
}
}
/// <summary>
/// In-toto statement for drift attestation.
/// </summary>
internal sealed record DriftInTotoStatement
{
[JsonPropertyName("_type")]
public required string Type { get; init; }
[JsonPropertyName("subject")]
public required IReadOnlyList<DriftSubject> Subject { get; init; }
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
[JsonPropertyName("predicate")]
public required ReachabilityDriftPredicate Predicate { get; init; }
}
/// <summary>
/// Subject in an in-toto statement.
/// </summary>
internal sealed record DriftSubject
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("digest")]
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}
/// <summary>
/// DSSE envelope for drift attestation.
/// </summary>
internal sealed record DriftDsseEnvelope
{
[JsonPropertyName("payloadType")]
public required string PayloadType { get; init; }
[JsonPropertyName("payload")]
public required string Payload { get; init; }
[JsonPropertyName("signatures")]
public required IReadOnlyList<DriftDsseSignature> Signatures { get; init; }
}
/// <summary>
/// Signature in a DSSE envelope.
/// </summary>
internal sealed record DriftDsseSignature
{
[JsonPropertyName("keyid")]
public required string KeyId { get; init; }
[JsonPropertyName("sig")]
public required string Sig { get; init; }
}

View File

@@ -0,0 +1,58 @@
// -----------------------------------------------------------------------------
// DriftAttestationServiceCollectionExtensions.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-017
// Description: Service collection extensions for drift attestation.
// -----------------------------------------------------------------------------
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.Scanner.ReachabilityDrift.Attestation;
/// <summary>
/// Extension methods for registering drift attestation services.
/// </summary>
public static class DriftAttestationServiceCollectionExtensions
{
/// <summary>
/// Adds drift attestation services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configuration">The configuration.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddDriftAttestation(
this IServiceCollection services,
IConfiguration configuration)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configuration);
// Bind options
services.Configure<DriftAttestationOptions>(
configuration.GetSection(DriftAttestationOptions.SectionName));
// Register the attestation service
services.TryAddSingleton<IDriftAttestationService, DriftAttestationService>();
// Register TimeProvider if not already registered
services.TryAddSingleton(TimeProvider.System);
return services;
}
/// <summary>
/// Adds a custom drift signer client implementation.
/// </summary>
/// <typeparam name="TClient">The signer client implementation type.</typeparam>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddDriftSignerClient<TClient>(
this IServiceCollection services)
where TClient : class, IDriftSignerClient
{
services.TryAddSingleton<IDriftSignerClient, TClient>();
return services;
}
}

View File

@@ -0,0 +1,140 @@
// -----------------------------------------------------------------------------
// IDriftAttestationService.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-016
// Description: Interface for creating signed reachability drift attestations.
// -----------------------------------------------------------------------------
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.ReachabilityDrift.Attestation;
/// <summary>
/// Service for creating signed DSSE attestations for reachability drift results.
/// </summary>
public interface IDriftAttestationService
{
/// <summary>
/// Creates a signed attestation for a drift result.
/// </summary>
/// <param name="request">The attestation request containing drift data and signing options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The attestation result including the signed envelope and digest.</returns>
Task<DriftAttestationResult> CreateAttestationAsync(
DriftAttestationRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request to create a drift attestation.
/// </summary>
public sealed record DriftAttestationRequest
{
/// <summary>
/// The tenant ID for key selection.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// The drift result to attest.
/// </summary>
public required ReachabilityDriftResult DriftResult { get; init; }
/// <summary>
/// Reference to the base image.
/// </summary>
public required ImageRef BaseImage { get; init; }
/// <summary>
/// Reference to the target (head) image.
/// </summary>
public required ImageRef TargetImage { get; init; }
/// <summary>
/// Content-addressed digest of the base call graph.
/// </summary>
public required string BaseGraphDigest { get; init; }
/// <summary>
/// Content-addressed digest of the head call graph.
/// </summary>
public required string HeadGraphDigest { get; init; }
/// <summary>
/// Optional: digest of the code change facts used.
/// </summary>
public string? CodeChangesDigest { get; init; }
/// <summary>
/// Optional key ID for signing. If not provided, uses default.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Whether to submit to transparency log.
/// </summary>
public bool SubmitToRekor { get; init; }
}
/// <summary>
/// Image reference for drift attestation.
/// </summary>
public sealed record ImageRef
{
/// <summary>
/// Image name (repository/image).
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Image digest (sha256:...).
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Optional tag at time of analysis.
/// </summary>
public string? Tag { get; init; }
}
/// <summary>
/// Result of drift attestation creation.
/// </summary>
public sealed record DriftAttestationResult
{
/// <summary>
/// Whether the attestation was created successfully.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Content-addressed digest of the attestation envelope.
/// </summary>
public string? AttestationDigest { get; init; }
/// <summary>
/// The signed DSSE envelope (JSON).
/// </summary>
public string? EnvelopeJson { get; init; }
/// <summary>
/// Key ID used for signing.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Error message if creation failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Rekor log entry index if submitted.
/// </summary>
public long? RekorLogIndex { get; init; }
/// <summary>
/// When the attestation was created.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
}

View File

@@ -0,0 +1,79 @@
// -----------------------------------------------------------------------------
// IDriftSignerClient.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-016
// Description: Client interface for signing drift attestations.
// -----------------------------------------------------------------------------
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Scanner.ReachabilityDrift.Attestation;
/// <summary>
/// Client for signing drift attestations via the Signer service.
/// </summary>
public interface IDriftSignerClient
{
/// <summary>
/// Signs the given payload.
/// </summary>
/// <param name="request">The signing request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The signing result.</returns>
Task<DriftSignerResult> SignAsync(
DriftSignerRequest request,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Request to sign a drift attestation payload.
/// </summary>
public sealed record DriftSignerRequest
{
/// <summary>
/// The predicate type being signed.
/// </summary>
public required string PayloadType { get; init; }
/// <summary>
/// Base64-encoded payload to sign.
/// </summary>
public required string PayloadBase64 { get; init; }
/// <summary>
/// Key ID to use for signing.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Tenant ID for key selection.
/// </summary>
public required string TenantId { get; init; }
}
/// <summary>
/// Result from signing a drift attestation.
/// </summary>
public sealed record DriftSignerResult
{
/// <summary>
/// Whether signing succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// The signature (base64 encoded).
/// </summary>
public string? Signature { get; init; }
/// <summary>
/// The key ID that was used.
/// </summary>
public string? KeyId { get; init; }
/// <summary>
/// Error message if signing failed.
/// </summary>
public string? Error { get; init; }
}

View File

@@ -8,12 +8,16 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\\StellaOps.Scanner.CallGraph\\StellaOps.Scanner.CallGraph.csproj" />
<ProjectReference Include="..\\..\\..\\Attestor\\__Libraries\\StellaOps.Attestor.ProofChain\\StellaOps.Attestor.ProofChain.csproj" />
<ProjectReference Include="..\\..\\..\\Signer\\StellaOps.Signer\\StellaOps.Signer.Core\\StellaOps.Signer.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,50 @@
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Entity mapping to scanner.proof_bundle table.
/// Stores cryptographic evidence chains for scan results.
/// </summary>
public sealed class ProofBundleRow
{
/// <summary>Reference to the parent scan.</summary>
public Guid ScanId { get; set; }
/// <summary>Merkle root hash of all evidence.</summary>
public string RootHash { get; set; } = default!;
/// <summary>Type of bundle: standard, extended, or minimal.</summary>
public string BundleType { get; set; } = "standard";
/// <summary>Full DSSE-signed envelope as JSONB.</summary>
public string? DsseEnvelope { get; set; }
/// <summary>Key ID used for signing.</summary>
public string? SignatureKeyId { get; set; }
/// <summary>Signature algorithm (e.g., ed25519, rsa-pss-sha256).</summary>
public string? SignatureAlgorithm { get; set; }
/// <summary>Bundle content (ZIP archive or raw data).</summary>
public byte[]? BundleContent { get; set; }
/// <summary>SHA-256 hash of bundle_content.</summary>
public string BundleHash { get; set; } = default!;
/// <summary>Hash of the proof ledger.</summary>
public string? LedgerHash { get; set; }
/// <summary>Reference to the scan manifest hash.</summary>
public string? ManifestHash { get; set; }
/// <summary>Hash of the SBOM in this bundle.</summary>
public string? SbomHash { get; set; }
/// <summary>Hash of the VEX in this bundle.</summary>
public string? VexHash { get; set; }
/// <summary>When this bundle was created.</summary>
public DateTimeOffset CreatedAt { get; set; }
/// <summary>Optional expiration time for retention policies.</summary>
public DateTimeOffset? ExpiresAt { get; set; }
}

View File

@@ -0,0 +1,44 @@
namespace StellaOps.Scanner.Storage.Entities;
/// <summary>
/// Entity mapping to scanner.scan_manifest table.
/// Captures all inputs that affect a scan's results for reproducibility.
/// </summary>
public sealed class ScanManifestRow
{
/// <summary>Unique identifier for this manifest.</summary>
public Guid ManifestId { get; set; }
/// <summary>Reference to the parent scan.</summary>
public Guid ScanId { get; set; }
/// <summary>SHA-256 hash of the manifest content.</summary>
public string ManifestHash { get; set; } = default!;
/// <summary>Hash of the input SBOM.</summary>
public string SbomHash { get; set; } = default!;
/// <summary>Hash of the rules snapshot.</summary>
public string RulesHash { get; set; } = default!;
/// <summary>Hash of the advisory feed snapshot.</summary>
public string FeedHash { get; set; } = default!;
/// <summary>Hash of the scoring policy.</summary>
public string PolicyHash { get; set; } = default!;
/// <summary>When the scan started.</summary>
public DateTimeOffset ScanStartedAt { get; set; }
/// <summary>When the scan completed (null if still running).</summary>
public DateTimeOffset? ScanCompletedAt { get; set; }
/// <summary>Full manifest content as JSONB.</summary>
public string ManifestContent { get; set; } = default!;
/// <summary>Version of the scanner that created this manifest.</summary>
public string ScannerVersion { get; set; } = default!;
/// <summary>When this row was created.</summary>
public DateTimeOffset CreatedAt { get; set; }
}

View File

@@ -0,0 +1,103 @@
-- =============================================================================
-- Migration: 015_vuln_surface_triggers_update.sql
-- Sprint: SPRINT_3700_0003_0001_trigger_extraction
-- Task: TRIG-010, TRIG-013
-- Description: Add trigger_count column and trigger path storage.
--
-- Note: migrations are executed with the module schema as the active search_path.
-- Keep objects unqualified so integration tests can run in isolated schemas.
-- =============================================================================
-- =============================================================================
-- ADD TRIGGER_COUNT TO VULN_SURFACES
-- =============================================================================
ALTER TABLE vuln_surfaces
ADD COLUMN IF NOT EXISTS trigger_count INTEGER NOT NULL DEFAULT 0;
COMMENT ON COLUMN vuln_surfaces.trigger_count IS 'Count of public API trigger methods that can reach changed sinks';
-- =============================================================================
-- VULN_SURFACE_TRIGGER_PATHS: Internal paths from trigger to sink
-- =============================================================================
CREATE TABLE IF NOT EXISTS vuln_surface_trigger_paths (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
surface_id UUID NOT NULL REFERENCES vuln_surfaces(id) ON DELETE CASCADE,
-- Trigger method (public API entry point)
trigger_method_key TEXT NOT NULL, -- FQN of public API method
trigger_method_name TEXT NOT NULL, -- Simple name
trigger_declaring_type TEXT NOT NULL, -- Declaring class/module
-- Sink method (changed vulnerability method)
sink_method_key TEXT NOT NULL, -- FQN of sink method (references vuln_surface_sinks.method_key)
-- Path from trigger to sink
path_length INTEGER NOT NULL, -- Number of hops
path_methods TEXT[] NOT NULL, -- Ordered list of method keys in path
-- Metadata
is_interface_trigger BOOLEAN NOT NULL DEFAULT false, -- Trigger is interface method
is_virtual_trigger BOOLEAN NOT NULL DEFAULT false, -- Trigger is virtual/overridable
computed_at TIMESTAMPTZ NOT NULL DEFAULT now(),
CONSTRAINT uq_trigger_path_key UNIQUE (surface_id, trigger_method_key, sink_method_key)
);
-- Indexes for common queries
CREATE INDEX IF NOT EXISTS idx_vuln_surface_trigger_paths_surface ON vuln_surface_trigger_paths(surface_id);
CREATE INDEX IF NOT EXISTS idx_vuln_surface_trigger_paths_trigger ON vuln_surface_trigger_paths(trigger_method_key);
CREATE INDEX IF NOT EXISTS idx_vuln_surface_trigger_paths_sink ON vuln_surface_trigger_paths(sink_method_key);
COMMENT ON TABLE vuln_surface_trigger_paths IS 'Internal paths from public API trigger methods to vulnerability sink methods within a package';
-- =============================================================================
-- FUNCTIONS
-- =============================================================================
CREATE OR REPLACE FUNCTION get_vuln_surface_triggers(
p_surface_id UUID
)
RETURNS TABLE (
trigger_method_key TEXT,
trigger_method_name TEXT,
trigger_declaring_type TEXT,
sink_count BIGINT,
shortest_path_length INTEGER,
is_interface_trigger BOOLEAN
) AS $$
BEGIN
RETURN QUERY
SELECT
tp.trigger_method_key,
tp.trigger_method_name,
tp.trigger_declaring_type,
COUNT(DISTINCT tp.sink_method_key)::BIGINT AS sink_count,
MIN(tp.path_length) AS shortest_path_length,
BOOL_OR(tp.is_interface_trigger) AS is_interface_trigger
FROM vuln_surface_trigger_paths tp
WHERE tp.surface_id = p_surface_id
GROUP BY tp.trigger_method_key, tp.trigger_method_name, tp.trigger_declaring_type
ORDER BY sink_count DESC, shortest_path_length;
END;
$$ LANGUAGE plpgsql STABLE;
CREATE OR REPLACE FUNCTION get_trigger_path_to_sink(
p_surface_id UUID,
p_trigger_method_key TEXT,
p_sink_method_key TEXT
)
RETURNS TABLE (
path_length INTEGER,
path_methods TEXT[]
) AS $$
BEGIN
RETURN QUERY
SELECT
tp.path_length,
tp.path_methods
FROM vuln_surface_trigger_paths tp
WHERE tp.surface_id = p_surface_id
AND tp.trigger_method_key = p_trigger_method_key
AND tp.sink_method_key = p_sink_method_key;
END;
$$ LANGUAGE plpgsql STABLE;

View File

@@ -0,0 +1,135 @@
-- -----------------------------------------------------------------------------
-- 016_reach_cache.sql
-- Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-001)
-- Description: Schema for reachability result caching.
-- -----------------------------------------------------------------------------
-- Reachability cache metadata per service
CREATE TABLE IF NOT EXISTS reach_cache_entries (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
service_id TEXT NOT NULL,
graph_hash TEXT NOT NULL,
sbom_hash TEXT,
entry_point_count INTEGER NOT NULL DEFAULT 0,
sink_count INTEGER NOT NULL DEFAULT 0,
pair_count INTEGER NOT NULL DEFAULT 0,
reachable_count INTEGER NOT NULL DEFAULT 0,
unreachable_count INTEGER NOT NULL DEFAULT 0,
cached_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
expires_at TIMESTAMPTZ,
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT uq_reach_cache_service_graph UNIQUE (service_id, graph_hash)
);
-- Index for cache lookups
CREATE INDEX IF NOT EXISTS idx_reach_cache_service_id ON reach_cache_entries (service_id);
CREATE INDEX IF NOT EXISTS idx_reach_cache_expires ON reach_cache_entries (expires_at) WHERE expires_at IS NOT NULL;
-- Cached (entry, sink) pair results
CREATE TABLE IF NOT EXISTS reach_cache_pairs (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
cache_entry_id UUID NOT NULL REFERENCES reach_cache_entries(id) ON DELETE CASCADE,
entry_method_key TEXT NOT NULL,
sink_method_key TEXT NOT NULL,
is_reachable BOOLEAN NOT NULL,
path_length INTEGER,
confidence DOUBLE PRECISION NOT NULL DEFAULT 1.0,
computed_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT uq_reach_pair UNIQUE (cache_entry_id, entry_method_key, sink_method_key)
);
-- Index for pair lookups
CREATE INDEX IF NOT EXISTS idx_reach_cache_pairs_entry ON reach_cache_pairs (cache_entry_id);
CREATE INDEX IF NOT EXISTS idx_reach_cache_pairs_reachable ON reach_cache_pairs (cache_entry_id, is_reachable);
-- Graph snapshots for delta computation
CREATE TABLE IF NOT EXISTS reach_graph_snapshots (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
service_id TEXT NOT NULL,
graph_hash TEXT NOT NULL,
node_count INTEGER NOT NULL DEFAULT 0,
edge_count INTEGER NOT NULL DEFAULT 0,
entry_point_count INTEGER NOT NULL DEFAULT 0,
snapshot_data BYTEA, -- Compressed graph data
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT uq_graph_snapshot UNIQUE (service_id, graph_hash)
);
-- Cache statistics for monitoring
CREATE TABLE IF NOT EXISTS reach_cache_stats (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
service_id TEXT NOT NULL UNIQUE,
total_hits BIGINT NOT NULL DEFAULT 0,
total_misses BIGINT NOT NULL DEFAULT 0,
full_recomputes BIGINT NOT NULL DEFAULT 0,
incremental_computes BIGINT NOT NULL DEFAULT 0,
current_graph_hash TEXT,
last_populated_at TIMESTAMPTZ,
last_invalidated_at TIMESTAMPTZ,
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- State flip history for auditing
CREATE TABLE IF NOT EXISTS reach_state_flips (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
service_id TEXT NOT NULL,
scan_id UUID,
entry_method_key TEXT NOT NULL,
sink_method_key TEXT NOT NULL,
flip_type TEXT NOT NULL CHECK (flip_type IN ('became_reachable', 'became_unreachable')),
cve_id TEXT,
package_name TEXT,
detected_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Index for flip queries
CREATE INDEX IF NOT EXISTS idx_state_flips_service ON reach_state_flips (service_id, detected_at DESC);
CREATE INDEX IF NOT EXISTS idx_state_flips_scan ON reach_state_flips (scan_id) WHERE scan_id IS NOT NULL;
CREATE INDEX IF NOT EXISTS idx_state_flips_type ON reach_state_flips (flip_type);
-- Function to clean up expired cache entries
CREATE OR REPLACE FUNCTION cleanup_expired_reach_cache()
RETURNS INTEGER AS $$
DECLARE
deleted_count INTEGER;
BEGIN
DELETE FROM reach_cache_entries
WHERE expires_at < NOW();
GET DIAGNOSTICS deleted_count = ROW_COUNT;
RETURN deleted_count;
END;
$$ LANGUAGE plpgsql;
-- Function to update cache statistics
CREATE OR REPLACE FUNCTION update_reach_cache_stats(
p_service_id TEXT,
p_is_hit BOOLEAN,
p_is_incremental BOOLEAN DEFAULT NULL,
p_graph_hash TEXT DEFAULT NULL
)
RETURNS VOID AS $$
BEGIN
INSERT INTO reach_cache_stats (service_id, total_hits, total_misses, current_graph_hash)
VALUES (p_service_id,
CASE WHEN p_is_hit THEN 1 ELSE 0 END,
CASE WHEN NOT p_is_hit THEN 1 ELSE 0 END,
p_graph_hash)
ON CONFLICT (service_id) DO UPDATE SET
total_hits = reach_cache_stats.total_hits + CASE WHEN p_is_hit THEN 1 ELSE 0 END,
total_misses = reach_cache_stats.total_misses + CASE WHEN NOT p_is_hit THEN 1 ELSE 0 END,
full_recomputes = reach_cache_stats.full_recomputes +
CASE WHEN p_is_incremental = FALSE THEN 1 ELSE 0 END,
incremental_computes = reach_cache_stats.incremental_computes +
CASE WHEN p_is_incremental = TRUE THEN 1 ELSE 0 END,
current_graph_hash = COALESCE(p_graph_hash, reach_cache_stats.current_graph_hash),
last_populated_at = CASE WHEN NOT p_is_hit THEN NOW() ELSE reach_cache_stats.last_populated_at END,
updated_at = NOW();
END;
$$ LANGUAGE plpgsql;
COMMENT ON TABLE reach_cache_entries IS 'Cached reachability analysis results per service/graph';
COMMENT ON TABLE reach_cache_pairs IS 'Individual (entry, sink) pair reachability results';
COMMENT ON TABLE reach_graph_snapshots IS 'Graph snapshots for delta computation';
COMMENT ON TABLE reach_cache_stats IS 'Cache performance statistics';
COMMENT ON TABLE reach_state_flips IS 'History of reachability state changes';

View File

@@ -0,0 +1,142 @@
using Dapper;
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
/// <summary>
/// PostgreSQL implementation of proof bundle repository.
/// </summary>
public sealed class PostgresProofBundleRepository : IProofBundleRepository
{
private readonly ScannerDataSource _dataSource;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string TableName => $"{SchemaName}.proof_bundle";
public PostgresProofBundleRepository(ScannerDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task<ProofBundleRow?> GetByRootHashAsync(string rootHash, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
scan_id AS ScanId,
root_hash AS RootHash,
bundle_type AS BundleType,
dsse_envelope AS DsseEnvelope,
signature_keyid AS SignatureKeyId,
signature_algorithm AS SignatureAlgorithm,
bundle_content AS BundleContent,
bundle_hash AS BundleHash,
ledger_hash AS LedgerHash,
manifest_hash AS ManifestHash,
sbom_hash AS SbomHash,
vex_hash AS VexHash,
created_at AS CreatedAt,
expires_at AS ExpiresAt
FROM {TableName}
WHERE root_hash = @RootHash
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
return await connection.QuerySingleOrDefaultAsync<ProofBundleRow>(
new CommandDefinition(sql, new { RootHash = rootHash }, cancellationToken: cancellationToken))
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<ProofBundleRow>> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
scan_id AS ScanId,
root_hash AS RootHash,
bundle_type AS BundleType,
dsse_envelope AS DsseEnvelope,
signature_keyid AS SignatureKeyId,
signature_algorithm AS SignatureAlgorithm,
bundle_content AS BundleContent,
bundle_hash AS BundleHash,
ledger_hash AS LedgerHash,
manifest_hash AS ManifestHash,
sbom_hash AS SbomHash,
vex_hash AS VexHash,
created_at AS CreatedAt,
expires_at AS ExpiresAt
FROM {TableName}
WHERE scan_id = @ScanId
ORDER BY created_at DESC
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var results = await connection.QueryAsync<ProofBundleRow>(
new CommandDefinition(sql, new { ScanId = scanId }, cancellationToken: cancellationToken))
.ConfigureAwait(false);
return results.ToList();
}
public async Task<ProofBundleRow> SaveAsync(ProofBundleRow bundle, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
var sql = $"""
INSERT INTO {TableName} (
scan_id,
root_hash,
bundle_type,
dsse_envelope,
signature_keyid,
signature_algorithm,
bundle_content,
bundle_hash,
ledger_hash,
manifest_hash,
sbom_hash,
vex_hash,
expires_at
) VALUES (
@ScanId,
@RootHash,
@BundleType,
@DsseEnvelope::jsonb,
@SignatureKeyId,
@SignatureAlgorithm,
@BundleContent,
@BundleHash,
@LedgerHash,
@ManifestHash,
@SbomHash,
@VexHash,
@ExpiresAt
)
ON CONFLICT (scan_id, root_hash) DO UPDATE SET
dsse_envelope = EXCLUDED.dsse_envelope,
bundle_content = EXCLUDED.bundle_content,
bundle_hash = EXCLUDED.bundle_hash,
ledger_hash = EXCLUDED.ledger_hash
RETURNING created_at AS CreatedAt
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var createdAt = await connection.QuerySingleAsync<DateTimeOffset>(
new CommandDefinition(sql, bundle, cancellationToken: cancellationToken))
.ConfigureAwait(false);
bundle.CreatedAt = createdAt;
return bundle;
}
public async Task<int> DeleteExpiredAsync(CancellationToken cancellationToken = default)
{
var sql = $"""
DELETE FROM {TableName}
WHERE expires_at IS NOT NULL AND expires_at < NOW()
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
return await connection.ExecuteAsync(
new CommandDefinition(sql, cancellationToken: cancellationToken))
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,129 @@
using Dapper;
using StellaOps.Scanner.Storage.Entities;
using StellaOps.Scanner.Storage.Repositories;
namespace StellaOps.Scanner.Storage.Postgres;
/// <summary>
/// PostgreSQL implementation of scan manifest repository.
/// </summary>
public sealed class PostgresScanManifestRepository : IScanManifestRepository
{
private readonly ScannerDataSource _dataSource;
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
private string TableName => $"{SchemaName}.scan_manifest";
public PostgresScanManifestRepository(ScannerDataSource dataSource)
{
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
}
public async Task<ScanManifestRow?> GetByHashAsync(string manifestHash, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
manifest_id AS ManifestId,
scan_id AS ScanId,
manifest_hash AS ManifestHash,
sbom_hash AS SbomHash,
rules_hash AS RulesHash,
feed_hash AS FeedHash,
policy_hash AS PolicyHash,
scan_started_at AS ScanStartedAt,
scan_completed_at AS ScanCompletedAt,
manifest_content AS ManifestContent,
scanner_version AS ScannerVersion,
created_at AS CreatedAt
FROM {TableName}
WHERE manifest_hash = @ManifestHash
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
return await connection.QuerySingleOrDefaultAsync<ScanManifestRow>(
new CommandDefinition(sql, new { ManifestHash = manifestHash }, cancellationToken: cancellationToken))
.ConfigureAwait(false);
}
public async Task<ScanManifestRow?> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default)
{
var sql = $"""
SELECT
manifest_id AS ManifestId,
scan_id AS ScanId,
manifest_hash AS ManifestHash,
sbom_hash AS SbomHash,
rules_hash AS RulesHash,
feed_hash AS FeedHash,
policy_hash AS PolicyHash,
scan_started_at AS ScanStartedAt,
scan_completed_at AS ScanCompletedAt,
manifest_content AS ManifestContent,
scanner_version AS ScannerVersion,
created_at AS CreatedAt
FROM {TableName}
WHERE scan_id = @ScanId
ORDER BY created_at DESC
LIMIT 1
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
return await connection.QuerySingleOrDefaultAsync<ScanManifestRow>(
new CommandDefinition(sql, new { ScanId = scanId }, cancellationToken: cancellationToken))
.ConfigureAwait(false);
}
public async Task<ScanManifestRow> SaveAsync(ScanManifestRow manifest, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifest);
var sql = $"""
INSERT INTO {TableName} (
scan_id,
manifest_hash,
sbom_hash,
rules_hash,
feed_hash,
policy_hash,
scan_started_at,
scan_completed_at,
manifest_content,
scanner_version
) VALUES (
@ScanId,
@ManifestHash,
@SbomHash,
@RulesHash,
@FeedHash,
@PolicyHash,
@ScanStartedAt,
@ScanCompletedAt,
@ManifestContent::jsonb,
@ScannerVersion
)
RETURNING manifest_id AS ManifestId, created_at AS CreatedAt
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
var result = await connection.QuerySingleAsync<(Guid ManifestId, DateTimeOffset CreatedAt)>(
new CommandDefinition(sql, manifest, cancellationToken: cancellationToken))
.ConfigureAwait(false);
manifest.ManifestId = result.ManifestId;
manifest.CreatedAt = result.CreatedAt;
return manifest;
}
public async Task MarkCompletedAsync(Guid manifestId, DateTimeOffset completedAt, CancellationToken cancellationToken = default)
{
var sql = $"""
UPDATE {TableName}
SET scan_completed_at = @CompletedAt
WHERE manifest_id = @ManifestId
""";
await using var connection = await _dataSource.OpenSystemConnectionAsync(cancellationToken).ConfigureAwait(false);
await connection.ExecuteAsync(
new CommandDefinition(sql, new { ManifestId = manifestId, CompletedAt = completedAt }, cancellationToken: cancellationToken))
.ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,55 @@
using StellaOps.Scanner.Storage.Entities;
namespace StellaOps.Scanner.Storage.Repositories;
/// <summary>
/// Repository interface for scan manifest operations.
/// </summary>
public interface IScanManifestRepository
{
/// <summary>
/// Gets a scan manifest by its hash.
/// </summary>
Task<ScanManifestRow?> GetByHashAsync(string manifestHash, CancellationToken cancellationToken = default);
/// <summary>
/// Gets a scan manifest by scan ID.
/// </summary>
Task<ScanManifestRow?> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default);
/// <summary>
/// Saves a new scan manifest.
/// </summary>
Task<ScanManifestRow> SaveAsync(ScanManifestRow manifest, CancellationToken cancellationToken = default);
/// <summary>
/// Marks a scan manifest as completed.
/// </summary>
Task MarkCompletedAsync(Guid manifestId, DateTimeOffset completedAt, CancellationToken cancellationToken = default);
}
/// <summary>
/// Repository interface for proof bundle operations.
/// </summary>
public interface IProofBundleRepository
{
/// <summary>
/// Gets a proof bundle by its root hash.
/// </summary>
Task<ProofBundleRow?> GetByRootHashAsync(string rootHash, CancellationToken cancellationToken = default);
/// <summary>
/// Gets all proof bundles for a scan.
/// </summary>
Task<IReadOnlyList<ProofBundleRow>> GetByScanIdAsync(Guid scanId, CancellationToken cancellationToken = default);
/// <summary>
/// Saves a new proof bundle.
/// </summary>
Task<ProofBundleRow> SaveAsync(ProofBundleRow bundle, CancellationToken cancellationToken = default);
/// <summary>
/// Deletes expired proof bundles.
/// </summary>
Task<int> DeleteExpiredAsync(CancellationToken cancellationToken = default);
}

View File

@@ -7,14 +7,14 @@
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="AWSSDK.S3" Version="3.7.305.6" />
<PackageReference Include="AWSSDK.S3" Version="4.0.6" />
<PackageReference Include="Dapper" Version="2.1.35" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="Npgsql" Version="9.0.2" />
<PackageReference Include="Npgsql" Version="9.0.3" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Postgres\Migrations\**\*.sql" LogicalName="%(RecursiveDir)%(Filename)%(Extension)" />

View File

@@ -0,0 +1,279 @@
// -----------------------------------------------------------------------------
// VulnSurfaceIntegrationTests.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core
// Task: SURF-023
// Description: Integration tests with real CVE data (Newtonsoft.Json).
// -----------------------------------------------------------------------------
using System.Collections.Generic;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.VulnSurfaces.Builder;
using StellaOps.Scanner.VulnSurfaces.CallGraph;
using StellaOps.Scanner.VulnSurfaces.Download;
using StellaOps.Scanner.VulnSurfaces.Fingerprint;
using StellaOps.Scanner.VulnSurfaces.Triggers;
using Xunit;
namespace StellaOps.Scanner.VulnSurfaces.Tests;
/// <summary>
/// Integration tests for VulnSurfaceBuilder using real packages.
/// These tests require network access and may be slow.
/// </summary>
[Trait("Category", "Integration")]
[Trait("Category", "SlowTests")]
public sealed class VulnSurfaceIntegrationTests : IDisposable
{
private readonly string _workDir;
public VulnSurfaceIntegrationTests()
{
_workDir = Path.Combine(Path.GetTempPath(), "vuln-surface-tests", Guid.NewGuid().ToString("N"));
Directory.CreateDirectory(_workDir);
}
public void Dispose()
{
try
{
if (Directory.Exists(_workDir))
{
Directory.Delete(_workDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
/// <summary>
/// Tests vulnerability surface extraction for Newtonsoft.Json CVE-2024-21907.
/// This CVE relates to type confusion in TypeNameHandling.
/// Vuln: 13.0.1, Fixed: 13.0.3
/// </summary>
[Fact(Skip = "Requires network access and ~30s runtime")]
public async Task BuildAsync_NewtonsoftJson_CVE_2024_21907_DetectsSinks()
{
// Arrange
var builder = CreateBuilder();
var request = new VulnSurfaceBuildRequest
{
CveId = "CVE-2024-21907",
PackageName = "Newtonsoft.Json",
Ecosystem = "nuget",
VulnVersion = "13.0.1",
FixedVersion = "13.0.3",
WorkingDirectory = _workDir,
ExtractTriggers = true
};
// Act
var result = await builder.BuildAsync(request);
// Assert
Assert.True(result.Success, result.Error ?? "Build should succeed");
Assert.NotNull(result.Surface);
Assert.Equal("CVE-2024-21907", result.Surface.CveId);
Assert.Equal("nuget", result.Surface.Ecosystem);
// Should detect changed methods in the security fix
Assert.NotEmpty(result.Surface.Sinks);
// Log for visibility
foreach (var sink in result.Surface.Sinks)
{
System.Diagnostics.Debug.WriteLine($"Sink: {sink.MethodKey} ({sink.ChangeType})");
}
}
/// <summary>
/// Tests building a surface for a small well-known package.
/// Uses Humanizer.Core which is small and has version differences.
/// </summary>
[Fact(Skip = "Requires network access and ~15s runtime")]
public async Task BuildAsync_HumanizerCore_DetectsMethodChanges()
{
// Arrange
var builder = CreateBuilder();
var request = new VulnSurfaceBuildRequest
{
CveId = "TEST-0001",
PackageName = "Humanizer.Core",
Ecosystem = "nuget",
VulnVersion = "2.14.0",
FixedVersion = "2.14.1",
WorkingDirectory = _workDir,
ExtractTriggers = false // Skip trigger extraction for speed
};
// Act
var result = await builder.BuildAsync(request);
// Assert
Assert.True(result.Success, result.Error ?? "Build should succeed");
Assert.NotNull(result.Surface);
// Even if no sinks are found, the surface should be created successfully
Assert.NotNull(result.Surface.Sinks);
}
/// <summary>
/// Tests that invalid package name returns appropriate error.
/// </summary>
[Fact(Skip = "Requires network access")]
public async Task BuildAsync_InvalidPackage_ReturnsFailed()
{
// Arrange
var builder = CreateBuilder();
var request = new VulnSurfaceBuildRequest
{
CveId = "TEST-INVALID",
PackageName = "This.Package.Does.Not.Exist.12345",
Ecosystem = "nuget",
VulnVersion = "1.0.0",
FixedVersion = "1.0.1",
WorkingDirectory = _workDir,
ExtractTriggers = false
};
// Act
var result = await builder.BuildAsync(request);
// Assert
Assert.False(result.Success);
Assert.NotNull(result.Error);
Assert.Contains("Failed to download", result.Error);
}
/// <summary>
/// Tests that unsupported ecosystem returns error.
/// </summary>
[Fact]
public async Task BuildAsync_UnsupportedEcosystem_ReturnsFailed()
{
// Arrange
var builder = CreateBuilder();
var request = new VulnSurfaceBuildRequest
{
CveId = "TEST-UNSUPPORTED",
PackageName = "some-package",
Ecosystem = "cargo", // Not supported yet
VulnVersion = "1.0.0",
FixedVersion = "1.0.1",
WorkingDirectory = _workDir,
ExtractTriggers = false
};
// Act
var result = await builder.BuildAsync(request);
// Assert
Assert.False(result.Success);
Assert.Contains("No downloader for ecosystem", result.Error);
}
/// <summary>
/// Tests surface building with trigger extraction.
/// </summary>
[Fact(Skip = "Requires network access and ~45s runtime")]
public async Task BuildAsync_WithTriggers_ExtractsTriggerMethods()
{
// Arrange
var builder = CreateBuilder();
var request = new VulnSurfaceBuildRequest
{
CveId = "CVE-2024-21907",
PackageName = "Newtonsoft.Json",
Ecosystem = "nuget",
VulnVersion = "13.0.1",
FixedVersion = "13.0.3",
WorkingDirectory = _workDir,
ExtractTriggers = true
};
// Act
var result = await builder.BuildAsync(request);
// Assert
Assert.True(result.Success, result.Error ?? "Build should succeed");
Assert.NotNull(result.Surface);
// When trigger extraction is enabled, we should have trigger info
// Note: TriggerCount may be 0 if no public API calls into the changed methods
Assert.True(result.Surface.TriggerCount >= 0);
}
/// <summary>
/// Tests deterministic output for the same inputs.
/// </summary>
[Fact(Skip = "Requires network access and ~60s runtime")]
public async Task BuildAsync_SameInput_ProducesDeterministicOutput()
{
// Arrange
var builder = CreateBuilder();
var request = new VulnSurfaceBuildRequest
{
CveId = "CVE-2024-21907",
PackageName = "Newtonsoft.Json",
Ecosystem = "nuget",
VulnVersion = "13.0.1",
FixedVersion = "13.0.3",
WorkingDirectory = Path.Combine(_workDir, "run1"),
ExtractTriggers = false
};
// Act
var result1 = await builder.BuildAsync(request);
// Reset for second run
request = request with { WorkingDirectory = Path.Combine(_workDir, "run2") };
var result2 = await builder.BuildAsync(request);
// Assert
Assert.True(result1.Success && result2.Success);
Assert.NotNull(result1.Surface);
Assert.NotNull(result2.Surface);
// Sink count should be identical
Assert.Equal(result1.Surface.Sinks.Count, result2.Surface.Sinks.Count);
// Method keys should be identical
var keys1 = result1.Surface.Sinks.Select(s => s.MethodKey).OrderBy(k => k).ToList();
var keys2 = result2.Surface.Sinks.Select(s => s.MethodKey).OrderBy(k => k).ToList();
Assert.Equal(keys1, keys2);
}
private VulnSurfaceBuilder CreateBuilder()
{
var downloaders = new List<IPackageDownloader>
{
new NuGetPackageDownloader(
new HttpClient(),
NullLogger<NuGetPackageDownloader>.Instance,
TimeProvider.System)
};
var fingerprinters = new List<IMethodFingerprinter>
{
new CecilMethodFingerprinter(NullLogger<CecilMethodFingerprinter>.Instance)
};
var diffEngine = new MethodDiffEngine(NullLogger<MethodDiffEngine>.Instance);
var triggerExtractor = new TriggerMethodExtractor(
NullLogger<TriggerMethodExtractor>.Instance);
var graphBuilders = new List<IInternalCallGraphBuilder>
{
new CecilInternalCallGraphBuilder(NullLogger<CecilInternalCallGraphBuilder>.Instance)
};
return new VulnSurfaceBuilder(
downloaders,
fingerprinters,
diffEngine,
triggerExtractor,
graphBuilders,
NullLogger<VulnSurfaceBuilder>.Instance);
}
}

View File

@@ -0,0 +1,531 @@
// -----------------------------------------------------------------------------
// JavaInternalGraphBuilder.cs
// Sprint: SPRINT_3700_0003_0001_trigger_extraction (TRIG-004)
// Description: Java internal call graph builder using bytecode analysis.
// -----------------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.VulnSurfaces.Models;
namespace StellaOps.Scanner.VulnSurfaces.CallGraph;
/// <summary>
/// Internal call graph builder for Java packages using bytecode analysis.
/// Parses .class files from JAR archives.
/// </summary>
public sealed class JavaInternalGraphBuilder : IInternalCallGraphBuilder
{
private readonly ILogger<JavaInternalGraphBuilder> _logger;
private const uint ClassFileMagic = 0xCAFEBABE;
public JavaInternalGraphBuilder(ILogger<JavaInternalGraphBuilder> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string Ecosystem => "maven";
/// <inheritdoc />
public bool CanHandle(string packagePath)
{
if (string.IsNullOrEmpty(packagePath))
return false;
if (packagePath.EndsWith(".jar", StringComparison.OrdinalIgnoreCase))
return true;
if (Directory.Exists(packagePath))
{
return Directory.EnumerateFiles(packagePath, "*.class", SearchOption.AllDirectories).Any();
}
return packagePath.EndsWith(".class", StringComparison.OrdinalIgnoreCase);
}
/// <inheritdoc />
public async Task<InternalCallGraphBuildResult> BuildAsync(
InternalCallGraphBuildRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var graph = new InternalCallGraph
{
PackageId = request.PackageId,
Version = request.Version
};
try
{
var classFiles = GetClassFiles(request.PackagePath);
var filesProcessed = 0;
// First pass: collect all classes and methods
var packageClasses = new HashSet<string>(StringComparer.Ordinal);
var allMethods = new Dictionary<string, MethodInfo>(StringComparer.Ordinal);
foreach (var classPath in classFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var bytes = await File.ReadAllBytesAsync(classPath, cancellationToken);
var classInfo = ParseClassFile(bytes);
if (classInfo is not null)
{
packageClasses.Add(classInfo.ClassName);
foreach (var method in classInfo.Methods)
{
var key = $"{classInfo.ClassName}::{method.Name}{method.Descriptor}";
allMethods[key] = method with { DeclaringClass = classInfo.ClassName };
}
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to parse class file {Path}", classPath);
}
}
// Second pass: analyze method bodies for internal calls
foreach (var classPath in classFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var bytes = await File.ReadAllBytesAsync(classPath, cancellationToken);
var classInfo = ParseClassFileWithCalls(bytes, packageClasses);
if (classInfo is not null)
{
foreach (var method in classInfo.Methods)
{
var callerKey = $"{classInfo.ClassName}::{method.Name}{method.Descriptor}";
// Skip private methods unless requested
if (!request.IncludePrivateMethods && !method.IsPublic && !method.IsProtected)
continue;
graph.AddMethod(new InternalMethodRef
{
MethodKey = callerKey,
Name = method.Name,
DeclaringType = classInfo.ClassName,
IsPublic = method.IsPublic
});
// Add edges for internal calls
foreach (var call in method.InternalCalls)
{
var calleeKey = $"{call.TargetClass}::{call.MethodName}{call.Descriptor}";
if (allMethods.ContainsKey(calleeKey))
{
graph.AddEdge(new InternalCallEdge { Caller = callerKey, Callee = calleeKey });
}
}
}
filesProcessed++;
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to analyze calls in {Path}", classPath);
}
}
sw.Stop();
_logger.LogDebug(
"Built internal call graph for Maven {PackageId} v{Version}: {Methods} methods, {Edges} edges in {Duration}ms",
request.PackageId, request.Version, graph.MethodCount, graph.EdgeCount, sw.ElapsedMilliseconds);
return InternalCallGraphBuildResult.Ok(graph, sw.Elapsed, filesProcessed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to build internal call graph for Maven {PackageId}", request.PackageId);
return InternalCallGraphBuildResult.Fail(ex.Message, sw.Elapsed);
}
}
private static string[] GetClassFiles(string packagePath)
{
if (File.Exists(packagePath) && packagePath.EndsWith(".class", StringComparison.OrdinalIgnoreCase))
{
return [packagePath];
}
if (Directory.Exists(packagePath))
{
return Directory.GetFiles(packagePath, "*.class", SearchOption.AllDirectories)
.Where(f => !f.Contains("META-INF"))
.ToArray();
}
return [];
}
private ClassInfo? ParseClassFile(byte[] bytes)
{
if (bytes.Length < 10 || BinaryPrimitives.ReadUInt32BigEndian(bytes) != ClassFileMagic)
return null;
var reader = new ByteReader(bytes);
reader.Skip(4); // magic
reader.Skip(4); // version
var constantPool = ParseConstantPool(reader);
var accessFlags = reader.ReadU2();
var thisClassIndex = reader.ReadU2();
var className = ResolveClassName(constantPool, thisClassIndex);
reader.Skip(2); // super class
var interfaceCount = reader.ReadU2();
reader.Skip(interfaceCount * 2);
// Skip fields
var fieldCount = reader.ReadU2();
for (var i = 0; i < fieldCount; i++)
SkipFieldOrMethod(reader);
// Parse methods
var methodCount = reader.ReadU2();
var methods = new List<MethodInfo>();
for (var i = 0; i < methodCount; i++)
{
var method = ParseMethod(reader, constantPool);
if (method is not null)
methods.Add(method);
}
return new ClassInfo
{
ClassName = className,
AccessFlags = accessFlags,
Methods = methods
};
}
private ClassInfo? ParseClassFileWithCalls(byte[] bytes, HashSet<string> packageClasses)
{
if (bytes.Length < 10 || BinaryPrimitives.ReadUInt32BigEndian(bytes) != ClassFileMagic)
return null;
var reader = new ByteReader(bytes);
reader.Skip(4); // magic
reader.Skip(4); // version
var constantPool = ParseConstantPool(reader);
var accessFlags = reader.ReadU2();
var thisClassIndex = reader.ReadU2();
var className = ResolveClassName(constantPool, thisClassIndex);
reader.Skip(2); // super class
var interfaceCount = reader.ReadU2();
reader.Skip(interfaceCount * 2);
// Skip fields
var fieldCount = reader.ReadU2();
for (var i = 0; i < fieldCount; i++)
SkipFieldOrMethod(reader);
// Parse methods with call analysis
var methodCount = reader.ReadU2();
var methods = new List<MethodInfo>();
for (var i = 0; i < methodCount; i++)
{
var method = ParseMethodWithCalls(reader, constantPool, packageClasses);
if (method is not null)
methods.Add(method);
}
return new ClassInfo
{
ClassName = className,
AccessFlags = accessFlags,
Methods = methods
};
}
private static List<ConstantPoolEntry> ParseConstantPool(ByteReader reader)
{
var count = reader.ReadU2();
var pool = new List<ConstantPoolEntry>(count) { new() };
for (var i = 1; i < count; i++)
{
var tag = reader.ReadU1();
var entry = new ConstantPoolEntry { Tag = tag };
switch (tag)
{
case 1: // CONSTANT_Utf8
var length = reader.ReadU2();
entry.StringValue = Encoding.UTF8.GetString(reader.ReadBytes(length));
break;
case 3: case 4: reader.Skip(4); break;
case 5: case 6: reader.Skip(8); pool.Add(new()); i++; break;
case 7: case 8: entry.NameIndex = reader.ReadU2(); break;
case 9: case 10: case 11:
entry.ClassIndex = reader.ReadU2();
entry.NameAndTypeIndex = reader.ReadU2();
break;
case 12:
entry.NameIndex = reader.ReadU2();
entry.DescriptorIndex = reader.ReadU2();
break;
case 15: reader.Skip(3); break;
case 16: reader.Skip(2); break;
case 17: case 18: reader.Skip(4); break;
case 19: case 20: reader.Skip(2); break;
}
pool.Add(entry);
}
return pool;
}
private static MethodInfo? ParseMethod(ByteReader reader, List<ConstantPoolEntry> pool)
{
var accessFlags = reader.ReadU2();
var nameIndex = reader.ReadU2();
var descriptorIndex = reader.ReadU2();
var name = GetUtf8(pool, nameIndex);
var descriptor = GetUtf8(pool, descriptorIndex);
var attrCount = reader.ReadU2();
for (var i = 0; i < attrCount; i++)
{
reader.Skip(2);
var attrLength = reader.ReadU4();
reader.Skip((int)attrLength);
}
return new MethodInfo
{
Name = name,
Descriptor = descriptor,
AccessFlags = accessFlags,
InternalCalls = []
};
}
private static MethodInfo? ParseMethodWithCalls(
ByteReader reader,
List<ConstantPoolEntry> pool,
HashSet<string> packageClasses)
{
var accessFlags = reader.ReadU2();
var nameIndex = reader.ReadU2();
var descriptorIndex = reader.ReadU2();
var name = GetUtf8(pool, nameIndex);
var descriptor = GetUtf8(pool, descriptorIndex);
var calls = new List<CallInfo>();
var attrCount = reader.ReadU2();
for (var i = 0; i < attrCount; i++)
{
var attrNameIndex = reader.ReadU2();
var attrLength = reader.ReadU4();
var attrName = GetUtf8(pool, attrNameIndex);
if (attrName == "Code")
{
reader.Skip(4); // max_stack, max_locals
var codeLength = reader.ReadU4();
var code = reader.ReadBytes((int)codeLength);
// Analyze bytecode for method calls
AnalyzeBytecode(code, pool, packageClasses, calls);
// Skip exception table and code attributes
var exceptionTableLength = reader.ReadU2();
reader.Skip(exceptionTableLength * 8);
var codeAttrCount = reader.ReadU2();
for (var j = 0; j < codeAttrCount; j++)
{
reader.Skip(2);
var codeAttrLength = reader.ReadU4();
reader.Skip((int)codeAttrLength);
}
}
else
{
reader.Skip((int)attrLength);
}
}
return new MethodInfo
{
Name = name,
Descriptor = descriptor,
AccessFlags = accessFlags,
InternalCalls = calls
};
}
private static void AnalyzeBytecode(
byte[] code,
List<ConstantPoolEntry> pool,
HashSet<string> packageClasses,
List<CallInfo> calls)
{
var i = 0;
while (i < code.Length)
{
var opcode = code[i];
// invokevirtual, invokespecial, invokestatic, invokeinterface
if (opcode is 0xB6 or 0xB7 or 0xB8 or 0xB9)
{
if (i + 2 < code.Length)
{
var methodRefIndex = (code[i + 1] << 8) | code[i + 2];
var callInfo = ResolveMethodRef(pool, methodRefIndex);
if (callInfo is not null && packageClasses.Contains(callInfo.TargetClass))
{
calls.Add(callInfo);
}
}
i += opcode == 0xB9 ? 5 : 3; // invokeinterface has 5 bytes
}
else
{
i += GetOpcodeLength(opcode);
}
}
}
private static CallInfo? ResolveMethodRef(List<ConstantPoolEntry> pool, int index)
{
if (index <= 0 || index >= pool.Count)
return null;
var methodRef = pool[index];
if (methodRef.Tag is not (10 or 11)) // Methodref or InterfaceMethodref
return null;
var classEntry = pool.ElementAtOrDefault(methodRef.ClassIndex);
var nameAndType = pool.ElementAtOrDefault(methodRef.NameAndTypeIndex);
if (classEntry?.Tag != 7 || nameAndType?.Tag != 12)
return null;
var className = GetUtf8(pool, classEntry.NameIndex).Replace('/', '.');
var methodName = GetUtf8(pool, nameAndType.NameIndex);
var descriptor = GetUtf8(pool, nameAndType.DescriptorIndex);
return new CallInfo
{
TargetClass = className,
MethodName = methodName,
Descriptor = descriptor
};
}
private static void SkipFieldOrMethod(ByteReader reader)
{
reader.Skip(6);
var attrCount = reader.ReadU2();
for (var i = 0; i < attrCount; i++)
{
reader.Skip(2);
var length = reader.ReadU4();
reader.Skip((int)length);
}
}
private static string ResolveClassName(List<ConstantPoolEntry> pool, int classIndex)
{
if (classIndex <= 0 || classIndex >= pool.Count || pool[classIndex].Tag != 7)
return "Unknown";
return GetUtf8(pool, pool[classIndex].NameIndex).Replace('/', '.');
}
private static string GetUtf8(List<ConstantPoolEntry> pool, int index)
{
if (index <= 0 || index >= pool.Count)
return string.Empty;
return pool[index].StringValue ?? string.Empty;
}
private static int GetOpcodeLength(byte opcode) => opcode switch
{
// Wide instructions and tableswitch/lookupswitch are variable - simplified handling
0xC4 => 4, // wide (simplified)
0xAA or 0xAB => 4, // tableswitch/lookupswitch (simplified)
_ when opcode is 0x10 or 0x12 or 0x15 or 0x16 or 0x17 or 0x18 or 0x19
or 0x36 or 0x37 or 0x38 or 0x39 or 0x3A or 0xA9 or 0xBC => 2,
_ when opcode is 0x11 or 0x13 or 0x14 or 0x84 or 0x99 or 0x9A or 0x9B
or 0x9C or 0x9D or 0x9E or 0x9F or 0xA0 or 0xA1 or 0xA2 or 0xA3
or 0xA4 or 0xA5 or 0xA6 or 0xA7 or 0xA8 or 0xB2 or 0xB3 or 0xB4
or 0xB5 or 0xB6 or 0xB7 or 0xB8 or 0xBB or 0xBD or 0xC0 or 0xC1
or 0xC6 or 0xC7 => 3,
0xC8 or 0xC9 => 5, // goto_w, jsr_w
0xB9 or 0xBA => 5, // invokeinterface, invokedynamic
0xC5 => 4, // multianewarray
_ => 1
};
private sealed class ByteReader(byte[] data)
{
private int _pos;
public byte ReadU1() => data[_pos++];
public ushort ReadU2() { var v = BinaryPrimitives.ReadUInt16BigEndian(data.AsSpan(_pos)); _pos += 2; return v; }
public uint ReadU4() { var v = BinaryPrimitives.ReadUInt32BigEndian(data.AsSpan(_pos)); _pos += 4; return v; }
public byte[] ReadBytes(int n) { var r = data[_pos..(_pos + n)]; _pos += n; return r; }
public void Skip(int n) => _pos += n;
}
private sealed class ConstantPoolEntry
{
public byte Tag { get; init; }
public string? StringValue { get; set; }
public int NameIndex { get; set; }
public int DescriptorIndex { get; set; }
public int ClassIndex { get; set; }
public int NameAndTypeIndex { get; set; }
}
private sealed record ClassInfo
{
public required string ClassName { get; init; }
public ushort AccessFlags { get; init; }
public required List<MethodInfo> Methods { get; init; }
}
private sealed record MethodInfo
{
public string DeclaringClass { get; init; } = string.Empty;
public required string Name { get; init; }
public required string Descriptor { get; init; }
public ushort AccessFlags { get; init; }
public required List<CallInfo> InternalCalls { get; init; }
public bool IsPublic => (AccessFlags & 0x0001) != 0;
public bool IsProtected => (AccessFlags & 0x0004) != 0;
}
private sealed record CallInfo
{
public required string TargetClass { get; init; }
public required string MethodName { get; init; }
public required string Descriptor { get; init; }
}
}

View File

@@ -0,0 +1,420 @@
// -----------------------------------------------------------------------------
// JavaScriptInternalGraphBuilder.cs
// Sprint: SPRINT_3700_0003_0001_trigger_extraction (TRIG-003)
// Description: JavaScript/Node.js internal call graph builder using AST parsing.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.VulnSurfaces.Models;
namespace StellaOps.Scanner.VulnSurfaces.CallGraph;
/// <summary>
/// Internal call graph builder for JavaScript/Node.js packages using AST-based parsing.
/// </summary>
public sealed partial class JavaScriptInternalGraphBuilder : IInternalCallGraphBuilder
{
private readonly ILogger<JavaScriptInternalGraphBuilder> _logger;
// Regex patterns for JavaScript analysis
[GeneratedRegex(@"(export\s+)?(async\s+)?function\s+(\w+)\s*\(", RegexOptions.Compiled)]
private static partial Regex FunctionDeclarationRegex();
[GeneratedRegex(@"(const|let|var)\s+(\w+)\s*=\s*(async\s+)?\(", RegexOptions.Compiled)]
private static partial Regex ArrowFunctionRegex();
[GeneratedRegex(@"class\s+(\w+)", RegexOptions.Compiled)]
private static partial Regex ClassDeclarationRegex();
[GeneratedRegex(@"(async\s+)?(\w+)\s*\([^)]*\)\s*\{", RegexOptions.Compiled)]
private static partial Regex MethodDeclarationRegex();
[GeneratedRegex(@"(?:this\.)?(\w+)\s*\(", RegexOptions.Compiled)]
private static partial Regex FunctionCallRegex();
[GeneratedRegex(@"module\.exports\s*=\s*\{?([^}]+)", RegexOptions.Compiled)]
private static partial Regex ModuleExportsRegex();
[GeneratedRegex(@"exports\.(\w+)", RegexOptions.Compiled)]
private static partial Regex NamedExportRegex();
public JavaScriptInternalGraphBuilder(ILogger<JavaScriptInternalGraphBuilder> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string Ecosystem => "npm";
/// <inheritdoc />
public bool CanHandle(string packagePath)
{
if (string.IsNullOrEmpty(packagePath))
return false;
if (packagePath.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase))
return true;
if (Directory.Exists(packagePath))
{
// Check for package.json or .js files
return File.Exists(Path.Combine(packagePath, "package.json")) ||
Directory.EnumerateFiles(packagePath, "*.js", SearchOption.AllDirectories).Any();
}
return packagePath.EndsWith(".js", StringComparison.OrdinalIgnoreCase);
}
/// <inheritdoc />
public async Task<InternalCallGraphBuildResult> BuildAsync(
InternalCallGraphBuildRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var graph = new InternalCallGraph
{
PackageId = request.PackageId,
Version = request.Version
};
try
{
var jsFiles = GetJavaScriptFiles(request.PackagePath);
var filesProcessed = 0;
var allFunctions = new Dictionary<string, FunctionInfo>(StringComparer.OrdinalIgnoreCase);
// First pass: collect all function declarations
foreach (var jsPath in jsFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await File.ReadAllTextAsync(jsPath, cancellationToken);
var moduleName = GetModuleName(jsPath, request.PackagePath);
CollectFunctions(content, moduleName, allFunctions, request.IncludePrivateMethods);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to collect functions from {Path}", jsPath);
}
}
// Second pass: analyze call relationships
foreach (var jsPath in jsFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await File.ReadAllTextAsync(jsPath, cancellationToken);
var moduleName = GetModuleName(jsPath, request.PackagePath);
AnalyzeCalls(content, moduleName, allFunctions, graph);
filesProcessed++;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to analyze calls in {Path}", jsPath);
}
}
sw.Stop();
_logger.LogDebug(
"Built internal call graph for npm {PackageId} v{Version}: {Methods} methods, {Edges} edges in {Duration}ms",
request.PackageId, request.Version, graph.MethodCount, graph.EdgeCount, sw.ElapsedMilliseconds);
return InternalCallGraphBuildResult.Ok(graph, sw.Elapsed, filesProcessed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to build internal call graph for npm {PackageId}", request.PackageId);
return InternalCallGraphBuildResult.Fail(ex.Message, sw.Elapsed);
}
}
private static string[] GetJavaScriptFiles(string packagePath)
{
if (File.Exists(packagePath) && packagePath.EndsWith(".js", StringComparison.OrdinalIgnoreCase))
{
return [packagePath];
}
if (Directory.Exists(packagePath))
{
return Directory.GetFiles(packagePath, "*.js", SearchOption.AllDirectories)
.Where(f =>
{
var name = Path.GetFileName(f);
return !name.Contains(".min.") &&
!name.EndsWith(".spec.js") &&
!name.EndsWith(".test.js") &&
!f.Contains("node_modules") &&
!f.Contains("__tests__");
})
.ToArray();
}
return [];
}
private static string GetModuleName(string jsPath, string basePath)
{
var relativePath = Path.GetRelativePath(basePath, jsPath);
var withoutExt = Path.ChangeExtension(relativePath, null);
return withoutExt
.Replace(Path.DirectorySeparatorChar, '.')
.Replace(Path.AltDirectorySeparatorChar, '.');
}
private void CollectFunctions(
string content,
string moduleName,
Dictionary<string, FunctionInfo> functions,
bool includePrivate)
{
// Collect function declarations
foreach (Match match in FunctionDeclarationRegex().Matches(content))
{
var isExported = !string.IsNullOrEmpty(match.Groups[1].Value);
var functionName = match.Groups[3].Value;
if (!includePrivate && !isExported)
continue;
var key = $"{moduleName}::{functionName}";
functions[key] = new FunctionInfo
{
Name = functionName,
Module = moduleName,
IsPublic = isExported,
StartIndex = match.Index,
EndIndex = FindFunctionEnd(content, match.Index)
};
}
// Collect arrow functions
foreach (Match match in ArrowFunctionRegex().Matches(content))
{
var functionName = match.Groups[2].Value;
var lineStart = content.LastIndexOf('\n', match.Index) + 1;
var prefix = content[lineStart..match.Index];
var isExported = prefix.Contains("export");
if (!includePrivate && !isExported)
continue;
var key = $"{moduleName}::{functionName}";
if (!functions.ContainsKey(key))
{
functions[key] = new FunctionInfo
{
Name = functionName,
Module = moduleName,
IsPublic = isExported,
StartIndex = match.Index,
EndIndex = FindArrowFunctionEnd(content, match.Index)
};
}
}
// Collect class methods
foreach (Match classMatch in ClassDeclarationRegex().Matches(content))
{
var className = classMatch.Groups[1].Value;
var classBodyStart = content.IndexOf('{', classMatch.Index);
if (classBodyStart < 0) continue;
var classBody = ExtractBracedBlock(content, classBodyStart);
if (string.IsNullOrEmpty(classBody)) continue;
foreach (Match methodMatch in MethodDeclarationRegex().Matches(classBody))
{
var methodName = methodMatch.Groups[2].Value;
if (methodName == "constructor") continue;
var key = $"{moduleName}.{className}::{methodName}";
functions[key] = new FunctionInfo
{
Name = methodName,
Module = $"{moduleName}.{className}",
IsPublic = true, // Class methods are typically public
StartIndex = classMatch.Index + methodMatch.Index,
EndIndex = classMatch.Index + FindFunctionEnd(classBody, methodMatch.Index)
};
}
}
// Mark exported functions from module.exports
var exportsMatch = ModuleExportsRegex().Match(content);
if (exportsMatch.Success)
{
var exports = exportsMatch.Groups[1].Value;
foreach (var func in functions.Values)
{
if (exports.Contains(func.Name, StringComparison.OrdinalIgnoreCase))
{
func.IsPublic = true;
}
}
}
foreach (Match exportMatch in NamedExportRegex().Matches(content))
{
var exportedName = exportMatch.Groups[1].Value;
var key = $"{moduleName}::{exportedName}";
if (functions.TryGetValue(key, out var func))
{
func.IsPublic = true;
}
}
}
private void AnalyzeCalls(
string content,
string moduleName,
Dictionary<string, FunctionInfo> allFunctions,
InternalCallGraph graph)
{
var moduleFunctions = allFunctions
.Where(kvp => kvp.Value.Module == moduleName || kvp.Value.Module.StartsWith($"{moduleName}."))
.ToList();
foreach (var (callerKey, callerInfo) in moduleFunctions)
{
// Add node
graph.AddMethod(new InternalMethodRef
{
MethodKey = callerKey,
Name = callerInfo.Name,
DeclaringType = callerInfo.Module,
IsPublic = callerInfo.IsPublic
});
// Extract function body
var bodyStart = callerInfo.StartIndex;
var bodyEnd = callerInfo.EndIndex;
if (bodyEnd <= bodyStart || bodyEnd > content.Length)
continue;
var body = content[bodyStart..Math.Min(bodyEnd, content.Length)];
// Find calls in body
foreach (Match callMatch in FunctionCallRegex().Matches(body))
{
var calledName = callMatch.Groups[1].Value;
// Skip common built-ins and keywords
if (IsBuiltIn(calledName))
continue;
// Try to resolve callee
var calleeKey = ResolveFunctionKey(calledName, moduleName, allFunctions);
if (calleeKey is not null && calleeKey != callerKey)
{
graph.AddEdge(new InternalCallEdge { Caller = callerKey, Callee = calleeKey });
}
}
}
}
private static string? ResolveFunctionKey(
string calledName,
string callerModule,
Dictionary<string, FunctionInfo> allFunctions)
{
// Try same module first
var sameModuleKey = $"{callerModule}::{calledName}";
if (allFunctions.ContainsKey(sameModuleKey))
return sameModuleKey;
// Try any module with that function
var match = allFunctions.Keys
.FirstOrDefault(k => k.EndsWith($"::{calledName}", StringComparison.OrdinalIgnoreCase));
return match;
}
private static bool IsBuiltIn(string name)
{
return name is "console" or "require" or "import" or "export" or "if" or "for" or "while"
or "switch" or "return" or "throw" or "catch" or "try" or "new" or "typeof" or "instanceof"
or "delete" or "void" or "await" or "Promise" or "Array" or "Object" or "String" or "Number"
or "Boolean" or "Date" or "Math" or "JSON" or "Error" or "RegExp" or "Map" or "Set"
or "setTimeout" or "setInterval" or "clearTimeout" or "clearInterval" or "fetch"
or "process" or "Buffer" or "__dirname" or "__filename";
}
private static int FindFunctionEnd(string content, int start)
{
var braceStart = content.IndexOf('{', start);
if (braceStart < 0) return start + 100;
return braceStart + FindMatchingBrace(content, braceStart);
}
private static int FindArrowFunctionEnd(string content, int start)
{
var arrowIndex = content.IndexOf("=>", start);
if (arrowIndex < 0) return start + 100;
var afterArrow = arrowIndex + 2;
while (afterArrow < content.Length && char.IsWhiteSpace(content[afterArrow]))
afterArrow++;
if (afterArrow < content.Length && content[afterArrow] == '{')
{
return afterArrow + FindMatchingBrace(content, afterArrow);
}
// Expression body
var endIndex = content.IndexOfAny([';', '\n', ','], afterArrow);
return endIndex > 0 ? endIndex : afterArrow + 100;
}
private static int FindMatchingBrace(string content, int braceStart)
{
var depth = 0;
for (var i = braceStart; i < content.Length; i++)
{
if (content[i] == '{') depth++;
else if (content[i] == '}')
{
depth--;
if (depth == 0) return i - braceStart + 1;
}
}
return content.Length - braceStart;
}
private static string ExtractBracedBlock(string content, int braceStart)
{
if (braceStart >= content.Length || content[braceStart] != '{')
return string.Empty;
var length = FindMatchingBrace(content, braceStart);
var endIndex = braceStart + length;
if (endIndex > content.Length) endIndex = content.Length;
return content[(braceStart + 1)..(endIndex - 1)];
}
private sealed class FunctionInfo
{
public required string Name { get; init; }
public required string Module { get; init; }
public bool IsPublic { get; set; }
public int StartIndex { get; init; }
public int EndIndex { get; init; }
}
}

View File

@@ -0,0 +1,449 @@
// -----------------------------------------------------------------------------
// PythonInternalGraphBuilder.cs
// Sprint: SPRINT_3700_0003_0001_trigger_extraction (TRIG-005)
// Description: Python internal call graph builder using AST-based parsing.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.VulnSurfaces.Models;
namespace StellaOps.Scanner.VulnSurfaces.CallGraph;
/// <summary>
/// Internal call graph builder for Python packages using AST-based parsing.
/// </summary>
public sealed partial class PythonInternalGraphBuilder : IInternalCallGraphBuilder
{
private readonly ILogger<PythonInternalGraphBuilder> _logger;
// Regex patterns for Python analysis
[GeneratedRegex(@"^(async\s+)?def\s+(\w+)\s*\(([^)]*)\)\s*(?:->\s*[^:]+)?:", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex FunctionDefRegex();
[GeneratedRegex(@"^class\s+(\w+)(?:\s*\([^)]*\))?\s*:", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex ClassDefRegex();
[GeneratedRegex(@"^(\s+)(async\s+)?def\s+(\w+)\s*\(([^)]*)\)\s*(?:->\s*[^:]+)?:", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex MethodDefRegex();
[GeneratedRegex(@"(?:self\.)?(\w+)\s*\(", RegexOptions.Compiled)]
private static partial Regex FunctionCallRegex();
[GeneratedRegex(@"^from\s+(\S+)\s+import\s+(.+)$", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex FromImportRegex();
[GeneratedRegex(@"^import\s+(\S+)", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex ImportRegex();
[GeneratedRegex(@"^__all__\s*=\s*\[([^\]]+)\]", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex AllExportRegex();
public PythonInternalGraphBuilder(ILogger<PythonInternalGraphBuilder> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string Ecosystem => "pypi";
/// <inheritdoc />
public bool CanHandle(string packagePath)
{
if (string.IsNullOrEmpty(packagePath))
return false;
if (packagePath.EndsWith(".whl", StringComparison.OrdinalIgnoreCase) ||
packagePath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
return true;
if (Directory.Exists(packagePath))
{
return File.Exists(Path.Combine(packagePath, "setup.py")) ||
File.Exists(Path.Combine(packagePath, "pyproject.toml")) ||
Directory.EnumerateFiles(packagePath, "*.py", SearchOption.AllDirectories).Any();
}
return packagePath.EndsWith(".py", StringComparison.OrdinalIgnoreCase);
}
/// <inheritdoc />
public async Task<InternalCallGraphBuildResult> BuildAsync(
InternalCallGraphBuildRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var graph = new InternalCallGraph
{
PackageId = request.PackageId,
Version = request.Version
};
try
{
var pyFiles = GetPythonFiles(request.PackagePath);
var filesProcessed = 0;
var allFunctions = new Dictionary<string, FunctionInfo>(StringComparer.Ordinal);
// First pass: collect all function declarations
foreach (var pyPath in pyFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await File.ReadAllTextAsync(pyPath, cancellationToken);
var moduleName = GetModuleName(pyPath, request.PackagePath);
CollectFunctions(content, moduleName, allFunctions, request.IncludePrivateMethods);
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to collect functions from {Path}", pyPath);
}
}
// Second pass: analyze call relationships
foreach (var pyPath in pyFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
var content = await File.ReadAllTextAsync(pyPath, cancellationToken);
var moduleName = GetModuleName(pyPath, request.PackagePath);
AnalyzeCalls(content, moduleName, allFunctions, graph);
filesProcessed++;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to analyze calls in {Path}", pyPath);
}
}
sw.Stop();
_logger.LogDebug(
"Built internal call graph for PyPI {PackageId} v{Version}: {Methods} methods, {Edges} edges in {Duration}ms",
request.PackageId, request.Version, graph.MethodCount, graph.EdgeCount, sw.ElapsedMilliseconds);
return InternalCallGraphBuildResult.Ok(graph, sw.Elapsed, filesProcessed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to build internal call graph for PyPI {PackageId}", request.PackageId);
return InternalCallGraphBuildResult.Fail(ex.Message, sw.Elapsed);
}
}
private static string[] GetPythonFiles(string packagePath)
{
if (File.Exists(packagePath) && packagePath.EndsWith(".py", StringComparison.OrdinalIgnoreCase))
{
return [packagePath];
}
if (Directory.Exists(packagePath))
{
return Directory.GetFiles(packagePath, "*.py", SearchOption.AllDirectories)
.Where(f =>
{
var name = Path.GetFileName(f);
return !name.StartsWith("test_") &&
!name.EndsWith("_test.py") &&
!f.Contains("__pycache__") &&
!f.Contains(".egg-info") &&
!f.Contains("tests/") &&
!f.Contains("test/");
})
.ToArray();
}
return [];
}
private static string GetModuleName(string pyPath, string basePath)
{
var relativePath = Path.GetRelativePath(basePath, pyPath);
var withoutExt = Path.ChangeExtension(relativePath, null);
var moduleName = withoutExt
.Replace(Path.DirectorySeparatorChar, '.')
.Replace(Path.AltDirectorySeparatorChar, '.');
// Remove __init__ from module name
if (moduleName.EndsWith(".__init__"))
moduleName = moduleName[..^9];
return moduleName;
}
private void CollectFunctions(
string content,
string moduleName,
Dictionary<string, FunctionInfo> functions,
bool includePrivate)
{
var lines = content.Split('\n');
// Check for __all__ exports
var exportedNames = new HashSet<string>(StringComparer.Ordinal);
var allMatch = AllExportRegex().Match(content);
if (allMatch.Success)
{
var exports = allMatch.Groups[1].Value;
foreach (var name in exports.Split(',').Select(s => s.Trim().Trim('\'', '"')))
{
if (!string.IsNullOrEmpty(name))
exportedNames.Add(name);
}
}
// Collect module-level functions
foreach (Match match in FunctionDefRegex().Matches(content))
{
// Skip if indented (class method)
var lineStart = content.LastIndexOf('\n', Math.Max(0, match.Index - 1)) + 1;
if (lineStart < match.Index && char.IsWhiteSpace(content[lineStart]))
continue;
var functionName = match.Groups[2].Value;
// Skip private functions unless requested
var isPrivate = functionName.StartsWith('_') && !functionName.StartsWith("__");
if (!includePrivate && isPrivate)
continue;
var isPublic = !isPrivate && (exportedNames.Count == 0 || exportedNames.Contains(functionName));
var lineNumber = GetLineNumber(content, match.Index);
var key = $"{moduleName}::{functionName}";
functions[key] = new FunctionInfo
{
Name = functionName,
Module = moduleName,
IsPublic = isPublic,
StartLine = lineNumber,
EndLine = FindFunctionEndLine(lines, lineNumber - 1, 0)
};
}
// Collect class methods
foreach (Match classMatch in ClassDefRegex().Matches(content))
{
var className = classMatch.Groups[1].Value;
var classLine = GetLineNumber(content, classMatch.Index);
var classIndent = GetIndentation(lines[classLine - 1]);
foreach (Match methodMatch in MethodDefRegex().Matches(content))
{
var methodLine = GetLineNumber(content, methodMatch.Index);
if (methodLine <= classLine)
continue;
var methodIndent = methodMatch.Groups[1].Value.Length;
if (methodIndent <= classIndent)
break;
var methodName = methodMatch.Groups[3].Value;
// Skip private methods unless requested
var isPrivate = methodName.StartsWith('_') && !methodName.StartsWith("__");
if (!includePrivate && isPrivate)
continue;
// Dunder methods are considered public
var isPublic = !isPrivate || (methodName.StartsWith("__") && methodName.EndsWith("__"));
var key = $"{moduleName}.{className}::{methodName}";
functions[key] = new FunctionInfo
{
Name = methodName,
Module = $"{moduleName}.{className}",
IsPublic = isPublic,
StartLine = methodLine,
EndLine = FindFunctionEndLine(lines, methodLine - 1, methodIndent)
};
}
}
}
private void AnalyzeCalls(
string content,
string moduleName,
Dictionary<string, FunctionInfo> allFunctions,
InternalCallGraph graph)
{
var lines = content.Split('\n');
var moduleFunctions = allFunctions
.Where(kvp => kvp.Value.Module == moduleName || kvp.Value.Module.StartsWith($"{moduleName}."))
.ToList();
// Collect imports for resolution
var imports = new Dictionary<string, string>(StringComparer.Ordinal);
foreach (Match match in FromImportRegex().Matches(content))
{
var fromModule = match.Groups[1].Value;
var imported = match.Groups[2].Value;
foreach (var item in imported.Split(',').Select(s => s.Trim()))
{
var parts = item.Split(" as ");
var name = parts[0].Trim();
var alias = parts.Length > 1 ? parts[1].Trim() : name;
imports[alias] = $"{fromModule}.{name}";
}
}
foreach (var (callerKey, callerInfo) in moduleFunctions)
{
graph.AddMethod(new InternalMethodRef
{
MethodKey = callerKey,
Name = callerInfo.Name,
DeclaringType = callerInfo.Module,
IsPublic = callerInfo.IsPublic
});
// Extract function body
if (callerInfo.StartLine <= 0 || callerInfo.EndLine <= callerInfo.StartLine)
continue;
var bodyLines = lines
.Skip(callerInfo.StartLine)
.Take(callerInfo.EndLine - callerInfo.StartLine)
.ToArray();
var body = string.Join("\n", bodyLines);
// Find calls in body
foreach (Match callMatch in FunctionCallRegex().Matches(body))
{
var calledName = callMatch.Groups[1].Value;
// Skip built-ins and keywords
if (IsBuiltIn(calledName))
continue;
// Try to resolve callee
var calleeKey = ResolveFunctionKey(calledName, moduleName, imports, allFunctions);
if (calleeKey is not null && calleeKey != callerKey)
{
graph.AddEdge(new InternalCallEdge { Caller = callerKey, Callee = calleeKey });
}
}
}
}
private static string? ResolveFunctionKey(
string calledName,
string callerModule,
Dictionary<string, string> imports,
Dictionary<string, FunctionInfo> allFunctions)
{
// Try same module first
var sameModuleKey = $"{callerModule}::{calledName}";
if (allFunctions.ContainsKey(sameModuleKey))
return sameModuleKey;
// Try class method in same module
var classMethodKey = allFunctions.Keys
.FirstOrDefault(k => k.StartsWith($"{callerModule}.") && k.EndsWith($"::{calledName}"));
if (classMethodKey is not null)
return classMethodKey;
// Try imported name
if (imports.TryGetValue(calledName, out var importedPath))
{
var importedKey = allFunctions.Keys
.FirstOrDefault(k => k.Contains(importedPath, StringComparison.OrdinalIgnoreCase) ||
k.EndsWith($"::{calledName}", StringComparison.OrdinalIgnoreCase));
if (importedKey is not null)
return importedKey;
}
// Try any module with that function
return allFunctions.Keys
.FirstOrDefault(k => k.EndsWith($"::{calledName}", StringComparison.Ordinal));
}
private static bool IsBuiltIn(string name)
{
return name is "print" or "len" or "range" or "str" or "int" or "float" or "bool" or "list"
or "dict" or "set" or "tuple" or "type" or "isinstance" or "issubclass" or "hasattr"
or "getattr" or "setattr" or "delattr" or "callable" or "super" or "property"
or "staticmethod" or "classmethod" or "open" or "input" or "format" or "repr"
or "id" or "hash" or "abs" or "round" or "min" or "max" or "sum" or "sorted"
or "reversed" or "enumerate" or "zip" or "map" or "filter" or "any" or "all"
or "iter" or "next" or "slice" or "object" or "Exception" or "ValueError"
or "TypeError" or "KeyError" or "IndexError" or "AttributeError" or "RuntimeError"
or "if" or "for" or "while" or "return" or "yield" or "raise" or "try"
or "except" or "finally" or "with" or "as" or "import" or "from" or "class" or "def"
or "async" or "await" or "lambda" or "pass" or "break" or "continue" or "assert"
or "True" or "False" or "None" or "self" or "cls";
}
private static int GetLineNumber(string content, int index)
{
var lineNumber = 1;
for (var i = 0; i < index && i < content.Length; i++)
{
if (content[i] == '\n')
lineNumber++;
}
return lineNumber;
}
private static int GetIndentation(string line)
{
var indent = 0;
foreach (var c in line)
{
if (c == ' ') indent++;
else if (c == '\t') indent += 4;
else break;
}
return indent;
}
private static int FindFunctionEndLine(string[] lines, int defLineIndex, int baseIndent)
{
var bodyIndent = -1;
for (var i = defLineIndex + 1; i < lines.Length; i++)
{
var line = lines[i];
if (string.IsNullOrWhiteSpace(line))
continue;
var currentIndent = GetIndentation(line);
if (bodyIndent < 0)
{
if (currentIndent <= baseIndent)
return defLineIndex + 1;
bodyIndent = currentIndent;
}
else if (currentIndent <= baseIndent && !string.IsNullOrWhiteSpace(line.Trim()))
{
return i;
}
}
return lines.Length;
}
private sealed class FunctionInfo
{
public required string Name { get; init; }
public required string Module { get; init; }
public bool IsPublic { get; set; }
public int StartLine { get; init; }
public int EndLine { get; init; }
}
}

View File

@@ -0,0 +1,198 @@
// -----------------------------------------------------------------------------
// MavenPackageDownloader.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-005)
// Description: Downloads Maven packages (JARs) from Maven Central or custom
// repositories for vulnerability surface analysis.
// -----------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Scanner.VulnSurfaces.Download;
/// <summary>
/// Downloads Maven packages (JARs) from Maven Central or custom repositories.
/// Maven coordinates: groupId:artifactId:version
/// </summary>
public sealed class MavenPackageDownloader : IPackageDownloader
{
private const string DefaultRepositoryUrl = "https://repo1.maven.org/maven2";
private readonly HttpClient _httpClient;
private readonly ILogger<MavenPackageDownloader> _logger;
private readonly MavenDownloaderOptions _options;
public MavenPackageDownloader(
HttpClient httpClient,
ILogger<MavenPackageDownloader> logger,
IOptions<MavenDownloaderOptions> options)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new MavenDownloaderOptions();
}
/// <inheritdoc />
public string Ecosystem => "maven";
/// <inheritdoc />
public async Task<PackageDownloadResult> DownloadAsync(
PackageDownloadRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
try
{
// Parse Maven coordinates (groupId:artifactId or just artifactId for simple cases)
var (groupId, artifactId) = ParseCoordinates(request.PackageName);
var version = request.Version;
var safeArtifactId = GetSafeDirectoryName(groupId, artifactId);
var extractedDir = Path.Combine(request.OutputDirectory, $"{safeArtifactId}-{version}");
var archivePath = Path.Combine(request.OutputDirectory, $"{safeArtifactId}-{version}.jar");
// Check cache first
if (request.UseCache && Directory.Exists(extractedDir))
{
sw.Stop();
_logger.LogDebug("Using cached Maven package {GroupId}:{ArtifactId} v{Version}",
groupId, artifactId, version);
return PackageDownloadResult.Ok(extractedDir, archivePath, sw.Elapsed, fromCache: true);
}
// Build download URL
// Maven Central path: /<groupId with / instead of .>/<artifactId>/<version>/<artifactId>-<version>.jar
var repositoryUrl = request.RegistryUrl ?? _options.RepositoryUrl ?? DefaultRepositoryUrl;
var groupPath = groupId.Replace('.', '/');
var jarUrl = $"{repositoryUrl}/{groupPath}/{artifactId}/{version}/{artifactId}-{version}.jar";
_logger.LogDebug("Downloading Maven JAR from {Url}", jarUrl);
// Download JAR
Directory.CreateDirectory(request.OutputDirectory);
using var response = await _httpClient.GetAsync(jarUrl, cancellationToken);
if (!response.IsSuccessStatusCode)
{
// Try sources JAR as fallback for source analysis
var sourcesUrl = $"{repositoryUrl}/{groupPath}/{artifactId}/{version}/{artifactId}-{version}-sources.jar";
_logger.LogDebug("Primary JAR not found, trying sources JAR from {Url}", sourcesUrl);
using var sourcesResponse = await _httpClient.GetAsync(sourcesUrl, cancellationToken);
if (!sourcesResponse.IsSuccessStatusCode)
{
sw.Stop();
var error = $"Failed to download: HTTP {(int)response.StatusCode} {response.ReasonPhrase}";
_logger.LogWarning("Maven download failed for {GroupId}:{ArtifactId} v{Version}: {Error}",
groupId, artifactId, version, error);
return PackageDownloadResult.Fail(error, sw.Elapsed);
}
// Save sources JAR
await using (var fs = File.Create(archivePath))
{
await sourcesResponse.Content.CopyToAsync(fs, cancellationToken);
}
}
else
{
// Save primary JAR
await using (var fs = File.Create(archivePath))
{
await response.Content.CopyToAsync(fs, cancellationToken);
}
}
// Extract JAR (it's just a ZIP file)
if (Directory.Exists(extractedDir))
{
Directory.Delete(extractedDir, recursive: true);
}
ZipFile.ExtractToDirectory(archivePath, extractedDir);
sw.Stop();
_logger.LogDebug("Downloaded and extracted Maven {GroupId}:{ArtifactId} v{Version} in {Duration}ms",
groupId, artifactId, version, sw.ElapsedMilliseconds);
return PackageDownloadResult.Ok(extractedDir, archivePath, sw.Elapsed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to download Maven package {Package} v{Version}",
request.PackageName, request.Version);
return PackageDownloadResult.Fail(ex.Message, sw.Elapsed);
}
}
/// <summary>
/// Parses Maven coordinates from package name.
/// Formats: "groupId:artifactId" or just "artifactId" (assumes default group).
/// </summary>
private (string groupId, string artifactId) ParseCoordinates(string packageName)
{
var parts = packageName.Split(':');
if (parts.Length >= 2)
{
return (parts[0], parts[1]);
}
// If no groupId provided, assume the package name is the artifactId
// and try to derive groupId from common patterns
return (packageName, packageName);
}
/// <summary>
/// Creates a safe directory name from Maven coordinates.
/// </summary>
private static string GetSafeDirectoryName(string groupId, string artifactId)
{
// Use artifactId primarily, prefixed with last segment of groupId if different
var groupLastPart = groupId.Split('.')[^1];
if (groupLastPart.Equals(artifactId, StringComparison.OrdinalIgnoreCase))
{
return artifactId;
}
return $"{groupLastPart}.{artifactId}";
}
}
/// <summary>
/// Options for Maven package downloader.
/// </summary>
public sealed class MavenDownloaderOptions
{
/// <summary>
/// Custom repository URL (null for Maven Central).
/// </summary>
public string? RepositoryUrl { get; set; }
/// <summary>
/// Cache directory for downloaded packages.
/// </summary>
public string? CacheDirectory { get; set; }
/// <summary>
/// Maximum package size in bytes (0 for unlimited).
/// </summary>
public long MaxPackageSize { get; set; }
/// <summary>
/// Whether to prefer sources JARs for analysis.
/// </summary>
public bool PreferSourcesJar { get; set; }
}

View File

@@ -0,0 +1,238 @@
// -----------------------------------------------------------------------------
// NpmPackageDownloader.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-004)
// Description: Downloads npm packages from registry.npmjs.org for vulnerability
// surface analysis.
// -----------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.IO;
using System.Net.Http;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using SharpCompress.Archives;
using SharpCompress.Archives.Tar;
using SharpCompress.Common;
using SharpCompress.Readers;
namespace StellaOps.Scanner.VulnSurfaces.Download;
/// <summary>
/// Downloads npm packages from registry.npmjs.org or custom registries.
/// npm packages are distributed as .tgz (gzipped tarball) files.
/// </summary>
public sealed class NpmPackageDownloader : IPackageDownloader
{
private const string DefaultRegistryUrl = "https://registry.npmjs.org";
private readonly HttpClient _httpClient;
private readonly ILogger<NpmPackageDownloader> _logger;
private readonly NpmDownloaderOptions _options;
public NpmPackageDownloader(
HttpClient httpClient,
ILogger<NpmPackageDownloader> logger,
IOptions<NpmDownloaderOptions> options)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new NpmDownloaderOptions();
}
/// <inheritdoc />
public string Ecosystem => "npm";
/// <inheritdoc />
public async Task<PackageDownloadResult> DownloadAsync(
PackageDownloadRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
try
{
// Normalize package name (npm uses lowercase, scoped packages have @scope/name)
var packageName = request.PackageName;
var safePackageName = GetSafeDirectoryName(packageName);
var extractedDir = Path.Combine(request.OutputDirectory, $"{safePackageName}-{request.Version}");
var archivePath = Path.Combine(request.OutputDirectory, $"{safePackageName}-{request.Version}.tgz");
// Check cache first
if (request.UseCache && Directory.Exists(extractedDir))
{
sw.Stop();
_logger.LogDebug("Using cached npm package {Package} v{Version}", packageName, request.Version);
return PackageDownloadResult.Ok(extractedDir, archivePath, sw.Elapsed, fromCache: true);
}
// Get package metadata to find tarball URL
var registryUrl = request.RegistryUrl ?? _options.RegistryUrl ?? DefaultRegistryUrl;
var tarballUrl = await GetTarballUrlAsync(registryUrl, packageName, request.Version, cancellationToken);
if (tarballUrl is null)
{
sw.Stop();
var error = $"Version {request.Version} not found for package {packageName}";
_logger.LogWarning("npm package not found: {Error}", error);
return PackageDownloadResult.Fail(error, sw.Elapsed);
}
_logger.LogDebug("Downloading npm package from {Url}", tarballUrl);
// Download tarball
Directory.CreateDirectory(request.OutputDirectory);
using var response = await _httpClient.GetAsync(tarballUrl, cancellationToken);
if (!response.IsSuccessStatusCode)
{
sw.Stop();
var error = $"Failed to download: HTTP {(int)response.StatusCode} {response.ReasonPhrase}";
_logger.LogWarning("npm download failed for {Package} v{Version}: {Error}",
packageName, request.Version, error);
return PackageDownloadResult.Fail(error, sw.Elapsed);
}
// Save archive
await using (var fs = File.Create(archivePath))
{
await response.Content.CopyToAsync(fs, cancellationToken);
}
// Extract .tgz (gzipped tarball)
if (Directory.Exists(extractedDir))
{
Directory.Delete(extractedDir, recursive: true);
}
Directory.CreateDirectory(extractedDir);
ExtractTgz(archivePath, extractedDir);
sw.Stop();
_logger.LogDebug("Downloaded and extracted npm {Package} v{Version} in {Duration}ms",
packageName, request.Version, sw.ElapsedMilliseconds);
return PackageDownloadResult.Ok(extractedDir, archivePath, sw.Elapsed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to download npm package {Package} v{Version}",
request.PackageName, request.Version);
return PackageDownloadResult.Fail(ex.Message, sw.Elapsed);
}
}
/// <summary>
/// Gets the tarball URL from the npm registry metadata.
/// </summary>
private async Task<string?> GetTarballUrlAsync(
string registryUrl,
string packageName,
string version,
CancellationToken cancellationToken)
{
// Encode scoped packages (@scope/name → @scope%2fname)
var encodedName = Uri.EscapeDataString(packageName).Replace("%40", "@");
var metadataUrl = $"{registryUrl}/{encodedName}";
using var response = await _httpClient.GetAsync(metadataUrl, cancellationToken);
if (!response.IsSuccessStatusCode)
{
_logger.LogDebug("Failed to fetch npm metadata for {Package}: HTTP {StatusCode}",
packageName, (int)response.StatusCode);
return null;
}
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
using var doc = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken);
// Look for versions.<version>.dist.tarball
if (doc.RootElement.TryGetProperty("versions", out var versions) &&
versions.TryGetProperty(version, out var versionObj) &&
versionObj.TryGetProperty("dist", out var dist) &&
dist.TryGetProperty("tarball", out var tarball))
{
return tarball.GetString();
}
return null;
}
/// <summary>
/// Extracts a .tgz file (gzipped tarball) to the specified directory.
/// </summary>
private static void ExtractTgz(string tgzPath, string destinationDir)
{
using var archive = ArchiveFactory.Open(tgzPath);
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
{
continue;
}
// npm packages have a "package/" prefix in the tarball
var entryPath = entry.Key ?? string.Empty;
if (entryPath.StartsWith("package/", StringComparison.OrdinalIgnoreCase))
{
entryPath = entryPath["package/".Length..];
}
var destPath = Path.Combine(destinationDir, entryPath);
var destDir = Path.GetDirectoryName(destPath);
if (!string.IsNullOrEmpty(destDir))
{
Directory.CreateDirectory(destDir);
}
entry.WriteToFile(destPath, new ExtractionOptions
{
ExtractFullPath = false,
Overwrite = true
});
}
}
/// <summary>
/// Converts a package name to a safe directory name.
/// Handles scoped packages like @scope/name → scope-name
/// </summary>
private static string GetSafeDirectoryName(string packageName)
{
return packageName
.Replace("@", string.Empty)
.Replace("/", "-")
.Replace("\\", "-");
}
}
/// <summary>
/// Options for npm package downloader.
/// </summary>
public sealed class NpmDownloaderOptions
{
/// <summary>
/// Custom registry URL (null for registry.npmjs.org).
/// </summary>
public string? RegistryUrl { get; set; }
/// <summary>
/// Cache directory for downloaded packages.
/// </summary>
public string? CacheDirectory { get; set; }
/// <summary>
/// Maximum package size in bytes (0 for unlimited).
/// </summary>
public long MaxPackageSize { get; set; }
}

View File

@@ -0,0 +1,295 @@
// -----------------------------------------------------------------------------
// PyPIPackageDownloader.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-006)
// Description: Downloads Python packages from PyPI for vulnerability surface
// analysis. Supports both wheel (.whl) and source distributions.
// -----------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Net.Http;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using SharpCompress.Archives;
using SharpCompress.Common;
namespace StellaOps.Scanner.VulnSurfaces.Download;
/// <summary>
/// Downloads Python packages from PyPI (Python Package Index).
/// Supports wheel (.whl) and source distribution (.tar.gz) formats.
/// </summary>
public sealed class PyPIPackageDownloader : IPackageDownloader
{
private const string DefaultRegistryUrl = "https://pypi.org/pypi";
private readonly HttpClient _httpClient;
private readonly ILogger<PyPIPackageDownloader> _logger;
private readonly PyPIDownloaderOptions _options;
public PyPIPackageDownloader(
HttpClient httpClient,
ILogger<PyPIPackageDownloader> logger,
IOptions<PyPIDownloaderOptions> options)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? new PyPIDownloaderOptions();
}
/// <inheritdoc />
public string Ecosystem => "pypi";
/// <inheritdoc />
public async Task<PackageDownloadResult> DownloadAsync(
PackageDownloadRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
try
{
// Normalize package name (PyPI uses lowercase with hyphens)
var normalizedName = NormalizePackageName(request.PackageName);
var safePackageName = GetSafeDirectoryName(normalizedName);
var extractedDir = Path.Combine(request.OutputDirectory, $"{safePackageName}-{request.Version}");
// Check cache first
if (request.UseCache && Directory.Exists(extractedDir))
{
sw.Stop();
_logger.LogDebug("Using cached PyPI package {Package} v{Version}",
request.PackageName, request.Version);
return PackageDownloadResult.Ok(extractedDir, string.Empty, sw.Elapsed, fromCache: true);
}
// Get package metadata to find download URL
var registryUrl = request.RegistryUrl ?? _options.RegistryUrl ?? DefaultRegistryUrl;
var downloadInfo = await GetDownloadUrlAsync(registryUrl, normalizedName, request.Version, cancellationToken);
if (downloadInfo is null)
{
sw.Stop();
var error = $"Version {request.Version} not found for package {request.PackageName}";
_logger.LogWarning("PyPI package not found: {Error}", error);
return PackageDownloadResult.Fail(error, sw.Elapsed);
}
_logger.LogDebug("Downloading PyPI package from {Url} (type: {Type})",
downloadInfo.Url, downloadInfo.PackageType);
// Download package
Directory.CreateDirectory(request.OutputDirectory);
using var response = await _httpClient.GetAsync(downloadInfo.Url, cancellationToken);
if (!response.IsSuccessStatusCode)
{
sw.Stop();
var error = $"Failed to download: HTTP {(int)response.StatusCode} {response.ReasonPhrase}";
_logger.LogWarning("PyPI download failed for {Package} v{Version}: {Error}",
request.PackageName, request.Version, error);
return PackageDownloadResult.Fail(error, sw.Elapsed);
}
// Determine archive extension and path
var extension = downloadInfo.PackageType == "bdist_wheel" ? ".whl" : ".tar.gz";
var archivePath = Path.Combine(request.OutputDirectory, $"{safePackageName}-{request.Version}{extension}");
// Save archive
await using (var fs = File.Create(archivePath))
{
await response.Content.CopyToAsync(fs, cancellationToken);
}
// Extract
if (Directory.Exists(extractedDir))
{
Directory.Delete(extractedDir, recursive: true);
}
Directory.CreateDirectory(extractedDir);
if (downloadInfo.PackageType == "bdist_wheel")
{
// Wheel files are ZIP archives
ZipFile.ExtractToDirectory(archivePath, extractedDir);
}
else
{
// Source distributions are .tar.gz
ExtractTarGz(archivePath, extractedDir);
}
sw.Stop();
_logger.LogDebug("Downloaded and extracted PyPI {Package} v{Version} in {Duration}ms",
request.PackageName, request.Version, sw.ElapsedMilliseconds);
return PackageDownloadResult.Ok(extractedDir, archivePath, sw.Elapsed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to download PyPI package {Package} v{Version}",
request.PackageName, request.Version);
return PackageDownloadResult.Fail(ex.Message, sw.Elapsed);
}
}
/// <summary>
/// Gets the download URL from PyPI JSON API.
/// Prefers source distributions for better AST analysis.
/// </summary>
private async Task<PyPIDownloadInfo?> GetDownloadUrlAsync(
string registryUrl,
string packageName,
string version,
CancellationToken cancellationToken)
{
var metadataUrl = $"{registryUrl}/{packageName}/{version}/json";
using var response = await _httpClient.GetAsync(metadataUrl, cancellationToken);
if (!response.IsSuccessStatusCode)
{
_logger.LogDebug("Failed to fetch PyPI metadata for {Package} v{Version}: HTTP {StatusCode}",
packageName, version, (int)response.StatusCode);
return null;
}
await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken);
using var doc = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken);
if (!doc.RootElement.TryGetProperty("urls", out var urls))
{
return null;
}
// Prefer source distribution for AST analysis, fall back to wheel
PyPIDownloadInfo? sourceDistribution = null;
PyPIDownloadInfo? wheel = null;
foreach (var urlEntry in urls.EnumerateArray())
{
var packageType = urlEntry.TryGetProperty("packagetype", out var pt) ? pt.GetString() : null;
var url = urlEntry.TryGetProperty("url", out var u) ? u.GetString() : null;
if (url is null)
{
continue;
}
if (packageType == "sdist")
{
sourceDistribution = new PyPIDownloadInfo(url, "sdist");
}
else if (packageType == "bdist_wheel" && wheel is null)
{
wheel = new PyPIDownloadInfo(url, "bdist_wheel");
}
}
// Prefer source distribution for better Python AST analysis
return _options.PreferSourceDistribution
? (sourceDistribution ?? wheel)
: (wheel ?? sourceDistribution);
}
/// <summary>
/// Extracts a .tar.gz file to the specified directory.
/// </summary>
private static void ExtractTarGz(string tarGzPath, string destinationDir)
{
using var archive = ArchiveFactory.Open(tarGzPath);
foreach (var entry in archive.Entries)
{
if (entry.IsDirectory)
{
continue;
}
var entryPath = entry.Key ?? string.Empty;
// Source distributions typically have a top-level directory like "package-1.0.0/"
// Remove it to flatten the structure
var pathParts = entryPath.Split('/');
if (pathParts.Length > 1)
{
entryPath = string.Join('/', pathParts.Skip(1));
}
if (string.IsNullOrEmpty(entryPath))
{
continue;
}
var destPath = Path.Combine(destinationDir, entryPath);
var destDir = Path.GetDirectoryName(destPath);
if (!string.IsNullOrEmpty(destDir))
{
Directory.CreateDirectory(destDir);
}
entry.WriteToFile(destPath, new ExtractionOptions
{
ExtractFullPath = false,
Overwrite = true
});
}
}
/// <summary>
/// Normalizes a PyPI package name (lowercase, hyphens).
/// </summary>
private static string NormalizePackageName(string packageName)
{
return packageName.ToLowerInvariant().Replace('_', '-');
}
/// <summary>
/// Creates a safe directory name from package name.
/// </summary>
private static string GetSafeDirectoryName(string packageName)
{
return packageName.Replace('-', '_');
}
private sealed record PyPIDownloadInfo(string Url, string PackageType);
}
/// <summary>
/// Options for PyPI package downloader.
/// </summary>
public sealed class PyPIDownloaderOptions
{
/// <summary>
/// Custom registry URL (null for pypi.org).
/// </summary>
public string? RegistryUrl { get; set; }
/// <summary>
/// Cache directory for downloaded packages.
/// </summary>
public string? CacheDirectory { get; set; }
/// <summary>
/// Maximum package size in bytes (0 for unlimited).
/// </summary>
public long MaxPackageSize { get; set; }
/// <summary>
/// Whether to prefer source distributions over wheels.
/// Default true for better AST analysis.
/// </summary>
public bool PreferSourceDistribution { get; set; } = true;
}

View File

@@ -0,0 +1,508 @@
// -----------------------------------------------------------------------------
// JavaBytecodeFingerprinter.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-010)
// Description: Java method fingerprinting using bytecode parsing.
// Parses .class files from JAR archives for method extraction.
// -----------------------------------------------------------------------------
using System;
using System.Buffers.Binary;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.VulnSurfaces.Fingerprint;
/// <summary>
/// Computes method fingerprints for Java packages using bytecode hashing.
/// Parses .class files from extracted JAR archives.
/// </summary>
public sealed class JavaBytecodeFingerprinter : IMethodFingerprinter
{
private readonly ILogger<JavaBytecodeFingerprinter> _logger;
// Java class file magic number
private const uint ClassFileMagic = 0xCAFEBABE;
public JavaBytecodeFingerprinter(ILogger<JavaBytecodeFingerprinter> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string Ecosystem => "maven";
/// <inheritdoc />
public async Task<FingerprintResult> FingerprintAsync(
FingerprintRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var methods = new Dictionary<string, MethodFingerprint>(StringComparer.Ordinal);
try
{
var classFiles = GetClassFiles(request.PackagePath);
var filesProcessed = 0;
foreach (var classPath in classFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await ProcessClassFileAsync(classPath, request.PackagePath, methods, request, cancellationToken);
filesProcessed++;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to process class file {Path}", classPath);
}
}
sw.Stop();
_logger.LogDebug(
"Fingerprinted {MethodCount} methods from {FileCount} class files in {Duration}ms",
methods.Count, filesProcessed, sw.ElapsedMilliseconds);
return FingerprintResult.Ok(methods, sw.Elapsed, filesProcessed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to fingerprint Java package at {Path}", request.PackagePath);
return FingerprintResult.Fail(ex.Message, sw.Elapsed);
}
}
private static string[] GetClassFiles(string packagePath)
{
if (!Directory.Exists(packagePath))
return [];
return Directory.GetFiles(packagePath, "*.class", SearchOption.AllDirectories)
.Where(f =>
{
// Skip META-INF and common non-source directories
var relativePath = f.Replace(packagePath, "").TrimStart(Path.DirectorySeparatorChar);
return !relativePath.StartsWith("META-INF", StringComparison.OrdinalIgnoreCase);
})
.ToArray();
}
private async Task ProcessClassFileAsync(
string classPath,
string packagePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request,
CancellationToken cancellationToken)
{
var bytes = await File.ReadAllBytesAsync(classPath, cancellationToken);
if (bytes.Length < 10)
return;
// Verify magic number
var magic = BinaryPrimitives.ReadUInt32BigEndian(bytes);
if (magic != ClassFileMagic)
{
_logger.LogDebug("Invalid class file magic in {Path}", classPath);
return;
}
try
{
var classInfo = ParseClassFile(bytes);
var relativePath = Path.GetRelativePath(packagePath, classPath);
foreach (var method in classInfo.Methods)
{
// Skip private methods unless requested
if (!request.IncludePrivateMethods && !method.IsPublic && !method.IsProtected)
continue;
// Skip synthetic and bridge methods
if (method.IsSynthetic || method.IsBridge)
continue;
var methodKey = $"{classInfo.ClassName}::{method.Name}{method.Descriptor}";
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = classInfo.ClassName,
Name = method.Name,
Signature = ParseDescriptor(method.Descriptor),
BodyHash = method.BodyHash,
SignatureHash = ComputeHash(method.Descriptor),
IsPublic = method.IsPublic,
BodySize = method.CodeLength,
SourceFile = relativePath
};
}
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Error parsing class file {Path}", classPath);
}
}
private JavaClassInfo ParseClassFile(byte[] bytes)
{
var reader = new JavaClassReader(bytes);
// Skip magic (already verified)
reader.Skip(4);
// Version info
_ = reader.ReadU2(); // minor version
_ = reader.ReadU2(); // major version
// Constant pool
var constantPool = ParseConstantPool(reader);
// Access flags
var accessFlags = reader.ReadU2();
// This class
var thisClassIndex = reader.ReadU2();
var className = ResolveClassName(constantPool, thisClassIndex);
// Super class
_ = reader.ReadU2(); // super class index
// Interfaces
var interfaceCount = reader.ReadU2();
reader.Skip(interfaceCount * 2);
// Fields
var fieldCount = reader.ReadU2();
for (var i = 0; i < fieldCount; i++)
{
SkipFieldOrMethod(reader);
}
// Methods
var methodCount = reader.ReadU2();
var methods = new List<JavaMethodInfo>();
for (var i = 0; i < methodCount; i++)
{
var method = ParseMethod(reader, constantPool);
methods.Add(method);
}
return new JavaClassInfo
{
ClassName = className,
AccessFlags = accessFlags,
Methods = methods
};
}
private static List<ConstantPoolEntry> ParseConstantPool(JavaClassReader reader)
{
var count = reader.ReadU2();
var pool = new List<ConstantPoolEntry>(count) { new() }; // Index 0 is unused
for (var i = 1; i < count; i++)
{
var tag = reader.ReadU1();
var entry = new ConstantPoolEntry { Tag = tag };
switch (tag)
{
case 1: // CONSTANT_Utf8
var length = reader.ReadU2();
entry.StringValue = Encoding.UTF8.GetString(reader.ReadBytes(length));
break;
case 3: // CONSTANT_Integer
case 4: // CONSTANT_Float
reader.Skip(4);
break;
case 5: // CONSTANT_Long
case 6: // CONSTANT_Double
reader.Skip(8);
pool.Add(new ConstantPoolEntry()); // Takes two entries
i++;
break;
case 7: // CONSTANT_Class
case 8: // CONSTANT_String
entry.NameIndex = reader.ReadU2();
break;
case 9: // CONSTANT_Fieldref
case 10: // CONSTANT_Methodref
case 11: // CONSTANT_InterfaceMethodref
entry.ClassIndex = reader.ReadU2();
entry.NameAndTypeIndex = reader.ReadU2();
break;
case 12: // CONSTANT_NameAndType
entry.NameIndex = reader.ReadU2();
entry.DescriptorIndex = reader.ReadU2();
break;
case 15: // CONSTANT_MethodHandle
reader.Skip(3);
break;
case 16: // CONSTANT_MethodType
reader.Skip(2);
break;
case 17: // CONSTANT_Dynamic
case 18: // CONSTANT_InvokeDynamic
reader.Skip(4);
break;
case 19: // CONSTANT_Module
case 20: // CONSTANT_Package
reader.Skip(2);
break;
}
pool.Add(entry);
}
return pool;
}
private static JavaMethodInfo ParseMethod(JavaClassReader reader, List<ConstantPoolEntry> constantPool)
{
var accessFlags = reader.ReadU2();
var nameIndex = reader.ReadU2();
var descriptorIndex = reader.ReadU2();
var name = GetUtf8(constantPool, nameIndex);
var descriptor = GetUtf8(constantPool, descriptorIndex);
// Attributes
var attributeCount = reader.ReadU2();
var codeBytes = Array.Empty<byte>();
var codeLength = 0;
for (var i = 0; i < attributeCount; i++)
{
var attrNameIndex = reader.ReadU2();
var attrLength = reader.ReadU4();
var attrName = GetUtf8(constantPool, attrNameIndex);
if (attrName == "Code")
{
// max_stack (2) + max_locals (2) + code_length (4)
reader.Skip(4);
codeLength = (int)reader.ReadU4();
codeBytes = reader.ReadBytes(codeLength);
// Skip exception table and code attributes
var remainingLength = attrLength - 8 - codeLength;
reader.Skip((int)remainingLength);
}
else
{
reader.Skip((int)attrLength);
}
}
return new JavaMethodInfo
{
Name = name,
Descriptor = descriptor,
AccessFlags = accessFlags,
CodeLength = codeLength,
BodyHash = ComputeHash(codeBytes)
};
}
private static void SkipFieldOrMethod(JavaClassReader reader)
{
reader.Skip(6); // access_flags + name_index + descriptor_index
var attributeCount = reader.ReadU2();
for (var i = 0; i < attributeCount; i++)
{
reader.Skip(2); // attribute_name_index
var length = reader.ReadU4();
reader.Skip((int)length);
}
}
private static string ResolveClassName(List<ConstantPoolEntry> pool, int classIndex)
{
if (classIndex <= 0 || classIndex >= pool.Count)
return "Unknown";
var classEntry = pool[classIndex];
if (classEntry.Tag != 7)
return "Unknown";
return GetUtf8(pool, classEntry.NameIndex).Replace('/', '.');
}
private static string GetUtf8(List<ConstantPoolEntry> pool, int index)
{
if (index <= 0 || index >= pool.Count)
return string.Empty;
return pool[index].StringValue ?? string.Empty;
}
private static string ParseDescriptor(string descriptor)
{
// Convert Java method descriptor to readable signature
// e.g., (Ljava/lang/String;I)V -> (String, int) void
var sb = new StringBuilder();
var i = 0;
if (descriptor.StartsWith('('))
{
sb.Append('(');
i = 1;
var first = true;
while (i < descriptor.Length && descriptor[i] != ')')
{
if (!first) sb.Append(", ");
first = false;
var (typeName, newIndex) = ParseType(descriptor, i);
sb.Append(typeName);
i = newIndex;
}
sb.Append(')');
i++; // Skip ')'
}
if (i < descriptor.Length)
{
var (returnType, _) = ParseType(descriptor, i);
sb.Append(" -> ");
sb.Append(returnType);
}
return sb.ToString();
}
private static (string typeName, int newIndex) ParseType(string descriptor, int index)
{
if (index >= descriptor.Length)
return ("void", index);
var c = descriptor[index];
return c switch
{
'B' => ("byte", index + 1),
'C' => ("char", index + 1),
'D' => ("double", index + 1),
'F' => ("float", index + 1),
'I' => ("int", index + 1),
'J' => ("long", index + 1),
'S' => ("short", index + 1),
'Z' => ("boolean", index + 1),
'V' => ("void", index + 1),
'[' => ParseArrayType(descriptor, index),
'L' => ParseObjectType(descriptor, index),
_ => ("?", index + 1)
};
}
private static (string typeName, int newIndex) ParseArrayType(string descriptor, int index)
{
var (elementType, newIndex) = ParseType(descriptor, index + 1);
return ($"{elementType}[]", newIndex);
}
private static (string typeName, int newIndex) ParseObjectType(string descriptor, int index)
{
var semicolonIndex = descriptor.IndexOf(';', index);
if (semicolonIndex < 0)
return ("Object", index + 1);
var className = descriptor[(index + 1)..semicolonIndex];
var simpleName = className.Split('/')[^1];
return (simpleName, semicolonIndex + 1);
}
private static string ComputeHash(byte[] data)
{
if (data.Length == 0)
return "empty";
var hashBytes = SHA256.HashData(data);
return Convert.ToHexStringLower(hashBytes[..16]);
}
private static string ComputeHash(string data)
{
if (string.IsNullOrEmpty(data))
return "empty";
return ComputeHash(Encoding.UTF8.GetBytes(data));
}
private sealed class JavaClassReader(byte[] data)
{
private int _position;
public byte ReadU1() => data[_position++];
public ushort ReadU2()
{
var value = BinaryPrimitives.ReadUInt16BigEndian(data.AsSpan(_position));
_position += 2;
return value;
}
public uint ReadU4()
{
var value = BinaryPrimitives.ReadUInt32BigEndian(data.AsSpan(_position));
_position += 4;
return value;
}
public byte[] ReadBytes(int count)
{
var result = data[_position..(_position + count)];
_position += count;
return result;
}
public void Skip(int count) => _position += count;
}
private sealed class ConstantPoolEntry
{
public byte Tag { get; init; }
public string? StringValue { get; set; }
public int NameIndex { get; set; }
public int DescriptorIndex { get; set; }
public int ClassIndex { get; set; }
public int NameAndTypeIndex { get; set; }
}
private sealed record JavaClassInfo
{
public required string ClassName { get; init; }
public ushort AccessFlags { get; init; }
public required List<JavaMethodInfo> Methods { get; init; }
}
private sealed record JavaMethodInfo
{
public required string Name { get; init; }
public required string Descriptor { get; init; }
public ushort AccessFlags { get; init; }
public int CodeLength { get; init; }
public required string BodyHash { get; init; }
public bool IsPublic => (AccessFlags & 0x0001) != 0;
public bool IsProtected => (AccessFlags & 0x0004) != 0;
public bool IsSynthetic => (AccessFlags & 0x1000) != 0;
public bool IsBridge => (AccessFlags & 0x0040) != 0;
}
}

View File

@@ -0,0 +1,492 @@
// -----------------------------------------------------------------------------
// JavaScriptMethodFingerprinter.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-009)
// Description: JavaScript/Node.js method fingerprinting using AST hashing.
// Uses Acornima for JavaScript parsing in .NET.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.VulnSurfaces.Fingerprint;
/// <summary>
/// Computes method fingerprints for JavaScript/Node.js packages using AST-based hashing.
/// Parses .js/.mjs/.cjs files and extracts function declarations, methods, and arrow functions.
/// </summary>
public sealed partial class JavaScriptMethodFingerprinter : IMethodFingerprinter
{
private readonly ILogger<JavaScriptMethodFingerprinter> _logger;
// Regex patterns for JavaScript function extraction
[GeneratedRegex(@"(export\s+)?(async\s+)?function\s+(\w+)\s*\(([^)]*)\)\s*\{", RegexOptions.Compiled)]
private static partial Regex FunctionDeclarationRegex();
[GeneratedRegex(@"(\w+)\s*:\s*(async\s+)?function\s*\(([^)]*)\)\s*\{", RegexOptions.Compiled)]
private static partial Regex ObjectMethodRegex();
[GeneratedRegex(@"(async\s+)?(\w+)\s*\(([^)]*)\)\s*\{", RegexOptions.Compiled)]
private static partial Regex ClassMethodRegex();
[GeneratedRegex(@"(const|let|var)\s+(\w+)\s*=\s*(async\s+)?\(([^)]*)\)\s*=>", RegexOptions.Compiled)]
private static partial Regex ArrowFunctionRegex();
[GeneratedRegex(@"class\s+(\w+)(?:\s+extends\s+(\w+))?\s*\{", RegexOptions.Compiled)]
private static partial Regex ClassDeclarationRegex();
[GeneratedRegex(@"module\.exports\s*=\s*(?:class\s+)?(\w+)", RegexOptions.Compiled)]
private static partial Regex ModuleExportsRegex();
public JavaScriptMethodFingerprinter(ILogger<JavaScriptMethodFingerprinter> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string Ecosystem => "npm";
/// <inheritdoc />
public async Task<FingerprintResult> FingerprintAsync(
FingerprintRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var methods = new Dictionary<string, MethodFingerprint>(StringComparer.Ordinal);
try
{
var jsFiles = GetJavaScriptFiles(request.PackagePath);
var filesProcessed = 0;
foreach (var jsPath in jsFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await ProcessJavaScriptFileAsync(jsPath, request.PackagePath, methods, request, cancellationToken);
filesProcessed++;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to process JavaScript file {Path}", jsPath);
}
}
sw.Stop();
_logger.LogDebug(
"Fingerprinted {MethodCount} functions from {FileCount} files in {Duration}ms",
methods.Count, filesProcessed, sw.ElapsedMilliseconds);
return FingerprintResult.Ok(methods, sw.Elapsed, filesProcessed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to fingerprint JavaScript package at {Path}", request.PackagePath);
return FingerprintResult.Fail(ex.Message, sw.Elapsed);
}
}
private static string[] GetJavaScriptFiles(string packagePath)
{
if (!Directory.Exists(packagePath))
return [];
return Directory.GetFiles(packagePath, "*", SearchOption.AllDirectories)
.Where(f =>
{
var ext = Path.GetExtension(f).ToLowerInvariant();
return ext is ".js" or ".mjs" or ".cjs" or ".jsx";
})
.Where(f =>
{
// Skip common non-source directories
var relativePath = f.Replace(packagePath, "").TrimStart(Path.DirectorySeparatorChar);
return !relativePath.StartsWith("node_modules", StringComparison.OrdinalIgnoreCase) &&
!relativePath.StartsWith("dist", StringComparison.OrdinalIgnoreCase) &&
!relativePath.Contains(".min.", StringComparison.OrdinalIgnoreCase);
})
.ToArray();
}
private async Task ProcessJavaScriptFileAsync(
string jsPath,
string packagePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request,
CancellationToken cancellationToken)
{
var content = await File.ReadAllTextAsync(jsPath, cancellationToken);
var relativePath = Path.GetRelativePath(packagePath, jsPath);
var moduleName = GetModuleName(relativePath);
// Extract function declarations
ExtractFunctionDeclarations(content, moduleName, relativePath, methods, request);
// Extract class methods
ExtractClassMethods(content, moduleName, relativePath, methods, request);
// Extract arrow functions
ExtractArrowFunctions(content, moduleName, relativePath, methods, request);
// Extract object methods
ExtractObjectMethods(content, moduleName, relativePath, methods, request);
}
private void ExtractFunctionDeclarations(
string content,
string moduleName,
string filePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request)
{
var matches = FunctionDeclarationRegex().Matches(content);
foreach (Match match in matches)
{
var isExported = !string.IsNullOrEmpty(match.Groups[1].Value);
var isAsync = !string.IsNullOrEmpty(match.Groups[2].Value);
var functionName = match.Groups[3].Value;
var parameters = match.Groups[4].Value.Trim();
// Skip private functions unless requested
if (!request.IncludePrivateMethods && !isExported)
continue;
var bodyHash = ComputeFunctionBodyHash(content, match.Index);
var methodKey = $"{moduleName}::{functionName}({NormalizeParams(parameters)})";
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = moduleName,
Name = functionName,
Signature = $"{(isAsync ? "async " : "")}function {functionName}({parameters})",
BodyHash = bodyHash,
IsPublic = isExported,
SourceFile = filePath,
LineNumber = GetLineNumber(content, match.Index)
};
}
}
private void ExtractClassMethods(
string content,
string moduleName,
string filePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request)
{
var classMatches = ClassDeclarationRegex().Matches(content);
foreach (Match classMatch in classMatches)
{
var className = classMatch.Groups[1].Value;
var classBodyStart = content.IndexOf('{', classMatch.Index);
if (classBodyStart < 0) continue;
// Find class body (simple brace matching)
var classBody = ExtractBracedBlock(content, classBodyStart);
if (string.IsNullOrEmpty(classBody)) continue;
var methodMatches = ClassMethodRegex().Matches(classBody);
foreach (Match methodMatch in methodMatches)
{
var isAsync = !string.IsNullOrEmpty(methodMatch.Groups[1].Value);
var methodName = methodMatch.Groups[2].Value;
var parameters = methodMatch.Groups[3].Value.Trim();
// Skip constructor unless specifically requested
if (methodName == "constructor" && !request.IncludePrivateMethods)
continue;
// Skip private methods (prefixed with #)
if (methodName.StartsWith('#') && !request.IncludePrivateMethods)
continue;
var bodyHash = ComputeFunctionBodyHash(classBody, methodMatch.Index);
var methodKey = $"{moduleName}.{className}::{methodName}({NormalizeParams(parameters)})";
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = $"{moduleName}.{className}",
Name = methodName,
Signature = $"{(isAsync ? "async " : "")}{methodName}({parameters})",
BodyHash = bodyHash,
IsPublic = !methodName.StartsWith('#'),
SourceFile = filePath,
LineNumber = GetLineNumber(content, classMatch.Index + methodMatch.Index)
};
}
}
}
private void ExtractArrowFunctions(
string content,
string moduleName,
string filePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request)
{
var matches = ArrowFunctionRegex().Matches(content);
foreach (Match match in matches)
{
var declarationType = match.Groups[1].Value; // const/let/var
var functionName = match.Groups[2].Value;
var isAsync = !string.IsNullOrEmpty(match.Groups[3].Value);
var parameters = match.Groups[4].Value.Trim();
// Check if it's exported
var lineStart = content.LastIndexOf('\n', match.Index) + 1;
var line = content[lineStart..match.Index];
var isExported = line.Contains("export", StringComparison.Ordinal);
if (!request.IncludePrivateMethods && !isExported)
continue;
var bodyHash = ComputeArrowFunctionBodyHash(content, match.Index);
var methodKey = $"{moduleName}::{functionName}({NormalizeParams(parameters)})";
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = moduleName,
Name = functionName,
Signature = $"{(isAsync ? "async " : "")}({parameters}) =>",
BodyHash = bodyHash,
IsPublic = isExported,
SourceFile = filePath,
LineNumber = GetLineNumber(content, match.Index)
};
}
}
private void ExtractObjectMethods(
string content,
string moduleName,
string filePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request)
{
var matches = ObjectMethodRegex().Matches(content);
foreach (Match match in matches)
{
var methodName = match.Groups[1].Value;
var isAsync = !string.IsNullOrEmpty(match.Groups[2].Value);
var parameters = match.Groups[3].Value.Trim();
var bodyHash = ComputeFunctionBodyHash(content, match.Index);
var methodKey = $"{moduleName}::obj.{methodName}({NormalizeParams(parameters)})";
// Object methods are typically exported if they're in module.exports
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = moduleName,
Name = methodName,
Signature = $"{(isAsync ? "async " : "")}{methodName}({parameters})",
BodyHash = bodyHash,
IsPublic = true,
SourceFile = filePath,
LineNumber = GetLineNumber(content, match.Index)
};
}
}
private static string GetModuleName(string relativePath)
{
// Convert path to module name: src/utils/helper.js -> src.utils.helper
var withoutExt = Path.ChangeExtension(relativePath, null);
return withoutExt
.Replace(Path.DirectorySeparatorChar, '.')
.Replace(Path.AltDirectorySeparatorChar, '.');
}
private static string NormalizeParams(string parameters)
{
if (string.IsNullOrWhiteSpace(parameters))
return "";
// Remove default values, just keep param names
var normalized = string.Join(",", parameters
.Split(',')
.Select(p => p.Split('=')[0].Trim())
.Where(p => !string.IsNullOrEmpty(p)));
return normalized;
}
private static string ComputeFunctionBodyHash(string content, int startIndex)
{
var braceStart = content.IndexOf('{', startIndex);
if (braceStart < 0) return "empty";
var body = ExtractBracedBlock(content, braceStart);
return ComputeHash(NormalizeBody(body));
}
private static string ComputeArrowFunctionBodyHash(string content, int startIndex)
{
var arrowIndex = content.IndexOf("=>", startIndex);
if (arrowIndex < 0) return "empty";
var bodyStart = arrowIndex + 2;
while (bodyStart < content.Length && char.IsWhiteSpace(content[bodyStart]))
bodyStart++;
if (bodyStart >= content.Length) return "empty";
// Check if it's a block or expression
if (content[bodyStart] == '{')
{
var body = ExtractBracedBlock(content, bodyStart);
return ComputeHash(NormalizeBody(body));
}
else
{
// Expression body - find end by semicolon or newline
var endIndex = content.IndexOfAny([';', '\n'], bodyStart);
if (endIndex < 0) endIndex = content.Length;
var body = content[bodyStart..endIndex];
return ComputeHash(NormalizeBody(body));
}
}
private static string ExtractBracedBlock(string content, int braceStart)
{
if (braceStart >= content.Length || content[braceStart] != '{')
return string.Empty;
var depth = 0;
var i = braceStart;
while (i < content.Length)
{
var c = content[i];
if (c == '{') depth++;
else if (c == '}')
{
depth--;
if (depth == 0)
return content[(braceStart + 1)..i];
}
i++;
}
return string.Empty;
}
private static string NormalizeBody(string body)
{
if (string.IsNullOrWhiteSpace(body))
return "empty";
// Remove comments, normalize whitespace
var sb = new StringBuilder();
var inLineComment = false;
var inBlockComment = false;
var inString = false;
var stringChar = '\0';
for (var i = 0; i < body.Length; i++)
{
var c = body[i];
var next = i + 1 < body.Length ? body[i + 1] : '\0';
if (inLineComment)
{
if (c == '\n') inLineComment = false;
continue;
}
if (inBlockComment)
{
if (c == '*' && next == '/')
{
inBlockComment = false;
i++;
}
continue;
}
if (inString)
{
sb.Append(c);
if (c == stringChar && (i == 0 || body[i - 1] != '\\'))
inString = false;
continue;
}
if (c == '/' && next == '/')
{
inLineComment = true;
i++;
continue;
}
if (c == '/' && next == '*')
{
inBlockComment = true;
i++;
continue;
}
if (c is '"' or '\'' or '`')
{
inString = true;
stringChar = c;
sb.Append(c);
continue;
}
// Normalize whitespace
if (char.IsWhiteSpace(c))
{
if (sb.Length > 0 && !char.IsWhiteSpace(sb[^1]))
sb.Append(' ');
}
else
{
sb.Append(c);
}
}
return sb.ToString().Trim();
}
private static string ComputeHash(string content)
{
if (string.IsNullOrEmpty(content))
return "empty";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return Convert.ToHexStringLower(bytes[..16]); // First 32 hex chars
}
private static int GetLineNumber(string content, int index)
{
var lineNumber = 1;
for (var i = 0; i < index && i < content.Length; i++)
{
if (content[i] == '\n')
lineNumber++;
}
return lineNumber;
}
}

View File

@@ -0,0 +1,433 @@
// -----------------------------------------------------------------------------
// PythonAstFingerprinter.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-011)
// Description: Python method fingerprinting using AST-based hashing.
// Parses .py files and extracts function and method definitions.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
namespace StellaOps.Scanner.VulnSurfaces.Fingerprint;
/// <summary>
/// Computes method fingerprints for Python packages using AST-based hashing.
/// Parses .py files and extracts function definitions and class methods.
/// </summary>
public sealed partial class PythonAstFingerprinter : IMethodFingerprinter
{
private readonly ILogger<PythonAstFingerprinter> _logger;
// Regex patterns for Python function extraction
[GeneratedRegex(@"^(async\s+)?def\s+(\w+)\s*\(([^)]*)\)\s*(?:->\s*[^:]+)?:", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex FunctionDefRegex();
[GeneratedRegex(@"^class\s+(\w+)(?:\s*\([^)]*\))?\s*:", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex ClassDefRegex();
[GeneratedRegex(@"^(\s+)(async\s+)?def\s+(\w+)\s*\(([^)]*)\)\s*(?:->\s*[^:]+)?:", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex MethodDefRegex();
[GeneratedRegex(@"^(\s*)@\w+(?:\([^)]*\))?$", RegexOptions.Multiline | RegexOptions.Compiled)]
private static partial Regex DecoratorRegex();
public PythonAstFingerprinter(ILogger<PythonAstFingerprinter> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public string Ecosystem => "pypi";
/// <inheritdoc />
public async Task<FingerprintResult> FingerprintAsync(
FingerprintRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = Stopwatch.StartNew();
var methods = new Dictionary<string, MethodFingerprint>(StringComparer.Ordinal);
try
{
var pyFiles = GetPythonFiles(request.PackagePath);
var filesProcessed = 0;
foreach (var pyPath in pyFiles)
{
cancellationToken.ThrowIfCancellationRequested();
try
{
await ProcessPythonFileAsync(pyPath, request.PackagePath, methods, request, cancellationToken);
filesProcessed++;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Failed to process Python file {Path}", pyPath);
}
}
sw.Stop();
_logger.LogDebug(
"Fingerprinted {MethodCount} functions from {FileCount} files in {Duration}ms",
methods.Count, filesProcessed, sw.ElapsedMilliseconds);
return FingerprintResult.Ok(methods, sw.Elapsed, filesProcessed);
}
catch (Exception ex)
{
sw.Stop();
_logger.LogWarning(ex, "Failed to fingerprint Python package at {Path}", request.PackagePath);
return FingerprintResult.Fail(ex.Message, sw.Elapsed);
}
}
private static string[] GetPythonFiles(string packagePath)
{
if (!Directory.Exists(packagePath))
return [];
return Directory.GetFiles(packagePath, "*.py", SearchOption.AllDirectories)
.Where(f =>
{
var relativePath = f.Replace(packagePath, "").TrimStart(Path.DirectorySeparatorChar);
return !relativePath.StartsWith("test", StringComparison.OrdinalIgnoreCase) &&
!relativePath.Contains("__pycache__", StringComparison.OrdinalIgnoreCase) &&
!relativePath.Contains(".egg-info", StringComparison.OrdinalIgnoreCase);
})
.ToArray();
}
private async Task ProcessPythonFileAsync(
string pyPath,
string packagePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request,
CancellationToken cancellationToken)
{
var content = await File.ReadAllTextAsync(pyPath, cancellationToken);
var lines = content.Split('\n');
var relativePath = Path.GetRelativePath(packagePath, pyPath);
var moduleName = GetModuleName(relativePath);
// Extract module-level functions
ExtractFunctions(content, lines, moduleName, relativePath, methods, request);
// Extract class methods
ExtractClassMethods(content, lines, moduleName, relativePath, methods, request);
}
private void ExtractFunctions(
string content,
string[] lines,
string moduleName,
string filePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request)
{
var matches = FunctionDefRegex().Matches(content);
foreach (Match match in matches)
{
// Skip if this is inside a class (has leading whitespace)
var lineStart = content.LastIndexOf('\n', Math.Max(0, match.Index - 1)) + 1;
if (lineStart < match.Index && !string.IsNullOrWhiteSpace(content[lineStart..match.Index]))
continue;
var isAsync = !string.IsNullOrEmpty(match.Groups[1].Value);
var functionName = match.Groups[2].Value;
var parameters = match.Groups[3].Value.Trim();
// Skip private functions unless requested
if (!request.IncludePrivateMethods && functionName.StartsWith('_') && !functionName.StartsWith("__"))
continue;
var lineNumber = GetLineNumber(content, match.Index);
var bodyHash = ComputeFunctionBodyHash(lines, lineNumber - 1, 0);
var methodKey = $"{moduleName}::{functionName}({NormalizeParams(parameters)})";
// Check for decorators to determine if it's exported
var isExported = !functionName.StartsWith('_');
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = moduleName,
Name = functionName,
Signature = $"{(isAsync ? "async " : "")}def {functionName}({parameters})",
BodyHash = bodyHash,
IsPublic = isExported,
SourceFile = filePath,
LineNumber = lineNumber
};
}
}
private void ExtractClassMethods(
string content,
string[] lines,
string moduleName,
string filePath,
Dictionary<string, MethodFingerprint> methods,
FingerprintRequest request)
{
var classMatches = ClassDefRegex().Matches(content);
foreach (Match classMatch in classMatches)
{
var className = classMatch.Groups[1].Value;
var classLineNumber = GetLineNumber(content, classMatch.Index);
var classIndent = GetIndentation(lines[classLineNumber - 1]);
// Find all methods in this class
var methodMatches = MethodDefRegex().Matches(content);
foreach (Match methodMatch in methodMatches)
{
var methodLineNumber = GetLineNumber(content, methodMatch.Index);
// Check if this method belongs to this class
if (methodLineNumber <= classLineNumber)
continue;
var methodIndent = methodMatch.Groups[1].Value.Length;
// Method should be indented one level from class
if (methodIndent <= classIndent)
break; // We've left the class
// Check if there's another class between
var nextClassMatch = classMatches
.Cast<Match>()
.FirstOrDefault(m => GetLineNumber(content, m.Index) > classLineNumber &&
GetLineNumber(content, m.Index) < methodLineNumber);
if (nextClassMatch is not null)
continue;
var isAsync = !string.IsNullOrEmpty(methodMatch.Groups[2].Value);
var methodName = methodMatch.Groups[3].Value;
var parameters = methodMatch.Groups[4].Value.Trim();
// Skip private methods unless requested
if (!request.IncludePrivateMethods && methodName.StartsWith('_') && !methodName.StartsWith("__"))
continue;
var bodyHash = ComputeFunctionBodyHash(lines, methodLineNumber - 1, methodIndent);
var methodKey = $"{moduleName}.{className}::{methodName}({NormalizeParams(parameters)})";
// Determine visibility
var isPublic = !methodName.StartsWith('_') || methodName.StartsWith("__") && methodName.EndsWith("__");
methods[methodKey] = new MethodFingerprint
{
MethodKey = methodKey,
DeclaringType = $"{moduleName}.{className}",
Name = methodName,
Signature = $"{(isAsync ? "async " : "")}def {methodName}({parameters})",
BodyHash = bodyHash,
IsPublic = isPublic,
SourceFile = filePath,
LineNumber = methodLineNumber
};
}
}
}
private static string GetModuleName(string relativePath)
{
// Convert path to module name: src/utils/helper.py -> src.utils.helper
var withoutExt = Path.ChangeExtension(relativePath, null);
var moduleName = withoutExt
.Replace(Path.DirectorySeparatorChar, '.')
.Replace(Path.AltDirectorySeparatorChar, '.');
// Remove __init__ from module name
if (moduleName.EndsWith(".__init__"))
{
moduleName = moduleName[..^9];
}
return moduleName;
}
private static string NormalizeParams(string parameters)
{
if (string.IsNullOrWhiteSpace(parameters))
return "";
// Remove type hints and default values, keep param names
var normalized = string.Join(",", parameters
.Split(',')
.Select(p =>
{
// Remove type hints (param: Type)
var colonIndex = p.IndexOf(':');
if (colonIndex > 0)
p = p[..colonIndex];
// Remove default values (param=value)
var equalsIndex = p.IndexOf('=');
if (equalsIndex > 0)
p = p[..equalsIndex];
return p.Trim();
})
.Where(p => !string.IsNullOrEmpty(p)));
return normalized;
}
private static string ComputeFunctionBodyHash(string[] lines, int defLineIndex, int baseIndent)
{
var sb = new StringBuilder();
// Find the function body indent
var bodyIndent = -1;
var inDocstring = false;
var docstringQuotes = "";
for (var i = defLineIndex + 1; i < lines.Length; i++)
{
var line = lines[i];
var trimmedLine = line.TrimStart();
// Skip empty lines
if (string.IsNullOrWhiteSpace(line))
{
if (bodyIndent > 0)
sb.AppendLine();
continue;
}
var currentIndent = GetIndentation(line);
// First non-empty line determines body indent
if (bodyIndent < 0)
{
if (currentIndent <= baseIndent)
break; // No body found
bodyIndent = currentIndent;
}
else if (currentIndent <= baseIndent && !string.IsNullOrWhiteSpace(trimmedLine))
{
// We've left the function body
break;
}
// Handle docstrings
if (trimmedLine.StartsWith("\"\"\"") || trimmedLine.StartsWith("'''"))
{
docstringQuotes = trimmedLine[..3];
if (!inDocstring)
{
inDocstring = true;
if (trimmedLine.Length > 3 && trimmedLine.EndsWith(docstringQuotes))
{
inDocstring = false;
}
continue; // Skip docstring lines
}
}
if (inDocstring)
{
if (trimmedLine.Contains(docstringQuotes))
{
inDocstring = false;
}
continue;
}
// Skip comments
if (trimmedLine.StartsWith('#'))
continue;
// Add normalized line to hash input
sb.AppendLine(NormalizeLine(trimmedLine));
}
return ComputeHash(sb.ToString());
}
private static string NormalizeLine(string line)
{
// Remove inline comments
var commentIndex = -1;
var inString = false;
var stringChar = '\0';
for (var i = 0; i < line.Length; i++)
{
var c = line[i];
if (inString)
{
if (c == stringChar && (i == 0 || line[i - 1] != '\\'))
inString = false;
continue;
}
if (c is '"' or '\'')
{
inString = true;
stringChar = c;
continue;
}
if (c == '#')
{
commentIndex = i;
break;
}
}
if (commentIndex > 0)
line = line[..commentIndex];
// Normalize whitespace
return line.Trim();
}
private static int GetIndentation(string line)
{
var indent = 0;
foreach (var c in line)
{
if (c == ' ') indent++;
else if (c == '\t') indent += 4;
else break;
}
return indent;
}
private static int GetLineNumber(string content, int index)
{
var lineNumber = 1;
for (var i = 0; i < index && i < content.Length; i++)
{
if (content[i] == '\n')
lineNumber++;
}
return lineNumber;
}
private static string ComputeHash(string content)
{
if (string.IsNullOrWhiteSpace(content))
return "empty";
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return Convert.ToHexStringLower(bytes[..16]);
}
}

View File

@@ -0,0 +1,161 @@
// -----------------------------------------------------------------------------
// DotNetMethodKeyBuilder.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-012)
// Description: Method key builder for .NET/NuGet packages.
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.VulnSurfaces.MethodKeys;
/// <summary>
/// Builds normalized method keys for .NET assemblies.
/// Format: Namespace.TypeName::MethodName(ParamType1,ParamType2)
/// </summary>
public sealed partial class DotNetMethodKeyBuilder : IMethodKeyBuilder
{
// Pattern: Namespace.Type::Method(params)
[GeneratedRegex(@"^(?:(.+)\.)?([^:.]+)::([^(]+)\(([^)]*)\)$", RegexOptions.Compiled)]
private static partial Regex MethodKeyPattern();
/// <inheritdoc />
public string Ecosystem => "nuget";
/// <inheritdoc />
public string BuildKey(MethodKeyRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var sb = new StringBuilder();
// Namespace.TypeName
if (!string.IsNullOrEmpty(request.Namespace))
{
sb.Append(NormalizeNamespace(request.Namespace));
if (!string.IsNullOrEmpty(request.TypeName))
{
sb.Append('.');
}
}
if (!string.IsNullOrEmpty(request.TypeName))
{
sb.Append(NormalizeTypeName(request.TypeName));
}
// ::MethodName
sb.Append("::");
sb.Append(NormalizeMethodName(request.MethodName));
// (ParamTypes)
sb.Append('(');
if (request.ParameterTypes is { Count: > 0 })
{
sb.Append(string.Join(",", request.ParameterTypes.Select(NormalizeTypeName)));
}
sb.Append(')');
return sb.ToString();
}
/// <inheritdoc />
public MethodKeyComponents? ParseKey(string methodKey)
{
if (string.IsNullOrEmpty(methodKey))
return null;
var match = MethodKeyPattern().Match(methodKey);
if (!match.Success)
return null;
var namespacePart = match.Groups[1].Value;
var typeName = match.Groups[2].Value;
var methodName = match.Groups[3].Value;
var parameters = match.Groups[4].Value;
var paramTypes = string.IsNullOrEmpty(parameters)
? []
: parameters.Split(',').Select(p => p.Trim()).ToList();
return new MethodKeyComponents
{
Namespace = string.IsNullOrEmpty(namespacePart) ? null : namespacePart,
TypeName = typeName,
MethodName = methodName,
ParameterTypes = paramTypes
};
}
/// <inheritdoc />
public string NormalizeKey(string methodKey)
{
var components = ParseKey(methodKey);
if (components is null)
return methodKey;
return BuildKey(new MethodKeyRequest
{
Namespace = components.Namespace,
TypeName = components.TypeName,
MethodName = components.MethodName,
ParameterTypes = components.ParameterTypes?.ToList()
});
}
private static string NormalizeNamespace(string ns)
{
// Remove generic arity markers
return ns.Replace("`1", "").Replace("`2", "").Replace("`3", "").Replace("`4", "");
}
private static string NormalizeTypeName(string typeName)
{
// Normalize common type aliases
var normalized = typeName switch
{
"System.String" or "string" => "String",
"System.Int32" or "int" => "Int32",
"System.Int64" or "long" => "Int64",
"System.Boolean" or "bool" => "Boolean",
"System.Double" or "double" => "Double",
"System.Single" or "float" => "Single",
"System.Void" or "void" => "Void",
"System.Object" or "object" => "Object",
"System.Byte" or "byte" => "Byte",
"System.Char" or "char" => "Char",
"System.Decimal" or "decimal" => "Decimal",
_ => typeName
};
// Remove generic arity and simplify
var arityIndex = normalized.IndexOf('`');
if (arityIndex > 0)
{
normalized = normalized[..arityIndex];
}
// Use simple name for common BCL types (e.g., System.String -> String)
if (normalized.StartsWith("System.", StringComparison.Ordinal))
{
var afterSystem = normalized[7..];
if (!afterSystem.Contains('.'))
{
normalized = afterSystem;
}
}
return normalized;
}
private static string NormalizeMethodName(string methodName)
{
// Normalize common method name variations
return methodName switch
{
".ctor" => ".ctor",
".cctor" => ".cctor",
_ => methodName
};
}
}

View File

@@ -0,0 +1,111 @@
// -----------------------------------------------------------------------------
// IMethodKeyBuilder.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-012)
// Description: Interface for building normalized method keys per ecosystem.
// -----------------------------------------------------------------------------
namespace StellaOps.Scanner.VulnSurfaces.MethodKeys;
/// <summary>
/// Builds normalized method keys for cross-ecosystem comparison.
/// Method keys provide a stable, canonical identifier for methods
/// that can be used for diffing between package versions.
/// </summary>
public interface IMethodKeyBuilder
{
/// <summary>
/// Ecosystem this builder handles.
/// </summary>
string Ecosystem { get; }
/// <summary>
/// Builds a normalized method key from components.
/// </summary>
/// <param name="request">Method key request with components.</param>
/// <returns>Normalized method key.</returns>
string BuildKey(MethodKeyRequest request);
/// <summary>
/// Parses a method key back into components.
/// </summary>
/// <param name="methodKey">The method key to parse.</param>
/// <returns>Parsed components or null if invalid.</returns>
MethodKeyComponents? ParseKey(string methodKey);
/// <summary>
/// Normalizes a method key to canonical form.
/// </summary>
/// <param name="methodKey">The method key to normalize.</param>
/// <returns>Normalized method key.</returns>
string NormalizeKey(string methodKey);
}
/// <summary>
/// Request to build a method key.
/// </summary>
public sealed record MethodKeyRequest
{
/// <summary>
/// Namespace or package path.
/// </summary>
public string? Namespace { get; init; }
/// <summary>
/// Type or class name.
/// </summary>
public string? TypeName { get; init; }
/// <summary>
/// Method or function name.
/// </summary>
public required string MethodName { get; init; }
/// <summary>
/// Parameter types (type names only).
/// </summary>
public IReadOnlyList<string>? ParameterTypes { get; init; }
/// <summary>
/// Return type.
/// </summary>
public string? ReturnType { get; init; }
/// <summary>
/// Whether to include return type in key (for overload resolution).
/// </summary>
public bool IncludeReturnType { get; init; }
}
/// <summary>
/// Parsed components of a method key.
/// </summary>
public sealed record MethodKeyComponents
{
/// <summary>
/// Full namespace path.
/// </summary>
public string? Namespace { get; init; }
/// <summary>
/// Type/class name.
/// </summary>
public string? TypeName { get; init; }
/// <summary>
/// Method/function name.
/// </summary>
public required string MethodName { get; init; }
/// <summary>
/// Parameter type names.
/// </summary>
public IReadOnlyList<string>? ParameterTypes { get; init; }
/// <summary>
/// Full qualified name (namespace.type::method).
/// </summary>
public string FullQualifiedName =>
string.IsNullOrEmpty(Namespace)
? (string.IsNullOrEmpty(TypeName) ? MethodName : $"{TypeName}::{MethodName}")
: (string.IsNullOrEmpty(TypeName) ? $"{Namespace}::{MethodName}" : $"{Namespace}.{TypeName}::{MethodName}");
}

View File

@@ -0,0 +1,212 @@
// -----------------------------------------------------------------------------
// JavaMethodKeyBuilder.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-012)
// Description: Method key builder for Java/Maven packages.
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.VulnSurfaces.MethodKeys;
/// <summary>
/// Builds normalized method keys for Java classes.
/// Format: com.package.ClassName::methodName(ParamType1,ParamType2)
/// </summary>
public sealed partial class JavaMethodKeyBuilder : IMethodKeyBuilder
{
// Pattern: package.ClassName::methodName(descriptor)
[GeneratedRegex(@"^([^:]+)::([^(]+)(\([^)]*\).*)$", RegexOptions.Compiled)]
private static partial Regex MethodKeyPattern();
/// <inheritdoc />
public string Ecosystem => "maven";
/// <inheritdoc />
public string BuildKey(MethodKeyRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var sb = new StringBuilder();
// Package.ClassName
if (!string.IsNullOrEmpty(request.Namespace))
{
sb.Append(NormalizePackage(request.Namespace));
sb.Append('.');
}
if (!string.IsNullOrEmpty(request.TypeName))
{
sb.Append(request.TypeName);
}
// ::methodName
sb.Append("::");
sb.Append(NormalizeMethodName(request.MethodName));
// (ParamTypes) - using Java descriptor format
sb.Append('(');
if (request.ParameterTypes is { Count: > 0 })
{
sb.Append(string.Join(",", request.ParameterTypes.Select(NormalizeTypeName)));
}
sb.Append(')');
return sb.ToString();
}
/// <inheritdoc />
public MethodKeyComponents? ParseKey(string methodKey)
{
if (string.IsNullOrEmpty(methodKey))
return null;
var match = MethodKeyPattern().Match(methodKey);
if (!match.Success)
return null;
var fullClassName = match.Groups[1].Value;
var methodName = match.Groups[2].Value;
var descriptor = match.Groups[3].Value;
// Split package from class name
string? packageName = null;
var typeName = fullClassName;
var lastDot = fullClassName.LastIndexOf('.');
if (lastDot > 0)
{
packageName = fullClassName[..lastDot];
typeName = fullClassName[(lastDot + 1)..];
}
// Parse descriptor to get parameter types
var paramTypes = ParseDescriptor(descriptor);
return new MethodKeyComponents
{
Namespace = packageName,
TypeName = typeName,
MethodName = methodName,
ParameterTypes = paramTypes
};
}
/// <inheritdoc />
public string NormalizeKey(string methodKey)
{
var components = ParseKey(methodKey);
if (components is null)
return methodKey;
return BuildKey(new MethodKeyRequest
{
Namespace = components.Namespace,
TypeName = components.TypeName,
MethodName = components.MethodName,
ParameterTypes = components.ParameterTypes?.ToList()
});
}
private static string NormalizePackage(string package)
{
// Java packages are lowercase
return package.ToLowerInvariant();
}
private static string NormalizeMethodName(string methodName)
{
// Handle constructor and static initializer
return methodName switch
{
"<init>" => "<init>",
"<clinit>" => "<clinit>",
_ => methodName
};
}
private static string NormalizeTypeName(string typeName)
{
// Simplify common Java types
return typeName switch
{
"java.lang.String" => "String",
"java.lang.Object" => "Object",
"java.lang.Integer" => "Integer",
"java.lang.Long" => "Long",
"java.lang.Boolean" => "Boolean",
"java.lang.Double" => "Double",
"java.lang.Float" => "Float",
"java.lang.Byte" => "Byte",
"java.lang.Short" => "Short",
"java.lang.Character" => "Character",
"java.util.List" => "List",
"java.util.Map" => "Map",
"java.util.Set" => "Set",
_ => typeName.Contains('.') ? typeName.Split('.')[^1] : typeName
};
}
private static List<string> ParseDescriptor(string descriptor)
{
var result = new List<string>();
if (string.IsNullOrEmpty(descriptor) || !descriptor.StartsWith('('))
return result;
var i = 1; // Skip opening paren
while (i < descriptor.Length && descriptor[i] != ')')
{
var (typeName, newIndex) = ParseTypeDescriptor(descriptor, i);
if (!string.IsNullOrEmpty(typeName))
{
result.Add(typeName);
}
i = newIndex;
}
return result;
}
private static (string typeName, int newIndex) ParseTypeDescriptor(string descriptor, int index)
{
if (index >= descriptor.Length)
return (string.Empty, index);
var c = descriptor[index];
return c switch
{
'B' => ("byte", index + 1),
'C' => ("char", index + 1),
'D' => ("double", index + 1),
'F' => ("float", index + 1),
'I' => ("int", index + 1),
'J' => ("long", index + 1),
'S' => ("short", index + 1),
'Z' => ("boolean", index + 1),
'V' => ("void", index + 1),
'[' => ParseArrayDescriptor(descriptor, index),
'L' => ParseObjectDescriptor(descriptor, index),
_ => (string.Empty, index + 1)
};
}
private static (string typeName, int newIndex) ParseArrayDescriptor(string descriptor, int index)
{
var (elementType, newIndex) = ParseTypeDescriptor(descriptor, index + 1);
return ($"{elementType}[]", newIndex);
}
private static (string typeName, int newIndex) ParseObjectDescriptor(string descriptor, int index)
{
var semicolonIndex = descriptor.IndexOf(';', index);
if (semicolonIndex < 0)
return ("Object", index + 1);
var className = descriptor[(index + 1)..semicolonIndex];
var simpleName = className.Split('/')[^1];
return (simpleName, semicolonIndex + 1);
}
}

View File

@@ -0,0 +1,149 @@
// -----------------------------------------------------------------------------
// NodeMethodKeyBuilder.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-012)
// Description: Method key builder for Node.js/npm packages.
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.VulnSurfaces.MethodKeys;
/// <summary>
/// Builds normalized method keys for JavaScript/Node.js modules.
/// Format: module.path::functionName(param1,param2) or module.path.ClassName::methodName(params)
/// </summary>
public sealed partial class NodeMethodKeyBuilder : IMethodKeyBuilder
{
// Pattern: module.path[.ClassName]::methodName(params)
[GeneratedRegex(@"^([^:]+)::([^(]+)\(([^)]*)\)$", RegexOptions.Compiled)]
private static partial Regex MethodKeyPattern();
/// <inheritdoc />
public string Ecosystem => "npm";
/// <inheritdoc />
public string BuildKey(MethodKeyRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var sb = new StringBuilder();
// Module path
if (!string.IsNullOrEmpty(request.Namespace))
{
sb.Append(NormalizeModulePath(request.Namespace));
}
// Class name (if any)
if (!string.IsNullOrEmpty(request.TypeName))
{
if (sb.Length > 0)
{
sb.Append('.');
}
sb.Append(request.TypeName);
}
// ::functionName
sb.Append("::");
sb.Append(request.MethodName);
// (params)
sb.Append('(');
if (request.ParameterTypes is { Count: > 0 })
{
sb.Append(string.Join(",", request.ParameterTypes));
}
sb.Append(')');
return sb.ToString();
}
/// <inheritdoc />
public MethodKeyComponents? ParseKey(string methodKey)
{
if (string.IsNullOrEmpty(methodKey))
return null;
var match = MethodKeyPattern().Match(methodKey);
if (!match.Success)
return null;
var modulePath = match.Groups[1].Value;
var methodName = match.Groups[2].Value;
var parameters = match.Groups[3].Value;
// Try to extract class name from module path
string? typeName = null;
var lastDot = modulePath.LastIndexOf('.');
if (lastDot > 0)
{
var lastPart = modulePath[(lastDot + 1)..];
// Check if it looks like a class name (starts with uppercase)
if (char.IsUpper(lastPart[0]))
{
typeName = lastPart;
modulePath = modulePath[..lastDot];
}
}
var paramTypes = string.IsNullOrEmpty(parameters)
? []
: parameters.Split(',').Select(p => p.Trim()).ToList();
return new MethodKeyComponents
{
Namespace = modulePath,
TypeName = typeName,
MethodName = methodName,
ParameterTypes = paramTypes
};
}
/// <inheritdoc />
public string NormalizeKey(string methodKey)
{
var components = ParseKey(methodKey);
if (components is null)
return methodKey;
return BuildKey(new MethodKeyRequest
{
Namespace = components.Namespace,
TypeName = components.TypeName,
MethodName = components.MethodName,
ParameterTypes = components.ParameterTypes?.ToList()
});
}
private static string NormalizeModulePath(string path)
{
// Normalize path separators and common patterns
var normalized = path
.Replace('/', '.')
.Replace('\\', '.')
.Replace("..", ".");
// Remove leading/trailing dots
normalized = normalized.Trim('.');
// Remove 'index' from module paths
if (normalized.EndsWith(".index", StringComparison.OrdinalIgnoreCase))
{
normalized = normalized[..^6];
}
// Remove common prefixes like 'src.' or 'lib.'
foreach (var prefix in new[] { "src.", "lib.", "dist." })
{
if (normalized.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
normalized = normalized[prefix.Length..];
break;
}
}
return normalized;
}
}

View File

@@ -0,0 +1,165 @@
// -----------------------------------------------------------------------------
// PythonMethodKeyBuilder.cs
// Sprint: SPRINT_3700_0002_0001_vuln_surfaces_core (SURF-012)
// Description: Method key builder for Python/PyPI packages.
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.RegularExpressions;
namespace StellaOps.Scanner.VulnSurfaces.MethodKeys;
/// <summary>
/// Builds normalized method keys for Python modules.
/// Format: package.module.ClassName::method_name(param1,param2) or package.module::function_name(params)
/// </summary>
public sealed partial class PythonMethodKeyBuilder : IMethodKeyBuilder
{
// Pattern: module.path[.ClassName]::function_name(params)
[GeneratedRegex(@"^([^:]+)::([^(]+)\(([^)]*)\)$", RegexOptions.Compiled)]
private static partial Regex MethodKeyPattern();
/// <inheritdoc />
public string Ecosystem => "pypi";
/// <inheritdoc />
public string BuildKey(MethodKeyRequest request)
{
ArgumentNullException.ThrowIfNull(request);
var sb = new StringBuilder();
// Module path
if (!string.IsNullOrEmpty(request.Namespace))
{
sb.Append(NormalizeModulePath(request.Namespace));
}
// Class name (if any)
if (!string.IsNullOrEmpty(request.TypeName))
{
if (sb.Length > 0)
{
sb.Append('.');
}
sb.Append(request.TypeName);
}
// ::function_name
sb.Append("::");
sb.Append(NormalizeFunctionName(request.MethodName));
// (params) - just param names for Python
sb.Append('(');
if (request.ParameterTypes is { Count: > 0 })
{
sb.Append(string.Join(",", request.ParameterTypes));
}
sb.Append(')');
return sb.ToString();
}
/// <inheritdoc />
public MethodKeyComponents? ParseKey(string methodKey)
{
if (string.IsNullOrEmpty(methodKey))
return null;
var match = MethodKeyPattern().Match(methodKey);
if (!match.Success)
return null;
var modulePath = match.Groups[1].Value;
var functionName = match.Groups[2].Value;
var parameters = match.Groups[3].Value;
// Try to extract class name from module path
string? typeName = null;
var lastDot = modulePath.LastIndexOf('.');
if (lastDot > 0)
{
var lastPart = modulePath[(lastDot + 1)..];
// Check if it looks like a class name (starts with uppercase)
if (lastPart.Length > 0 && char.IsUpper(lastPart[0]))
{
typeName = lastPart;
modulePath = modulePath[..lastDot];
}
}
var paramNames = string.IsNullOrEmpty(parameters)
? []
: parameters.Split(',').Select(p => p.Trim()).ToList();
return new MethodKeyComponents
{
Namespace = modulePath,
TypeName = typeName,
MethodName = functionName,
ParameterTypes = paramNames
};
}
/// <inheritdoc />
public string NormalizeKey(string methodKey)
{
var components = ParseKey(methodKey);
if (components is null)
return methodKey;
return BuildKey(new MethodKeyRequest
{
Namespace = components.Namespace,
TypeName = components.TypeName,
MethodName = components.MethodName,
ParameterTypes = components.ParameterTypes?.ToList()
});
}
private static string NormalizeModulePath(string path)
{
// Python module paths use dots
var normalized = path
.Replace('/', '.')
.Replace('\\', '.')
.Replace("..", ".");
// Remove leading/trailing dots
normalized = normalized.Trim('.');
// Remove __init__ from module paths
if (normalized.EndsWith(".__init__", StringComparison.OrdinalIgnoreCase))
{
normalized = normalized[..^9];
}
// Normalize common variations
normalized = normalized
.Replace("_", "_"); // Keep underscores as-is
return normalized;
}
private static string NormalizeFunctionName(string name)
{
// Python method names
return name switch
{
"__init__" => "__init__",
"__new__" => "__new__",
"__del__" => "__del__",
"__str__" => "__str__",
"__repr__" => "__repr__",
"__call__" => "__call__",
"__getitem__" => "__getitem__",
"__setitem__" => "__setitem__",
"__len__" => "__len__",
"__iter__" => "__iter__",
"__next__" => "__next__",
"__enter__" => "__enter__",
"__exit__" => "__exit__",
_ => name
};
}
}

View File

@@ -15,6 +15,7 @@
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Mono.Cecil" Version="0.11.6" />
<PackageReference Include="Npgsql" Version="9.0.3" />
<PackageReference Include="SharpCompress" Version="0.41.0" />
</ItemGroup>
<ItemGroup>

View File

@@ -4,6 +4,10 @@ using Xunit;
namespace StellaOps.Scanner.CallGraph.Tests;
/// <summary>
/// Tests for <see cref="ReachabilityAnalyzer"/>.
/// Sprint: SPRINT_3700_0001_0001 (WIT-007A) - determinism contract tests.
/// </summary>
public class ReachabilityAnalyzerTests
{
[Fact]
@@ -63,4 +67,321 @@ public class ReachabilityAnalyzerTests
Assert.Empty(result.Paths);
Assert.False(string.IsNullOrWhiteSpace(result.ResultDigest));
}
/// <summary>
/// WIT-007A: Verify deterministic path ordering (SinkId ASC, EntrypointId ASC, PathLength ASC).
/// </summary>
[Fact]
public void Analyze_PathsAreDeterministicallyOrdered_BySinkIdThenEntrypointIdThenLength()
{
// Arrange: create graph with multiple entrypoints and sinks
var entry1 = "entry:aaa";
var entry2 = "entry:bbb";
var mid1 = "mid:001";
var mid2 = "mid:002";
var sink1 = "sink:zzz"; // lexicographically last
var sink2 = "sink:aaa"; // lexicographically first
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes:
[
new CallGraphNode(entry1, "Entry1", "f.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(entry2, "Entry2", "f.cs", 2, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(mid1, "Mid1", "f.cs", 3, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(mid2, "Mid2", "f.cs", 4, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(sink1, "Sink1", "f.cs", 5, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec),
new CallGraphNode(sink2, "Sink2", "f.cs", 6, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.SqlRaw),
],
Edges:
[
// entry1 -> mid1 -> sink2 (path length 3)
new CallGraphEdge(entry1, mid1, CallKind.Direct),
new CallGraphEdge(mid1, sink2, CallKind.Direct),
// entry2 -> sink1 (path length 2, shorter)
new CallGraphEdge(entry2, sink1, CallKind.Direct),
],
EntrypointIds: [entry2, entry1], // deliberately out of order
SinkIds: [sink1, sink2]); // deliberately out of order
var analyzer = new ReachabilityAnalyzer();
// Act
var result = analyzer.Analyze(snapshot);
// Assert: paths should be ordered by SinkId ASC
Assert.Equal(2, result.Paths.Length);
Assert.Equal(sink2, result.Paths[0].SinkId); // "sink:aaa" comes before "sink:zzz"
Assert.Equal(sink1, result.Paths[1].SinkId);
}
/// <summary>
/// WIT-007A: Verify that multiple runs produce identical results (determinism).
/// </summary>
[Fact]
public void Analyze_ProducesIdenticalResults_OnMultipleRuns()
{
var entry = "entry:test";
var mid = "mid:test";
var sink = "sink:test";
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes:
[
new CallGraphNode(entry, "Entry", "f.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(mid, "Mid", "f.cs", 2, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(sink, "Sink", "f.cs", 3, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec),
],
Edges:
[
new CallGraphEdge(entry, mid, CallKind.Direct),
new CallGraphEdge(mid, sink, CallKind.Direct),
],
EntrypointIds: [entry],
SinkIds: [sink]);
var analyzer = new ReachabilityAnalyzer();
// Act: run analysis multiple times
var result1 = analyzer.Analyze(snapshot);
var result2 = analyzer.Analyze(snapshot);
var result3 = analyzer.Analyze(snapshot);
// Assert: all results should have identical digests (determinism proof)
Assert.Equal(result1.ResultDigest, result2.ResultDigest);
Assert.Equal(result2.ResultDigest, result3.ResultDigest);
Assert.Equal(result1.Paths.Length, result2.Paths.Length);
}
/// <summary>
/// WIT-007A: Verify MaxTotalPaths limit is enforced.
/// </summary>
[Fact]
public void Analyze_WithOptions_RespectsMaxTotalPathsLimit()
{
// Arrange: create graph with 5 sinks reachable from 1 entrypoint
var entry = "entry:test";
var nodes = new List<CallGraphNode>
{
new(entry, "Entry", "f.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
};
var edges = new List<CallGraphEdge>();
var sinks = new List<string>();
for (int i = 0; i < 5; i++)
{
var sink = $"sink:{i:D3}";
sinks.Add(sink);
nodes.Add(new CallGraphNode(sink, $"Sink{i}", "f.cs", i + 10, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec));
edges.Add(new CallGraphEdge(entry, sink, CallKind.Direct));
}
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes: nodes.ToImmutableArray(),
Edges: edges.ToImmutableArray(),
EntrypointIds: [entry],
SinkIds: sinks.ToImmutableArray());
var options = new ReachabilityAnalysisOptions { MaxTotalPaths = 3 };
var analyzer = new ReachabilityAnalyzer(null, options);
// Act
var result = analyzer.Analyze(snapshot);
// Assert: should only return MaxTotalPaths paths
Assert.Equal(3, result.Paths.Length);
}
/// <summary>
/// WIT-007A: Verify MaxDepth limit is enforced.
/// </summary>
[Fact]
public void Analyze_WithOptions_RespectsMaxDepthLimit()
{
// Arrange: create a chain of 10 nodes
var nodes = new List<CallGraphNode>();
var edges = new List<CallGraphEdge>();
for (int i = 0; i < 10; i++)
{
var nodeId = $"node:{i:D3}";
var isEntry = i == 0;
var isSink = i == 9;
nodes.Add(new CallGraphNode(nodeId, $"Node{i}", "f.cs", i, "app", Visibility.Public, isEntry, isEntry ? EntrypointType.HttpHandler : null, isSink, isSink ? StellaOps.Scanner.Reachability.SinkCategory.CmdExec : null));
if (i > 0)
{
edges.Add(new CallGraphEdge($"node:{(i-1):D3}", nodeId, CallKind.Direct));
}
}
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes: nodes.ToImmutableArray(),
Edges: edges.ToImmutableArray(),
EntrypointIds: ["node:000"],
SinkIds: ["node:009"]);
// With MaxDepth=5, the sink at depth 9 should not be reachable
var options = new ReachabilityAnalysisOptions { MaxDepth = 5 };
var analyzer = new ReachabilityAnalyzer(null, options);
// Act
var result = analyzer.Analyze(snapshot);
// Assert: sink should not be reachable due to depth limit
Assert.Empty(result.ReachableSinkIds);
Assert.Empty(result.Paths);
}
/// <summary>
/// WIT-007A: Verify node IDs in paths are ordered from entrypoint to sink.
/// </summary>
[Fact]
public void Analyze_PathNodeIds_AreOrderedFromEntrypointToSink()
{
var entry = "entry:start";
var mid1 = "mid:step1";
var mid2 = "mid:step2";
var sink = "sink:end";
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes:
[
new CallGraphNode(entry, "Entry", "f.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(mid1, "Mid1", "f.cs", 2, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(mid2, "Mid2", "f.cs", 3, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(sink, "Sink", "f.cs", 4, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec),
],
Edges:
[
new CallGraphEdge(entry, mid1, CallKind.Direct),
new CallGraphEdge(mid1, mid2, CallKind.Direct),
new CallGraphEdge(mid2, sink, CallKind.Direct),
],
EntrypointIds: [entry],
SinkIds: [sink]);
var analyzer = new ReachabilityAnalyzer();
// Act
var result = analyzer.Analyze(snapshot);
// Assert: path should start with entry and end with sink
Assert.Single(result.Paths);
var path = result.Paths[0];
Assert.Equal(4, path.NodeIds.Length);
Assert.Equal(entry, path.NodeIds[0]); // First: entrypoint
Assert.Equal(mid1, path.NodeIds[1]);
Assert.Equal(mid2, path.NodeIds[2]);
Assert.Equal(sink, path.NodeIds[3]); // Last: sink
}
/// <summary>
/// WIT-007B: Verify ExplicitSinks option allows targeting specific sinks not in snapshot.SinkIds.
/// </summary>
[Fact]
public void Analyze_WithExplicitSinks_FindsPathsToSpecifiedSinksOnly()
{
// Arrange: graph with 3 reachable nodes, only 1 is in snapshot.SinkIds
var entry = "entry:start";
var mid = "mid:step";
var snapshotSink = "sink:in-snapshot";
var explicitSink = "sink:explicit-target"; // Not in snapshot.SinkIds
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes:
[
new CallGraphNode(entry, "Entry", "f.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(mid, "Mid", "f.cs", 2, "app", Visibility.Public, false, null, false, null),
new CallGraphNode(snapshotSink, "SnapshotSink", "f.cs", 3, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec),
new CallGraphNode(explicitSink, "ExplicitSink", "f.cs", 4, "lib", Visibility.Public, false, null, false, null), // Not marked as sink
],
Edges:
[
new CallGraphEdge(entry, mid, CallKind.Direct),
new CallGraphEdge(mid, snapshotSink, CallKind.Direct),
new CallGraphEdge(mid, explicitSink, CallKind.Direct),
],
EntrypointIds: [entry],
SinkIds: [snapshotSink]); // Only snapshotSink is in the default sink list
// Use ExplicitSinks to target the non-sink node as if it were a trigger method
var options = new ReachabilityAnalysisOptions
{
ExplicitSinks = [explicitSink]
};
var analyzer = new ReachabilityAnalyzer(null, options);
// Act
var result = analyzer.Analyze(snapshot);
// Assert: should find path to explicit sink only, not the snapshot sink
Assert.Single(result.ReachableSinkIds);
Assert.Equal(explicitSink, result.ReachableSinkIds[0]);
Assert.Single(result.Paths);
Assert.Equal(explicitSink, result.Paths[0].SinkId);
}
/// <summary>
/// WIT-007B: Verify ExplicitSinks with empty array falls back to snapshot sinks.
/// </summary>
[Fact]
public void Analyze_WithEmptyExplicitSinks_UsesSnapshotSinks()
{
var entry = "entry:start";
var sink = "sink:default";
var snapshot = new CallGraphSnapshot(
ScanId: "scan-1",
GraphDigest: "sha256:test",
Language: "dotnet",
ExtractedAt: DateTimeOffset.UtcNow,
Nodes:
[
new CallGraphNode(entry, "Entry", "f.cs", 1, "app", Visibility.Public, true, EntrypointType.HttpHandler, false, null),
new CallGraphNode(sink, "Sink", "f.cs", 2, "lib", Visibility.Public, false, null, true, StellaOps.Scanner.Reachability.SinkCategory.CmdExec),
],
Edges:
[
new CallGraphEdge(entry, sink, CallKind.Direct),
],
EntrypointIds: [entry],
SinkIds: [sink]);
// Empty explicit sinks should fall back to snapshot sinks
var options = new ReachabilityAnalysisOptions
{
ExplicitSinks = ImmutableArray<string>.Empty
};
var analyzer = new ReachabilityAnalyzer(null, options);
// Act
var result = analyzer.Analyze(snapshot);
// Assert: should use snapshot sinks
Assert.Single(result.ReachableSinkIds);
Assert.Equal(sink, result.ReachableSinkIds[0]);
}
}

View File

@@ -0,0 +1,211 @@
using System.Text.Json;
using Xunit;
namespace StellaOps.Scanner.Core.Tests;
public class ScanManifestTests
{
[Fact]
public void ComputeHash_SameManifest_ProducesSameHash()
{
var manifest1 = CreateSampleManifest();
var manifest2 = CreateSampleManifest();
var hash1 = manifest1.ComputeHash();
var hash2 = manifest2.ComputeHash();
Assert.Equal(hash1, hash2);
Assert.StartsWith("sha256:", hash1);
}
[Fact]
public void ComputeHash_DifferentSeed_ProducesDifferentHash()
{
var seed1 = new byte[32];
var seed2 = new byte[32];
seed1[0] = 1;
seed2[0] = 2;
var manifest1 = CreateSampleManifest(seed: seed1);
var manifest2 = CreateSampleManifest(seed: seed2);
Assert.NotEqual(manifest1.ComputeHash(), manifest2.ComputeHash());
}
[Fact]
public void ComputeHash_DifferentArtifactDigest_ProducesDifferentHash()
{
var manifest1 = CreateSampleManifest(artifactDigest: "sha256:abc123");
var manifest2 = CreateSampleManifest(artifactDigest: "sha256:def456");
Assert.NotEqual(manifest1.ComputeHash(), manifest2.ComputeHash());
}
[Fact]
public void ComputeHash_HashIsLowercaseHex()
{
var manifest = CreateSampleManifest();
var hash = manifest.ComputeHash();
// Remove sha256: prefix and check format
var hexPart = hash["sha256:".Length..];
Assert.Matches(@"^[0-9a-f]{64}$", hexPart);
}
[Fact]
public void Serialization_RoundTrip_PreservesAllFields()
{
var manifest = CreateSampleManifest();
var json = manifest.ToJson();
var deserialized = ScanManifest.FromJson(json);
Assert.Equal(manifest.ScanId, deserialized.ScanId);
Assert.Equal(manifest.ArtifactDigest, deserialized.ArtifactDigest);
Assert.Equal(manifest.ArtifactPurl, deserialized.ArtifactPurl);
Assert.Equal(manifest.ScannerVersion, deserialized.ScannerVersion);
Assert.Equal(manifest.WorkerVersion, deserialized.WorkerVersion);
Assert.Equal(manifest.ConcelierSnapshotHash, deserialized.ConcelierSnapshotHash);
Assert.Equal(manifest.ExcititorSnapshotHash, deserialized.ExcititorSnapshotHash);
Assert.Equal(manifest.LatticePolicyHash, deserialized.LatticePolicyHash);
Assert.Equal(manifest.Deterministic, deserialized.Deterministic);
Assert.Equal(manifest.Seed, deserialized.Seed);
}
[Fact]
public void Serialization_JsonPropertyNames_AreCamelCase()
{
var manifest = CreateSampleManifest();
var json = manifest.ToJson();
Assert.Contains("\"scanId\":", json);
Assert.Contains("\"createdAtUtc\":", json);
Assert.Contains("\"artifactDigest\":", json);
Assert.Contains("\"scannerVersion\":", json);
Assert.Contains("\"concelierSnapshotHash\":", json);
}
[Fact]
public void ToCanonicalJson_ProducesDeterministicOutput()
{
var manifest = CreateSampleManifest();
var json1 = manifest.ToCanonicalJson();
var json2 = manifest.ToCanonicalJson();
Assert.Equal(json1, json2);
}
[Fact]
public void Builder_CreatesValidManifest()
{
var seed = new byte[32];
seed[0] = 0x42;
var manifest = ScanManifest.CreateBuilder("scan-001", "sha256:abc123")
.WithArtifactPurl("pkg:oci/myapp@sha256:abc123")
.WithScannerVersion("2.0.0")
.WithWorkerVersion("2.0.0")
.WithConcelierSnapshot("sha256:feed123")
.WithExcititorSnapshot("sha256:vex456")
.WithLatticePolicyHash("sha256:policy789")
.WithDeterministic(true)
.WithSeed(seed)
.WithKnob("maxDepth", "10")
.Build();
Assert.Equal("scan-001", manifest.ScanId);
Assert.Equal("sha256:abc123", manifest.ArtifactDigest);
Assert.Equal("pkg:oci/myapp@sha256:abc123", manifest.ArtifactPurl);
Assert.Equal("2.0.0", manifest.ScannerVersion);
Assert.Equal("sha256:feed123", manifest.ConcelierSnapshotHash);
Assert.True(manifest.Deterministic);
Assert.Equal((byte)0x42, manifest.Seed[0]);
Assert.Equal("10", manifest.Knobs["maxDepth"]);
}
[Fact]
public void Builder_WithKnobs_MergesMultipleKnobs()
{
var manifest = ScanManifest.CreateBuilder("scan-001", "sha256:abc123")
.WithKnob("key1", "value1")
.WithKnobs(new Dictionary<string, string> { ["key2"] = "value2", ["key3"] = "value3" })
.WithKnob("key4", "value4")
.WithSeed(new byte[32])
.Build();
Assert.Equal(4, manifest.Knobs.Count);
Assert.Equal("value1", manifest.Knobs["key1"]);
Assert.Equal("value2", manifest.Knobs["key2"]);
Assert.Equal("value3", manifest.Knobs["key3"]);
Assert.Equal("value4", manifest.Knobs["key4"]);
}
[Fact]
public void Builder_SeedMustBe32Bytes()
{
var builder = ScanManifest.CreateBuilder("scan-001", "sha256:abc123");
var ex = Assert.Throws<ArgumentException>(() => builder.WithSeed(new byte[16]));
Assert.Contains("32 bytes", ex.Message);
}
[Fact]
public void Record_WithExpression_CreatesModifiedCopy()
{
var original = CreateSampleManifest();
var modified = original with { Deterministic = false };
Assert.True(original.Deterministic);
Assert.False(modified.Deterministic);
Assert.Equal(original.ScanId, modified.ScanId);
}
[Fact]
public void ToJson_Indented_FormatsOutput()
{
var manifest = CreateSampleManifest();
var json = manifest.ToJson(indented: true);
Assert.Contains("\n", json);
Assert.Contains(" ", json);
}
[Fact]
public void ToJson_NotIndented_CompactOutput()
{
var manifest = CreateSampleManifest();
var json = manifest.ToJson(indented: false);
Assert.DoesNotContain("\n", json);
}
[Fact]
public void KnobsCollection_IsImmutable()
{
var manifest = CreateSampleManifest();
// Knobs is IReadOnlyDictionary - cannot be modified
Assert.IsAssignableFrom<IReadOnlyDictionary<string, string>>(manifest.Knobs);
}
private static ScanManifest CreateSampleManifest(
string scanId = "scan-001",
string artifactDigest = "sha256:abc123",
byte[]? seed = null)
{
seed ??= new byte[32];
return ScanManifest.CreateBuilder(scanId, artifactDigest)
.WithCreatedAt(DateTimeOffset.Parse("2025-12-17T12:00:00Z"))
.WithArtifactPurl("pkg:oci/myapp@sha256:abc123")
.WithScannerVersion("1.0.0")
.WithWorkerVersion("1.0.0")
.WithConcelierSnapshot("sha256:feed123")
.WithExcititorSnapshot("sha256:vex456")
.WithLatticePolicyHash("sha256:policy789")
.WithDeterministic(true)
.WithSeed(seed)
.WithKnob("maxDepth", "10")
.Build();
}
}

View File

@@ -384,4 +384,150 @@ public class PathWitnessBuilderTests
}
#endregion
#region BuildFromAnalyzerAsync Tests (WIT-008)
/// <summary>
/// WIT-008: Test that BuildFromAnalyzerAsync generates witnesses from pre-computed paths.
/// </summary>
[Fact]
public async Task BuildFromAnalyzerAsync_GeneratesWitnessesFromPaths()
{
// Arrange
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
var paths = new List<AnalyzerPathData>
{
new("entry:001", "sink:001",
System.Collections.Immutable.ImmutableArray.Create("entry:001", "mid:001", "sink:001"))
};
var nodeMetadata = new Dictionary<string, AnalyzerNodeData>
{
["entry:001"] = new("EntryMethod", "src/Entry.cs", 10, "http"),
["mid:001"] = new("MiddleMethod", "src/Middle.cs", 20, null),
["sink:001"] = new("SinkMethod", "src/Sink.cs", 30, null)
};
var request = new AnalyzerWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-99999",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
SinkType = "sql_injection",
GraphDigest = "blake3:graph123",
Paths = paths,
NodeMetadata = nodeMetadata,
BuildId = "build:xyz"
};
// Act
var witnesses = new List<PathWitness>();
await foreach (var witness in builder.BuildFromAnalyzerAsync(request))
{
witnesses.Add(witness);
}
// Assert
Assert.Single(witnesses);
var w = witnesses[0];
Assert.Equal("CVE-2024-99999", w.Vuln.Id);
Assert.Equal("entry:001", w.Entrypoint.SymbolId);
Assert.Equal("sink:001", w.Sink.SymbolId);
Assert.Equal(3, w.Path.Count);
Assert.Equal("EntryMethod", w.Path[0].Symbol);
Assert.Equal("MiddleMethod", w.Path[1].Symbol);
Assert.Equal("SinkMethod", w.Path[2].Symbol);
Assert.NotEmpty(w.WitnessId);
Assert.StartsWith("wit:", w.WitnessId);
}
/// <summary>
/// WIT-008: Test that BuildFromAnalyzerAsync yields empty when no paths provided.
/// </summary>
[Fact]
public async Task BuildFromAnalyzerAsync_YieldsEmpty_WhenNoPaths()
{
// Arrange
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
var request = new AnalyzerWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-99999",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
SinkType = "sql_injection",
GraphDigest = "blake3:graph123",
Paths = new List<AnalyzerPathData>(),
NodeMetadata = new Dictionary<string, AnalyzerNodeData>()
};
// Act
var witnesses = new List<PathWitness>();
await foreach (var witness in builder.BuildFromAnalyzerAsync(request))
{
witnesses.Add(witness);
}
// Assert
Assert.Empty(witnesses);
}
/// <summary>
/// WIT-008: Test that missing node metadata is handled gracefully.
/// </summary>
[Fact]
public async Task BuildFromAnalyzerAsync_HandlesMissingNodeMetadata()
{
// Arrange
var builder = new PathWitnessBuilder(_cryptoHash, _timeProvider);
var paths = new List<AnalyzerPathData>
{
new("entry:001", "sink:001",
System.Collections.Immutable.ImmutableArray.Create("entry:001", "unknown:002", "sink:001"))
};
// Only entry and sink have metadata, unknown:002 doesn't
var nodeMetadata = new Dictionary<string, AnalyzerNodeData>
{
["entry:001"] = new("EntryMethod", "src/Entry.cs", 10, "http"),
["sink:001"] = new("SinkMethod", "src/Sink.cs", 30, null)
};
var request = new AnalyzerWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-99999",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
SinkType = "sql_injection",
GraphDigest = "blake3:graph123",
Paths = paths,
NodeMetadata = nodeMetadata
};
// Act
var witnesses = new List<PathWitness>();
await foreach (var witness in builder.BuildFromAnalyzerAsync(request))
{
witnesses.Add(witness);
}
// Assert
Assert.Single(witnesses);
var w = witnesses[0];
Assert.Equal(3, w.Path.Count);
// Unknown node should use its ID as symbol
Assert.Equal("unknown:002", w.Path[1].Symbol);
Assert.Equal("unknown:002", w.Path[1].SymbolId);
Assert.Null(w.Path[1].File);
}
#endregion
}

View File

@@ -0,0 +1,348 @@
// -----------------------------------------------------------------------------
// ReachabilityCacheTests.cs
// Sprint: SPRINT_3700_0006_0001_incremental_cache (CACHE-016, CACHE-017)
// Description: Unit tests for reachability cache components.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Reachability.Cache;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests;
public sealed class GraphDeltaComputerTests
{
private readonly GraphDeltaComputer _computer;
public GraphDeltaComputerTests()
{
_computer = new GraphDeltaComputer(NullLogger<GraphDeltaComputer>.Instance);
}
[Fact]
public async Task ComputeDeltaAsync_SameHash_ReturnsEmpty()
{
// Arrange
var graph1 = new TestGraphSnapshot("hash1", new[] { "A", "B" }, new[] { ("A", "B") });
var graph2 = new TestGraphSnapshot("hash1", new[] { "A", "B" }, new[] { ("A", "B") });
// Act
var delta = await _computer.ComputeDeltaAsync(graph1, graph2);
// Assert
delta.HasChanges.Should().BeFalse();
}
[Fact]
public async Task ComputeDeltaAsync_AddedNode_ReturnsCorrectDelta()
{
// Arrange
var graph1 = new TestGraphSnapshot("hash1", new[] { "A", "B" }, new[] { ("A", "B") });
var graph2 = new TestGraphSnapshot("hash2", new[] { "A", "B", "C" }, new[] { ("A", "B"), ("B", "C") });
// Act
var delta = await _computer.ComputeDeltaAsync(graph1, graph2);
// Assert
delta.HasChanges.Should().BeTrue();
delta.AddedNodes.Should().Contain("C");
delta.RemovedNodes.Should().BeEmpty();
delta.AddedEdges.Should().ContainSingle(e => e.CallerKey == "B" && e.CalleeKey == "C");
delta.AffectedMethodKeys.Should().Contain("C");
}
[Fact]
public async Task ComputeDeltaAsync_RemovedNode_ReturnsCorrectDelta()
{
// Arrange
var graph1 = new TestGraphSnapshot("hash1", new[] { "A", "B", "C" }, new[] { ("A", "B"), ("B", "C") });
var graph2 = new TestGraphSnapshot("hash2", new[] { "A", "B" }, new[] { ("A", "B") });
// Act
var delta = await _computer.ComputeDeltaAsync(graph1, graph2);
// Assert
delta.HasChanges.Should().BeTrue();
delta.RemovedNodes.Should().Contain("C");
delta.AddedNodes.Should().BeEmpty();
delta.RemovedEdges.Should().ContainSingle(e => e.CallerKey == "B" && e.CalleeKey == "C");
}
[Fact]
public async Task ComputeDeltaAsync_EdgeChange_DetectsAffectedMethods()
{
// Arrange
var graph1 = new TestGraphSnapshot("hash1", new[] { "A", "B", "C" }, new[] { ("A", "B") });
var graph2 = new TestGraphSnapshot("hash2", new[] { "A", "B", "C" }, new[] { ("A", "C") });
// Act
var delta = await _computer.ComputeDeltaAsync(graph1, graph2);
// Assert
delta.HasChanges.Should().BeTrue();
delta.AddedEdges.Should().ContainSingle(e => e.CallerKey == "A" && e.CalleeKey == "C");
delta.RemovedEdges.Should().ContainSingle(e => e.CallerKey == "A" && e.CalleeKey == "B");
delta.AffectedMethodKeys.Should().Contain(new[] { "A", "B", "C" });
}
private sealed class TestGraphSnapshot : IGraphSnapshot
{
public string Hash { get; }
public IReadOnlySet<string> NodeKeys { get; }
public IReadOnlyList<GraphEdge> Edges { get; }
public IReadOnlySet<string> EntryPoints { get; }
public TestGraphSnapshot(string hash, string[] nodes, (string, string)[] edges, string[]? entryPoints = null)
{
Hash = hash;
NodeKeys = nodes.ToHashSet();
Edges = edges.Select(e => new GraphEdge(e.Item1, e.Item2)).ToList();
EntryPoints = (entryPoints ?? nodes.Take(1).ToArray()).ToHashSet();
}
}
}
public sealed class ImpactSetCalculatorTests
{
private readonly ImpactSetCalculator _calculator;
public ImpactSetCalculatorTests()
{
_calculator = new ImpactSetCalculator(NullLogger<ImpactSetCalculator>.Instance);
}
[Fact]
public async Task CalculateImpactAsync_NoDelta_ReturnsEmpty()
{
// Arrange
var delta = GraphDelta.Empty;
var graph = new TestGraphSnapshot("hash1", new[] { "Entry", "A", "B" }, new[] { ("Entry", "A"), ("A", "B") });
// Act
var impact = await _calculator.CalculateImpactAsync(delta, graph);
// Assert
impact.RequiresFullRecompute.Should().BeFalse();
impact.AffectedEntryPoints.Should().BeEmpty();
impact.SavingsRatio.Should().Be(1.0);
}
[Fact]
public async Task CalculateImpactAsync_ChangeInPath_IdentifiesAffectedEntry()
{
// Arrange
var delta = new GraphDelta
{
AddedNodes = new HashSet<string> { "C" },
AddedEdges = new List<GraphEdge> { new("B", "C") },
AffectedMethodKeys = new HashSet<string> { "B", "C" }
};
var graph = new TestGraphSnapshot(
"hash2",
new[] { "Entry", "A", "B", "C" },
new[] { ("Entry", "A"), ("A", "B"), ("B", "C") },
new[] { "Entry" });
// Act
var impact = await _calculator.CalculateImpactAsync(delta, graph);
// Assert
impact.RequiresFullRecompute.Should().BeFalse();
impact.AffectedEntryPoints.Should().Contain("Entry");
}
[Fact]
public async Task CalculateImpactAsync_ManyAffected_TriggersFullRecompute()
{
// Arrange - More than 30% affected
var delta = new GraphDelta
{
AffectedMethodKeys = new HashSet<string> { "Entry1", "Entry2", "Entry3", "Entry4" }
};
var graph = new TestGraphSnapshot(
"hash2",
new[] { "Entry1", "Entry2", "Entry3", "Entry4", "Sink" },
new[] { ("Entry1", "Sink"), ("Entry2", "Sink"), ("Entry3", "Sink"), ("Entry4", "Sink") },
new[] { "Entry1", "Entry2", "Entry3", "Entry4" });
// Act
var impact = await _calculator.CalculateImpactAsync(delta, graph);
// Assert - All 4 entries affected = 100% > 30% threshold
impact.RequiresFullRecompute.Should().BeTrue();
}
private sealed class TestGraphSnapshot : IGraphSnapshot
{
public string Hash { get; }
public IReadOnlySet<string> NodeKeys { get; }
public IReadOnlyList<GraphEdge> Edges { get; }
public IReadOnlySet<string> EntryPoints { get; }
public TestGraphSnapshot(string hash, string[] nodes, (string, string)[] edges, string[]? entryPoints = null)
{
Hash = hash;
NodeKeys = nodes.ToHashSet();
Edges = edges.Select(e => new GraphEdge(e.Item1, e.Item2)).ToList();
EntryPoints = (entryPoints ?? nodes.Take(1).ToArray()).ToHashSet();
}
}
}
public sealed class StateFlipDetectorTests
{
private readonly StateFlipDetector _detector;
public StateFlipDetectorTests()
{
_detector = new StateFlipDetector(NullLogger<StateFlipDetector>.Instance);
}
[Fact]
public async Task DetectFlipsAsync_NoChanges_ReturnsEmpty()
{
// Arrange
var previous = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
var current = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
// Act
var result = await _detector.DetectFlipsAsync(previous, current);
// Assert
result.HasFlips.Should().BeFalse();
result.NewRiskCount.Should().Be(0);
result.MitigatedCount.Should().Be(0);
}
[Fact]
public async Task DetectFlipsAsync_BecameReachable_ReturnsNewRisk()
{
// Arrange
var previous = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = false, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
var current = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
// Act
var result = await _detector.DetectFlipsAsync(previous, current);
// Assert
result.HasFlips.Should().BeTrue();
result.NewRiskCount.Should().Be(1);
result.MitigatedCount.Should().Be(0);
result.NewlyReachable.Should().ContainSingle()
.Which.FlipType.Should().Be(StateFlipType.BecameReachable);
result.ShouldBlockPr.Should().BeTrue();
}
[Fact]
public async Task DetectFlipsAsync_BecameUnreachable_ReturnsMitigated()
{
// Arrange
var previous = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
var current = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = false, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
// Act
var result = await _detector.DetectFlipsAsync(previous, current);
// Assert
result.HasFlips.Should().BeTrue();
result.NewRiskCount.Should().Be(0);
result.MitigatedCount.Should().Be(1);
result.NewlyUnreachable.Should().ContainSingle()
.Which.FlipType.Should().Be(StateFlipType.BecameUnreachable);
result.ShouldBlockPr.Should().BeFalse();
}
[Fact]
public async Task DetectFlipsAsync_NewReachablePair_ReturnsNewRisk()
{
// Arrange
var previous = new List<ReachablePairResult>();
var current = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
// Act
var result = await _detector.DetectFlipsAsync(previous, current);
// Assert
result.HasFlips.Should().BeTrue();
result.NewRiskCount.Should().Be(1);
result.ShouldBlockPr.Should().BeTrue();
}
[Fact]
public async Task DetectFlipsAsync_RemovedReachablePair_ReturnsMitigated()
{
// Arrange
var previous = new List<ReachablePairResult>
{
new() { EntryMethodKey = "Entry", SinkMethodKey = "Sink", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
var current = new List<ReachablePairResult>();
// Act
var result = await _detector.DetectFlipsAsync(previous, current);
// Assert
result.HasFlips.Should().BeTrue();
result.MitigatedCount.Should().Be(1);
result.ShouldBlockPr.Should().BeFalse();
}
[Fact]
public async Task DetectFlipsAsync_NetChange_CalculatesCorrectly()
{
// Arrange
var previous = new List<ReachablePairResult>
{
new() { EntryMethodKey = "E1", SinkMethodKey = "S1", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow },
new() { EntryMethodKey = "E2", SinkMethodKey = "S2", IsReachable = false, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
var current = new List<ReachablePairResult>
{
new() { EntryMethodKey = "E1", SinkMethodKey = "S1", IsReachable = false, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow },
new() { EntryMethodKey = "E2", SinkMethodKey = "S2", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow },
new() { EntryMethodKey = "E3", SinkMethodKey = "S3", IsReachable = true, Confidence = 1.0, ComputedAt = DateTimeOffset.UtcNow }
};
// Act
var result = await _detector.DetectFlipsAsync(previous, current);
// Assert
result.NewRiskCount.Should().Be(2); // E2->S2 became reachable, E3->S3 new
result.MitigatedCount.Should().Be(1); // E1->S1 became unreachable
result.NetChange.Should().Be(1); // +2 - 1 = 1
}
}

View File

@@ -0,0 +1,251 @@
using Org.BouncyCastle.Crypto.Generators;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Security;
using StellaOps.Attestor.Envelope;
using StellaOps.Cryptography;
using StellaOps.Scanner.Reachability.Witnesses;
using System.Collections.Immutable;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests;
/// <summary>
/// Tests for <see cref="SignedWitnessGenerator"/>.
/// Sprint: SPRINT_3700_0001_0001 (WIT-009)
/// </summary>
public class SignedWitnessGeneratorTests
{
private readonly IPathWitnessBuilder _builder;
private readonly IWitnessDsseSigner _signer;
private readonly SignedWitnessGenerator _generator;
private readonly EnvelopeKey _testKey;
public SignedWitnessGeneratorTests()
{
var cryptoHash = DefaultCryptoHash.CreateForTests();
_builder = new PathWitnessBuilder(cryptoHash, TimeProvider.System);
_signer = new WitnessDsseSigner();
_generator = new SignedWitnessGenerator(_builder, _signer);
_testKey = CreateTestKey();
}
[Fact]
public async Task GenerateSignedWitnessAsync_ReturnsNull_WhenNoPathExists()
{
// Arrange - Request with no valid path (unreachable sink)
var graph = CreateSimpleGraph();
var request = new PathWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-12345",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
EntrypointSymbolId = "sym:entry",
EntrypointKind = "http",
EntrypointName = "GET /api/test",
SinkSymbolId = "sym:unreachable", // Not in graph
SinkType = "deserialization",
CallGraph = graph,
CallgraphDigest = "blake3:graph123"
};
// Act
var result = await _generator.GenerateSignedWitnessAsync(request, _testKey);
// Assert
Assert.Null(result);
}
[Fact]
public async Task GenerateSignedWitnessAsync_ReturnsSignedResult_WhenPathExists()
{
// Arrange
var graph = CreateSimpleGraph();
var request = new PathWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-12345",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
EntrypointSymbolId = "sym:entry",
EntrypointKind = "http",
EntrypointName = "GET /api/test",
SinkSymbolId = "sym:sink",
SinkType = "deserialization",
CallGraph = graph,
CallgraphDigest = "blake3:graph123"
};
// Act
var result = await _generator.GenerateSignedWitnessAsync(request, _testKey);
// Assert
Assert.NotNull(result);
Assert.True(result.IsSuccess);
Assert.NotNull(result.Witness);
Assert.NotNull(result.Envelope);
Assert.NotEmpty(result.PayloadBytes!);
Assert.Equal(WitnessSchema.DssePayloadType, result.Envelope.PayloadType);
}
[Fact]
public async Task GenerateSignedWitnessesFromAnalyzerAsync_GeneratesSignedEnvelopes()
{
// Arrange
var paths = new List<AnalyzerPathData>
{
new("entry:001", "sink:001",
ImmutableArray.Create("entry:001", "mid:001", "sink:001")),
new("entry:002", "sink:002",
ImmutableArray.Create("entry:002", "sink:002"))
};
var nodeMetadata = new Dictionary<string, AnalyzerNodeData>
{
["entry:001"] = new("EntryMethod1", "src/Entry.cs", 10, "http"),
["mid:001"] = new("MiddleMethod", "src/Middle.cs", 20, null),
["sink:001"] = new("SinkMethod1", "src/Sink.cs", 30, null),
["entry:002"] = new("EntryMethod2", "src/Entry2.cs", 40, "grpc"),
["sink:002"] = new("SinkMethod2", "src/Sink2.cs", 50, null)
};
var request = new AnalyzerWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-0000",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
SinkType = "deserialization",
GraphDigest = "blake3:graph123",
Paths = paths,
NodeMetadata = nodeMetadata
};
// Act
var results = new List<SignedWitnessResult>();
await foreach (var result in _generator.GenerateSignedWitnessesFromAnalyzerAsync(request, _testKey))
{
results.Add(result);
}
// Assert
Assert.Equal(2, results.Count);
Assert.All(results, r => Assert.True(r.IsSuccess));
Assert.All(results, r => Assert.NotNull(r.Envelope));
Assert.Equal("entry:001", results[0].Witness!.Entrypoint.SymbolId);
Assert.Equal("entry:002", results[1].Witness!.Entrypoint.SymbolId);
}
[Fact]
public async Task GeneratedEnvelope_CanBeVerified()
{
// Arrange
var graph = CreateSimpleGraph();
var request = new PathWitnessRequest
{
SbomDigest = "sha256:sbom123",
ComponentPurl = "pkg:nuget/Test@1.0.0",
VulnId = "CVE-2024-12345",
VulnSource = "NVD",
AffectedRange = "<=1.0.0",
EntrypointSymbolId = "sym:entry",
EntrypointKind = "http",
EntrypointName = "GET /api/test",
SinkSymbolId = "sym:sink",
SinkType = "deserialization",
CallGraph = graph,
CallgraphDigest = "blake3:graph123"
};
var (_, publicKey) = GetTestKeyPair();
var verifyKey = EnvelopeKey.CreateEd25519Verifier(publicKey);
// Act
var result = await _generator.GenerateSignedWitnessAsync(request, _testKey);
// Assert - Verify the envelope
Assert.NotNull(result);
Assert.True(result.IsSuccess);
var verifyResult = _signer.VerifyWitness(result.Envelope!, verifyKey);
Assert.True(verifyResult.IsSuccess);
Assert.Equal(result.Witness!.WitnessId, verifyResult.Witness!.WitnessId);
}
private static RichGraph CreateSimpleGraph()
{
var nodes = new List<RichGraphNode>
{
new("n1", "sym:entry", null, null, "dotnet", "method", "Entry", null, null, null, null),
new("n2", "sym:middle", null, null, "dotnet", "method", "Middle", null, null, null, null),
new("n3", "sym:sink", null, null, "dotnet", "method", "Sink", null, null, null, null)
};
var edges = new List<RichGraphEdge>
{
new("n1", "n2", "call", null, null, null, 1.0, null),
new("n2", "n3", "call", null, null, null, 1.0, null)
};
var roots = new List<RichGraphRoot>
{
new("n1", "http", "/api/test")
};
return new RichGraph(
nodes,
edges,
roots,
new RichGraphAnalyzer("test", "1.0.0", null));
}
private static EnvelopeKey CreateTestKey()
{
var (privateKey, publicKey) = GetTestKeyPair();
return EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
}
private static (byte[] privateKey, byte[] publicKey) GetTestKeyPair()
{
var generator = new Ed25519KeyPairGenerator();
generator.Init(new Ed25519KeyGenerationParameters(new SecureRandom(new FixedRandomGenerator())));
var keyPair = generator.GenerateKeyPair();
var privateParams = (Ed25519PrivateKeyParameters)keyPair.Private;
var publicParams = (Ed25519PublicKeyParameters)keyPair.Public;
var privateKey = new byte[64];
privateParams.Encode(privateKey, 0);
var publicKey = publicParams.GetEncoded();
Array.Copy(publicKey, 0, privateKey, 32, 32);
return (privateKey, publicKey);
}
private sealed class FixedRandomGenerator : Org.BouncyCastle.Crypto.Prng.IRandomGenerator
{
private byte _value = 0x42;
public void AddSeedMaterial(byte[] seed) { }
public void AddSeedMaterial(ReadOnlySpan<byte> seed) { }
public void AddSeedMaterial(long seed) { }
public void NextBytes(byte[] bytes) => NextBytes(bytes, 0, bytes.Length);
public void NextBytes(byte[] bytes, int start, int len)
{
for (int i = start; i < start + len; i++)
{
bytes[i] = _value++;
}
}
public void NextBytes(Span<byte> bytes)
{
for (int i = 0; i < bytes.Length; i++)
{
bytes[i] = _value++;
}
}
}
}

View File

@@ -0,0 +1,282 @@
// -----------------------------------------------------------------------------
// SurfaceQueryServiceTests.cs
// Sprint: SPRINT_3700_0004_0001_reachability_integration (REACH-012)
// Description: Unit tests for SurfaceQueryService.
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using FluentAssertions;
using Microsoft.Extensions.Caching.Memory;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Reachability.Surfaces;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests;
public sealed class SurfaceQueryServiceTests : IDisposable
{
private readonly FakeSurfaceRepository _repository;
private readonly IMemoryCache _cache;
private readonly ILogger<SurfaceQueryService> _logger;
private readonly SurfaceQueryService _service;
public SurfaceQueryServiceTests()
{
_repository = new FakeSurfaceRepository();
_cache = new MemoryCache(new MemoryCacheOptions());
_logger = NullLogger<SurfaceQueryService>.Instance;
_service = new SurfaceQueryService(
_repository,
_cache,
_logger,
new SurfaceQueryOptions { EnableCaching = true });
}
public void Dispose()
{
_cache.Dispose();
}
[Fact]
public async Task QueryAsync_WhenSurfaceFound_ReturnsFoundResult()
{
// Arrange
var surfaceId = Guid.NewGuid();
var cveId = "CVE-2023-1234";
var packageName = "Newtonsoft.Json";
var version = "12.0.1";
var computedAt = DateTimeOffset.UtcNow.AddHours(-1);
_repository.AddSurface(new SurfaceInfo
{
Id = surfaceId,
CveId = cveId,
Ecosystem = "nuget",
PackageName = packageName,
VulnVersion = version,
FixedVersion = "12.0.2",
ComputedAt = computedAt,
ChangedMethodCount = 3,
TriggerCount = 5
});
_repository.AddTriggers(surfaceId, new List<TriggerMethodInfo>
{
new()
{
MethodKey = "Newtonsoft.Json.JsonConvert::DeserializeObject",
MethodName = "DeserializeObject",
DeclaringType = "JsonConvert",
SinkCount = 2,
ShortestPathLength = 1
}
});
_repository.AddSinks(surfaceId, new List<string> { "Newtonsoft.Json.Internal::Vulnerable" });
var request = new SurfaceQueryRequest
{
CveId = cveId,
Ecosystem = "nuget",
PackageName = packageName,
Version = version
};
// Act
var result = await _service.QueryAsync(request);
// Assert
result.SurfaceFound.Should().BeTrue();
result.Source.Should().Be(SinkSource.Surface);
result.SurfaceId.Should().Be(surfaceId);
result.Triggers.Should().HaveCount(1);
result.Triggers[0].MethodName.Should().Be("DeserializeObject");
result.ComputedAt.Should().Be(computedAt);
}
[Fact]
public async Task QueryAsync_WhenSurfaceNotFound_ReturnsFallbackResult()
{
// Arrange
var request = new SurfaceQueryRequest
{
CveId = "CVE-2023-9999",
Ecosystem = "npm",
PackageName = "unknown-package",
Version = "1.0.0"
};
// Act
var result = await _service.QueryAsync(request);
// Assert
result.SurfaceFound.Should().BeFalse();
result.Source.Should().Be(SinkSource.FallbackAll);
result.SurfaceId.Should().BeNull();
result.Triggers.Should().BeEmpty();
}
[Fact]
public async Task QueryAsync_CachesResult_ReturnsFromCacheOnSecondCall()
{
// Arrange
var surfaceId = Guid.NewGuid();
_repository.AddSurface(new SurfaceInfo
{
Id = surfaceId,
CveId = "CVE-2023-1234",
Ecosystem = "nuget",
PackageName = "Test.Package",
VulnVersion = "1.0.0",
ComputedAt = DateTimeOffset.UtcNow
});
var request = new SurfaceQueryRequest
{
CveId = "CVE-2023-1234",
Ecosystem = "nuget",
PackageName = "Test.Package",
Version = "1.0.0"
};
// Act
var result1 = await _service.QueryAsync(request);
var result2 = await _service.QueryAsync(request);
// Assert
result1.SurfaceFound.Should().BeTrue();
result2.SurfaceFound.Should().BeTrue();
// Repository should only be called once due to caching
_repository.GetSurfaceCallCount.Should().Be(1);
}
[Fact]
public async Task QueryBulkAsync_QueriesMultipleVulnerabilities()
{
// Arrange
var surfaceId1 = Guid.NewGuid();
_repository.AddSurface(new SurfaceInfo
{
Id = surfaceId1,
CveId = "CVE-2023-0001",
Ecosystem = "nuget",
PackageName = "Package1",
VulnVersion = "1.0.0",
ComputedAt = DateTimeOffset.UtcNow
});
var requests = new List<SurfaceQueryRequest>
{
new() { CveId = "CVE-2023-0001", Ecosystem = "nuget", PackageName = "Package1", Version = "1.0.0" },
new() { CveId = "CVE-2023-0002", Ecosystem = "nuget", PackageName = "Package2", Version = "2.0.0" }
};
// Act
var results = await _service.QueryBulkAsync(requests);
// Assert
results.Should().HaveCount(2);
var key1 = "CVE-2023-0001|nuget|Package1|1.0.0";
var key2 = "CVE-2023-0002|nuget|Package2|2.0.0";
results[key1].SurfaceFound.Should().BeTrue();
results[key2].SurfaceFound.Should().BeFalse();
}
[Fact]
public async Task ExistsAsync_ReturnsTrueWhenSurfaceExists()
{
// Arrange
_repository.AddSurface(new SurfaceInfo
{
Id = Guid.NewGuid(),
CveId = "CVE-2023-1234",
Ecosystem = "nuget",
PackageName = "Package",
VulnVersion = "1.0.0",
ComputedAt = DateTimeOffset.UtcNow
});
// Act
var exists = await _service.ExistsAsync("CVE-2023-1234", "nuget", "Package", "1.0.0");
// Assert
exists.Should().BeTrue();
}
[Fact]
public async Task ExistsAsync_ReturnsFalseWhenSurfaceDoesNotExist()
{
// Act
var exists = await _service.ExistsAsync("CVE-2023-9999", "npm", "unknown", "1.0.0");
// Assert
exists.Should().BeFalse();
}
/// <summary>
/// Fake implementation of ISurfaceRepository for testing.
/// </summary>
private sealed class FakeSurfaceRepository : ISurfaceRepository
{
private readonly Dictionary<string, SurfaceInfo> _surfaces = new();
private readonly Dictionary<Guid, List<TriggerMethodInfo>> _triggers = new();
private readonly Dictionary<Guid, List<string>> _sinks = new();
public int GetSurfaceCallCount { get; private set; }
public void AddSurface(SurfaceInfo surface)
{
var key = $"{surface.CveId}|{surface.Ecosystem}|{surface.PackageName}|{surface.VulnVersion}";
_surfaces[key] = surface;
}
public void AddTriggers(Guid surfaceId, List<TriggerMethodInfo> triggers)
{
_triggers[surfaceId] = triggers;
}
public void AddSinks(Guid surfaceId, List<string> sinks)
{
_sinks[surfaceId] = sinks;
}
public Task<SurfaceInfo?> GetSurfaceAsync(string cveId, string ecosystem, string packageName, string version, CancellationToken cancellationToken = default)
{
GetSurfaceCallCount++;
var key = $"{cveId}|{ecosystem}|{packageName}|{version}";
_surfaces.TryGetValue(key, out var surface);
return Task.FromResult(surface);
}
public Task<IReadOnlyList<TriggerMethodInfo>> GetTriggersAsync(Guid surfaceId, int maxCount, CancellationToken cancellationToken = default)
{
if (_triggers.TryGetValue(surfaceId, out var triggers))
{
return Task.FromResult<IReadOnlyList<TriggerMethodInfo>>(triggers);
}
return Task.FromResult<IReadOnlyList<TriggerMethodInfo>>(new List<TriggerMethodInfo>());
}
public Task<IReadOnlyList<string>> GetSinksAsync(Guid surfaceId, CancellationToken cancellationToken = default)
{
if (_sinks.TryGetValue(surfaceId, out var sinks))
{
return Task.FromResult<IReadOnlyList<string>>(sinks);
}
return Task.FromResult<IReadOnlyList<string>>(new List<string>());
}
public Task<bool> ExistsAsync(string cveId, string ecosystem, string packageName, string version, CancellationToken cancellationToken = default)
{
var key = $"{cveId}|{ecosystem}|{packageName}|{version}";
return Task.FromResult(_surfaces.ContainsKey(key));
}
}
}

View File

@@ -0,0 +1,278 @@
using Org.BouncyCastle.Crypto.Generators;
using Org.BouncyCastle.Crypto.Parameters;
using Org.BouncyCastle.Security;
using StellaOps.Attestor.Envelope;
using StellaOps.Scanner.Reachability.Witnesses;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests;
/// <summary>
/// Tests for <see cref="WitnessDsseSigner"/>.
/// Sprint: SPRINT_3700_0001_0001 (WIT-007D)
/// Golden fixture tests for DSSE sign/verify.
/// </summary>
public class WitnessDsseSignerTests
{
/// <summary>
/// Creates a deterministic Ed25519 key pair for testing.
/// </summary>
private static (byte[] privateKey, byte[] publicKey) CreateTestKeyPair()
{
// Use a fixed seed for deterministic tests
var generator = new Ed25519KeyPairGenerator();
generator.Init(new Ed25519KeyGenerationParameters(new SecureRandom(new FixedRandomGenerator())));
var keyPair = generator.GenerateKeyPair();
var privateParams = (Ed25519PrivateKeyParameters)keyPair.Private;
var publicParams = (Ed25519PublicKeyParameters)keyPair.Public;
// Ed25519 private key = 32-byte seed + 32-byte public key
var privateKey = new byte[64];
privateParams.Encode(privateKey, 0);
var publicKey = publicParams.GetEncoded();
// Append public key to make 64-byte expanded form
Array.Copy(publicKey, 0, privateKey, 32, 32);
return (privateKey, publicKey);
}
[Fact]
public void SignWitness_WithValidKey_ReturnsSuccess()
{
// Arrange
var witness = CreateTestWitness();
var (privateKey, publicKey) = CreateTestKeyPair();
var key = EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
var signer = new WitnessDsseSigner();
// Act
var result = signer.SignWitness(witness, key);
// Assert
Assert.True(result.IsSuccess, result.Error);
Assert.NotNull(result.Envelope);
Assert.Equal(WitnessSchema.DssePayloadType, result.Envelope.PayloadType);
Assert.Single(result.Envelope.Signatures);
Assert.NotEmpty(result.PayloadBytes!);
}
[Fact]
public void VerifyWitness_WithValidSignature_ReturnsSuccess()
{
// Arrange
var witness = CreateTestWitness();
var (privateKey, publicKey) = CreateTestKeyPair();
var signingKey = EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
var signer = new WitnessDsseSigner();
// Sign the witness
var signResult = signer.SignWitness(witness, signingKey);
Assert.True(signResult.IsSuccess, signResult.Error);
// Create public key for verification
var verifyKey = EnvelopeKey.CreateEd25519Verifier(publicKey);
// Act
var verifyResult = signer.VerifyWitness(signResult.Envelope!, verifyKey);
// Assert
Assert.True(verifyResult.IsSuccess, verifyResult.Error);
Assert.NotNull(verifyResult.Witness);
Assert.Equal(witness.WitnessId, verifyResult.Witness.WitnessId);
Assert.Equal(witness.Vuln.Id, verifyResult.Witness.Vuln.Id);
}
[Fact]
public void VerifyWitness_WithWrongKey_ReturnsFails()
{
// Arrange
var witness = CreateTestWitness();
var (privateKey, publicKey) = CreateTestKeyPair();
var signingKey = EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
var signer = new WitnessDsseSigner();
// Sign the witness
var signResult = signer.SignWitness(witness, signingKey);
Assert.True(signResult.IsSuccess, signResult.Error);
// Create a different key for verification (different keyId)
var generator = new Ed25519KeyPairGenerator();
generator.Init(new Ed25519KeyGenerationParameters(new SecureRandom()));
var wrongKeyPair = generator.GenerateKeyPair();
var wrongPublicKey = ((Ed25519PublicKeyParameters)wrongKeyPair.Public).GetEncoded();
var wrongKey = EnvelopeKey.CreateEd25519Verifier(wrongPublicKey);
// Act - verify with wrong key (keyId won't match)
var verifyResult = signer.VerifyWitness(signResult.Envelope!, wrongKey);
// Assert
Assert.False(verifyResult.IsSuccess);
Assert.Contains("No signature found for key ID", verifyResult.Error);
}
[Fact]
public void SignWitness_ProducesDeterministicPayload()
{
// Arrange
var witness = CreateTestWitness();
var (privateKey, publicKey) = CreateTestKeyPair();
var key = EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
var signer = new WitnessDsseSigner();
// Act
var result1 = signer.SignWitness(witness, key);
var result2 = signer.SignWitness(witness, key);
// Assert: payloads should be identical (deterministic serialization)
Assert.True(result1.IsSuccess);
Assert.True(result2.IsSuccess);
Assert.Equal(result1.PayloadBytes, result2.PayloadBytes);
}
[Fact]
public void VerifyWitness_WithInvalidPayloadType_ReturnsFails()
{
// Arrange
var witness = CreateTestWitness();
var (privateKey, publicKey) = CreateTestKeyPair();
var signingKey = EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
var signer = new WitnessDsseSigner();
var signResult = signer.SignWitness(witness, signingKey);
Assert.True(signResult.IsSuccess);
// Create envelope with wrong payload type
var wrongEnvelope = new DsseEnvelope(
payloadType: "application/wrong-type",
payload: signResult.Envelope!.Payload,
signatures: signResult.Envelope.Signatures);
var verifyKey = EnvelopeKey.CreateEd25519Verifier(publicKey);
// Act
var verifyResult = signer.VerifyWitness(wrongEnvelope, verifyKey);
// Assert
Assert.False(verifyResult.IsSuccess);
Assert.Contains("Invalid payload type", verifyResult.Error);
}
[Fact]
public void RoundTrip_PreservesAllWitnessFields()
{
// Arrange
var witness = CreateTestWitness();
var (privateKey, publicKey) = CreateTestKeyPair();
var signingKey = EnvelopeKey.CreateEd25519Signer(privateKey, publicKey);
var verifyKey = EnvelopeKey.CreateEd25519Verifier(publicKey);
var signer = new WitnessDsseSigner();
// Act
var signResult = signer.SignWitness(witness, signingKey);
var verifyResult = signer.VerifyWitness(signResult.Envelope!, verifyKey);
// Assert
Assert.True(signResult.IsSuccess);
Assert.True(verifyResult.IsSuccess);
var roundTripped = verifyResult.Witness!;
Assert.Equal(witness.WitnessSchema, roundTripped.WitnessSchema);
Assert.Equal(witness.WitnessId, roundTripped.WitnessId);
Assert.Equal(witness.Artifact.SbomDigest, roundTripped.Artifact.SbomDigest);
Assert.Equal(witness.Artifact.ComponentPurl, roundTripped.Artifact.ComponentPurl);
Assert.Equal(witness.Vuln.Id, roundTripped.Vuln.Id);
Assert.Equal(witness.Vuln.Source, roundTripped.Vuln.Source);
Assert.Equal(witness.Entrypoint.Kind, roundTripped.Entrypoint.Kind);
Assert.Equal(witness.Entrypoint.Name, roundTripped.Entrypoint.Name);
Assert.Equal(witness.Entrypoint.SymbolId, roundTripped.Entrypoint.SymbolId);
Assert.Equal(witness.Sink.Symbol, roundTripped.Sink.Symbol);
Assert.Equal(witness.Sink.SymbolId, roundTripped.Sink.SymbolId);
Assert.Equal(witness.Sink.SinkType, roundTripped.Sink.SinkType);
Assert.Equal(witness.Path.Count, roundTripped.Path.Count);
Assert.Equal(witness.Evidence.CallgraphDigest, roundTripped.Evidence.CallgraphDigest);
}
private static PathWitness CreateTestWitness()
{
return new PathWitness
{
WitnessId = "wit:sha256:abc123def456",
Artifact = new WitnessArtifact
{
SbomDigest = "sha256:sbom123456",
ComponentPurl = "pkg:nuget/Newtonsoft.Json@12.0.3"
},
Vuln = new WitnessVuln
{
Id = "CVE-2024-12345",
Source = "NVD",
AffectedRange = "<=12.0.3"
},
Entrypoint = new WitnessEntrypoint
{
Kind = "http",
Name = "GET /api/users",
SymbolId = "sym:entry:001"
},
Path = new List<PathStep>
{
new PathStep
{
Symbol = "UserController.GetUsers",
SymbolId = "sym:step:001",
File = "Controllers/UserController.cs",
Line = 42
},
new PathStep
{
Symbol = "JsonConvert.DeserializeObject",
SymbolId = "sym:step:002",
File = null,
Line = null
}
},
Sink = new WitnessSink
{
Symbol = "JsonConvert.DeserializeObject<T>",
SymbolId = "sym:sink:001",
SinkType = "deserialization"
},
Evidence = new WitnessEvidence
{
CallgraphDigest = "blake3:graph123456",
SurfaceDigest = "sha256:surface789",
BuildId = "build:xyz123"
},
ObservedAt = new DateTimeOffset(2025, 12, 19, 12, 0, 0, TimeSpan.Zero)
};
}
/// <summary>
/// Fixed random generator for deterministic key generation in tests.
/// </summary>
private sealed class FixedRandomGenerator : Org.BouncyCastle.Crypto.Prng.IRandomGenerator
{
private byte _value = 0x42;
public void AddSeedMaterial(byte[] seed) { }
public void AddSeedMaterial(ReadOnlySpan<byte> seed) { }
public void AddSeedMaterial(long seed) { }
public void NextBytes(byte[] bytes) => NextBytes(bytes, 0, bytes.Length);
public void NextBytes(byte[] bytes, int start, int len)
{
for (int i = start; i < start + len; i++)
{
bytes[i] = _value++;
}
}
public void NextBytes(Span<byte> bytes)
{
for (int i = 0; i < bytes.Length; i++)
{
bytes[i] = _value++;
}
}
}
}

View File

@@ -0,0 +1,344 @@
// -----------------------------------------------------------------------------
// DriftAttestationServiceTests.cs
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
// Task: UI-018
// Description: Unit tests for DriftAttestationService.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using Moq;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.ReachabilityDrift.Attestation;
using Xunit;
namespace StellaOps.Scanner.ReachabilityDrift.Tests;
public sealed class DriftAttestationServiceTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly Mock<IOptionsMonitor<DriftAttestationOptions>> _optionsMock;
private readonly DriftAttestationOptions _options;
public DriftAttestationServiceTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 19, 12, 0, 0, TimeSpan.Zero));
_options = new DriftAttestationOptions { Enabled = true, UseSignerService = false };
_optionsMock = new Mock<IOptionsMonitor<DriftAttestationOptions>>();
_optionsMock.Setup(x => x.CurrentValue).Returns(_options);
}
[Fact]
public async Task CreateAttestationAsync_Creates_Valid_Attestation()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
result.Success.Should().BeTrue();
result.AttestationDigest.Should().StartWith("sha256:");
result.EnvelopeJson.Should().NotBeNullOrEmpty();
result.KeyId.Should().Be("local-dev-key");
result.CreatedAt.Should().Be(_timeProvider.GetUtcNow());
}
[Fact]
public async Task CreateAttestationAsync_Returns_Failure_When_Disabled()
{
// Arrange
_options.Enabled = false;
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
result.Success.Should().BeFalse();
result.Error.Should().Contain("disabled");
}
[Fact]
public async Task CreateAttestationAsync_Throws_When_Request_Null()
{
// Arrange
var service = CreateService();
// Act & Assert
await Assert.ThrowsAsync<ArgumentNullException>(
() => service.CreateAttestationAsync(null!));
}
[Fact]
public async Task CreateAttestationAsync_Envelope_Contains_Correct_PayloadType()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
result.EnvelopeJson.Should().Contain("application/vnd.in-toto+json");
}
[Fact]
public async Task CreateAttestationAsync_Envelope_Contains_Signature()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
var envelope = JsonDocument.Parse(result.EnvelopeJson!);
var signatures = envelope.RootElement.GetProperty("signatures");
signatures.GetArrayLength().Should().Be(1);
signatures[0].GetProperty("keyid").GetString().Should().Be("local-dev-key");
signatures[0].GetProperty("sig").GetString().Should().NotBeNullOrEmpty();
}
[Fact]
public async Task CreateAttestationAsync_Statement_Contains_Predicate()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
var envelope = JsonDocument.Parse(result.EnvelopeJson!);
var payloadBase64 = envelope.RootElement.GetProperty("payload").GetString();
var payloadBytes = Convert.FromBase64String(payloadBase64!);
var statement = JsonDocument.Parse(payloadBytes);
statement.RootElement.GetProperty("predicateType").GetString()
.Should().Be("stellaops.dev/predicates/reachability-drift@v1");
}
[Fact]
public async Task CreateAttestationAsync_Predicate_Contains_Drift_Summary()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
var predicate = ExtractPredicate(result.EnvelopeJson!);
predicate.GetProperty("drift").GetProperty("newlyReachableCount").GetInt32().Should().Be(1);
predicate.GetProperty("drift").GetProperty("newlyUnreachableCount").GetInt32().Should().Be(0);
}
[Fact]
public async Task CreateAttestationAsync_Predicate_Contains_Image_References()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
var predicate = ExtractPredicate(result.EnvelopeJson!);
predicate.GetProperty("baseImage").GetProperty("name").GetString()
.Should().Be("myregistry/myapp");
predicate.GetProperty("baseImage").GetProperty("digest").GetString()
.Should().Be("sha256:base123");
predicate.GetProperty("targetImage").GetProperty("name").GetString()
.Should().Be("myregistry/myapp");
predicate.GetProperty("targetImage").GetProperty("digest").GetString()
.Should().Be("sha256:head456");
}
[Fact]
public async Task CreateAttestationAsync_Predicate_Contains_Analysis_Metadata()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
var predicate = ExtractPredicate(result.EnvelopeJson!);
var analysis = predicate.GetProperty("analysis");
analysis.GetProperty("baseGraphDigest").GetString().Should().Be("sha256:graph-base");
analysis.GetProperty("headGraphDigest").GetString().Should().Be("sha256:graph-head");
analysis.GetProperty("scanner").GetProperty("name").GetString().Should().Be("StellaOps.Scanner");
}
[Fact]
public async Task CreateAttestationAsync_Produces_Deterministic_Digest_For_Same_Input()
{
// Arrange
var service = CreateService();
var request = CreateValidRequest();
// Act
var result1 = await service.CreateAttestationAsync(request);
var result2 = await service.CreateAttestationAsync(request);
// Assert
result1.AttestationDigest.Should().Be(result2.AttestationDigest);
}
[Fact]
public async Task CreateAttestationAsync_With_Signer_Service_Calls_SignAsync()
{
// Arrange
_options.UseSignerService = true;
var signerMock = new Mock<IDriftSignerClient>();
signerMock.Setup(x => x.SignAsync(It.IsAny<DriftSignerRequest>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new DriftSignerResult
{
Success = true,
Signature = "base64-signature",
KeyId = "test-key-id"
});
var service = CreateService(signerMock.Object);
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
result.Success.Should().BeTrue();
result.KeyId.Should().Be("test-key-id");
signerMock.Verify(x => x.SignAsync(
It.Is<DriftSignerRequest>(r => r.TenantId == "tenant-1"),
It.IsAny<CancellationToken>()), Times.Once);
}
[Fact]
public async Task CreateAttestationAsync_Returns_Failure_When_Signer_Fails()
{
// Arrange
_options.UseSignerService = true;
var signerMock = new Mock<IDriftSignerClient>();
signerMock.Setup(x => x.SignAsync(It.IsAny<DriftSignerRequest>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new DriftSignerResult
{
Success = false,
Error = "Key not found"
});
var service = CreateService(signerMock.Object);
var request = CreateValidRequest();
// Act
var result = await service.CreateAttestationAsync(request);
// Assert
result.Success.Should().BeFalse();
result.Error.Should().Contain("Key not found");
}
private DriftAttestationService CreateService(IDriftSignerClient? signerClient = null)
{
return new DriftAttestationService(
signerClient,
_optionsMock.Object,
_timeProvider,
NullLogger<DriftAttestationService>.Instance);
}
private DriftAttestationRequest CreateValidRequest()
{
var driftResult = new ReachabilityDriftResult
{
Id = Guid.NewGuid(),
BaseScanId = "scan-base-123",
HeadScanId = "scan-head-456",
Language = "csharp",
DetectedAt = _timeProvider.GetUtcNow(),
NewlyReachable = ImmutableArray.Create(CreateDriftedSink()),
NewlyUnreachable = ImmutableArray<DriftedSink>.Empty,
ResultDigest = "sha256:result-digest"
};
return new DriftAttestationRequest
{
TenantId = "tenant-1",
DriftResult = driftResult,
BaseImage = new ImageRef
{
Name = "myregistry/myapp",
Digest = "sha256:base123",
Tag = "v1.0.0"
},
TargetImage = new ImageRef
{
Name = "myregistry/myapp",
Digest = "sha256:head456",
Tag = "v1.1.0"
},
BaseGraphDigest = "sha256:graph-base",
HeadGraphDigest = "sha256:graph-head"
};
}
private static DriftedSink CreateDriftedSink()
{
return new DriftedSink
{
Id = Guid.NewGuid(),
SinkNodeId = "sink-node-1",
Symbol = "SqlCommand.ExecuteNonQuery",
SinkCategory = SinkCategory.SqlInjection,
Direction = DriftDirection.BecameReachable,
Cause = new DriftCause
{
Kind = DriftCauseKind.GuardRemoved,
Description = "Security guard was removed from the call path"
},
Path = new CompressedPath
{
Entrypoint = new PathNode
{
NodeId = "entry-1",
Symbol = "Program.Main",
IsChanged = false
},
Sink = new PathNode
{
NodeId = "sink-1",
Symbol = "SqlCommand.ExecuteNonQuery",
IsChanged = false
},
KeyNodes = ImmutableArray<PathNode>.Empty,
IntermediateCount = 3
}
};
}
private static JsonElement ExtractPredicate(string envelopeJson)
{
var envelope = JsonDocument.Parse(envelopeJson);
var payloadBase64 = envelope.RootElement.GetProperty("payload").GetString();
var payloadBytes = Convert.FromBase64String(payloadBase64!);
var statement = JsonDocument.Parse(payloadBytes);
return statement.RootElement.GetProperty("predicate");
}
}

View File

@@ -12,6 +12,10 @@
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1" />
<PackageReference Include="Moq" Version="4.20.72" />
<PackageReference Include="FluentAssertions" Version="7.2.0" />
<PackageReference Include="Microsoft.Extensions.Time.Testing" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
</ItemGroup>
<ItemGroup>

View File

@@ -0,0 +1,22 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.*" />
<PackageReference Include="Moq" Version="4.*" />
<PackageReference Include="xunit" Version="2.*" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.*" PrivateAssets="all" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Scanner.Triage/StellaOps.Scanner.Triage.csproj" />
<ProjectReference Include="../../../__Libraries/StellaOps.Infrastructure.Postgres.Testing/StellaOps.Infrastructure.Postgres.Testing.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,20 @@
using System.Reflection;
using StellaOps.Infrastructure.Postgres.Testing;
namespace StellaOps.Scanner.Triage.Tests;
/// <summary>
/// PostgreSQL test fixture for Triage integration tests.
/// Uses Testcontainers to spin up a real PostgreSQL instance.
/// </summary>
public sealed class TriagePostgresFixture : PostgresIntegrationFixture, ICollectionFixture<TriagePostgresFixture>
{
protected override Assembly? GetMigrationAssembly() => typeof(TriageDbContext).Assembly;
protected override string GetModuleName() => "Scanner.Triage";
}
[CollectionDefinition("triage-postgres")]
public sealed class TriagePostgresCollection : ICollectionFixture<TriagePostgresFixture>
{
}

View File

@@ -0,0 +1,225 @@
using Microsoft.EntityFrameworkCore;
using StellaOps.Scanner.Triage.Entities;
using Xunit;
namespace StellaOps.Scanner.Triage.Tests;
/// <summary>
/// Query performance validation tests for the Triage schema.
/// These tests verify that EXPLAIN ANALYZE results show efficient query plans.
/// </summary>
[Collection("triage-postgres")]
public sealed class TriageQueryPerformanceTests : IAsyncLifetime
{
private readonly TriagePostgresFixture _fixture;
private TriageDbContext? _context;
public TriageQueryPerformanceTests(TriagePostgresFixture fixture)
{
_fixture = fixture;
}
public Task InitializeAsync()
{
var optionsBuilder = new DbContextOptionsBuilder<TriageDbContext>()
.UseNpgsql(_fixture.ConnectionString);
_context = new TriageDbContext(optionsBuilder.Options);
return Task.CompletedTask;
}
public async Task DisposeAsync()
{
if (_context != null)
{
await _context.DisposeAsync();
}
}
private TriageDbContext Context => _context ?? throw new InvalidOperationException("Context not initialized");
[Fact]
public async Task Finding_Lookup_By_CVE_Uses_Index()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
await SeedTestData(100);
// Act - explain analyze a CVE lookup query
var explainPlan = await Context.Database.SqlQueryRaw<string>(
"EXPLAIN ANALYZE SELECT * FROM triage_finding WHERE cve_id = 'CVE-2021-23337'")
.ToListAsync();
var planText = string.Join("\n", explainPlan);
// Assert - verify the query uses an index scan
Assert.True(
planText.Contains("Index", StringComparison.OrdinalIgnoreCase),
$"Expected index scan in query plan, got: {planText}");
}
[Fact]
public async Task Finding_Lookup_By_Last_Seen_Uses_Index()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
await SeedTestData(100);
// Act
var explainPlan = await Context.Database.SqlQueryRaw<string>(
"EXPLAIN ANALYZE SELECT * FROM triage_finding WHERE last_seen_at > NOW() - INTERVAL '7 days' ORDER BY last_seen_at DESC LIMIT 10")
.ToListAsync();
var planText = string.Join("\n", explainPlan);
// Assert
Assert.True(
planText.Contains("Index", StringComparison.OrdinalIgnoreCase),
$"Expected index usage in query plan for last_seen_at, got: {planText}");
}
[Fact]
public async Task RiskResult_Lookup_By_Finding_Uses_Index()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var findings = await SeedTestData(50);
await SeedRiskResults(findings);
var targetFindingId = findings.First().Id;
// Act
var explainPlan = await Context.Database.SqlQueryRaw<string>(
$"EXPLAIN ANALYZE SELECT * FROM triage_risk_result WHERE finding_id = '{targetFindingId}'")
.ToListAsync();
var planText = string.Join("\n", explainPlan);
// Assert
Assert.True(
planText.Contains("Index", StringComparison.OrdinalIgnoreCase),
$"Expected index scan for finding_id lookup, got: {planText}");
}
[Fact]
public async Task Decision_Active_Filter_Uses_Partial_Index()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var findings = await SeedTestData(50);
await SeedDecisions(findings);
var targetFindingId = findings.First().Id;
// Act - query for active decisions (revoked_at IS NULL)
var explainPlan = await Context.Database.SqlQueryRaw<string>(
$"EXPLAIN ANALYZE SELECT * FROM triage_decision WHERE finding_id = '{targetFindingId}' AND revoked_at IS NULL")
.ToListAsync();
var planText = string.Join("\n", explainPlan);
// Assert - either index scan or we accept seq scan on small data
Assert.True(
planText.Contains("Scan", StringComparison.OrdinalIgnoreCase),
$"Expected some scan type in query plan, got: {planText}");
}
[Fact]
public async Task Lane_Aggregation_Query_Is_Efficient()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var findings = await SeedTestData(100);
await SeedRiskResults(findings);
// Act - aggregate by lane
var explainPlan = await Context.Database.SqlQueryRaw<string>(
"EXPLAIN ANALYZE SELECT lane, COUNT(*) FROM triage_risk_result GROUP BY lane")
.ToListAsync();
var planText = string.Join("\n", explainPlan);
// Assert - should complete efficiently
Assert.True(
planText.Contains("Aggregate", StringComparison.OrdinalIgnoreCase) ||
planText.Contains("Group", StringComparison.OrdinalIgnoreCase) ||
planText.Contains("Scan", StringComparison.OrdinalIgnoreCase),
$"Expected aggregate or group in query plan, got: {planText}");
}
private async Task<List<TriageFinding>> SeedTestData(int count)
{
var findings = new List<TriageFinding>();
for (int i = 0; i < count; i++)
{
var finding = new TriageFinding
{
Id = Guid.NewGuid(),
AssetId = Guid.NewGuid(),
EnvironmentId = i % 5 == 0 ? Guid.NewGuid() : null,
AssetLabel = $"prod/service-{i}:1.0.{i}",
Purl = $"pkg:npm/package-{i}@1.0.{i}",
CveId = i % 3 == 0 ? $"CVE-2021-{23337 + i}" : null,
RuleId = i % 3 != 0 ? $"RULE-{i:D4}" : null,
FirstSeenAt = DateTimeOffset.UtcNow.AddDays(-i),
LastSeenAt = DateTimeOffset.UtcNow.AddHours(-i)
};
findings.Add(finding);
}
Context.Findings.AddRange(findings);
await Context.SaveChangesAsync();
return findings;
}
private async Task SeedRiskResults(List<TriageFinding> findings)
{
var lanes = Enum.GetValues<TriageLane>();
var verdicts = Enum.GetValues<TriageVerdict>();
foreach (var finding in findings)
{
var riskResult = new TriageRiskResult
{
Id = Guid.NewGuid(),
FindingId = finding.Id,
PolicyId = "security-policy-v1",
PolicyVersion = "1.0.0",
InputsHash = Guid.NewGuid().ToString("N"),
Score = Random.Shared.Next(0, 100),
Verdict = verdicts[Random.Shared.Next(verdicts.Length)],
Lane = lanes[Random.Shared.Next(lanes.Length)],
Why = "Auto-generated test risk result",
ComputedAt = DateTimeOffset.UtcNow
};
Context.RiskResults.Add(riskResult);
}
await Context.SaveChangesAsync();
}
private async Task SeedDecisions(List<TriageFinding> findings)
{
var kinds = Enum.GetValues<TriageDecisionKind>();
foreach (var finding in findings.Take(findings.Count / 2))
{
var decision = new TriageDecision
{
Id = Guid.NewGuid(),
FindingId = finding.Id,
Kind = kinds[Random.Shared.Next(kinds.Length)],
ReasonCode = "TEST_REASON",
ActorSubject = "user:test@example.com",
CreatedAt = DateTimeOffset.UtcNow
};
Context.Decisions.Add(decision);
}
await Context.SaveChangesAsync();
}
}

View File

@@ -0,0 +1,286 @@
using Microsoft.EntityFrameworkCore;
using StellaOps.Scanner.Triage.Entities;
using Xunit;
namespace StellaOps.Scanner.Triage.Tests;
/// <summary>
/// Integration tests for the Triage schema using Testcontainers.
/// </summary>
[Collection("triage-postgres")]
public sealed class TriageSchemaIntegrationTests : IAsyncLifetime
{
private readonly TriagePostgresFixture _fixture;
private TriageDbContext? _context;
public TriageSchemaIntegrationTests(TriagePostgresFixture fixture)
{
_fixture = fixture;
}
public Task InitializeAsync()
{
var optionsBuilder = new DbContextOptionsBuilder<TriageDbContext>()
.UseNpgsql(_fixture.ConnectionString);
_context = new TriageDbContext(optionsBuilder.Options);
return Task.CompletedTask;
}
public async Task DisposeAsync()
{
if (_context != null)
{
await _context.DisposeAsync();
}
}
private TriageDbContext Context => _context ?? throw new InvalidOperationException("Context not initialized");
[Fact]
public async Task Schema_Creates_Successfully()
{
// Arrange / Act
await Context.Database.EnsureCreatedAsync();
// Assert - verify tables exist by querying the metadata
var findingsCount = await Context.Findings.CountAsync();
var decisionsCount = await Context.Decisions.CountAsync();
Assert.Equal(0, findingsCount);
Assert.Equal(0, decisionsCount);
}
[Fact]
public async Task Can_Create_And_Query_TriageFinding()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var finding = new TriageFinding
{
Id = Guid.NewGuid(),
AssetId = Guid.NewGuid(),
AssetLabel = "prod/api-gateway:1.2.3",
Purl = "pkg:npm/lodash@4.17.20",
CveId = "CVE-2021-23337",
FirstSeenAt = DateTimeOffset.UtcNow,
LastSeenAt = DateTimeOffset.UtcNow
};
// Act
Context.Findings.Add(finding);
await Context.SaveChangesAsync();
// Assert
var retrieved = await Context.Findings.FirstOrDefaultAsync(f => f.Id == finding.Id);
Assert.NotNull(retrieved);
Assert.Equal(finding.AssetLabel, retrieved.AssetLabel);
Assert.Equal(finding.Purl, retrieved.Purl);
Assert.Equal(finding.CveId, retrieved.CveId);
}
[Fact]
public async Task Can_Create_TriageDecision_With_Finding()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var finding = new TriageFinding
{
Id = Guid.NewGuid(),
AssetId = Guid.NewGuid(),
AssetLabel = "prod/api-gateway:1.2.3",
Purl = "pkg:npm/lodash@4.17.20",
CveId = "CVE-2021-23337"
};
Context.Findings.Add(finding);
await Context.SaveChangesAsync();
var decision = new TriageDecision
{
Id = Guid.NewGuid(),
FindingId = finding.Id,
Kind = TriageDecisionKind.MuteReach,
ReasonCode = "NOT_REACHABLE",
Note = "Code path is not reachable per RichGraph analysis",
ActorSubject = "user:test@example.com",
ActorDisplay = "Test User",
CreatedAt = DateTimeOffset.UtcNow
};
// Act
Context.Decisions.Add(decision);
await Context.SaveChangesAsync();
// Assert
var retrieved = await Context.Decisions
.Include(d => d.Finding)
.FirstOrDefaultAsync(d => d.Id == decision.Id);
Assert.NotNull(retrieved);
Assert.Equal(TriageDecisionKind.MuteReach, retrieved.Kind);
Assert.Equal("NOT_REACHABLE", retrieved.ReasonCode);
Assert.NotNull(retrieved.Finding);
Assert.Equal(finding.Purl, retrieved.Finding!.Purl);
}
[Fact]
public async Task Can_Create_TriageRiskResult_With_Finding()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var finding = new TriageFinding
{
Id = Guid.NewGuid(),
AssetId = Guid.NewGuid(),
AssetLabel = "prod/api-gateway:1.2.3",
Purl = "pkg:npm/lodash@4.17.20",
CveId = "CVE-2021-23337"
};
Context.Findings.Add(finding);
await Context.SaveChangesAsync();
var riskResult = new TriageRiskResult
{
Id = Guid.NewGuid(),
FindingId = finding.Id,
PolicyId = "security-policy-v1",
PolicyVersion = "1.0.0",
InputsHash = "abc123def456",
Score = 75,
Verdict = TriageVerdict.Block,
Lane = TriageLane.Blocked,
Why = "High-severity CVE with network exposure",
ComputedAt = DateTimeOffset.UtcNow
};
// Act
Context.RiskResults.Add(riskResult);
await Context.SaveChangesAsync();
// Assert
var retrieved = await Context.RiskResults
.Include(r => r.Finding)
.FirstOrDefaultAsync(r => r.Id == riskResult.Id);
Assert.NotNull(retrieved);
Assert.Equal(75, retrieved.Score);
Assert.Equal(TriageVerdict.Block, retrieved.Verdict);
Assert.Equal(TriageLane.Blocked, retrieved.Lane);
Assert.NotNull(retrieved.Finding);
}
[Fact]
public async Task Finding_Cascade_Deletes_Related_Entities()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var finding = new TriageFinding
{
Id = Guid.NewGuid(),
AssetId = Guid.NewGuid(),
AssetLabel = "prod/api:1.0",
Purl = "pkg:npm/test@1.0.0",
CveId = "CVE-2024-0001"
};
Context.Findings.Add(finding);
await Context.SaveChangesAsync();
var decision = new TriageDecision
{
FindingId = finding.Id,
Kind = TriageDecisionKind.Ack,
ReasonCode = "ACKNOWLEDGED",
ActorSubject = "user:admin"
};
var riskResult = new TriageRiskResult
{
FindingId = finding.Id,
PolicyId = "policy-v1",
PolicyVersion = "1.0",
InputsHash = "hash123",
Score = 50,
Why = "Medium risk"
};
Context.Decisions.Add(decision);
Context.RiskResults.Add(riskResult);
await Context.SaveChangesAsync();
// Verify entities exist
Assert.Single(await Context.Decisions.Where(d => d.FindingId == finding.Id).ToListAsync());
Assert.Single(await Context.RiskResults.Where(r => r.FindingId == finding.Id).ToListAsync());
// Act - delete the finding
Context.Findings.Remove(finding);
await Context.SaveChangesAsync();
// Assert - related entities should be cascade deleted
Assert.Empty(await Context.Decisions.Where(d => d.FindingId == finding.Id).ToListAsync());
Assert.Empty(await Context.RiskResults.Where(r => r.FindingId == finding.Id).ToListAsync());
}
[Fact]
public async Task Unique_Constraint_Prevents_Duplicate_Findings()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
var assetId = Guid.NewGuid();
var envId = Guid.NewGuid();
const string purl = "pkg:npm/lodash@4.17.20";
const string cveId = "CVE-2021-23337";
var finding1 = new TriageFinding
{
AssetId = assetId,
EnvironmentId = envId,
AssetLabel = "prod/api:1.0",
Purl = purl,
CveId = cveId
};
Context.Findings.Add(finding1);
await Context.SaveChangesAsync();
var finding2 = new TriageFinding
{
AssetId = assetId,
EnvironmentId = envId,
AssetLabel = "prod/api:1.0",
Purl = purl,
CveId = cveId
};
Context.Findings.Add(finding2);
// Act & Assert - should throw due to unique constraint
await Assert.ThrowsAsync<DbUpdateException>(async () =>
{
await Context.SaveChangesAsync();
});
}
[Fact]
public async Task Indexes_Exist_For_Performance()
{
// Arrange
await Context.Database.EnsureCreatedAsync();
// Act - query for indexes on triage_finding table
var indexes = await Context.Database.SqlQueryRaw<string>(
"SELECT indexname FROM pg_indexes WHERE tablename = 'triage_finding'")
.ToListAsync();
// Assert - verify expected indexes exist
Assert.Contains(indexes, i => i.Contains("last_seen"));
Assert.Contains(indexes, i => i.Contains("purl"));
}
}

View File

@@ -74,6 +74,22 @@ public static class PredicateTypes
/// </summary>
public const string StellaOpsReachabilityWitness = "stella.ops/reachabilityWitness@v1";
/// <summary>
/// StellaOps Path Witness predicate type for DSSE attestations.
/// Sprint: SPRINT_3700_0001_0001 (WIT-007C)
/// Cryptographic proof of a specific entrypoint → sink path.
/// Used by PathWitnessBuilder to sign individual path witnesses.
/// </summary>
public const string StellaOpsPathWitness = "stella.ops/pathWitness@v1";
/// <summary>
/// StellaOps Reachability Drift predicate type for DSSE attestations.
/// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain (UI-014)
/// Cryptographic proof of reachability changes between scans.
/// Used by DriftAttestationService to sign drift analysis results.
/// </summary>
public const string StellaOpsReachabilityDrift = "stellaops.dev/predicates/reachability-drift@v1";
/// <summary>
/// CycloneDX SBOM predicate type.
/// </summary>
@@ -123,7 +139,9 @@ public static class PredicateTypes
return predicateType == StellaOpsGraph
|| predicateType == StellaOpsReplay
|| predicateType == StellaOpsEvidence
|| predicateType == StellaOpsReachabilityWitness;
|| predicateType == StellaOpsReachabilityWitness
|| predicateType == StellaOpsPathWitness
|| predicateType == StellaOpsReachabilityDrift;
}
/// <summary>
@@ -147,6 +165,8 @@ public static class PredicateTypes
StellaOpsVexDecision,
StellaOpsGraph,
StellaOpsReachabilityWitness,
StellaOpsPathWitness,
StellaOpsReachabilityDrift,
// Third-party types
CycloneDxSbom,
SpdxSbom,

View File

@@ -0,0 +1,258 @@
// -----------------------------------------------------------------------------
// NativeUnknownClassifierTests.cs
// Sprint: SPRINT_3500_0013_0001_native_unknowns
// Task: NUC-005
// Description: Unit tests for NativeUnknownClassifier service.
// -----------------------------------------------------------------------------
using FluentAssertions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Unknowns.Core.Models;
using StellaOps.Unknowns.Core.Services;
using Xunit;
namespace StellaOps.Unknowns.Core.Tests.Services;
public sealed class NativeUnknownClassifierTests
{
private readonly FakeTimeProvider _timeProvider;
private readonly NativeUnknownClassifier _classifier;
public NativeUnknownClassifierTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 12, 19, 12, 0, 0, TimeSpan.Zero));
_classifier = new NativeUnknownClassifier(_timeProvider, "test-classifier");
}
[Fact]
public void ClassifyMissingBuildId_Creates_Unknown_With_Correct_Properties()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "elf",
FilePath = "/usr/lib/libfoo.so.1",
Architecture = "x86_64",
LayerDigest = "sha256:abc123",
LayerIndex = 2,
FileDigest = "sha256:def456",
FileSize = 1024000
};
// Act
var unknown = _classifier.ClassifyMissingBuildId("tenant-1", context);
// Assert
unknown.Should().NotBeNull();
unknown.Kind.Should().Be(UnknownKind.MissingBuildId);
unknown.SubjectType.Should().Be(UnknownSubjectType.Binary);
unknown.SubjectRef.Should().Be("/usr/lib/libfoo.so.1");
unknown.TenantId.Should().Be("tenant-1");
unknown.Severity.Should().Be(UnknownSeverity.Medium);
unknown.CreatedBy.Should().Be("test-classifier");
unknown.ValidFrom.Should().Be(_timeProvider.GetUtcNow());
unknown.SysFrom.Should().Be(_timeProvider.GetUtcNow());
unknown.Context.Should().NotBeNull();
}
[Fact]
public void ClassifyUnknownBuildId_Creates_Unknown_With_BuildId_Reference()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "elf",
FilePath = "/usr/lib/libbar.so.2",
BuildId = "gnu-build-id:abc123def456",
Architecture = "aarch64",
LayerDigest = "sha256:xyz789"
};
// Act
var unknown = _classifier.ClassifyUnknownBuildId("tenant-2", context);
// Assert
unknown.Should().NotBeNull();
unknown.Kind.Should().Be(UnknownKind.UnknownBuildId);
unknown.SubjectRef.Should().Be("gnu-build-id:abc123def456");
unknown.Severity.Should().Be(UnknownSeverity.Low);
}
[Fact]
public void ClassifyUnknownBuildId_Throws_When_BuildId_Missing()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "elf",
FilePath = "/usr/lib/libfoo.so"
};
// Act & Assert
var act = () => _classifier.ClassifyUnknownBuildId("tenant-1", context);
act.Should().Throw<ArgumentException>()
.WithMessage("*BuildId*");
}
[Fact]
public void ClassifyUnresolvedLibrary_Creates_Unknown_With_Import_Info()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "elf",
FilePath = "/usr/bin/myapp",
UnresolvedImport = "libcrypto.so.1.1",
Architecture = "x86_64"
};
// Act
var unknown = _classifier.ClassifyUnresolvedLibrary("tenant-3", context);
// Assert
unknown.Should().NotBeNull();
unknown.Kind.Should().Be(UnknownKind.UnresolvedNativeLibrary);
unknown.SubjectRef.Should().Contain("libcrypto.so.1.1");
unknown.Severity.Should().Be(UnknownSeverity.Medium);
}
[Fact]
public void ClassifyHeuristicDependency_Creates_Unknown_With_Confidence()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "elf",
FilePath = "/usr/bin/dynamic-loader",
HeuristicPattern = "dlopen(\"libplugin-%s.so\", RTLD_NOW)",
HeuristicConfidence = 0.75,
Architecture = "x86_64"
};
// Act
var unknown = _classifier.ClassifyHeuristicDependency("tenant-4", context);
// Assert
unknown.Should().NotBeNull();
unknown.Kind.Should().Be(UnknownKind.HeuristicDependency);
unknown.Severity.Should().Be(UnknownSeverity.Low);
}
[Fact]
public void ClassifyUnsupportedFormat_Creates_Unknown_With_Reason()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "pe",
FilePath = "C:\\Windows\\System32\\legacy.dll",
UnsupportedReason = "PE/COFF format with non-standard overlay",
Architecture = "i686"
};
// Act
var unknown = _classifier.ClassifyUnsupportedFormat("tenant-5", context);
// Assert
unknown.Should().NotBeNull();
unknown.Kind.Should().Be(UnknownKind.UnsupportedBinaryFormat);
unknown.Severity.Should().Be(UnknownSeverity.Info);
}
[Fact]
public void All_Classifications_Have_Unique_Subject_Hashes()
{
// Arrange
var context1 = new NativeUnknownContext { Format = "elf", FilePath = "/lib/a.so", LayerDigest = "sha256:layer1" };
var context2 = new NativeUnknownContext { Format = "elf", FilePath = "/lib/b.so", LayerDigest = "sha256:layer1" };
var context3 = new NativeUnknownContext { Format = "elf", FilePath = "/lib/a.so", LayerDigest = "sha256:layer2" };
// Act
var unknown1 = _classifier.ClassifyMissingBuildId("tenant", context1);
var unknown2 = _classifier.ClassifyMissingBuildId("tenant", context2);
var unknown3 = _classifier.ClassifyMissingBuildId("tenant", context3);
// Assert - Different files or layers should produce different hashes
unknown1.SubjectHash.Should().NotBe(unknown2.SubjectHash);
unknown1.SubjectHash.Should().NotBe(unknown3.SubjectHash);
}
[Fact]
public void Same_Binary_Produces_Same_Subject_Hash()
{
// Arrange - Same file path and layer
var context1 = new NativeUnknownContext { Format = "elf", FilePath = "/lib/same.so", LayerDigest = "sha256:samelayer" };
var context2 = new NativeUnknownContext { Format = "elf", FilePath = "/lib/same.so", LayerDigest = "sha256:samelayer" };
// Act
var unknown1 = _classifier.ClassifyMissingBuildId("tenant", context1);
var unknown2 = _classifier.ClassifyMissingBuildId("tenant", context2);
// Assert - Same file+layer should produce same hash (for deduplication)
unknown1.SubjectHash.Should().Be(unknown2.SubjectHash);
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void ClassifyMissingBuildId_Throws_When_TenantId_Invalid(string? tenantId)
{
// Arrange
var context = new NativeUnknownContext { Format = "elf", FilePath = "/lib/foo.so" };
// Act & Assert
var act = () => _classifier.ClassifyMissingBuildId(tenantId!, context);
act.Should().Throw<ArgumentException>();
}
[Fact]
public void ClassifyMissingBuildId_Throws_When_Context_Null()
{
// Act & Assert
var act = () => _classifier.ClassifyMissingBuildId("tenant", null!);
act.Should().Throw<ArgumentNullException>();
}
[Fact]
public void Constructor_Throws_When_TimeProvider_Null()
{
// Act & Assert
var act = () => new NativeUnknownClassifier(null!, "test");
act.Should().Throw<ArgumentNullException>();
}
[Theory]
[InlineData(null)]
[InlineData("")]
[InlineData(" ")]
public void Constructor_Throws_When_CreatedBy_Invalid(string? createdBy)
{
// Act & Assert
var act = () => new NativeUnknownClassifier(TimeProvider.System, createdBy!);
act.Should().Throw<ArgumentException>();
}
[Fact]
public void Context_Is_Serialized_To_JsonDocument()
{
// Arrange
var context = new NativeUnknownContext
{
Format = "macho",
FilePath = "/Applications/MyApp.app/Contents/MacOS/MyApp",
BuildId = "macho-uuid:12345678-1234-5678-9abc-def012345678",
Architecture = "arm64"
};
// Act
var unknown = _classifier.ClassifyUnknownBuildId("tenant", context);
// Assert
unknown.Context.Should().NotBeNull();
var root = unknown.Context!.RootElement;
root.GetProperty("format").GetString().Should().Be("macho");
root.GetProperty("filePath").GetString().Should().Contain("MyApp");
root.GetProperty("architecture").GetString().Should().Be("arm64");
}
}

View File

@@ -0,0 +1,34 @@
<?xml version="1.0" ?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<IsTestProject>true</IsTestProject>
<RootNamespace>StellaOps.Unknowns.Core.Tests</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.3" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.0.1">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="Moq" Version="4.20.72" />
<PackageReference Include="FluentAssertions" Version="7.2.0" />
<PackageReference Include="Microsoft.Extensions.Time.Testing" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Unknowns.Core/StellaOps.Unknowns.Core.csproj" />
</ItemGroup>
</Project>

View File

@@ -18,6 +18,45 @@ Design and build the StellaOps web user experience that surfaces backend capabil
- `docs/` — UX specs and mockups (to be added).
- `ops/` — Web deployment manifests for air-gapped environments (future).
## Reachability Drift UI (Sprint 3600)
### Components
- **PathViewerComponent** (`app/features/reachability/components/path-viewer/`) - Interactive call path visualization
- Displays entrypoint → key nodes → sink paths
- Highlights changed nodes with change kind indicators
- Supports collapse/expand for long paths
- **RiskDriftCardComponent** (`app/features/reachability/components/risk-drift-card/`) - Summary card for drift analysis
- Shows newly reachable / mitigated path counts
- Displays associated CVEs
- Action buttons for drill-down
### Models
- `PathNode` - Node in a reachability path with symbol, file, line
- `CompressedPath` - Compact path representation
- `DriftedSink` - Sink with reachability change and cause
- `DriftCause` - Explanation of why reachability changed
### Services
- `DriftApiService` (`app/core/services/drift-api.service.ts`) - API client for drift endpoints
- Mock implementations available for offline development
### Integration Points
- Scan detail page includes PathViewer for reachability visualization
- Drift results linked to DSSE attestations for evidence chain
- Path export supports JSON and SARIF formats
## Witness UI (Sprint 3700) - TODO
### Planned Components
- **WitnessModalComponent** - Modal for viewing witness details
- **PathVisualizationComponent** - Detailed path rendering with gates
- **ConfidenceTierBadgeComponent** - Tier indicators (Confirmed/Likely/Present/Unreachable)
- **GateBadgeComponent** - Auth gate visualization
### Planned Services
- `witness.service.ts` - API client for witness endpoints
- Browser-based Ed25519 signature verification
## Coordination
- Sync with DevEx for project scaffolding and build pipelines.
- Partner with Docs Guild to translate UX decisions into operator guides.

View File

@@ -0,0 +1,288 @@
/**
* Witness API client service.
* Sprint: SPRINT_3700_0005_0001_witness_ui_cli (UI-005)
*/
import { Injectable, InjectionToken, inject } from '@angular/core';
import { HttpClient, HttpParams } from '@angular/common/http';
import { Observable, of, delay, map } from 'rxjs';
import {
ReachabilityWitness,
WitnessListResponse,
WitnessVerificationResult,
StateFlipSummary,
ConfidenceTier,
PathNode,
GateInfo,
CallPathNode,
} from './witness.models';
export interface WitnessApi {
/**
* Get a witness by ID.
*/
getWitness(witnessId: string): Observable<ReachabilityWitness>;
/**
* List witnesses for a scan.
*/
listWitnesses(
scanId: string,
options?: { page?: number; pageSize?: number; tier?: ConfidenceTier }
): Observable<WitnessListResponse>;
/**
* Verify a witness signature.
*/
verifyWitness(witnessId: string): Observable<WitnessVerificationResult>;
/**
* Get witnesses for a specific vulnerability.
*/
getWitnessesForVuln(vulnId: string): Observable<ReachabilityWitness[]>;
/**
* Get state flip summary for a scan (for PR gates).
*/
getStateFlipSummary(scanId: string): Observable<StateFlipSummary>;
/**
* Download witness as JSON.
*/
downloadWitnessJson(witnessId: string): Observable<Blob>;
/**
* Export witnesses as SARIF.
*/
exportSarif(scanId: string): Observable<Blob>;
}
export const WITNESS_API = new InjectionToken<WitnessApi>('WITNESS_API');
/**
* HTTP implementation of WitnessApi.
*/
@Injectable({ providedIn: 'root' })
export class WitnessHttpClient implements WitnessApi {
private readonly http = inject(HttpClient);
private readonly baseUrl = '/api/v1/witnesses';
getWitness(witnessId: string): Observable<ReachabilityWitness> {
return this.http.get<ReachabilityWitness>(`${this.baseUrl}/${witnessId}`);
}
listWitnesses(
scanId: string,
options?: { page?: number; pageSize?: number; tier?: ConfidenceTier }
): Observable<WitnessListResponse> {
let params = new HttpParams().set('scanId', scanId);
if (options?.page) {
params = params.set('page', options.page.toString());
}
if (options?.pageSize) {
params = params.set('pageSize', options.pageSize.toString());
}
if (options?.tier) {
params = params.set('tier', options.tier);
}
return this.http.get<WitnessListResponse>(this.baseUrl, { params });
}
verifyWitness(witnessId: string): Observable<WitnessVerificationResult> {
return this.http.post<WitnessVerificationResult>(
`${this.baseUrl}/${witnessId}/verify`,
{}
);
}
getWitnessesForVuln(vulnId: string): Observable<ReachabilityWitness[]> {
return this.http.get<ReachabilityWitness[]>(`${this.baseUrl}/by-vuln/${vulnId}`);
}
getStateFlipSummary(scanId: string): Observable<StateFlipSummary> {
return this.http.get<StateFlipSummary>(`${this.baseUrl}/state-flips/${scanId}`);
}
downloadWitnessJson(witnessId: string): Observable<Blob> {
return this.http.get(`${this.baseUrl}/${witnessId}/download`, {
responseType: 'blob',
});
}
exportSarif(scanId: string): Observable<Blob> {
return this.http.get(`${this.baseUrl}/export/sarif`, {
params: new HttpParams().set('scanId', scanId),
responseType: 'blob',
});
}
}
// Mock data for development
const MOCK_WITNESSES: ReachabilityWitness[] = [
{
witnessId: 'wit-001',
scanId: 'scan-001',
tenantId: 'tenant-1',
vulnId: 'vuln-001',
cveId: 'CVE-2024-12345',
packageName: 'Newtonsoft.Json',
packageVersion: '12.0.3',
purl: 'pkg:nuget/Newtonsoft.Json@12.0.3',
confidenceTier: 'confirmed',
confidenceScore: 0.95,
isReachable: true,
callPath: [
{ nodeId: 'n1', symbol: 'UserController.GetUser', file: 'Controllers/UserController.cs', line: 42 },
{ nodeId: 'n2', symbol: 'UserService.GetUserById', file: 'Services/UserService.cs', line: 88 },
{ nodeId: 'n3', symbol: 'JsonConvert.DeserializeObject<User>', package: 'Newtonsoft.Json' },
],
entrypoint: {
nodeId: 'n1',
symbol: 'UserController.GetUser',
file: 'Controllers/UserController.cs',
line: 42,
httpRoute: '/api/users/{id}',
httpMethod: 'GET',
},
sink: {
nodeId: 'n3',
symbol: 'JsonConvert.DeserializeObject<User>',
package: 'Newtonsoft.Json',
method: 'DeserializeObject',
},
gates: [
{
gateType: 'auth',
symbol: '[Authorize]',
confidence: 0.95,
description: 'Authorization attribute on controller',
},
],
evidence: {
callGraphHash: 'blake3:a1b2c3d4e5f6...',
surfaceHash: 'sha256:9f8e7d6c5b4a...',
analysisMethod: 'static',
toolVersion: '1.0.0',
},
signature: {
algorithm: 'ed25519',
keyId: 'attestor-stellaops-ed25519',
signature: 'base64...',
verified: true,
verifiedAt: '2025-12-18T10:30:00Z',
},
observedAt: '2025-12-18T10:30:00Z',
vexRecommendation: 'affected',
},
{
witnessId: 'wit-002',
scanId: 'scan-001',
tenantId: 'tenant-1',
vulnId: 'vuln-002',
cveId: 'CVE-2024-12346',
packageName: 'log4net',
packageVersion: '2.0.8',
purl: 'pkg:nuget/log4net@2.0.8',
confidenceTier: 'unreachable',
confidenceScore: 0.9,
isReachable: false,
callPath: [],
gates: [],
evidence: {
callGraphHash: 'blake3:b2c3d4e5f6g7...',
analysisMethod: 'static',
},
observedAt: '2025-12-18T10:30:00Z',
vexRecommendation: 'not_affected',
},
];
/**
* Mock implementation of WitnessApi for development.
*/
@Injectable({ providedIn: 'root' })
export class WitnessMockClient implements WitnessApi {
getWitness(witnessId: string): Observable<ReachabilityWitness> {
const witness = MOCK_WITNESSES.find((w) => w.witnessId === witnessId);
if (!witness) {
throw new Error(`Witness ${witnessId} not found`);
}
return of(witness).pipe(delay(200));
}
listWitnesses(
scanId: string,
options?: { page?: number; pageSize?: number; tier?: ConfidenceTier }
): Observable<WitnessListResponse> {
let filtered = MOCK_WITNESSES.filter((w) => w.scanId === scanId);
if (options?.tier) {
filtered = filtered.filter((w) => w.confidenceTier === options.tier);
}
const page = options?.page ?? 1;
const pageSize = options?.pageSize ?? 20;
const start = (page - 1) * pageSize;
const paged = filtered.slice(start, start + pageSize);
return of({
witnesses: paged,
total: filtered.length,
page,
pageSize,
hasMore: start + pageSize < filtered.length,
}).pipe(delay(200));
}
verifyWitness(witnessId: string): Observable<WitnessVerificationResult> {
return of({
witnessId,
verified: true,
algorithm: 'ed25519',
keyId: 'attestor-stellaops-ed25519',
verifiedAt: new Date().toISOString(),
}).pipe(delay(300));
}
getWitnessesForVuln(vulnId: string): Observable<ReachabilityWitness[]> {
return of(MOCK_WITNESSES.filter((w) => w.vulnId === vulnId)).pipe(delay(200));
}
getStateFlipSummary(scanId: string): Observable<StateFlipSummary> {
return of({
scanId,
hasFlips: false,
newRiskCount: 0,
mitigatedCount: 0,
netChange: 0,
shouldBlockPr: false,
summary: 'No reachability changes',
flips: [],
}).pipe(delay(200));
}
downloadWitnessJson(witnessId: string): Observable<Blob> {
const witness = MOCK_WITNESSES.find((w) => w.witnessId === witnessId);
const json = JSON.stringify(witness, null, 2);
return of(new Blob([json], { type: 'application/json' })).pipe(delay(100));
}
exportSarif(scanId: string): Observable<Blob> {
const sarif = {
$schema: 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json',
version: '2.1.0',
runs: [
{
tool: { driver: { name: 'StellaOps Reachability', version: '1.0.0' } },
results: [],
},
],
};
return of(new Blob([JSON.stringify(sarif, null, 2)], { type: 'application/json' })).pipe(
delay(100)
);
}
}

View File

@@ -0,0 +1,221 @@
/**
* Witness API models for reachability evidence.
* Sprint: SPRINT_3700_0005_0001_witness_ui_cli (UI-005)
*/
/**
* Confidence tier for reachability assessment.
*/
export type ConfidenceTier = 'confirmed' | 'likely' | 'present' | 'unreachable' | 'unknown';
/**
* Reachability witness evidence.
*/
export interface ReachabilityWitness {
witnessId: string;
scanId: string;
tenantId: string;
vulnId: string;
cveId?: string;
packageName: string;
packageVersion?: string;
purl?: string;
/** Confidence tier for reachability. */
confidenceTier: ConfidenceTier;
/** Confidence score (0.0-1.0). */
confidenceScore: number;
/** Whether the vulnerable code is reachable from entry points. */
isReachable: boolean;
/** Call path from entry point to sink. */
callPath: CallPathNode[];
/** Entry point information. */
entrypoint?: PathNode;
/** Sink (vulnerable method) information. */
sink?: PathNode;
/** Gates encountered along the path. */
gates: GateInfo[];
/** Evidence metadata. */
evidence: WitnessEvidence;
/** Signature information. */
signature?: WitnessSignature;
/** When the witness was created. */
observedAt: string;
/** VEX recommendation based on reachability. */
vexRecommendation?: string;
}
/**
* Node in a call path.
*/
export interface CallPathNode {
nodeId: string;
symbol: string;
file?: string;
line?: number;
package?: string;
isChanged?: boolean;
changeKind?: string;
}
/**
* Detailed path node for entry/sink.
*/
export interface PathNode {
nodeId: string;
symbol: string;
file?: string;
line?: number;
package?: string;
method?: string;
httpRoute?: string;
httpMethod?: string;
}
/**
* Security gate information.
*/
export interface GateInfo {
gateType: 'auth' | 'authz' | 'validation' | 'sanitization' | 'rate-limit' | 'other';
symbol: string;
confidence: number;
description?: string;
file?: string;
line?: number;
}
/**
* Evidence metadata for witness.
*/
export interface WitnessEvidence {
/** Call graph hash. */
callGraphHash?: string;
/** Surface hash. */
surfaceHash?: string;
/** Analysis method. */
analysisMethod: 'static' | 'dynamic' | 'hybrid';
/** Tool version. */
toolVersion?: string;
/** Additional evidence artifacts. */
artifacts?: EvidenceArtifact[];
}
/**
* Evidence artifact reference.
*/
export interface EvidenceArtifact {
type: 'call-graph' | 'sbom' | 'attestation' | 'surface';
hash: string;
algorithm: string;
uri?: string;
}
/**
* Signature information for witness.
*/
export interface WitnessSignature {
algorithm: string;
keyId: string;
signature: string;
verified?: boolean;
verifiedAt?: string;
verificationError?: string;
}
/**
* Witness list response.
*/
export interface WitnessListResponse {
witnesses: ReachabilityWitness[];
total: number;
page: number;
pageSize: number;
hasMore: boolean;
}
/**
* Witness verification result.
*/
export interface WitnessVerificationResult {
witnessId: string;
verified: boolean;
algorithm: string;
keyId: string;
verifiedAt: string;
error?: string;
}
/**
* State flip information for PR gates.
*/
export interface StateFlip {
entryMethodKey: string;
sinkMethodKey: string;
wasReachable: boolean;
isReachable: boolean;
flipType: 'became_reachable' | 'became_unreachable';
cveId?: string;
packageName?: string;
}
/**
* State flip summary for PR annotations.
*/
export interface StateFlipSummary {
scanId: string;
previousScanId?: string;
hasFlips: boolean;
newRiskCount: number;
mitigatedCount: number;
netChange: number;
shouldBlockPr: boolean;
summary: string;
flips: StateFlip[];
}
/**
* Confidence tier badge colors.
*/
export const CONFIDENCE_TIER_COLORS: Record<ConfidenceTier, string> = {
confirmed: '#dc3545', // Red - highest risk
likely: '#fd7e14', // Orange
present: '#6c757d', // Gray
unreachable: '#28a745', // Green - no risk
unknown: '#17a2b8', // Blue - needs analysis
};
/**
* Confidence tier labels.
*/
export const CONFIDENCE_TIER_LABELS: Record<ConfidenceTier, string> = {
confirmed: 'Confirmed Reachable',
likely: 'Likely Reachable',
present: 'Present (Unknown Reachability)',
unreachable: 'Unreachable',
unknown: 'Unknown',
};
/**
* VEX recommendation by tier.
*/
export const VEX_RECOMMENDATIONS: Record<ConfidenceTier, string> = {
confirmed: 'affected',
likely: 'under_investigation',
present: 'under_investigation',
unreachable: 'not_affected',
unknown: 'under_investigation',
};

View File

@@ -0,0 +1,149 @@
/**
* PathViewerComponent Unit Tests
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
* Task: UI-012
*/
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { PathViewerComponent } from './path-viewer.component';
import { CompressedPath, PathNode } from '../../models/path-viewer.models';
describe('PathViewerComponent', () => {
let fixture: ComponentFixture<PathViewerComponent>;
let component: PathViewerComponent;
const mockEntrypoint: PathNode = {
nodeId: 'entry-1',
symbol: 'Program.Main',
file: 'Program.cs',
line: 10,
package: 'MyApp',
isChanged: false
};
const mockSink: PathNode = {
nodeId: 'sink-1',
symbol: 'SqlCommand.Execute',
file: 'DataAccess.cs',
line: 45,
package: 'System.Data',
isChanged: false
};
const mockKeyNode: PathNode = {
nodeId: 'key-1',
symbol: 'UserController.GetUser',
file: 'UserController.cs',
line: 25,
package: 'MyApp.Controllers',
isChanged: true,
changeKind: 'added'
};
const mockPath: CompressedPath = {
entrypoint: mockEntrypoint,
sink: mockSink,
intermediateCount: 5,
keyNodes: [mockKeyNode],
fullPath: ['entry-1', 'mid-1', 'mid-2', 'key-1', 'mid-3', 'sink-1']
};
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [PathViewerComponent],
}).compileComponents();
fixture = TestBed.createComponent(PathViewerComponent);
component = fixture.componentInstance;
fixture.componentRef.setInput('path', mockPath);
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should display entrypoint and sink nodes', () => {
fixture.detectChanges();
const displayNodes = component.displayNodes();
expect(displayNodes[0]).toEqual(mockEntrypoint);
expect(displayNodes[displayNodes.length - 1]).toEqual(mockSink);
});
it('should include key nodes in display', () => {
fixture.detectChanges();
const displayNodes = component.displayNodes();
expect(displayNodes).toContain(mockKeyNode);
});
it('should compute hidden node count correctly', () => {
fixture.detectChanges();
// intermediateCount (5) - keyNodes.length (1) = 4
expect(component.hiddenNodeCount()).toBe(4);
});
it('should toggle collapsed state', () => {
fixture.detectChanges();
expect(component.collapsed()).toBe(false);
component.toggleCollapse();
expect(component.collapsed()).toBe(true);
component.toggleCollapse();
expect(component.collapsed()).toBe(false);
});
it('should emit nodeClick when node is clicked', () => {
fixture.detectChanges();
const emitSpy = jest.spyOn(component.nodeClick, 'emit');
component.onNodeClick(mockKeyNode);
expect(emitSpy).toHaveBeenCalledWith(mockKeyNode);
});
it('should emit expandRequest when toggling expand', () => {
fixture.detectChanges();
const emitSpy = jest.spyOn(component.expandRequest, 'emit');
component.toggleExpand();
expect(emitSpy).toHaveBeenCalledWith('entry-1');
});
it('should show all nodes when expanded', () => {
fixture.detectChanges();
component.isExpanded.set(true);
const displayNodes = component.displayNodes();
// When expanded, should include all nodes from fullPath
expect(displayNodes.length).toBeGreaterThanOrEqual(3);
});
it('should return 0 hidden nodes when expanded', () => {
fixture.detectChanges();
component.isExpanded.set(true);
expect(component.hiddenNodeCount()).toBe(0);
});
it('should use default title if not provided', () => {
fixture.detectChanges();
expect(component.title()).toBe('Reachability Path');
});
it('should use custom title when provided', () => {
fixture.componentRef.setInput('title', 'Custom Path Title');
fixture.detectChanges();
expect(component.title()).toBe('Custom Path Title');
});
it('should be collapsible by default', () => {
fixture.detectChanges();
expect(component.collapsible()).toBe(true);
});
it('should highlight changes by default', () => {
fixture.detectChanges();
expect(component.highlightChanges()).toBe(true);
});
});

View File

@@ -0,0 +1,190 @@
/**
* RiskDriftCardComponent Unit Tests
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain
* Task: UI-013
*/
import { ComponentFixture, TestBed } from '@angular/core/testing';
import { RiskDriftCardComponent } from './risk-drift-card.component';
import { DriftResult, DriftedSink, DriftSummary } from '../../models/drift.models';
describe('RiskDriftCardComponent', () => {
let fixture: ComponentFixture<RiskDriftCardComponent>;
let component: RiskDriftCardComponent;
const mockSink1: DriftedSink = {
sinkId: 'sink-1',
sinkSymbol: 'SqlCommand.Execute',
driftKind: 'became_reachable',
riskDelta: 0.25,
severity: 'high',
cveId: 'CVE-2021-12345',
pathCount: 2
};
const mockSink2: DriftedSink = {
sinkId: 'sink-2',
sinkSymbol: 'ProcessBuilder.start',
driftKind: 'became_unreachable',
riskDelta: -0.15,
severity: 'critical',
pathCount: 1
};
const mockSink3: DriftedSink = {
sinkId: 'sink-3',
sinkSymbol: 'Runtime.exec',
driftKind: 'became_reachable',
riskDelta: 0.10,
severity: 'medium',
pathCount: 3
};
const mockSummary: DriftSummary = {
totalDrifts: 3,
newlyReachable: 2,
newlyUnreachable: 1,
riskTrend: 'increasing',
baselineScanId: 'scan-base',
currentScanId: 'scan-current'
};
const mockDriftResult: DriftResult = {
id: 'drift-1',
summary: mockSummary,
driftedSinks: [mockSink1, mockSink2, mockSink3],
attestationDigest: 'sha256:abc123',
createdAt: '2025-12-19T12:00:00Z'
};
beforeEach(async () => {
await TestBed.configureTestingModule({
imports: [RiskDriftCardComponent],
}).compileComponents();
fixture = TestBed.createComponent(RiskDriftCardComponent);
component = fixture.componentInstance;
fixture.componentRef.setInput('drift', mockDriftResult);
});
it('should create', () => {
expect(component).toBeTruthy();
});
it('should compute summary from drift', () => {
fixture.detectChanges();
expect(component.summary()).toEqual(mockSummary);
});
it('should detect signed attestation', () => {
fixture.detectChanges();
expect(component.isSigned()).toBe(true);
});
it('should detect unsigned drift when no attestation', () => {
const unsignedDrift = { ...mockDriftResult, attestationDigest: undefined };
fixture.componentRef.setInput('drift', unsignedDrift);
fixture.detectChanges();
expect(component.isSigned()).toBe(false);
});
it('should show upward trend icon for increasing risk', () => {
fixture.detectChanges();
expect(component.trendIcon()).toBe('↑');
});
it('should show downward trend icon for decreasing risk', () => {
const decreasingDrift = {
...mockDriftResult,
summary: { ...mockSummary, riskTrend: 'decreasing' as const }
};
fixture.componentRef.setInput('drift', decreasingDrift);
fixture.detectChanges();
expect(component.trendIcon()).toBe('↓');
});
it('should show stable trend icon for stable risk', () => {
const stableDrift = {
...mockDriftResult,
summary: { ...mockSummary, riskTrend: 'stable' as const }
};
fixture.componentRef.setInput('drift', stableDrift);
fixture.detectChanges();
expect(component.trendIcon()).toBe('→');
});
it('should compute trend CSS class correctly', () => {
fixture.detectChanges();
expect(component.trendClass()).toBe('risk-drift-card__trend--increasing');
});
it('should show max preview sinks (default 3)', () => {
fixture.detectChanges();
expect(component.previewSinks().length).toBeLessThanOrEqual(3);
});
it('should respect custom maxPreviewSinks', () => {
fixture.componentRef.setInput('maxPreviewSinks', 1);
fixture.detectChanges();
expect(component.previewSinks().length).toBe(1);
});
it('should sort preview sinks by severity first', () => {
fixture.detectChanges();
const sinks = component.previewSinks();
// Critical should come before high
const criticalIndex = sinks.findIndex(s => s.severity === 'critical');
const highIndex = sinks.findIndex(s => s.severity === 'high');
if (criticalIndex !== -1 && highIndex !== -1) {
expect(criticalIndex).toBeLessThan(highIndex);
}
});
it('should compute additional sinks count', () => {
fixture.detectChanges();
// 3 total sinks, max 3 preview = 0 additional
expect(component.additionalSinksCount()).toBe(0);
});
it('should compute additional sinks when more than max', () => {
fixture.componentRef.setInput('maxPreviewSinks', 1);
fixture.detectChanges();
// 3 total sinks, max 1 preview = 2 additional
expect(component.additionalSinksCount()).toBe(2);
});
it('should emit viewDetails when view details is clicked', () => {
fixture.detectChanges();
const emitSpy = jest.spyOn(component.viewDetails, 'emit');
component.onViewDetails();
expect(emitSpy).toHaveBeenCalled();
});
it('should emit sinkClick when a sink is clicked', () => {
fixture.detectChanges();
const emitSpy = jest.spyOn(component.sinkClick, 'emit');
component.onSinkClick(mockSink1);
expect(emitSpy).toHaveBeenCalledWith(mockSink1);
});
it('should be non-compact by default', () => {
fixture.detectChanges();
expect(component.compact()).toBe(false);
});
it('should show attestation by default', () => {
fixture.detectChanges();
expect(component.showAttestation()).toBe(true);
});
});

View File

@@ -51,29 +51,49 @@
</p>
<!-- Determinism Evidence Section -->
<section class="determinism-section">
<h2>SBOM Determinism</h2>
@if (scan().determinism) {
<app-determinism-badge [evidence]="scan().determinism ?? null" />
} @else {
<p class="determinism-empty">
No determinism evidence available for this scan.
</p>
}
</section>
<section class="determinism-section">
<h2>SBOM Determinism</h2>
@if (scan().determinism) {
<app-determinism-badge [evidence]="scan().determinism ?? null" />
} @else {
<p class="determinism-empty">
No determinism evidence available for this scan.
</p>
}
</section>
<!-- Entropy Analysis Section -->
<section class="entropy-section">
<h2>Entropy Analysis</h2>
@if (scan().entropy) {
<!-- Policy Banner with thresholds and mitigations -->
<app-entropy-policy-banner [evidence]="scan().entropy ?? null" />
<!-- Detailed entropy visualization -->
<app-entropy-panel [evidence]="scan().entropy ?? null" />
} @else {
<p class="entropy-empty">
No entropy analysis available for this scan.
</p>
}
</section>
</section>
<section class="entropy-section">
<h2>Entropy Analysis</h2>
@if (scan().entropy) {
<!-- Policy Banner with thresholds and mitigations -->
<app-entropy-policy-banner [evidence]="scan().entropy ?? null" />
<!-- Detailed entropy visualization -->
<app-entropy-panel [evidence]="scan().entropy ?? null" />
} @else {
<p class="entropy-empty">
No entropy analysis available for this scan.
</p>
}
</section>
<!-- Reachability Drift Section -->
<!-- Sprint: SPRINT_3600_0004_0001_ui_evidence_chain (UI-010) -->
<section class="reachability-drift-section">
<h2>Reachability Drift</h2>
@if (driftResult()) {
<app-risk-drift-card
[drift]="driftResult()!"
[compact]="false"
[showAttestation]="true"
(viewDetails)="onViewDriftDetails()"
(sinkClick)="onSinkClick($event)"
/>
} @else {
<p class="drift-empty">
No reachability drift detected for this scan.
Drift analysis requires a baseline scan for comparison.
</p>
}
</section>
</section>

View File

@@ -117,3 +117,24 @@
color: #94a3b8;
margin: 0;
}
// Reachability Drift Section
// Sprint: SPRINT_3600_0004_0001_ui_evidence_chain (UI-010)
.reachability-drift-section {
border: 1px solid #1f2933;
border-radius: 8px;
padding: 1.25rem;
background: #111827;
h2 {
margin: 0 0 1rem 0;
font-size: 1.125rem;
color: #e2e8f0;
}
}
.drift-empty {
font-style: italic;
color: #94a3b8;
margin: 0;
}

View File

@@ -11,11 +11,14 @@ import { ScanAttestationPanelComponent } from './scan-attestation-panel.componen
import { DeterminismBadgeComponent } from './determinism-badge.component';
import { EntropyPanelComponent } from './entropy-panel.component';
import { EntropyPolicyBannerComponent } from './entropy-policy-banner.component';
import { PathViewerComponent } from '../reachability/components/path-viewer/path-viewer.component';
import { RiskDriftCardComponent } from '../reachability/components/risk-drift-card/risk-drift-card.component';
import { ScanDetail } from '../../core/api/scanner.models';
import {
scanDetailWithFailedAttestation,
scanDetailWithVerifiedAttestation,
} from '../../testing/scan-fixtures';
import type { PathNode, DriftResult, DriftedSink } from '../reachability/models';
type Scenario = 'verified' | 'failed';
@@ -27,7 +30,15 @@ const SCENARIO_MAP: Record<Scenario, ScanDetail> = {
@Component({
selector: 'app-scan-detail-page',
standalone: true,
imports: [CommonModule, ScanAttestationPanelComponent, DeterminismBadgeComponent, EntropyPanelComponent, EntropyPolicyBannerComponent],
imports: [
CommonModule,
ScanAttestationPanelComponent,
DeterminismBadgeComponent,
EntropyPanelComponent,
EntropyPolicyBannerComponent,
PathViewerComponent,
RiskDriftCardComponent,
],
templateUrl: './scan-detail-page.component.html',
styleUrls: ['./scan-detail-page.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
@@ -36,6 +47,7 @@ export class ScanDetailPageComponent {
private readonly route = inject(ActivatedRoute);
readonly scenario = signal<Scenario>('verified');
readonly driftResult = signal<DriftResult | null>(null);
readonly scan = computed<ScanDetail>(() => {
const current = this.scenario();
@@ -62,4 +74,31 @@ export class ScanDetailPageComponent {
onSelectScenario(next: Scenario): void {
this.scenario.set(next);
}
/**
* Handle node click in path viewer.
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain (UI-010)
*/
onPathNodeClick(node: PathNode): void {
console.log('Path node clicked:', node);
// TODO: Navigate to source location or show node details
}
/**
* Handle view details click in drift card.
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain (UI-010)
*/
onViewDriftDetails(): void {
console.log('View drift details requested');
// TODO: Navigate to full drift analysis page
}
/**
* Handle sink click in drift card.
* Sprint: SPRINT_3600_0004_0001_ui_evidence_chain (UI-010)
*/
onSinkClick(sink: DriftedSink): void {
console.log('Sink clicked:', sink);
// TODO: Navigate to sink details or expand path view
}
}

View File

@@ -9,30 +9,34 @@ import {
} from '@angular/core';
import { firstValueFrom } from 'rxjs';
import { VULNERABILITY_API, VulnerabilityApi } from '../../core/api/vulnerability.client';
import {
Vulnerability,
VulnerabilitySeverity,
VulnerabilityStats,
VulnerabilityStatus,
} from '../../core/api/vulnerability.models';
import { VULNERABILITY_API, VulnerabilityApi } from '../../core/api/vulnerability.client';
import {
Vulnerability,
VulnerabilitySeverity,
VulnerabilityStats,
VulnerabilityStatus,
} from '../../core/api/vulnerability.models';
import {
ExceptionDraftContext,
ExceptionDraftInlineComponent,
} from '../exceptions/exception-draft-inline.component';
import {
ExceptionBadgeComponent,
ExceptionBadgeData,
ExceptionExplainComponent,
ExceptionExplainData,
} from '../../shared/components';
import { ReachabilityWhyDrawerComponent } from '../reachability/reachability-why-drawer.component';
import {
ExceptionBadgeComponent,
ExceptionBadgeData,
ExceptionExplainComponent,
ExceptionExplainData,
} from '../../shared/components';
import { ReachabilityWhyDrawerComponent } from '../reachability/reachability-why-drawer.component';
import { WitnessModalComponent } from '../../shared/components/witness-modal.component';
import { ConfidenceTierBadgeComponent } from '../../shared/components/confidence-tier-badge.component';
import { ReachabilityWitness, ConfidenceTier } from '../../core/api/witness.models';
import { WitnessMockClient } from '../../core/api/witness.client';
type SeverityFilter = VulnerabilitySeverity | 'all';
type StatusFilter = VulnerabilityStatus | 'all';
type ReachabilityFilter = 'reachable' | 'unreachable' | 'unknown' | 'all';
type SortField = 'cveId' | 'severity' | 'cvssScore' | 'publishedAt' | 'status';
type SortOrder = 'asc' | 'desc';
type SeverityFilter = VulnerabilitySeverity | 'all';
type StatusFilter = VulnerabilityStatus | 'all';
type ReachabilityFilter = 'reachable' | 'unreachable' | 'unknown' | 'all';
type SortField = 'cveId' | 'severity' | 'cvssScore' | 'publishedAt' | 'status';
type SortOrder = 'asc' | 'desc';
const SEVERITY_LABELS: Record<VulnerabilitySeverity, string> = {
critical: 'Critical',
@@ -42,39 +46,48 @@ const SEVERITY_LABELS: Record<VulnerabilitySeverity, string> = {
unknown: 'Unknown',
};
const STATUS_LABELS: Record<VulnerabilityStatus, string> = {
open: 'Open',
fixed: 'Fixed',
wont_fix: "Won't Fix",
in_progress: 'In Progress',
excepted: 'Excepted',
};
const REACHABILITY_LABELS: Record<Exclude<ReachabilityFilter, 'all'>, string> = {
reachable: 'Reachable',
unreachable: 'Unreachable',
unknown: 'Unknown',
};
const SEVERITY_ORDER: Record<VulnerabilitySeverity, number> = {
critical: 0,
high: 1,
medium: 2,
const STATUS_LABELS: Record<VulnerabilityStatus, string> = {
open: 'Open',
fixed: 'Fixed',
wont_fix: "Won't Fix",
in_progress: 'In Progress',
excepted: 'Excepted',
};
const REACHABILITY_LABELS: Record<Exclude<ReachabilityFilter, 'all'>, string> = {
reachable: 'Reachable',
unreachable: 'Unreachable',
unknown: 'Unknown',
};
const SEVERITY_ORDER: Record<VulnerabilitySeverity, number> = {
critical: 0,
high: 1,
medium: 2,
low: 3,
unknown: 4,
};
@Component({
selector: 'app-vulnerability-explorer',
standalone: true,
imports: [CommonModule, ExceptionDraftInlineComponent, ExceptionBadgeComponent, ExceptionExplainComponent, ReachabilityWhyDrawerComponent],
templateUrl: './vulnerability-explorer.component.html',
styleUrls: ['./vulnerability-explorer.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
providers: [],
})
export class VulnerabilityExplorerComponent implements OnInit {
selector: 'app-vulnerability-explorer',
standalone: true,
imports: [
CommonModule,
ExceptionDraftInlineComponent,
ExceptionBadgeComponent,
ExceptionExplainComponent,
ReachabilityWhyDrawerComponent,
WitnessModalComponent,
ConfidenceTierBadgeComponent,
],
templateUrl: './vulnerability-explorer.component.html',
styleUrls: ['./vulnerability-explorer.component.scss'],
changeDetection: ChangeDetectionStrategy.OnPush,
providers: [],
})
export class VulnerabilityExplorerComponent implements OnInit {
private readonly api = inject<VulnerabilityApi>(VULNERABILITY_API);
private readonly witnessClient = inject(WitnessMockClient);
// View state
readonly loading = signal(false);
@@ -86,55 +99,55 @@ export class VulnerabilityExplorerComponent implements OnInit {
readonly stats = signal<VulnerabilityStats | null>(null);
readonly selectedVulnId = signal<string | null>(null);
// Filters & sorting
readonly severityFilter = signal<SeverityFilter>('all');
readonly statusFilter = signal<StatusFilter>('all');
readonly reachabilityFilter = signal<ReachabilityFilter>('all');
readonly searchQuery = signal('');
readonly sortField = signal<SortField>('severity');
readonly sortOrder = signal<SortOrder>('asc');
readonly showExceptedOnly = signal(false);
// Filters & sorting
readonly severityFilter = signal<SeverityFilter>('all');
readonly statusFilter = signal<StatusFilter>('all');
readonly reachabilityFilter = signal<ReachabilityFilter>('all');
readonly searchQuery = signal('');
readonly sortField = signal<SortField>('severity');
readonly sortOrder = signal<SortOrder>('asc');
readonly showExceptedOnly = signal(false);
// Exception draft state
readonly showExceptionDraft = signal(false);
readonly selectedForException = signal<Vulnerability[]>([]);
// Exception explain state
readonly showExceptionExplain = signal(false);
readonly explainExceptionId = signal<string | null>(null);
// Why drawer state
readonly showWhyDrawer = signal(false);
// Constants for template
readonly severityLabels = SEVERITY_LABELS;
readonly statusLabels = STATUS_LABELS;
readonly reachabilityLabels = REACHABILITY_LABELS;
readonly allSeverities: VulnerabilitySeverity[] = ['critical', 'high', 'medium', 'low', 'unknown'];
readonly allStatuses: VulnerabilityStatus[] = ['open', 'fixed', 'wont_fix', 'in_progress', 'excepted'];
readonly allReachability: Exclude<ReachabilityFilter, 'all'>[] = ['reachable', 'unknown', 'unreachable'];
// Exception explain state
readonly showExceptionExplain = signal(false);
readonly explainExceptionId = signal<string | null>(null);
// Why drawer state
readonly showWhyDrawer = signal(false);
// Constants for template
readonly severityLabels = SEVERITY_LABELS;
readonly statusLabels = STATUS_LABELS;
readonly reachabilityLabels = REACHABILITY_LABELS;
readonly allSeverities: VulnerabilitySeverity[] = ['critical', 'high', 'medium', 'low', 'unknown'];
readonly allStatuses: VulnerabilityStatus[] = ['open', 'fixed', 'wont_fix', 'in_progress', 'excepted'];
readonly allReachability: Exclude<ReachabilityFilter, 'all'>[] = ['reachable', 'unknown', 'unreachable'];
// Computed: filtered and sorted list
readonly filteredVulnerabilities = computed(() => {
let items = [...this.vulnerabilities()];
const severity = this.severityFilter();
const status = this.statusFilter();
const reachability = this.reachabilityFilter();
const search = this.searchQuery().toLowerCase();
const exceptedOnly = this.showExceptedOnly();
readonly filteredVulnerabilities = computed(() => {
let items = [...this.vulnerabilities()];
const severity = this.severityFilter();
const status = this.statusFilter();
const reachability = this.reachabilityFilter();
const search = this.searchQuery().toLowerCase();
const exceptedOnly = this.showExceptedOnly();
if (severity !== 'all') {
items = items.filter((v) => v.severity === severity);
}
if (status !== 'all') {
items = items.filter((v) => v.status === status);
}
if (reachability !== 'all') {
items = items.filter((v) => (v.reachabilityStatus ?? 'unknown') === reachability);
}
if (exceptedOnly) {
items = items.filter((v) => v.hasException);
}
if (status !== 'all') {
items = items.filter((v) => v.status === status);
}
if (reachability !== 'all') {
items = items.filter((v) => (v.reachabilityStatus ?? 'unknown') === reachability);
}
if (exceptedOnly) {
items = items.filter((v) => v.hasException);
}
if (search) {
items = items.filter(
(v) =>
@@ -239,10 +252,10 @@ export class VulnerabilityExplorerComponent implements OnInit {
this.message.set(null);
try {
const [vulnsResponse, statsResponse] = await Promise.all([
firstValueFrom(this.api.listVulnerabilities({ includeReachability: true })),
firstValueFrom(this.api.getStats()),
]);
const [vulnsResponse, statsResponse] = await Promise.all([
firstValueFrom(this.api.listVulnerabilities({ includeReachability: true })),
firstValueFrom(this.api.getStats()),
]);
this.vulnerabilities.set([...vulnsResponse.items]);
this.stats.set(statsResponse);
@@ -258,18 +271,18 @@ export class VulnerabilityExplorerComponent implements OnInit {
this.severityFilter.set(severity);
}
setStatusFilter(status: StatusFilter): void {
this.statusFilter.set(status);
}
setReachabilityFilter(reachability: ReachabilityFilter): void {
this.reachabilityFilter.set(reachability);
}
onSearchInput(event: Event): void {
const input = event.target as HTMLInputElement;
this.searchQuery.set(input.value);
}
setStatusFilter(status: StatusFilter): void {
this.statusFilter.set(status);
}
setReachabilityFilter(reachability: ReachabilityFilter): void {
this.reachabilityFilter.set(reachability);
}
onSearchInput(event: Event): void {
const input = event.target as HTMLInputElement;
this.searchQuery.set(input.value);
}
clearSearch(): void {
this.searchQuery.set('');
@@ -337,17 +350,17 @@ export class VulnerabilityExplorerComponent implements OnInit {
this.showExceptionExplain.set(true);
}
closeExplain(): void {
this.showExceptionExplain.set(false);
this.explainExceptionId.set(null);
}
closeExplain(): void {
this.showExceptionExplain.set(false);
this.explainExceptionId.set(null);
}
viewExceptionFromExplain(exceptionId: string): void {
this.closeExplain();
this.onViewExceptionDetails(exceptionId);
}
openFullWizard(): void {
openFullWizard(): void {
// In a real app, this would navigate to the Exception Center wizard
// For now, just show a message
this.showMessage('Opening full wizard... (would navigate to Exception Center)', 'info');
@@ -371,47 +384,47 @@ export class VulnerabilityExplorerComponent implements OnInit {
});
}
formatCvss(score: number | undefined): string {
if (score === undefined) return '-';
return score.toFixed(1);
}
openWhyDrawer(): void {
this.showWhyDrawer.set(true);
}
closeWhyDrawer(): void {
this.showWhyDrawer.set(false);
}
getReachabilityClass(vuln: Vulnerability): string {
const status = vuln.reachabilityStatus ?? 'unknown';
return `reachability--${status}`;
}
getReachabilityLabel(vuln: Vulnerability): string {
const status = vuln.reachabilityStatus ?? 'unknown';
return REACHABILITY_LABELS[status];
}
getReachabilityTooltip(vuln: Vulnerability): string {
const status = vuln.reachabilityStatus ?? 'unknown';
const score = vuln.reachabilityScore;
const scoreText =
typeof score === 'number' ? ` (confidence ${(score * 100).toFixed(0)}%)` : '';
switch (status) {
case 'reachable':
return `Reachable${scoreText}. Signals indicates a call path reaches at least one affected component.`;
case 'unreachable':
return `Unreachable${scoreText}. Signals found no call path to affected components.`;
default:
return `Unknown${scoreText}. No reachability evidence is available for the affected components.`;
}
}
trackByVuln = (_: number, item: Vulnerability) => item.vulnId;
trackByComponent = (_: number, item: { purl: string }) => item.purl;
formatCvss(score: number | undefined): string {
if (score === undefined) return '-';
return score.toFixed(1);
}
openWhyDrawer(): void {
this.showWhyDrawer.set(true);
}
closeWhyDrawer(): void {
this.showWhyDrawer.set(false);
}
getReachabilityClass(vuln: Vulnerability): string {
const status = vuln.reachabilityStatus ?? 'unknown';
return `reachability--${status}`;
}
getReachabilityLabel(vuln: Vulnerability): string {
const status = vuln.reachabilityStatus ?? 'unknown';
return REACHABILITY_LABELS[status];
}
getReachabilityTooltip(vuln: Vulnerability): string {
const status = vuln.reachabilityStatus ?? 'unknown';
const score = vuln.reachabilityScore;
const scoreText =
typeof score === 'number' ? ` (confidence ${(score * 100).toFixed(0)}%)` : '';
switch (status) {
case 'reachable':
return `Reachable${scoreText}. Signals indicates a call path reaches at least one affected component.`;
case 'unreachable':
return `Unreachable${scoreText}. Signals found no call path to affected components.`;
default:
return `Unknown${scoreText}. No reachability evidence is available for the affected components.`;
}
}
trackByVuln = (_: number, item: Vulnerability) => item.vulnId;
trackByComponent = (_: number, item: { purl: string }) => item.purl;
private sortVulnerabilities(items: Vulnerability[]): Vulnerability[] {
const field = this.sortField();
@@ -448,9 +461,9 @@ export class VulnerabilityExplorerComponent implements OnInit {
setTimeout(() => this.message.set(null), 5000);
}
private toErrorMessage(error: unknown): string {
if (error instanceof Error) return error.message;
if (typeof error === 'string') return error;
return 'Operation failed. Please retry.';
}
}
private toErrorMessage(error: unknown): string {
if (error instanceof Error) return error.message;
if (typeof error === 'string') return error;
return 'Operation failed. Please retry.';
}
}

View File

@@ -0,0 +1,148 @@
/**
* Confidence Tier Badge Component.
* Sprint: SPRINT_3700_0005_0001_witness_ui_cli (UI-006)
*
* Displays reachability confidence tier with color coding.
*/
import { Component, input, computed } from '@angular/core';
import { CommonModule } from '@angular/common';
import {
ConfidenceTier,
CONFIDENCE_TIER_COLORS,
CONFIDENCE_TIER_LABELS,
VEX_RECOMMENDATIONS,
} from '../../core/api/witness.models';
@Component({
selector: 'app-confidence-tier-badge',
standalone: true,
imports: [CommonModule],
template: `
<span
class="tier-badge"
[class]="tierClass()"
[style.background-color]="tierColor()"
[attr.title]="tooltip()"
[attr.aria-label]="ariaLabel()"
>
<span class="tier-badge__icon" *ngIf="showIcon()">{{ tierIcon() }}</span>
<span class="tier-badge__label">{{ tierLabel() }}</span>
<span class="tier-badge__score" *ngIf="showScore() && score() !== undefined">
{{ formatScore() }}
</span>
</span>
`,
styles: [`
.tier-badge {
display: inline-flex;
align-items: center;
gap: 0.375rem;
padding: 0.25rem 0.625rem;
border-radius: 4px;
font-size: 0.75rem;
font-weight: 600;
color: #fff;
text-transform: uppercase;
letter-spacing: 0.025em;
cursor: help;
transition: opacity 0.15s, transform 0.15s;
&:hover {
opacity: 0.9;
transform: scale(1.02);
}
}
.tier-badge--confirmed {
background-color: #dc3545;
box-shadow: 0 2px 4px rgba(220, 53, 69, 0.3);
}
.tier-badge--likely {
background-color: #fd7e14;
box-shadow: 0 2px 4px rgba(253, 126, 20, 0.3);
}
.tier-badge--present {
background-color: #6c757d;
box-shadow: 0 2px 4px rgba(108, 117, 125, 0.3);
}
.tier-badge--unreachable {
background-color: #28a745;
box-shadow: 0 2px 4px rgba(40, 167, 69, 0.3);
}
.tier-badge--unknown {
background-color: #17a2b8;
box-shadow: 0 2px 4px rgba(23, 162, 184, 0.3);
}
.tier-badge__icon {
font-size: 0.875rem;
}
.tier-badge__score {
opacity: 0.9;
font-weight: 400;
}
`],
})
export class ConfidenceTierBadgeComponent {
/** Confidence tier. */
tier = input.required<ConfidenceTier>();
/** Optional confidence score (0.0-1.0). */
score = input<number>();
/** Whether to show the icon. */
showIcon = input<boolean>(true);
/** Whether to show the score. */
showScore = input<boolean>(false);
/** Compact mode (shorter label). */
compact = input<boolean>(false);
tierClass = computed(() => `tier-badge tier-badge--${this.tier()}`);
tierColor = computed(() => CONFIDENCE_TIER_COLORS[this.tier()]);
tierLabel = computed(() => {
if (this.compact()) {
return this.tier().toUpperCase();
}
return CONFIDENCE_TIER_LABELS[this.tier()];
});
tierIcon = computed(() => {
const icons: Record<ConfidenceTier, string> = {
confirmed: '⚠️',
likely: '❗',
present: '❓',
unreachable: '✓',
unknown: '?',
};
return icons[this.tier()];
});
tooltip = computed(() => {
const vex = VEX_RECOMMENDATIONS[this.tier()];
const scoreText = this.score() !== undefined
? ` (Score: ${(this.score()! * 100).toFixed(0)}%)`
: '';
return `${CONFIDENCE_TIER_LABELS[this.tier()]}${scoreText}\nVEX Recommendation: ${vex}`;
});
ariaLabel = computed(() =>
`Confidence tier: ${CONFIDENCE_TIER_LABELS[this.tier()]}`
);
formatScore = computed(() => {
const s = this.score();
if (s === undefined) return '';
return `${(s * 100).toFixed(0)}%`;
});
}

View File

@@ -0,0 +1,768 @@
/**
* Evidence Drawer Component.
* Sprint: SPRINT_3850_0001_0001 (Competitive Gap Closure)
* Task: EXP-F-005 - Evidence drawer UI with proof tabs
*
* Displays detailed evidence for a finding including:
* - Proof chain visualization
* - Reachability witness
* - VEX decisions
* - Attestation verification
*/
import { Component, input, output, computed, signal } from '@angular/core';
import { CommonModule } from '@angular/common';
import { PathVisualizationComponent, PathVisualizationData } from './path-visualization.component';
import { ConfidenceTierBadgeComponent } from './confidence-tier-badge.component';
import { GateBadgeComponent } from './gate-badge.component';
import { GateInfo } from '../../core/api/witness.models';
/**
* Evidence tab types.
*/
export type EvidenceTab = 'summary' | 'proof' | 'reachability' | 'vex' | 'attestation';
/**
* Proof node for the proof chain.
*/
export interface ProofNode {
id: string;
kind: 'input' | 'rule' | 'merge' | 'output';
ruleId?: string;
delta: number;
total: number;
parentIds: string[];
evidenceRefs: string[];
actor?: string;
timestamp: string;
}
/**
* VEX decision for a finding.
*/
export interface VexDecision {
status: 'not_affected' | 'affected' | 'under_investigation' | 'fixed';
justification?: string;
source: string;
sourceVersion?: string;
timestamp: string;
jurisdiction?: string;
confidence: number;
}
/**
* Attestation envelope.
*/
export interface AttestationInfo {
envelopeType: 'DSSE' | 'in-toto';
predicateType: string;
signedAt: string;
keyId: string;
algorithm: string;
verified: boolean;
rekorLogIndex?: number;
rekorLogId?: string;
}
/**
* Evidence drawer data.
*/
export interface EvidenceDrawerData {
findingId: string;
cveId?: string;
packageName: string;
packageVersion?: string;
severity: 'critical' | 'high' | 'medium' | 'low' | 'info';
score?: number;
// Proof chain
proofNodes?: ProofNode[];
proofRootHash?: string;
// Reachability
reachabilityPath?: PathVisualizationData;
confidenceTier?: string;
gates?: GateInfo[];
// VEX
vexDecisions?: VexDecision[];
mergedVexStatus?: string;
// Attestations
attestations?: AttestationInfo[];
// Falsification conditions
falsificationConditions?: string[];
}
@Component({
selector: 'app-evidence-drawer',
standalone: true,
imports: [CommonModule, PathVisualizationComponent, ConfidenceTierBadgeComponent, GateBadgeComponent],
template: `
<div class="evidence-drawer" [class.evidence-drawer--open]="open()">
<div class="evidence-drawer__backdrop" (click)="close.emit()"></div>
<div class="evidence-drawer__panel">
<header class="evidence-drawer__header">
<div class="evidence-drawer__title">
<span class="evidence-drawer__severity" [class]="'evidence-drawer__severity--' + data().severity">
{{ data().severity | uppercase }}
</span>
<h2>{{ data().cveId ?? data().findingId }}</h2>
<span class="evidence-drawer__package">{{ data().packageName }}{{ data().packageVersion ? '@' + data().packageVersion : '' }}</span>
</div>
<button class="evidence-drawer__close" (click)="close.emit()" aria-label="Close">
×
</button>
</header>
<nav class="evidence-drawer__tabs">
<button
*ngFor="let tab of tabs"
class="evidence-drawer__tab"
[class.evidence-drawer__tab--active]="activeTab() === tab.id"
(click)="activeTab.set(tab.id)"
>
{{ tab.label }}
<span class="evidence-drawer__tab-indicator" *ngIf="tab.hasData && tab.hasData()"></span>
</button>
</nav>
<div class="evidence-drawer__content">
<!-- Summary Tab -->
<section *ngIf="activeTab() === 'summary'" class="evidence-drawer__section">
<h3>Finding Summary</h3>
<dl class="evidence-drawer__details">
<dt>Finding ID</dt>
<dd>{{ data().findingId }}</dd>
<dt *ngIf="data().cveId">CVE</dt>
<dd *ngIf="data().cveId">{{ data().cveId }}</dd>
<dt>Package</dt>
<dd>{{ data().packageName }}{{ data().packageVersion ? '@' + data().packageVersion : '' }}</dd>
<dt *ngIf="data().score !== undefined">Score</dt>
<dd *ngIf="data().score !== undefined">{{ data().score | number:'1.1-1' }}</dd>
<dt *ngIf="data().confidenceTier">Confidence</dt>
<dd *ngIf="data().confidenceTier">
<app-confidence-tier-badge [tier]="data().confidenceTier!"></app-confidence-tier-badge>
</dd>
<dt *ngIf="data().mergedVexStatus">VEX Status</dt>
<dd *ngIf="data().mergedVexStatus">
<span class="evidence-drawer__vex-status" [class]="'evidence-drawer__vex-status--' + data().mergedVexStatus">
{{ data().mergedVexStatus | uppercase }}
</span>
</dd>
</dl>
<div class="evidence-drawer__falsification" *ngIf="data().falsificationConditions?.length">
<h4>Falsification Conditions</h4>
<p class="evidence-drawer__falsification-intro">
This finding would be invalid if any of the following conditions are met:
</p>
<ul class="evidence-drawer__falsification-list">
<li *ngFor="let condition of data().falsificationConditions">{{ condition }}</li>
</ul>
</div>
</section>
<!-- Proof Tab -->
<section *ngIf="activeTab() === 'proof'" class="evidence-drawer__section">
<h3>Proof Chain</h3>
<div class="evidence-drawer__proof-root" *ngIf="data().proofRootHash">
<strong>Root Hash:</strong>
<code>{{ data().proofRootHash }}</code>
</div>
<div class="evidence-drawer__proof-nodes" *ngIf="data().proofNodes?.length">
<div
class="evidence-drawer__proof-node"
*ngFor="let node of data().proofNodes"
[class]="'evidence-drawer__proof-node--' + node.kind"
>
<div class="evidence-drawer__proof-node-header">
<span class="evidence-drawer__proof-node-kind">{{ node.kind | uppercase }}</span>
<span class="evidence-drawer__proof-node-id">{{ node.id }}</span>
</div>
<div class="evidence-drawer__proof-node-body">
<span *ngIf="node.ruleId" class="evidence-drawer__proof-node-rule">{{ node.ruleId }}</span>
<span class="evidence-drawer__proof-node-delta" [class.positive]="node.delta > 0" [class.negative]="node.delta < 0">
Δ {{ node.delta >= 0 ? '+' : '' }}{{ node.delta | number:'1.2-2' }}
</span>
<span class="evidence-drawer__proof-node-total">= {{ node.total | number:'1.2-2' }}</span>
</div>
<div class="evidence-drawer__proof-node-refs" *ngIf="node.evidenceRefs.length">
<span *ngFor="let ref of node.evidenceRefs" class="evidence-drawer__proof-node-ref">{{ ref }}</span>
</div>
</div>
</div>
<p *ngIf="!data().proofNodes?.length" class="evidence-drawer__empty">
No proof chain data available.
</p>
</section>
<!-- Reachability Tab -->
<section *ngIf="activeTab() === 'reachability'" class="evidence-drawer__section">
<h3>Reachability Analysis</h3>
<div class="evidence-drawer__reachability-header" *ngIf="data().confidenceTier">
<app-confidence-tier-badge [tier]="data().confidenceTier!"></app-confidence-tier-badge>
<div class="evidence-drawer__gates" *ngIf="data().gates?.length">
<app-gate-badge *ngFor="let gate of data().gates" [gate]="gate"></app-gate-badge>
</div>
</div>
<app-path-visualization
*ngIf="data().reachabilityPath"
[data]="data().reachabilityPath!"
[collapsed]="false"
></app-path-visualization>
<p *ngIf="!data().reachabilityPath" class="evidence-drawer__empty">
No reachability path available.
</p>
</section>
<!-- VEX Tab -->
<section *ngIf="activeTab() === 'vex'" class="evidence-drawer__section">
<h3>VEX Decisions</h3>
<div class="evidence-drawer__vex-merged" *ngIf="data().mergedVexStatus">
<strong>Merged Status:</strong>
<span class="evidence-drawer__vex-status" [class]="'evidence-drawer__vex-status--' + data().mergedVexStatus">
{{ data().mergedVexStatus | uppercase }}
</span>
</div>
<div class="evidence-drawer__vex-decisions" *ngIf="data().vexDecisions?.length">
<div class="evidence-drawer__vex-decision" *ngFor="let vex of data().vexDecisions">
<div class="evidence-drawer__vex-decision-header">
<span class="evidence-drawer__vex-status" [class]="'evidence-drawer__vex-status--' + vex.status">
{{ vex.status | uppercase }}
</span>
<span class="evidence-drawer__vex-source">{{ vex.source }}</span>
<span class="evidence-drawer__vex-confidence">{{ vex.confidence | percent }}</span>
</div>
<div class="evidence-drawer__vex-decision-body">
<p *ngIf="vex.justification">{{ vex.justification }}</p>
<div class="evidence-drawer__vex-meta">
<span *ngIf="vex.jurisdiction">Region: {{ vex.jurisdiction }}</span>
<span>{{ vex.timestamp | date:'short' }}</span>
</div>
</div>
</div>
</div>
<p *ngIf="!data().vexDecisions?.length" class="evidence-drawer__empty">
No VEX decisions available.
</p>
</section>
<!-- Attestation Tab -->
<section *ngIf="activeTab() === 'attestation'" class="evidence-drawer__section">
<h3>Attestations</h3>
<div class="evidence-drawer__attestations" *ngIf="data().attestations?.length">
<div
class="evidence-drawer__attestation"
*ngFor="let att of data().attestations"
[class.evidence-drawer__attestation--verified]="att.verified"
>
<div class="evidence-drawer__attestation-header">
<span class="evidence-drawer__attestation-type">{{ att.envelopeType }}</span>
<span
class="evidence-drawer__attestation-status"
[class.verified]="att.verified"
>
{{ att.verified ? '✓ Verified' : '⚠ Unverified' }}
</span>
</div>
<dl class="evidence-drawer__attestation-details">
<dt>Predicate Type</dt>
<dd><code>{{ att.predicateType }}</code></dd>
<dt>Key ID</dt>
<dd><code>{{ att.keyId }}</code></dd>
<dt>Algorithm</dt>
<dd>{{ att.algorithm }}</dd>
<dt>Signed At</dt>
<dd>{{ att.signedAt | date:'medium' }}</dd>
<dt *ngIf="att.rekorLogIndex">Rekor Log</dt>
<dd *ngIf="att.rekorLogIndex">
Index: {{ att.rekorLogIndex }}
<span *ngIf="att.rekorLogId">({{ att.rekorLogId }})</span>
</dd>
</dl>
</div>
</div>
<p *ngIf="!data().attestations?.length" class="evidence-drawer__empty">
No attestations available.
</p>
</section>
</div>
</div>
</div>
`,
styles: [`
.evidence-drawer {
position: fixed;
top: 0;
left: 0;
right: 0;
bottom: 0;
z-index: 1000;
pointer-events: none;
}
.evidence-drawer--open {
pointer-events: auto;
}
.evidence-drawer__backdrop {
position: absolute;
inset: 0;
background: rgba(0, 0, 0, 0.4);
opacity: 0;
transition: opacity 0.2s;
}
.evidence-drawer--open .evidence-drawer__backdrop {
opacity: 1;
}
.evidence-drawer__panel {
position: absolute;
top: 0;
right: 0;
bottom: 0;
width: min(600px, 90vw);
background: var(--surface-primary, #fff);
box-shadow: -4px 0 24px rgba(0, 0, 0, 0.15);
transform: translateX(100%);
transition: transform 0.3s ease-out;
display: flex;
flex-direction: column;
}
.evidence-drawer--open .evidence-drawer__panel {
transform: translateX(0);
}
.evidence-drawer__header {
display: flex;
align-items: flex-start;
justify-content: space-between;
padding: 1.25rem;
border-bottom: 1px solid var(--border-color, #dee2e6);
background: var(--surface-secondary, #f8f9fa);
}
.evidence-drawer__title {
display: flex;
flex-direction: column;
gap: 0.25rem;
h2 {
margin: 0;
font-size: 1.125rem;
font-weight: 600;
}
}
.evidence-drawer__severity {
display: inline-block;
padding: 0.125rem 0.5rem;
border-radius: 3px;
font-size: 0.6875rem;
font-weight: 700;
text-transform: uppercase;
width: fit-content;
}
.evidence-drawer__severity--critical { background: #dc3545; color: #fff; }
.evidence-drawer__severity--high { background: #fd7e14; color: #fff; }
.evidence-drawer__severity--medium { background: #ffc107; color: #212529; }
.evidence-drawer__severity--low { background: #28a745; color: #fff; }
.evidence-drawer__severity--info { background: #17a2b8; color: #fff; }
.evidence-drawer__package {
font-family: var(--font-mono, monospace);
font-size: 0.8125rem;
color: var(--text-secondary, #6c757d);
}
.evidence-drawer__close {
background: none;
border: none;
font-size: 1.5rem;
line-height: 1;
cursor: pointer;
padding: 0.25rem;
color: var(--text-secondary, #6c757d);
&:hover {
color: var(--text-primary, #212529);
}
}
.evidence-drawer__tabs {
display: flex;
gap: 0;
border-bottom: 1px solid var(--border-color, #dee2e6);
overflow-x: auto;
}
.evidence-drawer__tab {
position: relative;
padding: 0.75rem 1rem;
background: none;
border: none;
font-size: 0.8125rem;
font-weight: 500;
color: var(--text-secondary, #6c757d);
cursor: pointer;
white-space: nowrap;
transition: color 0.15s, background-color 0.15s;
&:hover {
color: var(--text-primary, #212529);
background: var(--surface-secondary, #f8f9fa);
}
}
.evidence-drawer__tab--active {
color: var(--primary, #007bff);
border-bottom: 2px solid var(--primary, #007bff);
margin-bottom: -1px;
}
.evidence-drawer__tab-indicator {
position: absolute;
top: 0.5rem;
right: 0.5rem;
width: 6px;
height: 6px;
border-radius: 50%;
background: var(--primary, #007bff);
}
.evidence-drawer__content {
flex: 1;
overflow-y: auto;
padding: 1.25rem;
}
.evidence-drawer__section {
h3 {
margin: 0 0 1rem;
font-size: 1rem;
font-weight: 600;
}
h4 {
margin: 1rem 0 0.5rem;
font-size: 0.875rem;
font-weight: 600;
}
}
.evidence-drawer__details {
display: grid;
grid-template-columns: auto 1fr;
gap: 0.5rem 1rem;
margin: 0;
dt {
font-weight: 500;
color: var(--text-secondary, #6c757d);
}
dd {
margin: 0;
}
}
.evidence-drawer__vex-status {
display: inline-block;
padding: 0.125rem 0.5rem;
border-radius: 3px;
font-size: 0.6875rem;
font-weight: 600;
}
.evidence-drawer__vex-status--not_affected { background: #28a745; color: #fff; }
.evidence-drawer__vex-status--affected { background: #dc3545; color: #fff; }
.evidence-drawer__vex-status--under_investigation { background: #ffc107; color: #212529; }
.evidence-drawer__vex-status--fixed { background: #17a2b8; color: #fff; }
.evidence-drawer__falsification {
margin-top: 1.5rem;
padding: 1rem;
background: rgba(253, 126, 20, 0.1);
border-radius: 6px;
border-left: 3px solid #fd7e14;
}
.evidence-drawer__falsification-intro {
font-size: 0.8125rem;
color: var(--text-secondary, #6c757d);
margin: 0 0 0.5rem;
}
.evidence-drawer__falsification-list {
margin: 0;
padding-left: 1.25rem;
font-size: 0.875rem;
li {
margin-bottom: 0.25rem;
}
}
.evidence-drawer__proof-root {
margin-bottom: 1rem;
padding: 0.75rem;
background: var(--surface-secondary, #f8f9fa);
border-radius: 4px;
font-size: 0.8125rem;
code {
font-family: var(--font-mono, monospace);
word-break: break-all;
}
}
.evidence-drawer__proof-nodes {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.evidence-drawer__proof-node {
padding: 0.75rem;
background: var(--surface-secondary, #f8f9fa);
border-radius: 4px;
border-left: 3px solid var(--border-color, #dee2e6);
}
.evidence-drawer__proof-node--input { border-left-color: #6c757d; }
.evidence-drawer__proof-node--rule { border-left-color: #007bff; }
.evidence-drawer__proof-node--merge { border-left-color: #6f42c1; }
.evidence-drawer__proof-node--output { border-left-color: #28a745; }
.evidence-drawer__proof-node-header {
display: flex;
align-items: center;
gap: 0.5rem;
margin-bottom: 0.25rem;
}
.evidence-drawer__proof-node-kind {
font-size: 0.625rem;
font-weight: 700;
padding: 0.125rem 0.375rem;
border-radius: 2px;
background: var(--surface-tertiary, #e9ecef);
}
.evidence-drawer__proof-node-id {
font-family: var(--font-mono, monospace);
font-size: 0.75rem;
color: var(--text-secondary, #6c757d);
}
.evidence-drawer__proof-node-body {
display: flex;
align-items: center;
gap: 0.75rem;
font-size: 0.8125rem;
}
.evidence-drawer__proof-node-delta {
font-weight: 600;
&.positive { color: #dc3545; }
&.negative { color: #28a745; }
}
.evidence-drawer__proof-node-refs {
display: flex;
flex-wrap: wrap;
gap: 0.25rem;
margin-top: 0.5rem;
}
.evidence-drawer__proof-node-ref {
font-family: var(--font-mono, monospace);
font-size: 0.6875rem;
padding: 0.125rem 0.375rem;
background: var(--surface-tertiary, #e9ecef);
border-radius: 2px;
}
.evidence-drawer__reachability-header {
display: flex;
align-items: center;
gap: 1rem;
margin-bottom: 1rem;
}
.evidence-drawer__gates {
display: flex;
gap: 0.25rem;
}
.evidence-drawer__vex-merged {
margin-bottom: 1rem;
padding: 0.75rem;
background: var(--surface-secondary, #f8f9fa);
border-radius: 4px;
}
.evidence-drawer__vex-decisions {
display: flex;
flex-direction: column;
gap: 0.75rem;
}
.evidence-drawer__vex-decision {
padding: 0.75rem;
background: var(--surface-secondary, #f8f9fa);
border-radius: 4px;
}
.evidence-drawer__vex-decision-header {
display: flex;
align-items: center;
gap: 0.5rem;
margin-bottom: 0.5rem;
}
.evidence-drawer__vex-source {
font-weight: 500;
font-size: 0.8125rem;
}
.evidence-drawer__vex-confidence {
font-size: 0.75rem;
color: var(--text-secondary, #6c757d);
}
.evidence-drawer__vex-decision-body {
font-size: 0.8125rem;
p {
margin: 0 0 0.5rem;
}
}
.evidence-drawer__vex-meta {
display: flex;
gap: 1rem;
font-size: 0.75rem;
color: var(--text-tertiary, #868e96);
}
.evidence-drawer__attestations {
display: flex;
flex-direction: column;
gap: 0.75rem;
}
.evidence-drawer__attestation {
padding: 0.75rem;
background: var(--surface-secondary, #f8f9fa);
border-radius: 4px;
border-left: 3px solid var(--border-color, #dee2e6);
}
.evidence-drawer__attestation--verified {
border-left-color: #28a745;
}
.evidence-drawer__attestation-header {
display: flex;
align-items: center;
justify-content: space-between;
margin-bottom: 0.5rem;
}
.evidence-drawer__attestation-type {
font-weight: 600;
font-size: 0.8125rem;
}
.evidence-drawer__attestation-status {
font-size: 0.75rem;
font-weight: 500;
color: #fd7e14;
&.verified {
color: #28a745;
}
}
.evidence-drawer__attestation-details {
display: grid;
grid-template-columns: auto 1fr;
gap: 0.25rem 0.75rem;
font-size: 0.8125rem;
dt {
font-weight: 500;
color: var(--text-secondary, #6c757d);
}
dd {
margin: 0;
code {
font-family: var(--font-mono, monospace);
font-size: 0.75rem;
word-break: break-all;
}
}
}
.evidence-drawer__empty {
text-align: center;
color: var(--text-secondary, #6c757d);
font-style: italic;
padding: 2rem;
}
`],
})
export class EvidenceDrawerComponent {
/** Evidence data to display. */
data = input.required<EvidenceDrawerData>();
/** Whether the drawer is open. */
open = input<boolean>(false);
/** Emitted when the drawer should close. */
close = output<void>();
/** Active tab. */
activeTab = signal<EvidenceTab>('summary');
/** Tab configuration. */
tabs: Array<{ id: EvidenceTab; label: string; hasData?: () => boolean }> = [
{ id: 'summary', label: 'Summary' },
{ id: 'proof', label: 'Proof Chain', hasData: () => !!this.data().proofNodes?.length },
{ id: 'reachability', label: 'Reachability', hasData: () => !!this.data().reachabilityPath },
{ id: 'vex', label: 'VEX', hasData: () => !!this.data().vexDecisions?.length },
{ id: 'attestation', label: 'Attestation', hasData: () => !!this.data().attestations?.length },
];
}

View File

@@ -0,0 +1,144 @@
/**
* Gate Badge Component.
* Sprint: SPRINT_3700_0005_0001_witness_ui_cli (UI-003)
*
* Displays security gate information in the reachability path.
*/
import { Component, input, computed } from '@angular/core';
import { CommonModule } from '@angular/common';
import { GateInfo } from '../../core/api/witness.models';
@Component({
selector: 'app-gate-badge',
standalone: true,
imports: [CommonModule],
template: `
<span
class="gate-badge"
[class]="gateClass()"
[attr.title]="tooltip()"
[attr.aria-label]="ariaLabel()"
>
<span class="gate-badge__icon">{{ gateIcon() }}</span>
<span class="gate-badge__type">{{ gateTypeLabel() }}</span>
<span class="gate-badge__confidence" *ngIf="showConfidence()">
{{ formatConfidence() }}
</span>
</span>
`,
styles: [`
.gate-badge {
display: inline-flex;
align-items: center;
gap: 0.25rem;
padding: 0.125rem 0.5rem;
border-radius: 3px;
font-size: 0.6875rem;
font-weight: 500;
border: 1px solid;
cursor: help;
transition: background-color 0.15s;
}
.gate-badge--auth {
background-color: rgba(40, 167, 69, 0.1);
border-color: #28a745;
color: #28a745;
}
.gate-badge--authz {
background-color: rgba(0, 123, 255, 0.1);
border-color: #007bff;
color: #007bff;
}
.gate-badge--validation {
background-color: rgba(253, 126, 20, 0.1);
border-color: #fd7e14;
color: #fd7e14;
}
.gate-badge--sanitization {
background-color: rgba(102, 16, 242, 0.1);
border-color: #6610f2;
color: #6610f2;
}
.gate-badge--rate-limit {
background-color: rgba(108, 117, 125, 0.1);
border-color: #6c757d;
color: #6c757d;
}
.gate-badge--other {
background-color: rgba(23, 162, 184, 0.1);
border-color: #17a2b8;
color: #17a2b8;
}
.gate-badge__icon {
font-size: 0.75rem;
}
.gate-badge__confidence {
opacity: 0.8;
font-size: 0.625rem;
}
`],
})
export class GateBadgeComponent {
/** Gate information. */
gate = input.required<GateInfo>();
/** Whether to show confidence. */
showConfidence = input<boolean>(true);
gateClass = computed(() => `gate-badge gate-badge--${this.gate().gateType}`);
gateIcon = computed(() => {
const icons: Record<string, string> = {
'auth': '🔐',
'authz': '🛡️',
'validation': '✓',
'sanitization': '🧹',
'rate-limit': '⏱️',
'other': '🔒',
};
return icons[this.gate().gateType] ?? '🔒';
});
gateTypeLabel = computed(() => {
const labels: Record<string, string> = {
'auth': 'Auth',
'authz': 'AuthZ',
'validation': 'Validation',
'sanitization': 'Sanitize',
'rate-limit': 'Rate Limit',
'other': 'Gate',
};
return labels[this.gate().gateType] ?? 'Gate';
});
tooltip = computed(() => {
const g = this.gate();
let text = `${g.symbol}`;
if (g.description) {
text += `\n${g.description}`;
}
if (g.file && g.line) {
text += `\n${g.file}:${g.line}`;
}
text += `\nConfidence: ${(g.confidence * 100).toFixed(0)}%`;
return text;
});
ariaLabel = computed(() =>
`Security gate: ${this.gate().symbol}, confidence ${(this.gate().confidence * 100).toFixed(0)}%`
);
formatConfidence = computed(() =>
`${(this.gate().confidence * 100).toFixed(0)}%`
);
}

View File

@@ -3,3 +3,18 @@ export { ExceptionExplainComponent, ExceptionExplainData } from './exception-exp
export { ConfidenceBadgeComponent, ConfidenceBand } from './confidence-badge.component';
export { QuietProvenanceIndicatorComponent } from './quiet-provenance-indicator.component';
export { PolicyPackSelectorComponent } from './policy-pack-selector.component';
// Witness & Reachability components (SPRINT_3700_0005_0001)
export { ConfidenceTierBadgeComponent } from './confidence-tier-badge.component';
export { GateBadgeComponent } from './gate-badge.component';
export { PathVisualizationComponent, PathVisualizationData } from './path-visualization.component';
export { WitnessModalComponent } from './witness-modal.component';
// Risk Drift components (SPRINT_3600_0004_0001)
export { RiskDriftCardComponent, DriftResult, DriftedSink, DriftCause, AssociatedVuln } from './risk-drift-card.component';
// Evidence Drawer (SPRINT_3850_0001_0001)
export { EvidenceDrawerComponent, EvidenceDrawerData, EvidenceTab, ProofNode, VexDecision, AttestationInfo } from './evidence-drawer.component';
// Unknowns UI (SPRINT_3850_0001_0001)
export { UnknownChipComponent, UnknownItem, UnknownType, UnknownTriageAction } from './unknown-chip.component';

Some files were not shown because too many files have changed in this diff Show More