Refactor code structure for improved readability and maintainability; optimize performance in key functions.
This commit is contained in:
@@ -67,8 +67,8 @@ Reachability Drift Detection tracks function-level reachability changes between
|
||||
|
||||
### Call Graph Support
|
||||
- **.NET**: Roslyn semantic analysis (`DotNetCallGraphExtractor`)
|
||||
- **Node.js**: Babel AST analysis (`NodeCallGraphExtractor`)
|
||||
- **Future**: Java (ASM), Go (SSA), Python (AST)
|
||||
- **Node.js**: placeholder trace ingestion (`NodeCallGraphExtractor`); Babel integration pending (Sprint 3600.0004)
|
||||
- **Planned**: Java (ASM), Go (SSA), Python (AST) extractors exist but are not registered yet
|
||||
|
||||
### Entrypoint Detection
|
||||
- ASP.NET Core: `[HttpGet]`, `[Route]`, minimal APIs
|
||||
@@ -77,9 +77,17 @@ Reachability Drift Detection tracks function-level reachability changes between
|
||||
- CLI: `Main`, command handlers
|
||||
|
||||
### Drift API Endpoints
|
||||
- `POST /api/drift/analyze` - Compute drift between two scans
|
||||
- `GET /api/drift/{driftId}` - Retrieve drift result
|
||||
- `GET /api/drift/{driftId}/paths` - Get detailed paths
|
||||
- `GET /api/v1/scans/{scanId}/drift` - Get or compute drift between two scans
|
||||
- `GET /api/v1/drift/{driftId}/sinks` - Page drifted sinks
|
||||
- `POST /api/v1/scans/{scanId}/compute-reachability` - Trigger reachability computation
|
||||
- `GET /api/v1/scans/{scanId}/reachability/components` - List component reachability
|
||||
- `GET /api/v1/scans/{scanId}/reachability/findings` - List reachability findings
|
||||
- `GET /api/v1/scans/{scanId}/reachability/explain` - Explain reachability for CVE + PURL
|
||||
|
||||
### Drift Documentation
|
||||
- `docs/modules/scanner/reachability-drift.md`
|
||||
- `docs/api/scanner-drift-api.md`
|
||||
- `docs/operations/reachability-drift-guide.md`
|
||||
|
||||
### Testing
|
||||
- Unit tests: `src/Scanner/__Tests/StellaOps.Scanner.ReachabilityDrift.Tests/`
|
||||
@@ -122,7 +130,7 @@ Layered binary reachability with attestable slices for CVE triage:
|
||||
- **3840**: Runtime trace merge (eBPF/ETW)
|
||||
- **3850**: OCI storage and CLI commands
|
||||
|
||||
See: `docs/implplan/SPRINT_3800_SUMMARY.md`
|
||||
See: `docs/implplan/SPRINT_3800_0000_0000_summary.md`
|
||||
|
||||
### Libraries
|
||||
- `StellaOps.Scanner.Reachability.Slices` - Slice extraction, DSSE signing, verdict computation
|
||||
|
||||
@@ -9,7 +9,7 @@ public sealed record DescriptorRequest
|
||||
{
|
||||
public string ImageDigest { get; init; } = string.Empty;
|
||||
public string SbomPath { get; init; } = string.Empty;
|
||||
public string SbomMediaType { get; init; } = "application/vnd.cyclonedx+json";
|
||||
public string SbomMediaType { get; init; } = "application/vnd.cyclonedx+json; version=1.7";
|
||||
public string SbomFormat { get; init; } = "cyclonedx-json";
|
||||
public string SbomArtifactType { get; init; } = "application/vnd.stellaops.sbom.layer+json";
|
||||
public string SbomKind { get; init; } = "inventory";
|
||||
|
||||
@@ -208,7 +208,7 @@ internal static class Program
|
||||
var imageDigest = RequireOption(args, "--image");
|
||||
var sbomPath = RequireOption(args, "--sbom");
|
||||
|
||||
var sbomMediaType = GetOption(args, "--media-type") ?? "application/vnd.cyclonedx+json";
|
||||
var sbomMediaType = GetOption(args, "--media-type") ?? "application/vnd.cyclonedx+json; version=1.7";
|
||||
var sbomFormat = GetOption(args, "--sbom-format") ?? "cyclonedx-json";
|
||||
var sbomKind = GetOption(args, "--sbom-kind") ?? "inventory";
|
||||
var artifactType = GetOption(args, "--artifact-type") ?? "application/vnd.stellaops.sbom.layer+json";
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// FindingEvidenceContracts.cs
|
||||
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
|
||||
// Description: Unified evidence API response contracts for findings.
|
||||
// Sprint: SPRINT_4300_0001_0002_findings_evidence_api
|
||||
// Description: Evidence API response contracts for explainable triage.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System;
|
||||
@@ -11,447 +11,188 @@ using System.Text.Json.Serialization;
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
/// <summary>
|
||||
/// Unified evidence response for a finding, combining reachability, boundary,
|
||||
/// VEX evidence, and score explanation.
|
||||
/// Consolidated evidence response for a finding.
|
||||
/// Matches the advisory contract for explainable triage UX.
|
||||
/// </summary>
|
||||
public sealed record FindingEvidenceResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for the finding.
|
||||
/// Unique finding identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("finding_id")]
|
||||
public string FindingId { get; init; } = string.Empty;
|
||||
public required string FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CVE identifier (e.g., "CVE-2021-44228").
|
||||
/// CVE or vulnerability identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cve")]
|
||||
public string Cve { get; init; } = string.Empty;
|
||||
public required string Cve { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Component where the vulnerability was found.
|
||||
/// Affected component details.
|
||||
/// </summary>
|
||||
[JsonPropertyName("component")]
|
||||
public ComponentRef? Component { get; init; }
|
||||
public required ComponentInfo Component { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachable call path from entrypoint to vulnerable sink.
|
||||
/// Each element is a fully-qualified name (FQN).
|
||||
/// Reachable path from entrypoint to vulnerable code.
|
||||
/// </summary>
|
||||
[JsonPropertyName("reachable_path")]
|
||||
public IReadOnlyList<string>? ReachablePath { get; init; }
|
||||
public IReadOnlyList<string> ReachablePath { get; init; } = Array.Empty<string>();
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint proof (how the code is exposed).
|
||||
/// Entrypoint details (HTTP route, CLI command, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("entrypoint")]
|
||||
public EntrypointProof? Entrypoint { get; init; }
|
||||
public EntrypointInfo? Entrypoint { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Boundary proof (surface exposure and controls).
|
||||
/// </summary>
|
||||
[JsonPropertyName("boundary")]
|
||||
public BoundaryProofDto? Boundary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX (Vulnerability Exploitability eXchange) evidence.
|
||||
/// VEX exploitability status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vex")]
|
||||
public VexEvidenceDto? Vex { get; init; }
|
||||
public VexStatusInfo? Vex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Score explanation with additive risk breakdown.
|
||||
/// </summary>
|
||||
[JsonPropertyName("score_explain")]
|
||||
public ScoreExplanationDto? ScoreExplain { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the finding was last observed.
|
||||
/// When this evidence was last observed/generated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
public required DateTimeOffset LastSeen { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the evidence expires (for VEX/attestation freshness).
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the evidence is stale (expired or near-expiry).
|
||||
/// </summary>
|
||||
[JsonPropertyName("is_stale")]
|
||||
public bool IsStale { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// References to DSSE/in-toto attestations backing this evidence.
|
||||
/// Content-addressed references to attestations.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_refs")]
|
||||
public IReadOnlyList<string>? AttestationRefs { get; init; }
|
||||
public IReadOnlyList<string> AttestationRefs { get; init; } = Array.Empty<string>();
|
||||
|
||||
/// <summary>
|
||||
/// Risk score with explanation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("score")]
|
||||
public ScoreInfo? Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Boundary exposure information.
|
||||
/// </summary>
|
||||
[JsonPropertyName("boundary")]
|
||||
public BoundaryInfo? Boundary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence freshness and TTL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("freshness")]
|
||||
public FreshnessInfo Freshness { get; init; } = new();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to a component (package) by PURL and version.
|
||||
/// </summary>
|
||||
public sealed record ComponentRef
|
||||
public sealed record ComponentInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Package URL (PURL) identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("purl")]
|
||||
public string Purl { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Package name.
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
public string Name { get; init; } = string.Empty;
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
public string Version { get; init; } = string.Empty;
|
||||
public required string Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package type/ecosystem (npm, maven, nuget, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
[JsonPropertyName("ecosystem")]
|
||||
public string? Ecosystem { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Proof of how code is exposed as an entrypoint.
|
||||
/// </summary>
|
||||
public sealed record EntrypointProof
|
||||
public sealed record EntrypointInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of entrypoint (http_handler, grpc_method, cli_command, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Route or path (e.g., "/api/v1/users", "grpc.UserService.GetUser").
|
||||
/// </summary>
|
||||
[JsonPropertyName("route")]
|
||||
public string? Route { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// HTTP method if applicable (GET, POST, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("method")]
|
||||
public string? Method { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication requirement (none, optional, required).
|
||||
/// </summary>
|
||||
[JsonPropertyName("auth")]
|
||||
public string? Auth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Execution phase (startup, runtime, shutdown).
|
||||
/// </summary>
|
||||
[JsonPropertyName("phase")]
|
||||
public string? Phase { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Fully qualified name of the entrypoint symbol.
|
||||
/// </summary>
|
||||
[JsonPropertyName("fqn")]
|
||||
public string Fqn { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Source file location.
|
||||
/// </summary>
|
||||
[JsonPropertyName("location")]
|
||||
public SourceLocation? Location { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Source file location reference.
|
||||
/// </summary>
|
||||
public sealed record SourceLocation
|
||||
public sealed record VexStatusInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// File path relative to repository root.
|
||||
/// </summary>
|
||||
[JsonPropertyName("file")]
|
||||
public string File { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Line number (1-indexed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Column number (1-indexed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("column")]
|
||||
public int? Column { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Boundary proof describing surface exposure and controls.
|
||||
/// </summary>
|
||||
public sealed record BoundaryProofDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of boundary (network, file, ipc, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Surface descriptor (what is exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("surface")]
|
||||
public SurfaceDescriptor? Surface { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exposure descriptor (how it's exposed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("exposure")]
|
||||
public ExposureDescriptor? Exposure { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication descriptor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("auth")]
|
||||
public AuthDescriptor? Auth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Security controls in place.
|
||||
/// </summary>
|
||||
[JsonPropertyName("controls")]
|
||||
public IReadOnlyList<ControlDescriptor>? Controls { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the boundary was last verified.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence score (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes what attack surface is exposed.
|
||||
/// </summary>
|
||||
public sealed record SurfaceDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of surface (api, web, cli, library).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Protocol (http, https, grpc, tcp).
|
||||
/// </summary>
|
||||
[JsonPropertyName("protocol")]
|
||||
public string? Protocol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Port number if network-exposed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("port")]
|
||||
public int? Port { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes how the surface is exposed.
|
||||
/// </summary>
|
||||
public sealed record ExposureDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Exposure level (public, internal, private).
|
||||
/// </summary>
|
||||
[JsonPropertyName("level")]
|
||||
public string Level { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the exposure is internet-facing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("internet_facing")]
|
||||
public bool InternetFacing { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Network zone (dmz, internal, trusted).
|
||||
/// </summary>
|
||||
[JsonPropertyName("zone")]
|
||||
public string? Zone { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes authentication requirements.
|
||||
/// </summary>
|
||||
public sealed record AuthDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether authentication is required.
|
||||
/// </summary>
|
||||
[JsonPropertyName("required")]
|
||||
public bool Required { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Authentication type (jwt, oauth2, basic, api_key).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string? Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Required roles/scopes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("roles")]
|
||||
public IReadOnlyList<string>? Roles { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Describes a security control.
|
||||
/// </summary>
|
||||
public sealed record ControlDescriptor
|
||||
{
|
||||
/// <summary>
|
||||
/// Type of control (rate_limit, waf, input_validation, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
public string Type { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Whether the control is active.
|
||||
/// </summary>
|
||||
[JsonPropertyName("active")]
|
||||
public bool Active { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Control configuration details.
|
||||
/// </summary>
|
||||
[JsonPropertyName("config")]
|
||||
public string? Config { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX (Vulnerability Exploitability eXchange) evidence.
|
||||
/// </summary>
|
||||
public sealed record VexEvidenceDto
|
||||
{
|
||||
/// <summary>
|
||||
/// VEX status (not_affected, affected, fixed, under_investigation).
|
||||
/// </summary>
|
||||
[JsonPropertyName("status")]
|
||||
public string Status { get; init; } = string.Empty;
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for the status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("justification")]
|
||||
public string? Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Impact statement explaining why not affected.
|
||||
/// </summary>
|
||||
[JsonPropertyName("impact")]
|
||||
public string? Impact { get; init; }
|
||||
[JsonPropertyName("timestamp")]
|
||||
public DateTimeOffset? Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Action statement (remediation steps).
|
||||
/// </summary>
|
||||
[JsonPropertyName("action")]
|
||||
public string? Action { get; init; }
|
||||
[JsonPropertyName("issuer")]
|
||||
public string? Issuer { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the VEX document/attestation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestation_ref")]
|
||||
public string? AttestationRef { get; init; }
|
||||
public sealed record ScoreInfo
|
||||
{
|
||||
[JsonPropertyName("risk_score")]
|
||||
public required int RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement was issued.
|
||||
/// </summary>
|
||||
[JsonPropertyName("issued_at")]
|
||||
public DateTimeOffset? IssuedAt { get; init; }
|
||||
[JsonPropertyName("contributions")]
|
||||
public IReadOnlyList<ScoreContribution> Contributions { get; init; } = Array.Empty<ScoreContribution>();
|
||||
}
|
||||
|
||||
public sealed record ScoreContribution
|
||||
{
|
||||
[JsonPropertyName("factor")]
|
||||
public required string Factor { get; init; }
|
||||
|
||||
[JsonPropertyName("value")]
|
||||
public required int Value { get; init; }
|
||||
|
||||
[JsonPropertyName("reason")]
|
||||
public string? Reason { get; init; }
|
||||
}
|
||||
|
||||
public sealed record BoundaryInfo
|
||||
{
|
||||
[JsonPropertyName("surface")]
|
||||
public required string Surface { get; init; }
|
||||
|
||||
[JsonPropertyName("exposure")]
|
||||
public required string Exposure { get; init; }
|
||||
|
||||
[JsonPropertyName("auth")]
|
||||
public AuthInfo? Auth { get; init; }
|
||||
|
||||
[JsonPropertyName("controls")]
|
||||
public IReadOnlyList<string> Controls { get; init; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
public sealed record AuthInfo
|
||||
{
|
||||
[JsonPropertyName("mechanism")]
|
||||
public required string Mechanism { get; init; }
|
||||
|
||||
[JsonPropertyName("required_scopes")]
|
||||
public IReadOnlyList<string> RequiredScopes { get; init; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
public sealed record FreshnessInfo
|
||||
{
|
||||
[JsonPropertyName("is_stale")]
|
||||
public bool IsStale { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the VEX statement expires.
|
||||
/// </summary>
|
||||
[JsonPropertyName("expires_at")]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the VEX statement (vendor, first-party, third-party).
|
||||
/// </summary>
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
[JsonPropertyName("ttl_remaining_hours")]
|
||||
public int? TtlRemainingHours { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Score explanation with additive breakdown of risk factors.
|
||||
/// </summary>
|
||||
public sealed record ScoreExplanationDto
|
||||
public sealed record BatchEvidenceRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Kind of scoring algorithm (stellaops_risk_v1, cvss_v4, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("kind")]
|
||||
public string Kind { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Final computed risk score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("risk_score")]
|
||||
public double RiskScore { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Individual score contributions.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contributions")]
|
||||
public IReadOnlyList<ScoreContributionDto>? Contributions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the score was computed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("last_seen")]
|
||||
public DateTimeOffset LastSeen { get; init; }
|
||||
[JsonPropertyName("finding_ids")]
|
||||
public required IReadOnlyList<string> FindingIds { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Individual contribution to the risk score.
|
||||
/// </summary>
|
||||
public sealed record ScoreContributionDto
|
||||
public sealed record BatchEvidenceResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Factor name (cvss_base, epss, reachability, gate_multiplier, etc.).
|
||||
/// </summary>
|
||||
[JsonPropertyName("factor")]
|
||||
public string Factor { get; init; } = string.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Weight applied to this factor (0.0 to 1.0).
|
||||
/// </summary>
|
||||
[JsonPropertyName("weight")]
|
||||
public double Weight { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw value before weighting.
|
||||
/// </summary>
|
||||
[JsonPropertyName("raw_value")]
|
||||
public double RawValue { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Weighted contribution to final score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contribution")]
|
||||
public double Contribution { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable explanation of this factor.
|
||||
/// </summary>
|
||||
[JsonPropertyName("explanation")]
|
||||
public string? Explanation { get; init; }
|
||||
[JsonPropertyName("findings")]
|
||||
public required IReadOnlyList<FindingEvidenceResponse> Findings { get; init; }
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Contracts;
|
||||
@@ -11,6 +12,153 @@ public sealed record SbomAcceptedResponseDto(
|
||||
[property: JsonPropertyName("componentCount")] int ComponentCount,
|
||||
[property: JsonPropertyName("digest")] string Digest);
|
||||
|
||||
/// <summary>
|
||||
/// Request payload for BYOS SBOM uploads.
|
||||
/// </summary>
|
||||
public sealed record SbomUploadRequestDto
|
||||
{
|
||||
[JsonPropertyName("artifactRef")]
|
||||
public string ArtifactRef { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("artifactDigest")]
|
||||
public string? ArtifactDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("sbom")]
|
||||
public JsonElement? Sbom { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomBase64")]
|
||||
public string? SbomBase64 { get; init; }
|
||||
|
||||
[JsonPropertyName("format")]
|
||||
public string? Format { get; init; }
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public SbomUploadSourceDto? Source { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance metadata for a BYOS SBOM upload.
|
||||
/// </summary>
|
||||
public sealed record SbomUploadSourceDto
|
||||
{
|
||||
[JsonPropertyName("tool")]
|
||||
public string? Tool { get; init; }
|
||||
|
||||
[JsonPropertyName("version")]
|
||||
public string? Version { get; init; }
|
||||
|
||||
[JsonPropertyName("ciContext")]
|
||||
public SbomUploadCiContextDto? CiContext { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CI metadata attached to a BYOS SBOM upload.
|
||||
/// </summary>
|
||||
public sealed record SbomUploadCiContextDto
|
||||
{
|
||||
[JsonPropertyName("buildId")]
|
||||
public string? BuildId { get; init; }
|
||||
|
||||
[JsonPropertyName("repository")]
|
||||
public string? Repository { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response payload for BYOS SBOM uploads.
|
||||
/// </summary>
|
||||
public sealed record SbomUploadResponseDto
|
||||
{
|
||||
[JsonPropertyName("sbomId")]
|
||||
public string SbomId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("artifactRef")]
|
||||
public string ArtifactRef { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("artifactDigest")]
|
||||
public string? ArtifactDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
public string Digest { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("format")]
|
||||
public string Format { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("formatVersion")]
|
||||
public string FormatVersion { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("validationResult")]
|
||||
public SbomValidationSummaryDto ValidationResult { get; init; } = new();
|
||||
|
||||
[JsonPropertyName("analysisJobId")]
|
||||
public string AnalysisJobId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("uploadedAtUtc")]
|
||||
public DateTimeOffset UploadedAtUtc { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validation summary for a BYOS SBOM upload.
|
||||
/// </summary>
|
||||
public sealed record SbomValidationSummaryDto
|
||||
{
|
||||
[JsonPropertyName("valid")]
|
||||
public bool Valid { get; init; }
|
||||
|
||||
[JsonPropertyName("qualityScore")]
|
||||
public double QualityScore { get; init; }
|
||||
|
||||
[JsonPropertyName("warnings")]
|
||||
public IReadOnlyList<string> Warnings { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("errors")]
|
||||
public IReadOnlyList<string> Errors { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("componentCount")]
|
||||
public int ComponentCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Upload record returned for BYOS queries.
|
||||
/// </summary>
|
||||
public sealed record SbomUploadRecordDto
|
||||
{
|
||||
[JsonPropertyName("sbomId")]
|
||||
public string SbomId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("artifactRef")]
|
||||
public string ArtifactRef { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("artifactDigest")]
|
||||
public string? ArtifactDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
public string Digest { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("format")]
|
||||
public string Format { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("formatVersion")]
|
||||
public string FormatVersion { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("analysisJobId")]
|
||||
public string AnalysisJobId { get; init; } = string.Empty;
|
||||
|
||||
[JsonPropertyName("componentCount")]
|
||||
public int ComponentCount { get; init; }
|
||||
|
||||
[JsonPropertyName("qualityScore")]
|
||||
public double QualityScore { get; init; }
|
||||
|
||||
[JsonPropertyName("warnings")]
|
||||
public IReadOnlyList<string> Warnings { get; init; } = Array.Empty<string>();
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public SbomUploadSourceDto? Source { get; init; }
|
||||
|
||||
[JsonPropertyName("createdAtUtc")]
|
||||
public DateTimeOffset CreatedAtUtc { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// SBOM format types.
|
||||
/// </summary>
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Controllers;
|
||||
|
||||
[ApiController]
|
||||
[Route("api/v1/findings")]
|
||||
[Produces("application/json")]
|
||||
public sealed class FindingsEvidenceController : ControllerBase
|
||||
{
|
||||
private readonly IEvidenceCompositionService _evidenceService;
|
||||
private readonly ITriageQueryService _triageService;
|
||||
private readonly ILogger<FindingsEvidenceController> _logger;
|
||||
|
||||
public FindingsEvidenceController(
|
||||
IEvidenceCompositionService evidenceService,
|
||||
ITriageQueryService triageService,
|
||||
ILogger<FindingsEvidenceController> logger)
|
||||
{
|
||||
_evidenceService = evidenceService ?? throw new ArgumentNullException(nameof(evidenceService));
|
||||
_triageService = triageService ?? throw new ArgumentNullException(nameof(triageService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get consolidated evidence for a finding.
|
||||
/// </summary>
|
||||
/// <param name="findingId">The finding identifier.</param>
|
||||
/// <param name="includeRaw">Include raw source locations (requires elevated permissions).</param>
|
||||
/// <response code="200">Evidence retrieved successfully.</response>
|
||||
/// <response code="404">Finding not found.</response>
|
||||
/// <response code="403">Insufficient permissions for raw source.</response>
|
||||
[HttpGet("{findingId}/evidence")]
|
||||
[ProducesResponseType(typeof(FindingEvidenceResponse), StatusCodes.Status200OK)]
|
||||
[ProducesResponseType(StatusCodes.Status404NotFound)]
|
||||
[ProducesResponseType(StatusCodes.Status403Forbidden)]
|
||||
public async Task<IActionResult> GetEvidenceAsync(
|
||||
[FromRoute] string findingId,
|
||||
[FromQuery] bool includeRaw = false,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
_logger.LogDebug("Getting evidence for finding {FindingId}", findingId);
|
||||
|
||||
if (includeRaw && !User.HasClaim("scope", "evidence:raw"))
|
||||
{
|
||||
return Forbid("Requires evidence:raw scope for raw source access");
|
||||
}
|
||||
|
||||
var finding = await _triageService.GetFindingAsync(findingId, ct).ConfigureAwait(false);
|
||||
if (finding is null)
|
||||
{
|
||||
return NotFound(new { error = "Finding not found", findingId });
|
||||
}
|
||||
|
||||
var response = await _evidenceService.ComposeAsync(finding, includeRaw, ct).ConfigureAwait(false);
|
||||
return Ok(response);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get evidence for multiple findings (batch).
|
||||
/// </summary>
|
||||
[HttpPost("evidence/batch")]
|
||||
[ProducesResponseType(typeof(BatchEvidenceResponse), StatusCodes.Status200OK)]
|
||||
public async Task<IActionResult> GetBatchEvidenceAsync(
|
||||
[FromBody] BatchEvidenceRequest request,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
if (request.FindingIds.Count > 100)
|
||||
{
|
||||
return BadRequest(new { error = "Maximum 100 findings per batch" });
|
||||
}
|
||||
|
||||
var results = new List<FindingEvidenceResponse>();
|
||||
foreach (var findingId in request.FindingIds)
|
||||
{
|
||||
var finding = await _triageService.GetFindingAsync(findingId, ct).ConfigureAwait(false);
|
||||
if (finding is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var evidence = await _evidenceService.ComposeAsync(finding, includeRaw: false, ct).ConfigureAwait(false);
|
||||
results.Add(evidence);
|
||||
}
|
||||
|
||||
return Ok(new BatchEvidenceResponse { Findings = results });
|
||||
}
|
||||
}
|
||||
@@ -102,13 +102,13 @@ internal static class EvidenceEndpoints
|
||||
}
|
||||
|
||||
// Add warning header if evidence is stale or near expiry
|
||||
if (evidence.IsStale)
|
||||
if (evidence.Freshness.IsStale)
|
||||
{
|
||||
context.Response.Headers["X-Evidence-Warning"] = "stale";
|
||||
}
|
||||
else if (evidence.ExpiresAt.HasValue)
|
||||
else if (evidence.Freshness.ExpiresAt.HasValue)
|
||||
{
|
||||
var timeUntilExpiry = evidence.ExpiresAt.Value - DateTimeOffset.UtcNow;
|
||||
var timeUntilExpiry = evidence.Freshness.ExpiresAt.Value - DateTimeOffset.UtcNow;
|
||||
if (timeUntilExpiry <= TimeSpan.FromDays(1))
|
||||
{
|
||||
context.Response.Headers["X-Evidence-Warning"] = "near-expiry";
|
||||
|
||||
@@ -35,7 +35,7 @@ internal static class ExportEndpoints
|
||||
scansGroup.MapGet("/{scanId}/exports/cdxr", HandleExportCycloneDxRAsync)
|
||||
.WithName("scanner.scans.exports.cdxr")
|
||||
.WithTags("Exports")
|
||||
.Produces(StatusCodes.Status200OK, contentType: "application/vnd.cyclonedx+json")
|
||||
.Produces(StatusCodes.Status200OK, contentType: "application/vnd.cyclonedx+json; version=1.7")
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
@@ -137,7 +137,7 @@ internal static class ExportEndpoints
|
||||
}
|
||||
|
||||
var json = JsonSerializer.Serialize(cdxDocument, SerializerOptions);
|
||||
return Results.Content(json, "application/vnd.cyclonedx+json", System.Text.Encoding.UTF8, StatusCodes.Status200OK);
|
||||
return Results.Content(json, "application/vnd.cyclonedx+json; version=1.7", System.Text.Encoding.UTF8, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleExportOpenVexAsync(
|
||||
|
||||
@@ -0,0 +1,45 @@
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using StellaOps.Scanner.Orchestration.Fidelity;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
public static class FidelityEndpoints
|
||||
{
|
||||
public static void MapFidelityEndpoints(this WebApplication app)
|
||||
{
|
||||
var group = app.MapGroup("/api/v1/scan")
|
||||
.WithTags("Fidelity")
|
||||
.RequireAuthorization();
|
||||
|
||||
// POST /api/v1/scan/analyze?fidelity={level}
|
||||
group.MapPost("/analyze", async (
|
||||
[FromBody] AnalysisRequest request,
|
||||
[FromQuery] FidelityLevel fidelity = FidelityLevel.Standard,
|
||||
IFidelityAwareAnalyzer analyzer,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
var result = await analyzer.AnalyzeAsync(request, fidelity, ct);
|
||||
return Results.Ok(result);
|
||||
})
|
||||
.WithName("AnalyzeWithFidelity")
|
||||
.WithDescription("Analyze with specified fidelity level")
|
||||
.Produces<FidelityAnalysisResult>(200);
|
||||
|
||||
// POST /api/v1/scan/findings/{findingId}/upgrade
|
||||
group.MapPost("/findings/{findingId:guid}/upgrade", async (
|
||||
Guid findingId,
|
||||
[FromQuery] FidelityLevel target = FidelityLevel.Deep,
|
||||
IFidelityAwareAnalyzer analyzer,
|
||||
CancellationToken ct) =>
|
||||
{
|
||||
var result = await analyzer.UpgradeFidelityAsync(findingId, target, ct);
|
||||
return result.Success
|
||||
? Results.Ok(result)
|
||||
: Results.BadRequest(result);
|
||||
})
|
||||
.WithName("UpgradeFidelity")
|
||||
.WithDescription("Upgrade analysis fidelity for a finding")
|
||||
.Produces<FidelityUpgradeResult>(200)
|
||||
.Produces<FidelityUpgradeResult>(400);
|
||||
}
|
||||
}
|
||||
@@ -27,7 +27,12 @@ internal static class SbomEndpoints
|
||||
scansGroup.MapPost("/{scanId}/sbom", HandleSubmitSbomAsync)
|
||||
.WithName("scanner.scans.sbom.submit")
|
||||
.WithTags("Scans")
|
||||
.Accepts<JsonDocument>("application/vnd.cyclonedx+json", "application/spdx+json", "application/json")
|
||||
.Accepts<JsonDocument>(
|
||||
"application/vnd.cyclonedx+json; version=1.7",
|
||||
"application/vnd.cyclonedx+json; version=1.6",
|
||||
"application/vnd.cyclonedx+json",
|
||||
"application/spdx+json",
|
||||
"application/json")
|
||||
.Produces<SbomAcceptedResponseDto>(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
@@ -96,7 +101,7 @@ internal static class SbomEndpoints
|
||||
ProblemTypes.Validation,
|
||||
"Unknown SBOM format",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Could not detect SBOM format. Use Content-Type 'application/vnd.cyclonedx+json' or 'application/spdx+json'.");
|
||||
detail: "Could not detect SBOM format. Use Content-Type 'application/vnd.cyclonedx+json; version=1.7' (or 1.6) or 'application/spdx+json'.");
|
||||
}
|
||||
|
||||
// Validate the SBOM
|
||||
|
||||
@@ -0,0 +1,96 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class SbomUploadEndpoints
|
||||
{
|
||||
public static void MapSbomUploadEndpoints(this RouteGroupBuilder apiGroup)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var sbomGroup = apiGroup.MapGroup("/sbom");
|
||||
|
||||
sbomGroup.MapPost("/upload", HandleUploadAsync)
|
||||
.WithName("scanner.sbom.upload")
|
||||
.WithTags("SBOM")
|
||||
.Produces<SbomUploadResponseDto>(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.ScansWrite);
|
||||
|
||||
sbomGroup.MapGet("/uploads/{sbomId}", HandleGetUploadAsync)
|
||||
.WithName("scanner.sbom.uploads.get")
|
||||
.WithTags("SBOM")
|
||||
.Produces<SbomUploadRecordDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleUploadAsync(
|
||||
SbomUploadRequestDto request,
|
||||
ISbomByosUploadService uploadService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(uploadService);
|
||||
|
||||
var (response, validation) = await uploadService.UploadAsync(request, cancellationToken).ConfigureAwait(false);
|
||||
if (!validation.Valid)
|
||||
{
|
||||
var extensions = new Dictionary<string, object?>
|
||||
{
|
||||
["errors"] = validation.Errors,
|
||||
["warnings"] = validation.Warnings
|
||||
};
|
||||
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid SBOM",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "SBOM validation failed.",
|
||||
extensions: extensions);
|
||||
}
|
||||
|
||||
return Results.Accepted($"/api/v1/sbom/uploads/{response.SbomId}", response);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetUploadAsync(
|
||||
string sbomId,
|
||||
ISbomByosUploadService uploadService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(uploadService);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(sbomId))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid SBOM identifier",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "SBOM identifier is required.");
|
||||
}
|
||||
|
||||
var record = await uploadService.GetRecordAsync(sbomId.Trim(), cancellationToken).ConfigureAwait(false);
|
||||
if (record is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"SBOM upload not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Requested SBOM upload could not be located.");
|
||||
}
|
||||
|
||||
return Results.Ok(record);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,386 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints for slice query and replay operations.
|
||||
/// </summary>
|
||||
internal static class SliceEndpoints
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
public static void MapSliceEndpoints(this IEndpointRouteBuilder endpoints)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(endpoints);
|
||||
|
||||
var slicesGroup = endpoints.MapGroup("/api/slices")
|
||||
.WithTags("Slices");
|
||||
|
||||
// POST /api/slices/query - Generate reachability slice on demand
|
||||
slicesGroup.MapPost("/query", HandleQueryAsync)
|
||||
.WithName("scanner.slices.query")
|
||||
.WithDescription("Query reachability for CVE/symbols and generate an attested slice")
|
||||
.Produces<SliceQueryResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces<SliceQueryResponseDto>(StatusCodes.Status202Accepted)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
// GET /api/slices/{digest} - Retrieve attested slice by digest
|
||||
slicesGroup.MapGet("/{digest}", HandleGetSliceAsync)
|
||||
.WithName("scanner.slices.get")
|
||||
.WithDescription("Retrieve an attested reachability slice by its content digest")
|
||||
.Produces<object>(StatusCodes.Status200OK, "application/json")
|
||||
.Produces<object>(StatusCodes.Status200OK, "application/dsse+json")
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
// POST /api/slices/replay - Verify slice reproducibility
|
||||
slicesGroup.MapPost("/replay", HandleReplayAsync)
|
||||
.WithName("scanner.slices.replay")
|
||||
.WithDescription("Recompute a slice and verify byte-for-byte match with the original")
|
||||
.Produces<SliceReplayResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
|
||||
// GET /api/slices/cache/stats - Cache statistics (admin only)
|
||||
slicesGroup.MapGet("/cache/stats", HandleCacheStatsAsync)
|
||||
.WithName("scanner.slices.cache.stats")
|
||||
.WithDescription("Get slice cache statistics")
|
||||
.Produces<SliceCacheStatsDto>(StatusCodes.Status200OK)
|
||||
.RequireAuthorization(ScannerPolicies.Admin);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleQueryAsync(
|
||||
[FromBody] SliceQueryRequestDto request,
|
||||
[FromServices] ISliceQueryService sliceService,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (request == null)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Request body is required" });
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.ScanId))
|
||||
{
|
||||
return Results.BadRequest(new { error = "scanId is required" });
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.CveId) &&
|
||||
(request.Symbols == null || request.Symbols.Count == 0))
|
||||
{
|
||||
return Results.BadRequest(new { error = "Either cveId or symbols must be specified" });
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var serviceRequest = new SliceQueryRequest
|
||||
{
|
||||
ScanId = request.ScanId,
|
||||
CveId = request.CveId,
|
||||
Symbols = request.Symbols,
|
||||
Entrypoints = request.Entrypoints,
|
||||
PolicyHash = request.PolicyHash
|
||||
};
|
||||
|
||||
var response = await sliceService.QueryAsync(serviceRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var dto = new SliceQueryResponseDto
|
||||
{
|
||||
SliceDigest = response.SliceDigest,
|
||||
Verdict = response.Verdict,
|
||||
Confidence = response.Confidence,
|
||||
PathWitnesses = response.PathWitnesses,
|
||||
CacheHit = response.CacheHit,
|
||||
JobId = response.JobId
|
||||
};
|
||||
|
||||
// Return 202 Accepted if async generation (jobId present)
|
||||
if (!string.IsNullOrEmpty(response.JobId))
|
||||
{
|
||||
return Results.Accepted($"/api/slices/jobs/{response.JobId}", dto);
|
||||
}
|
||||
|
||||
return Results.Ok(dto);
|
||||
}
|
||||
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
|
||||
{
|
||||
return Results.NotFound(new { error = ex.Message });
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetSliceAsync(
|
||||
[FromRoute] string digest,
|
||||
[FromHeader(Name = "Accept")] string? accept,
|
||||
[FromServices] ISliceQueryService sliceService,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return Results.BadRequest(new { error = "digest is required" });
|
||||
}
|
||||
|
||||
var wantsDsse = accept?.Contains("dsse", StringComparison.OrdinalIgnoreCase) == true;
|
||||
|
||||
try
|
||||
{
|
||||
if (wantsDsse)
|
||||
{
|
||||
var dsse = await sliceService.GetSliceDsseAsync(digest, cancellationToken).ConfigureAwait(false);
|
||||
if (dsse == null)
|
||||
{
|
||||
return Results.NotFound(new { error = $"Slice {digest} not found" });
|
||||
}
|
||||
return Results.Json(dsse, SerializerOptions, "application/dsse+json");
|
||||
}
|
||||
else
|
||||
{
|
||||
var slice = await sliceService.GetSliceAsync(digest, cancellationToken).ConfigureAwait(false);
|
||||
if (slice == null)
|
||||
{
|
||||
return Results.NotFound(new { error = $"Slice {digest} not found" });
|
||||
}
|
||||
return Results.Json(slice, SerializerOptions, "application/json");
|
||||
}
|
||||
}
|
||||
catch (InvalidOperationException ex)
|
||||
{
|
||||
return Results.NotFound(new { error = ex.Message });
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleReplayAsync(
|
||||
[FromBody] SliceReplayRequestDto request,
|
||||
[FromServices] ISliceQueryService sliceService,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (request == null)
|
||||
{
|
||||
return Results.BadRequest(new { error = "Request body is required" });
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.SliceDigest))
|
||||
{
|
||||
return Results.BadRequest(new { error = "sliceDigest is required" });
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var serviceRequest = new SliceReplayRequest
|
||||
{
|
||||
SliceDigest = request.SliceDigest
|
||||
};
|
||||
|
||||
var response = await sliceService.ReplayAsync(serviceRequest, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var dto = new SliceReplayResponseDto
|
||||
{
|
||||
Match = response.Match,
|
||||
OriginalDigest = response.OriginalDigest,
|
||||
RecomputedDigest = response.RecomputedDigest,
|
||||
Diff = response.Diff == null ? null : new SliceDiffDto
|
||||
{
|
||||
MissingNodes = response.Diff.MissingNodes,
|
||||
ExtraNodes = response.Diff.ExtraNodes,
|
||||
MissingEdges = response.Diff.MissingEdges,
|
||||
ExtraEdges = response.Diff.ExtraEdges,
|
||||
VerdictDiff = response.Diff.VerdictDiff
|
||||
}
|
||||
};
|
||||
|
||||
return Results.Ok(dto);
|
||||
}
|
||||
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
|
||||
{
|
||||
return Results.NotFound(new { error = ex.Message });
|
||||
}
|
||||
}
|
||||
|
||||
private static IResult HandleCacheStatsAsync(
|
||||
[FromServices] Reachability.Slices.ISliceCache cache)
|
||||
{
|
||||
var stats = cache.GetStatistics();
|
||||
return Results.Ok(new SliceCacheStatsDto
|
||||
{
|
||||
ItemCount = (int)stats.EntryCount,
|
||||
HitCount = stats.HitCount,
|
||||
MissCount = stats.MissCount,
|
||||
HitRate = stats.HitRate
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#region DTOs
|
||||
|
||||
/// <summary>
|
||||
/// Request to query reachability and generate a slice.
|
||||
/// </summary>
|
||||
public sealed class SliceQueryRequestDto
|
||||
{
|
||||
/// <summary>
|
||||
/// The scan ID to query against.
|
||||
/// </summary>
|
||||
[JsonPropertyName("scanId")]
|
||||
public string? ScanId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional CVE ID to query reachability for.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cveId")]
|
||||
public string? CveId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Target symbols to check reachability for.
|
||||
/// </summary>
|
||||
[JsonPropertyName("symbols")]
|
||||
public List<string>? Symbols { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Entrypoint symbols to start reachability analysis from.
|
||||
/// </summary>
|
||||
[JsonPropertyName("entrypoints")]
|
||||
public List<string>? Entrypoints { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional policy hash to include in the slice.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyHash")]
|
||||
public string? PolicyHash { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from slice query.
|
||||
/// </summary>
|
||||
public sealed class SliceQueryResponseDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Content-addressed digest of the generated slice.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sliceDigest")]
|
||||
public required string SliceDigest { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reachability verdict (reachable, unreachable, unknown, gated).
|
||||
/// </summary>
|
||||
[JsonPropertyName("verdict")]
|
||||
public required string Verdict { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence score [0.0, 1.0].
|
||||
/// </summary>
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Example paths demonstrating reachability (if reachable).
|
||||
/// </summary>
|
||||
[JsonPropertyName("pathWitnesses")]
|
||||
public IReadOnlyList<string>? PathWitnesses { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether result was served from cache.
|
||||
/// </summary>
|
||||
[JsonPropertyName("cacheHit")]
|
||||
public bool CacheHit { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Job ID for async generation (if slice is large).
|
||||
/// </summary>
|
||||
[JsonPropertyName("jobId")]
|
||||
public string? JobId { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to replay/verify a slice.
|
||||
/// </summary>
|
||||
public sealed class SliceReplayRequestDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Digest of the slice to replay.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sliceDigest")]
|
||||
public string? SliceDigest { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from slice replay verification.
|
||||
/// </summary>
|
||||
public sealed class SliceReplayResponseDto
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the recomputed slice matches the original.
|
||||
/// </summary>
|
||||
[JsonPropertyName("match")]
|
||||
public bool Match { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the original slice.
|
||||
/// </summary>
|
||||
[JsonPropertyName("originalDigest")]
|
||||
public required string OriginalDigest { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the recomputed slice.
|
||||
/// </summary>
|
||||
[JsonPropertyName("recomputedDigest")]
|
||||
public required string RecomputedDigest { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Detailed diff if slices don't match.
|
||||
/// </summary>
|
||||
[JsonPropertyName("diff")]
|
||||
public SliceDiffDto? Diff { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Diff between two slices.
|
||||
/// </summary>
|
||||
public sealed class SliceDiffDto
|
||||
{
|
||||
[JsonPropertyName("missingNodes")]
|
||||
public IReadOnlyList<string>? MissingNodes { get; set; }
|
||||
|
||||
[JsonPropertyName("extraNodes")]
|
||||
public IReadOnlyList<string>? ExtraNodes { get; set; }
|
||||
|
||||
[JsonPropertyName("missingEdges")]
|
||||
public IReadOnlyList<string>? MissingEdges { get; set; }
|
||||
|
||||
[JsonPropertyName("extraEdges")]
|
||||
public IReadOnlyList<string>? ExtraEdges { get; set; }
|
||||
|
||||
[JsonPropertyName("verdictDiff")]
|
||||
public string? VerdictDiff { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Slice cache statistics.
|
||||
/// </summary>
|
||||
public sealed class SliceCacheStatsDto
|
||||
{
|
||||
[JsonPropertyName("itemCount")]
|
||||
public int ItemCount { get; set; }
|
||||
|
||||
[JsonPropertyName("hitCount")]
|
||||
public long HitCount { get; set; }
|
||||
|
||||
[JsonPropertyName("missCount")]
|
||||
public long MissCount { get; set; }
|
||||
|
||||
[JsonPropertyName("hitRate")]
|
||||
public double HitRate { get; set; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,163 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ProofBundleEndpoints.cs
|
||||
// Sprint: SPRINT_3900_0003_0001_exploit_path_inbox_proof_bundles
|
||||
// Description: HTTP endpoints for proof bundle generation (attestations + evidence).
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.Triage.Models;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints for proof bundle generation - attested evidence packages.
|
||||
/// </summary>
|
||||
internal static class ProofBundleEndpoints
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps proof bundle endpoints.
|
||||
/// </summary>
|
||||
public static void MapProofBundleEndpoints(this RouteGroupBuilder apiGroup)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var triageGroup = apiGroup.MapGroup("/triage")
|
||||
.WithTags("Triage");
|
||||
|
||||
// POST /v1/triage/proof-bundle
|
||||
triageGroup.MapPost("/proof-bundle", HandleGenerateProofBundleAsync)
|
||||
.WithName("scanner.triage.proof-bundle")
|
||||
.WithDescription("Generates an attested proof bundle for an exploit path.")
|
||||
.Produces<ProofBundleResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.TriageWrite);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGenerateProofBundleAsync(
|
||||
ProofBundleRequest request,
|
||||
IProofBundleGenerator bundleGenerator,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundleGenerator);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(request.PathId))
|
||||
{
|
||||
return Results.BadRequest(new
|
||||
{
|
||||
type = "validation-error",
|
||||
title = "Invalid path ID",
|
||||
detail = "Path ID is required."
|
||||
});
|
||||
}
|
||||
|
||||
var bundle = await bundleGenerator.GenerateBundleAsync(
|
||||
request.PathId,
|
||||
request.IncludeReachGraph,
|
||||
request.IncludeCallTrace,
|
||||
request.IncludeVexStatements,
|
||||
request.AttestationKeyId,
|
||||
cancellationToken);
|
||||
|
||||
var response = new ProofBundleResponse
|
||||
{
|
||||
PathId = request.PathId,
|
||||
Bundle = bundle,
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request for proof bundle generation.
|
||||
/// </summary>
|
||||
public sealed record ProofBundleRequest
|
||||
{
|
||||
public required string PathId { get; init; }
|
||||
public bool IncludeReachGraph { get; init; } = true;
|
||||
public bool IncludeCallTrace { get; init; } = true;
|
||||
public bool IncludeVexStatements { get; init; } = true;
|
||||
public string? AttestationKeyId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response containing proof bundle.
|
||||
/// </summary>
|
||||
public sealed record ProofBundleResponse
|
||||
{
|
||||
public required string PathId { get; init; }
|
||||
public required ProofBundle Bundle { get; init; }
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Proof bundle containing attestations and evidence.
|
||||
/// </summary>
|
||||
public sealed record ProofBundle
|
||||
{
|
||||
public required string BundleId { get; init; }
|
||||
public required string PathId { get; init; }
|
||||
public required string ArtifactDigest { get; init; }
|
||||
public required ExploitPathSummary Path { get; init; }
|
||||
public required IReadOnlyList<EvidenceAttestation> Attestations { get; init; }
|
||||
public ReachGraphEvidence? ReachGraph { get; init; }
|
||||
public CallTraceEvidence? CallTrace { get; init; }
|
||||
public IReadOnlyList<VexStatement>? VexStatements { get; init; }
|
||||
public required BundleSignature Signature { get; init; }
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
public sealed record ExploitPathSummary(
|
||||
string PathId,
|
||||
string PackagePurl,
|
||||
string VulnerableSymbol,
|
||||
string EntryPoint,
|
||||
IReadOnlyList<string> CveIds,
|
||||
string ReachabilityStatus);
|
||||
|
||||
public sealed record EvidenceAttestation(
|
||||
string Type,
|
||||
string Predicate,
|
||||
string Subject,
|
||||
string DsseEnvelope);
|
||||
|
||||
public sealed record ReachGraphEvidence(
|
||||
IReadOnlyList<GraphNode> Nodes,
|
||||
IReadOnlyList<GraphEdge> Edges);
|
||||
|
||||
public sealed record GraphNode(string Id, string Label, string Type);
|
||||
public sealed record GraphEdge(string From, string To, string Label);
|
||||
|
||||
public sealed record CallTraceEvidence(
|
||||
IReadOnlyList<CallFrame> Frames);
|
||||
|
||||
public sealed record CallFrame(string Function, string File, int Line);
|
||||
|
||||
public sealed record VexStatement(
|
||||
string CveId,
|
||||
string Status,
|
||||
string Justification,
|
||||
DateTimeOffset IssuedAt);
|
||||
|
||||
public sealed record BundleSignature(
|
||||
string Algorithm,
|
||||
string KeyId,
|
||||
string Signature,
|
||||
DateTimeOffset SignedAt);
|
||||
|
||||
public interface IProofBundleGenerator
|
||||
{
|
||||
Task<ProofBundle> GenerateBundleAsync(
|
||||
string pathId,
|
||||
bool includeReachGraph,
|
||||
bool includeCallTrace,
|
||||
bool includeVexStatements,
|
||||
string? attestationKeyId,
|
||||
CancellationToken ct);
|
||||
}
|
||||
@@ -0,0 +1,122 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TriageInboxEndpoints.cs
|
||||
// Sprint: SPRINT_3900_0003_0001_exploit_path_inbox_proof_bundles
|
||||
// Description: HTTP endpoints for triage inbox with grouped exploit paths.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.Triage.Models;
|
||||
using StellaOps.Scanner.Triage.Services;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints.Triage;
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints for triage inbox - grouped exploit paths.
|
||||
/// </summary>
|
||||
internal static class TriageInboxEndpoints
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Maps triage inbox endpoints.
|
||||
/// </summary>
|
||||
public static void MapTriageInboxEndpoints(this RouteGroupBuilder apiGroup)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var triageGroup = apiGroup.MapGroup("/triage")
|
||||
.WithTags("Triage");
|
||||
|
||||
// GET /v1/triage/inbox?artifactDigest={digest}&filter={filter}
|
||||
triageGroup.MapGet("/inbox", HandleGetInboxAsync)
|
||||
.WithName("scanner.triage.inbox")
|
||||
.WithDescription("Retrieves triage inbox with grouped exploit paths for an artifact.")
|
||||
.Produces<TriageInboxResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.RequireAuthorization(ScannerPolicies.TriageRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetInboxAsync(
|
||||
string artifactDigest,
|
||||
string? filter,
|
||||
IExploitPathGroupingService groupingService,
|
||||
IFindingQueryService findingService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(groupingService);
|
||||
ArgumentNullException.ThrowIfNull(findingService);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(artifactDigest))
|
||||
{
|
||||
return Results.BadRequest(new
|
||||
{
|
||||
type = "validation-error",
|
||||
title = "Invalid artifact digest",
|
||||
detail = "Artifact digest is required."
|
||||
});
|
||||
}
|
||||
|
||||
var findings = await findingService.GetFindingsForArtifactAsync(artifactDigest, cancellationToken);
|
||||
var paths = await groupingService.GroupFindingsAsync(artifactDigest, findings, cancellationToken);
|
||||
|
||||
var filteredPaths = ApplyFilter(paths, filter);
|
||||
|
||||
var response = new TriageInboxResponse
|
||||
{
|
||||
ArtifactDigest = artifactDigest,
|
||||
TotalPaths = paths.Count,
|
||||
FilteredPaths = filteredPaths.Count,
|
||||
Filter = filter,
|
||||
Paths = filteredPaths,
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
return Results.Ok(response);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<ExploitPath> ApplyFilter(
|
||||
IReadOnlyList<ExploitPath> paths,
|
||||
string? filter)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(filter))
|
||||
return paths;
|
||||
|
||||
return filter.ToLowerInvariant() switch
|
||||
{
|
||||
"actionable" => paths.Where(p => !p.IsQuiet && p.Reachability is ReachabilityStatus.StaticallyReachable or ReachabilityStatus.RuntimeConfirmed).ToList(),
|
||||
"noisy" => paths.Where(p => p.IsQuiet).ToList(),
|
||||
"reachable" => paths.Where(p => p.Reachability is ReachabilityStatus.StaticallyReachable or ReachabilityStatus.RuntimeConfirmed).ToList(),
|
||||
"runtime" => paths.Where(p => p.Reachability == ReachabilityStatus.RuntimeConfirmed).ToList(),
|
||||
"critical" => paths.Where(p => p.RiskScore.CriticalCount > 0).ToList(),
|
||||
"high" => paths.Where(p => p.RiskScore.HighCount > 0).ToList(),
|
||||
_ => paths
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response for triage inbox endpoint.
|
||||
/// </summary>
|
||||
public sealed record TriageInboxResponse
|
||||
{
|
||||
public required string ArtifactDigest { get; init; }
|
||||
public required int TotalPaths { get; init; }
|
||||
public required int FilteredPaths { get; init; }
|
||||
public string? Filter { get; init; }
|
||||
public required IReadOnlyList<ExploitPath> Paths { get; init; }
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
}
|
||||
|
||||
public interface IFindingQueryService
|
||||
{
|
||||
Task<IReadOnlyList<Finding>> GetFindingsForArtifactAsync(string artifactDigest, CancellationToken ct);
|
||||
}
|
||||
@@ -8,6 +8,7 @@ using Microsoft.AspNetCore.Diagnostics;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Authentication;
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Serilog;
|
||||
@@ -32,6 +33,7 @@ using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.FS;
|
||||
using StellaOps.Scanner.Surface.Secrets;
|
||||
using StellaOps.Scanner.Surface.Validation;
|
||||
using StellaOps.Scanner.Triage;
|
||||
using StellaOps.Scanner.WebService.Diagnostics;
|
||||
using StellaOps.Scanner.WebService.Determinism;
|
||||
using StellaOps.Scanner.WebService.Endpoints;
|
||||
@@ -68,6 +70,7 @@ var bootstrapOptions = builder.Configuration.BindOptions<ScannerWebServiceOption
|
||||
});
|
||||
|
||||
builder.Services.AddStellaOpsCrypto(bootstrapOptions.Crypto);
|
||||
builder.Services.AddControllers();
|
||||
|
||||
builder.Services.AddOptions<ScannerWebServiceOptions>()
|
||||
.Bind(builder.Configuration.GetSection(ScannerWebServiceOptions.SectionName))
|
||||
@@ -126,6 +129,8 @@ builder.Services.AddSingleton<IAttestationChainVerifier, AttestationChainVerifie
|
||||
builder.Services.AddSingleton<IHumanApprovalAttestationService, HumanApprovalAttestationService>();
|
||||
builder.Services.AddScoped<ICallGraphIngestionService, CallGraphIngestionService>();
|
||||
builder.Services.AddScoped<ISbomIngestionService, SbomIngestionService>();
|
||||
builder.Services.AddSingleton<ISbomUploadStore, InMemorySbomUploadStore>();
|
||||
builder.Services.AddScoped<ISbomByosUploadService, SbomByosUploadService>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<PolicySnapshotStore>();
|
||||
@@ -136,6 +141,9 @@ builder.Services.AddSingleton<IScanManifestRepository, InMemoryScanManifestRepos
|
||||
builder.Services.AddSingleton<IProofBundleRepository, InMemoryProofBundleRepository>();
|
||||
builder.Services.AddSingleton<IScoringService, DeterministicScoringService>();
|
||||
builder.Services.AddSingleton<IScanManifestSigner, ScanManifestSigner>();
|
||||
builder.Services.AddDbContext<TriageDbContext>(options =>
|
||||
options.UseNpgsql(bootstrapOptions.Storage.Dsn));
|
||||
builder.Services.AddScoped<ITriageQueryService, TriageQueryService>();
|
||||
|
||||
// Register Storage.Repositories implementations for ManifestEndpoints
|
||||
builder.Services.AddSingleton<StellaOps.Scanner.Storage.Repositories.IScanManifestRepository, TestManifestRepository>();
|
||||
@@ -516,6 +524,7 @@ if (app.Environment.IsEnvironment("Testing"))
|
||||
}
|
||||
|
||||
apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapSbomUploadEndpoints();
|
||||
apiGroup.MapReachabilityDriftRootEndpoints();
|
||||
apiGroup.MapProofSpineEndpoints(resolvedOptions.Api.SpinesSegment, resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReplayEndpoints();
|
||||
@@ -525,6 +534,7 @@ if (resolvedOptions.ScoreReplay.Enabled)
|
||||
}
|
||||
apiGroup.MapWitnessEndpoints(); // Sprint: SPRINT_3700_0001_0001
|
||||
apiGroup.MapEpssEndpoints(); // Sprint: SPRINT_3410_0002_0001
|
||||
apiGroup.MapSliceEndpoints(); // Sprint: SPRINT_3820_0001_0001
|
||||
|
||||
if (resolvedOptions.Features.EnablePolicyPreview)
|
||||
{
|
||||
@@ -534,6 +544,7 @@ if (resolvedOptions.Features.EnablePolicyPreview)
|
||||
apiGroup.MapReportEndpoints(resolvedOptions.Api.ReportsSegment);
|
||||
apiGroup.MapRuntimeEndpoints(resolvedOptions.Api.RuntimeSegment);
|
||||
|
||||
app.MapControllers();
|
||||
app.MapOpenApiIfAvailable();
|
||||
await app.RunAsync().ConfigureAwait(false);
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
@@ -94,44 +95,137 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Build score explanation (simplified local computation)
|
||||
var scoreExplanation = BuildScoreExplanation(finding, explanation);
|
||||
var scoreInfo = BuildScoreInfo(finding, explanation);
|
||||
|
||||
// Compose the response
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Calculate expiry based on evidence sources
|
||||
var (expiresAt, isStale) = CalculateTtlAndStaleness(now, explanation);
|
||||
var freshness = BuildFreshnessInfo(now, explanation, observedAt: now);
|
||||
|
||||
return new FindingEvidenceResponse
|
||||
{
|
||||
FindingId = findingId,
|
||||
Cve = cveId,
|
||||
Component = BuildComponentRef(purl),
|
||||
ReachablePath = explanation?.PathWitness,
|
||||
Entrypoint = BuildEntrypointProof(explanation),
|
||||
Component = BuildComponentInfo(purl),
|
||||
ReachablePath = explanation?.PathWitness ?? Array.Empty<string>(),
|
||||
Entrypoint = BuildEntrypointInfo(explanation),
|
||||
Boundary = null, // Boundary extraction requires RichGraph, deferred to SPRINT_3800_0003_0002
|
||||
Vex = null, // VEX requires Excititor query, deferred to SPRINT_3800_0003_0002
|
||||
ScoreExplain = scoreExplanation,
|
||||
Score = scoreInfo,
|
||||
LastSeen = now,
|
||||
ExpiresAt = expiresAt,
|
||||
IsStale = isStale,
|
||||
AttestationRefs = BuildAttestationRefs(scan, explanation)
|
||||
AttestationRefs = BuildAttestationRefs(scan, explanation) ?? Array.Empty<string>(),
|
||||
Freshness = freshness
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<FindingEvidenceResponse> ComposeAsync(
|
||||
TriageFinding finding,
|
||||
bool includeRaw,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(finding);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var latestReachability = finding.ReachabilityResults
|
||||
.OrderByDescending(r => r.ComputedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
var latestRisk = finding.RiskResults
|
||||
.OrderByDescending(r => r.ComputedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
var latestVex = finding.EffectiveVexRecords
|
||||
.OrderByDescending(r => r.CollectedAt)
|
||||
.FirstOrDefault();
|
||||
|
||||
var attestationRefs = finding.EvidenceArtifacts
|
||||
.OrderByDescending(a => a.CreatedAt)
|
||||
.Select(a => a.ContentHash)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
var scoreInfo = latestRisk is null
|
||||
? null
|
||||
: new ScoreInfo
|
||||
{
|
||||
RiskScore = latestRisk.Score,
|
||||
Contributions = new[]
|
||||
{
|
||||
new ScoreContribution
|
||||
{
|
||||
Factor = "policy",
|
||||
Value = latestRisk.Score,
|
||||
Reason = latestRisk.Why
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var vexInfo = latestVex is null
|
||||
? null
|
||||
: new VexStatusInfo
|
||||
{
|
||||
Status = latestVex.Status.ToString().ToLowerInvariant(),
|
||||
Justification = latestVex.SourceDomain,
|
||||
Timestamp = latestVex.ValidFrom,
|
||||
Issuer = latestVex.Issuer
|
||||
};
|
||||
|
||||
var entrypoint = latestReachability is null
|
||||
? null
|
||||
: new EntrypointInfo
|
||||
{
|
||||
Type = latestReachability.Reachable switch
|
||||
{
|
||||
TriageReachability.Yes => "http",
|
||||
TriageReachability.No => "internal",
|
||||
_ => "internal"
|
||||
},
|
||||
Route = latestReachability.StaticProofRef,
|
||||
Auth = null
|
||||
};
|
||||
|
||||
var freshness = BuildFreshnessInfo(
|
||||
now,
|
||||
explanation: null,
|
||||
observedAt: finding.LastSeenAt);
|
||||
|
||||
var cve = !string.IsNullOrWhiteSpace(finding.CveId)
|
||||
? finding.CveId
|
||||
: finding.RuleId ?? "unknown";
|
||||
|
||||
return Task.FromResult(new FindingEvidenceResponse
|
||||
{
|
||||
FindingId = finding.Id.ToString(),
|
||||
Cve = cve,
|
||||
Component = BuildComponentInfo(finding.Purl),
|
||||
ReachablePath = Array.Empty<string>(),
|
||||
Entrypoint = entrypoint,
|
||||
Vex = vexInfo,
|
||||
LastSeen = finding.LastSeenAt,
|
||||
AttestationRefs = attestationRefs,
|
||||
Score = scoreInfo,
|
||||
Boundary = null,
|
||||
Freshness = freshness
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the evidence expiry time and staleness based on evidence sources.
|
||||
/// Uses the minimum expiry time from all evidence sources.
|
||||
/// </summary>
|
||||
private (DateTimeOffset expiresAt, bool isStale) CalculateTtlAndStaleness(
|
||||
private FreshnessInfo BuildFreshnessInfo(
|
||||
DateTimeOffset now,
|
||||
ReachabilityExplanation? explanation)
|
||||
ReachabilityExplanation? explanation,
|
||||
DateTimeOffset? observedAt)
|
||||
{
|
||||
var defaultTtl = TimeSpan.FromDays(_options.DefaultEvidenceTtlDays);
|
||||
var warningThreshold = TimeSpan.FromDays(_options.StaleWarningThresholdDays);
|
||||
|
||||
// Default: evidence expires from when it was computed (now)
|
||||
var reachabilityExpiry = now.Add(defaultTtl);
|
||||
var baseTimestamp = observedAt ?? now;
|
||||
var reachabilityExpiry = baseTimestamp.Add(defaultTtl);
|
||||
|
||||
// If we have evidence chain with timestamps, use those instead
|
||||
// For now, we use now as the base timestamp since ReachabilityExplanation
|
||||
@@ -153,7 +247,16 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
_logger.LogDebug("Evidence nearing expiry: expires in {TimeRemaining}", expiresAt - now);
|
||||
}
|
||||
|
||||
return (expiresAt, isStale);
|
||||
var ttlRemaining = expiresAt > now
|
||||
? (int)Math.Floor((expiresAt - now).TotalHours)
|
||||
: 0;
|
||||
|
||||
return new FreshnessInfo
|
||||
{
|
||||
IsStale = isStale,
|
||||
ExpiresAt = expiresAt,
|
||||
TtlRemainingHours = ttlRemaining
|
||||
};
|
||||
}
|
||||
|
||||
private static (string? cveId, string? purl) ParseFindingId(string findingId)
|
||||
@@ -183,7 +286,7 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
return (cveId, purl);
|
||||
}
|
||||
|
||||
private static ComponentRef BuildComponentRef(string purl)
|
||||
private static ComponentInfo BuildComponentInfo(string purl)
|
||||
{
|
||||
// Parse PURL: "pkg:ecosystem/name@version"
|
||||
var parts = purl.Replace("pkg:", "", StringComparison.OrdinalIgnoreCase)
|
||||
@@ -193,16 +296,16 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
var name = parts.Length > 1 ? parts[1] : "unknown";
|
||||
var version = parts.Length > 2 ? parts[2] : "unknown";
|
||||
|
||||
return new ComponentRef
|
||||
return new ComponentInfo
|
||||
{
|
||||
Purl = purl,
|
||||
Name = name,
|
||||
Version = version,
|
||||
Type = ecosystem
|
||||
Ecosystem = ecosystem
|
||||
};
|
||||
}
|
||||
|
||||
private static EntrypointProof? BuildEntrypointProof(ReachabilityExplanation? explanation)
|
||||
private static EntrypointInfo? BuildEntrypointInfo(ReachabilityExplanation? explanation)
|
||||
{
|
||||
if (explanation?.PathWitness is null || explanation.PathWitness.Count == 0)
|
||||
{
|
||||
@@ -212,11 +315,10 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
var firstHop = explanation.PathWitness[0];
|
||||
var entrypointType = InferEntrypointType(firstHop);
|
||||
|
||||
return new EntrypointProof
|
||||
return new EntrypointInfo
|
||||
{
|
||||
Type = entrypointType,
|
||||
Fqn = firstHop,
|
||||
Phase = "runtime"
|
||||
Route = firstHop
|
||||
};
|
||||
}
|
||||
|
||||
@@ -225,25 +327,25 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
var lower = fqn.ToLowerInvariant();
|
||||
if (lower.Contains("controller") || lower.Contains("handler") || lower.Contains("http"))
|
||||
{
|
||||
return "http_handler";
|
||||
return "http";
|
||||
}
|
||||
if (lower.Contains("grpc") || lower.Contains("rpc"))
|
||||
{
|
||||
return "grpc_method";
|
||||
return "grpc";
|
||||
}
|
||||
if (lower.Contains("main") || lower.Contains("program"))
|
||||
{
|
||||
return "cli_command";
|
||||
return "cli";
|
||||
}
|
||||
return "internal";
|
||||
}
|
||||
|
||||
private ScoreExplanationDto BuildScoreExplanation(
|
||||
private ScoreInfo BuildScoreInfo(
|
||||
ReachabilityFinding finding,
|
||||
ReachabilityExplanation? explanation)
|
||||
{
|
||||
// Simplified score computation based on reachability status
|
||||
var contributions = new List<ScoreContributionDto>();
|
||||
var contributions = new List<ScoreContribution>();
|
||||
double riskScore = 0.0;
|
||||
|
||||
// Reachability contribution (0-25 points)
|
||||
@@ -258,26 +360,22 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
|
||||
if (reachabilityContribution > 0)
|
||||
{
|
||||
contributions.Add(new ScoreContributionDto
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = "reachability",
|
||||
Weight = 1.0,
|
||||
RawValue = reachabilityContribution,
|
||||
Contribution = reachabilityContribution,
|
||||
Explanation = reachabilityExplanation
|
||||
Value = Convert.ToInt32(Math.Round(reachabilityContribution)),
|
||||
Reason = reachabilityExplanation
|
||||
});
|
||||
riskScore += reachabilityContribution;
|
||||
}
|
||||
|
||||
// Confidence contribution (0-10 points)
|
||||
var confidenceContribution = finding.Confidence * 10.0;
|
||||
contributions.Add(new ScoreContributionDto
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = "confidence",
|
||||
Weight = 1.0,
|
||||
RawValue = finding.Confidence,
|
||||
Contribution = confidenceContribution,
|
||||
Explanation = $"Analysis confidence: {finding.Confidence:P0}"
|
||||
Value = Convert.ToInt32(Math.Round(confidenceContribution)),
|
||||
Reason = $"Analysis confidence: {finding.Confidence:P0}"
|
||||
});
|
||||
riskScore += confidenceContribution;
|
||||
|
||||
@@ -289,13 +387,11 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
if (gateCount > 0)
|
||||
{
|
||||
var gateDiscount = Math.Min(gateCount * -3.0, -10.0);
|
||||
contributions.Add(new ScoreContributionDto
|
||||
contributions.Add(new ScoreContribution
|
||||
{
|
||||
Factor = "gate_protection",
|
||||
Weight = 1.0,
|
||||
RawValue = gateCount,
|
||||
Contribution = gateDiscount,
|
||||
Explanation = $"{gateCount} protective gate(s) detected"
|
||||
Value = Convert.ToInt32(Math.Round(gateDiscount)),
|
||||
Reason = $"{gateCount} protective gate(s) detected"
|
||||
});
|
||||
riskScore += gateDiscount;
|
||||
}
|
||||
@@ -304,12 +400,10 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
|
||||
// Clamp to 0-100
|
||||
riskScore = Math.Clamp(riskScore, 0.0, 100.0);
|
||||
|
||||
return new ScoreExplanationDto
|
||||
return new ScoreInfo
|
||||
{
|
||||
Kind = "stellaops_evidence_v1",
|
||||
RiskScore = riskScore,
|
||||
Contributions = contributions,
|
||||
LastSeen = _timeProvider.GetUtcNow()
|
||||
RiskScore = Convert.ToInt32(Math.Round(riskScore)),
|
||||
Contributions = contributions
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
@@ -30,4 +31,15 @@ public interface IEvidenceCompositionService
|
||||
ScanId scanId,
|
||||
string findingId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Composes evidence for a triage finding.
|
||||
/// </summary>
|
||||
/// <param name="finding">The triage finding entity.</param>
|
||||
/// <param name="includeRaw">Whether to include raw evidence pointers.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task<FindingEvidenceResponse> ComposeAsync(
|
||||
TriageFinding finding,
|
||||
bool includeRaw,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,94 @@
|
||||
using StellaOps.Scanner.Reachability.Slices;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Query request for reachability slices.
|
||||
/// </summary>
|
||||
public sealed record SliceQueryRequest
|
||||
{
|
||||
public string? CveId { get; init; }
|
||||
public IReadOnlyList<string>? Symbols { get; init; }
|
||||
public IReadOnlyList<string>? Entrypoints { get; init; }
|
||||
public string? PolicyHash { get; init; }
|
||||
public required string ScanId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from slice query.
|
||||
/// </summary>
|
||||
public sealed record SliceQueryResponse
|
||||
{
|
||||
public required string SliceDigest { get; init; }
|
||||
public required string Verdict { get; init; }
|
||||
public required double Confidence { get; init; }
|
||||
public IReadOnlyList<string>? PathWitnesses { get; init; }
|
||||
public required bool CacheHit { get; init; }
|
||||
public string? JobId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Replay request for slice verification.
|
||||
/// </summary>
|
||||
public sealed record SliceReplayRequest
|
||||
{
|
||||
public required string SliceDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response from slice replay verification.
|
||||
/// </summary>
|
||||
public sealed record SliceReplayResponse
|
||||
{
|
||||
public required bool Match { get; init; }
|
||||
public required string OriginalDigest { get; init; }
|
||||
public required string RecomputedDigest { get; init; }
|
||||
public SliceDiff? Diff { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Diff information when replay doesn't match.
|
||||
/// </summary>
|
||||
public sealed record SliceDiff
|
||||
{
|
||||
public IReadOnlyList<string>? MissingNodes { get; init; }
|
||||
public IReadOnlyList<string>? ExtraNodes { get; init; }
|
||||
public IReadOnlyList<string>? MissingEdges { get; init; }
|
||||
public IReadOnlyList<string>? ExtraEdges { get; init; }
|
||||
public string? VerdictDiff { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for querying and managing reachability slices.
|
||||
/// </summary>
|
||||
public interface ISliceQueryService
|
||||
{
|
||||
/// <summary>
|
||||
/// Query reachability for CVE/symbols and generate slice.
|
||||
/// </summary>
|
||||
Task<SliceQueryResponse> QueryAsync(
|
||||
SliceQueryRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve an attested slice by digest.
|
||||
/// </summary>
|
||||
Task<ReachabilitySlice?> GetSliceAsync(
|
||||
string digest,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve DSSE envelope for a slice.
|
||||
/// </summary>
|
||||
Task<object?> GetSliceDsseAsync(
|
||||
string digest,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verify slice reproducibility by recomputing.
|
||||
/// </summary>
|
||||
Task<SliceReplayResponse> ReplayAsync(
|
||||
SliceReplayRequest request,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
using StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
public interface ITriageQueryService
|
||||
{
|
||||
Task<TriageFinding?> GetFindingAsync(string findingId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,640 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Utilities;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal interface ISbomByosUploadService
|
||||
{
|
||||
Task<(SbomUploadResponseDto Response, SbomValidationSummaryDto Validation)> UploadAsync(
|
||||
SbomUploadRequestDto request,
|
||||
CancellationToken cancellationToken);
|
||||
|
||||
Task<SbomUploadRecordDto?> GetRecordAsync(string sbomId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class SbomByosUploadService : ISbomByosUploadService
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly IScanCoordinator _scanCoordinator;
|
||||
private readonly ISbomIngestionService _ingestionService;
|
||||
private readonly ISbomUploadStore _uploadStore;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<SbomByosUploadService> _logger;
|
||||
|
||||
public SbomByosUploadService(
|
||||
IScanCoordinator scanCoordinator,
|
||||
ISbomIngestionService ingestionService,
|
||||
ISbomUploadStore uploadStore,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<SbomByosUploadService> logger)
|
||||
{
|
||||
_scanCoordinator = scanCoordinator ?? throw new ArgumentNullException(nameof(scanCoordinator));
|
||||
_ingestionService = ingestionService ?? throw new ArgumentNullException(nameof(ingestionService));
|
||||
_uploadStore = uploadStore ?? throw new ArgumentNullException(nameof(uploadStore));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<(SbomUploadResponseDto Response, SbomValidationSummaryDto Validation)> UploadAsync(
|
||||
SbomUploadRequestDto request,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var errors = new List<string>();
|
||||
if (string.IsNullOrWhiteSpace(request.ArtifactRef))
|
||||
{
|
||||
errors.Add("artifactRef is required.");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.ArtifactDigest) && !request.ArtifactDigest.Contains(':', StringComparison.Ordinal))
|
||||
{
|
||||
errors.Add("artifactDigest must include algorithm prefix (e.g. sha256:...).");
|
||||
}
|
||||
|
||||
var document = TryParseDocument(request, out var parseErrors);
|
||||
if (parseErrors.Count > 0)
|
||||
{
|
||||
errors.AddRange(parseErrors);
|
||||
}
|
||||
|
||||
if (errors.Count > 0)
|
||||
{
|
||||
var validation = new SbomValidationSummaryDto
|
||||
{
|
||||
Valid = false,
|
||||
Errors = errors
|
||||
};
|
||||
|
||||
return (new SbomUploadResponseDto { ValidationResult = validation }, validation);
|
||||
}
|
||||
|
||||
using (document)
|
||||
{
|
||||
var root = document!.RootElement;
|
||||
var (format, formatVersion) = ResolveFormat(root, request.Format);
|
||||
var validationWarnings = new List<string>();
|
||||
var validationErrors = ValidateFormat(root, format, formatVersion, validationWarnings);
|
||||
|
||||
if (validationErrors.Count > 0)
|
||||
{
|
||||
var invalid = new SbomValidationSummaryDto
|
||||
{
|
||||
Valid = false,
|
||||
Errors = validationErrors
|
||||
};
|
||||
return (new SbomUploadResponseDto { ValidationResult = invalid }, invalid);
|
||||
}
|
||||
|
||||
var normalized = Normalize(root, format);
|
||||
var (qualityScore, qualityWarnings) = Score(normalized);
|
||||
var digest = ComputeDigest(root);
|
||||
var sbomId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, digest);
|
||||
|
||||
var warnings = new List<string>();
|
||||
warnings.AddRange(validationWarnings);
|
||||
warnings.AddRange(qualityWarnings);
|
||||
|
||||
var metadata = BuildMetadata(request, format, formatVersion, digest, sbomId);
|
||||
var target = new ScanTarget(request.ArtifactRef.Trim(), request.ArtifactDigest?.Trim()).Normalize();
|
||||
var scanId = ScanIdGenerator.Create(target, force: false, clientRequestId: null, metadata);
|
||||
|
||||
var ingestion = await _ingestionService
|
||||
.IngestAsync(scanId, document, format, digest, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var submission = new ScanSubmission(target, force: false, clientRequestId: null, metadata);
|
||||
var scanResult = await _scanCoordinator.SubmitAsync(submission, cancellationToken).ConfigureAwait(false);
|
||||
if (!string.Equals(scanResult.Snapshot.ScanId.Value, scanId.Value, StringComparison.Ordinal))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"BYOS scan id mismatch. computed={Computed} submitted={Submitted}",
|
||||
scanId.Value,
|
||||
scanResult.Snapshot.ScanId.Value);
|
||||
}
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var validation = new SbomValidationSummaryDto
|
||||
{
|
||||
Valid = true,
|
||||
QualityScore = qualityScore,
|
||||
Warnings = warnings,
|
||||
ComponentCount = normalized.Count
|
||||
};
|
||||
|
||||
var response = new SbomUploadResponseDto
|
||||
{
|
||||
SbomId = ingestion.SbomId,
|
||||
ArtifactRef = target.Reference ?? string.Empty,
|
||||
ArtifactDigest = target.Digest,
|
||||
Digest = ingestion.Digest,
|
||||
Format = format,
|
||||
FormatVersion = formatVersion,
|
||||
ValidationResult = validation,
|
||||
AnalysisJobId = scanResult.Snapshot.ScanId.Value,
|
||||
UploadedAtUtc = now
|
||||
};
|
||||
|
||||
var record = new SbomUploadRecord(
|
||||
SbomId: ingestion.SbomId,
|
||||
ArtifactRef: target.Reference ?? string.Empty,
|
||||
ArtifactDigest: target.Digest,
|
||||
Digest: ingestion.Digest,
|
||||
Format: format,
|
||||
FormatVersion: formatVersion,
|
||||
AnalysisJobId: scanResult.Snapshot.ScanId.Value,
|
||||
ComponentCount: normalized.Count,
|
||||
QualityScore: qualityScore,
|
||||
Warnings: warnings,
|
||||
Source: request.Source,
|
||||
CreatedAtUtc: now);
|
||||
|
||||
await _uploadStore.AddAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return (response, validation);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<SbomUploadRecordDto?> GetRecordAsync(string sbomId, CancellationToken cancellationToken)
|
||||
{
|
||||
var record = await _uploadStore.GetAsync(sbomId, cancellationToken).ConfigureAwait(false);
|
||||
if (record is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new SbomUploadRecordDto
|
||||
{
|
||||
SbomId = record.SbomId,
|
||||
ArtifactRef = record.ArtifactRef,
|
||||
ArtifactDigest = record.ArtifactDigest,
|
||||
Digest = record.Digest,
|
||||
Format = record.Format,
|
||||
FormatVersion = record.FormatVersion,
|
||||
AnalysisJobId = record.AnalysisJobId,
|
||||
ComponentCount = record.ComponentCount,
|
||||
QualityScore = record.QualityScore,
|
||||
Warnings = record.Warnings,
|
||||
Source = record.Source,
|
||||
CreatedAtUtc = record.CreatedAtUtc
|
||||
};
|
||||
}
|
||||
|
||||
private static JsonDocument? TryParseDocument(SbomUploadRequestDto request, out List<string> errors)
|
||||
{
|
||||
errors = new List<string>();
|
||||
|
||||
if (request.Sbom is { } sbomElement && sbomElement.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
var raw = sbomElement.GetRawText();
|
||||
return JsonDocument.Parse(raw);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(request.SbomBase64))
|
||||
{
|
||||
try
|
||||
{
|
||||
var bytes = Convert.FromBase64String(request.SbomBase64);
|
||||
return JsonDocument.Parse(bytes);
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
errors.Add("sbomBase64 is not valid base64.");
|
||||
return null;
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
errors.Add($"Invalid SBOM JSON: {ex.Message}");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
errors.Add("sbom or sbomBase64 is required.");
|
||||
return null;
|
||||
}
|
||||
|
||||
private static (string Format, string FormatVersion) ResolveFormat(JsonElement root, string? requestedFormat)
|
||||
{
|
||||
var format = string.IsNullOrWhiteSpace(requestedFormat)
|
||||
? DetectFormat(root)
|
||||
: requestedFormat.Trim().ToLowerInvariant();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(format))
|
||||
{
|
||||
return (string.Empty, string.Empty);
|
||||
}
|
||||
|
||||
var formatVersion = format switch
|
||||
{
|
||||
SbomFormats.CycloneDx => GetCycloneDxVersion(root),
|
||||
SbomFormats.Spdx => GetSpdxVersion(root),
|
||||
_ => string.Empty
|
||||
};
|
||||
|
||||
return (format, formatVersion);
|
||||
}
|
||||
|
||||
private static string? DetectFormat(JsonElement root)
|
||||
{
|
||||
if (root.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("bomFormat", out var bomFormat)
|
||||
&& bomFormat.ValueKind == JsonValueKind.String
|
||||
&& string.Equals(bomFormat.GetString(), "CycloneDX", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormats.CycloneDx;
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("spdxVersion", out var spdxVersion)
|
||||
&& spdxVersion.ValueKind == JsonValueKind.String
|
||||
&& !string.IsNullOrWhiteSpace(spdxVersion.GetString()))
|
||||
{
|
||||
return SbomFormats.Spdx;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<string> ValidateFormat(
|
||||
JsonElement root,
|
||||
string format,
|
||||
string formatVersion,
|
||||
List<string> warnings)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(format))
|
||||
{
|
||||
errors.Add("Unable to detect SBOM format.");
|
||||
return errors;
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!root.TryGetProperty("bomFormat", out var bomFormat) || bomFormat.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
errors.Add("CycloneDX SBOM must include bomFormat.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(formatVersion))
|
||||
{
|
||||
errors.Add("CycloneDX SBOM must include specVersion.");
|
||||
}
|
||||
else if (!IsSupportedCycloneDx(formatVersion))
|
||||
{
|
||||
errors.Add($"CycloneDX specVersion '{formatVersion}' is not supported (1.4-1.6).");
|
||||
}
|
||||
|
||||
if (!root.TryGetProperty("components", out var components) || components.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
warnings.Add("CycloneDX SBOM does not include a components array.");
|
||||
}
|
||||
}
|
||||
else if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!root.TryGetProperty("spdxVersion", out var spdxVersion) || spdxVersion.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
errors.Add("SPDX SBOM must include spdxVersion.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(formatVersion))
|
||||
{
|
||||
errors.Add("SPDX SBOM version could not be determined.");
|
||||
}
|
||||
else if (!IsSupportedSpdx(formatVersion))
|
||||
{
|
||||
errors.Add($"SPDX version '{formatVersion}' is not supported (2.3, 3.0).");
|
||||
}
|
||||
else if (formatVersion.StartsWith("3.0", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
warnings.Add("SPDX 3.0 schema validation is pending; structural checks only.");
|
||||
}
|
||||
|
||||
if (!root.TryGetProperty("packages", out var packages) || packages.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
warnings.Add("SPDX SBOM does not include a packages array.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
errors.Add($"Unsupported SBOM format '{format}'.");
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
private static bool IsSupportedCycloneDx(string version)
|
||||
=> version.StartsWith("1.4", StringComparison.OrdinalIgnoreCase)
|
||||
|| version.StartsWith("1.5", StringComparison.OrdinalIgnoreCase)
|
||||
|| version.StartsWith("1.6", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static bool IsSupportedSpdx(string version)
|
||||
=> version.StartsWith("2.3", StringComparison.OrdinalIgnoreCase)
|
||||
|| version.StartsWith("3.0", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static IReadOnlyList<SbomNormalizedComponent> Normalize(JsonElement root, string format)
|
||||
{
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return NormalizeCycloneDx(root);
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return NormalizeSpdx(root);
|
||||
}
|
||||
|
||||
return Array.Empty<SbomNormalizedComponent>();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<SbomNormalizedComponent> NormalizeCycloneDx(JsonElement root)
|
||||
{
|
||||
if (!root.TryGetProperty("components", out var components) || components.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return Array.Empty<SbomNormalizedComponent>();
|
||||
}
|
||||
|
||||
var results = new List<SbomNormalizedComponent>();
|
||||
|
||||
foreach (var component in components.EnumerateArray())
|
||||
{
|
||||
if (component.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var name = GetString(component, "name");
|
||||
var version = GetString(component, "version");
|
||||
var purl = GetString(component, "purl");
|
||||
var license = ExtractCycloneDxLicense(component);
|
||||
|
||||
if (string.IsNullOrWhiteSpace(name) && string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var key = NormalizeKey(purl, name);
|
||||
results.Add(new SbomNormalizedComponent(key, name, version, purl, license));
|
||||
}
|
||||
|
||||
return results
|
||||
.OrderBy(c => c.Key, StringComparer.Ordinal)
|
||||
.ThenBy(c => c.Version ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<SbomNormalizedComponent> NormalizeSpdx(JsonElement root)
|
||||
{
|
||||
if (!root.TryGetProperty("packages", out var packages) || packages.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return Array.Empty<SbomNormalizedComponent>();
|
||||
}
|
||||
|
||||
var results = new List<SbomNormalizedComponent>();
|
||||
|
||||
foreach (var package in packages.EnumerateArray())
|
||||
{
|
||||
if (package.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var name = GetString(package, "name");
|
||||
var version = GetString(package, "versionInfo");
|
||||
var purl = ExtractSpdxPurl(package);
|
||||
var license = GetString(package, "licenseDeclared");
|
||||
if (string.IsNullOrWhiteSpace(license))
|
||||
{
|
||||
license = GetString(package, "licenseConcluded");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(name) && string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var key = NormalizeKey(purl, name);
|
||||
results.Add(new SbomNormalizedComponent(key, name, version, purl, license));
|
||||
}
|
||||
|
||||
return results
|
||||
.OrderBy(c => c.Key, StringComparer.Ordinal)
|
||||
.ThenBy(c => c.Version ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static (double Score, IReadOnlyList<string> Warnings) Score(IReadOnlyList<SbomNormalizedComponent> components)
|
||||
{
|
||||
if (components is null || components.Count == 0)
|
||||
{
|
||||
return (0.0, new[] { "No components detected in SBOM." });
|
||||
}
|
||||
|
||||
var total = components.Count;
|
||||
var withPurl = components.Count(c => !string.IsNullOrWhiteSpace(c.Purl));
|
||||
var withVersion = components.Count(c => !string.IsNullOrWhiteSpace(c.Version));
|
||||
var withLicense = components.Count(c => !string.IsNullOrWhiteSpace(c.License));
|
||||
|
||||
var purlRatio = (double)withPurl / total;
|
||||
var versionRatio = (double)withVersion / total;
|
||||
var licenseRatio = (double)withLicense / total;
|
||||
|
||||
var score = (purlRatio * 0.4) + (versionRatio * 0.3) + (licenseRatio * 0.3);
|
||||
var warnings = new List<string>();
|
||||
|
||||
if (withPurl < total)
|
||||
{
|
||||
warnings.Add($"{total - withPurl} components missing PURL values.");
|
||||
}
|
||||
|
||||
if (withVersion < total)
|
||||
{
|
||||
warnings.Add($"{total - withVersion} components missing version values.");
|
||||
}
|
||||
|
||||
if (withLicense < total)
|
||||
{
|
||||
warnings.Add($"{total - withLicense} components missing license values.");
|
||||
}
|
||||
|
||||
return (Math.Round(score, 2), warnings);
|
||||
}
|
||||
|
||||
private static string ComputeDigest(JsonElement root)
|
||||
{
|
||||
var bytes = JsonSerializer.SerializeToUtf8Bytes(root, JsonOptions);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static Dictionary<string, string> BuildMetadata(
|
||||
SbomUploadRequestDto request,
|
||||
string format,
|
||||
string formatVersion,
|
||||
string digest,
|
||||
string sbomId)
|
||||
{
|
||||
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
|
||||
{
|
||||
["sbom.digest"] = digest,
|
||||
["sbom.id"] = sbomId,
|
||||
["sbom.format"] = format,
|
||||
["sbom.format_version"] = formatVersion
|
||||
};
|
||||
|
||||
AddIfPresent(metadata, "byos.source.tool", request.Source?.Tool);
|
||||
AddIfPresent(metadata, "byos.source.version", request.Source?.Version);
|
||||
AddIfPresent(metadata, "byos.ci.build_id", request.Source?.CiContext?.BuildId);
|
||||
AddIfPresent(metadata, "byos.ci.repository", request.Source?.CiContext?.Repository);
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private static void AddIfPresent(Dictionary<string, string> metadata, string key, string? value)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
metadata[key] = value.Trim();
|
||||
}
|
||||
}
|
||||
|
||||
private static string GetCycloneDxVersion(JsonElement root)
|
||||
{
|
||||
var spec = GetString(root, "specVersion");
|
||||
return string.IsNullOrWhiteSpace(spec) ? string.Empty : spec.Trim();
|
||||
}
|
||||
|
||||
private static string GetSpdxVersion(JsonElement root)
|
||||
{
|
||||
var version = GetString(root, "spdxVersion");
|
||||
if (string.IsNullOrWhiteSpace(version))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var trimmed = version.Trim();
|
||||
return trimmed.StartsWith("SPDX-", StringComparison.OrdinalIgnoreCase)
|
||||
? trimmed[5..]
|
||||
: trimmed;
|
||||
}
|
||||
|
||||
private static string NormalizeKey(string? purl, string name)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(purl))
|
||||
{
|
||||
var trimmed = purl.Trim();
|
||||
var qualifierIndex = trimmed.IndexOf('?');
|
||||
if (qualifierIndex > 0)
|
||||
{
|
||||
trimmed = trimmed[..qualifierIndex];
|
||||
}
|
||||
|
||||
var atIndex = trimmed.LastIndexOf('@');
|
||||
if (atIndex > 4)
|
||||
{
|
||||
trimmed = trimmed[..atIndex];
|
||||
}
|
||||
|
||||
return trimmed;
|
||||
}
|
||||
|
||||
return name.Trim();
|
||||
}
|
||||
|
||||
private static string? ExtractCycloneDxLicense(JsonElement component)
|
||||
{
|
||||
if (!component.TryGetProperty("licenses", out var licenses) || licenses.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
foreach (var entry in licenses.EnumerateArray())
|
||||
{
|
||||
if (entry.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (entry.TryGetProperty("license", out var licenseObj) && licenseObj.ValueKind == JsonValueKind.Object)
|
||||
{
|
||||
var id = GetString(licenseObj, "id");
|
||||
if (!string.IsNullOrWhiteSpace(id))
|
||||
{
|
||||
return id;
|
||||
}
|
||||
|
||||
var name = GetString(licenseObj, "name");
|
||||
if (!string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return name;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? ExtractSpdxPurl(JsonElement package)
|
||||
{
|
||||
if (!package.TryGetProperty("externalRefs", out var refs) || refs.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
foreach (var reference in refs.EnumerateArray())
|
||||
{
|
||||
if (reference.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var referenceType = GetString(reference, "referenceType");
|
||||
if (!string.Equals(referenceType, "purl", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var locator = GetString(reference, "referenceLocator");
|
||||
if (!string.IsNullOrWhiteSpace(locator))
|
||||
{
|
||||
return locator;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string GetString(JsonElement element, string property)
|
||||
{
|
||||
if (element.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
if (!element.TryGetProperty(property, out var prop))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
return prop.ValueKind == JsonValueKind.String ? prop.GetString() ?? string.Empty : string.Empty;
|
||||
}
|
||||
|
||||
private sealed record SbomNormalizedComponent(
|
||||
string Key,
|
||||
string Name,
|
||||
string? Version,
|
||||
string? Purl,
|
||||
string? License);
|
||||
}
|
||||
@@ -146,7 +146,7 @@ internal sealed class SbomIngestionService : ISbomIngestionService
|
||||
{
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (ArtifactDocumentFormat.CycloneDxJson, "application/vnd.cyclonedx+json");
|
||||
return (ArtifactDocumentFormat.CycloneDxJson, "application/vnd.cyclonedx+json; version=1.7");
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
|
||||
@@ -0,0 +1,50 @@
|
||||
using System.Collections.Concurrent;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed record SbomUploadRecord(
|
||||
string SbomId,
|
||||
string ArtifactRef,
|
||||
string? ArtifactDigest,
|
||||
string Digest,
|
||||
string Format,
|
||||
string FormatVersion,
|
||||
string AnalysisJobId,
|
||||
int ComponentCount,
|
||||
double QualityScore,
|
||||
IReadOnlyList<string> Warnings,
|
||||
SbomUploadSourceDto? Source,
|
||||
DateTimeOffset CreatedAtUtc);
|
||||
|
||||
internal interface ISbomUploadStore
|
||||
{
|
||||
Task AddAsync(SbomUploadRecord record, CancellationToken cancellationToken);
|
||||
Task<SbomUploadRecord?> GetAsync(string sbomId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
internal sealed class InMemorySbomUploadStore : ISbomUploadStore
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, SbomUploadRecord> _records = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public Task AddAsync(SbomUploadRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(record);
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
_records[record.SbomId] = record;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task<SbomUploadRecord?> GetAsync(string sbomId, CancellationToken cancellationToken)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
if (string.IsNullOrWhiteSpace(sbomId))
|
||||
{
|
||||
return Task.FromResult<SbomUploadRecord?>(null);
|
||||
}
|
||||
|
||||
_records.TryGetValue(sbomId.Trim(), out var record);
|
||||
return Task.FromResult(record);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,344 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Scanner.Cache.Abstractions;
|
||||
using StellaOps.Scanner.Core;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
using StellaOps.Scanner.Reachability.Slices;
|
||||
using StellaOps.Scanner.Reachability.Slices.Replay;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
/// <summary>
|
||||
/// Options for slice query service.
|
||||
/// </summary>
|
||||
public sealed class SliceQueryServiceOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum slice size (nodes + edges) for synchronous generation.
|
||||
/// Larger slices return 202 Accepted with job ID.
|
||||
/// </summary>
|
||||
public int MaxSyncSliceSize { get; set; } = 10_000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to cache generated slices.
|
||||
/// </summary>
|
||||
public bool EnableCache { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for querying and managing reachability slices.
|
||||
/// </summary>
|
||||
public sealed class SliceQueryService : ISliceQueryService
|
||||
{
|
||||
private readonly ISliceCache _cache;
|
||||
private readonly SliceExtractor _extractor;
|
||||
private readonly SliceCasStorage _casStorage;
|
||||
private readonly SliceDiffComputer _diffComputer;
|
||||
private readonly SliceHasher _hasher;
|
||||
private readonly IFileContentAddressableStore _cas;
|
||||
private readonly IScanMetadataRepository _scanRepo;
|
||||
private readonly SliceQueryServiceOptions _options;
|
||||
private readonly ILogger<SliceQueryService> _logger;
|
||||
|
||||
public SliceQueryService(
|
||||
ISliceCache cache,
|
||||
SliceExtractor extractor,
|
||||
SliceCasStorage casStorage,
|
||||
SliceDiffComputer diffComputer,
|
||||
SliceHasher hasher,
|
||||
IFileContentAddressableStore cas,
|
||||
IScanMetadataRepository scanRepo,
|
||||
IOptions<SliceQueryServiceOptions> options,
|
||||
ILogger<SliceQueryService> logger)
|
||||
{
|
||||
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
_extractor = extractor ?? throw new ArgumentNullException(nameof(extractor));
|
||||
_casStorage = casStorage ?? throw new ArgumentNullException(nameof(casStorage));
|
||||
_diffComputer = diffComputer ?? throw new ArgumentNullException(nameof(diffComputer));
|
||||
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
|
||||
_cas = cas ?? throw new ArgumentNullException(nameof(cas));
|
||||
_scanRepo = scanRepo ?? throw new ArgumentNullException(nameof(scanRepo));
|
||||
_options = options?.Value ?? new SliceQueryServiceOptions();
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<SliceQueryResponse> QueryAsync(
|
||||
SliceQueryRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
_logger.LogDebug("Processing slice query for scan {ScanId}, CVE {CveId}", request.ScanId, request.CveId);
|
||||
|
||||
// Check cache first
|
||||
var cacheKey = ComputeCacheKey(request);
|
||||
|
||||
if (_options.EnableCache)
|
||||
{
|
||||
var cached = await _cache.TryGetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
|
||||
if (cached is not null)
|
||||
{
|
||||
_logger.LogDebug("Cache hit for slice query {CacheKey}", cacheKey);
|
||||
return new SliceQueryResponse
|
||||
{
|
||||
SliceDigest = cached.SliceDigest,
|
||||
Verdict = cached.Verdict,
|
||||
Confidence = cached.Confidence,
|
||||
PathWitnesses = cached.PathWitnesses.ToList(),
|
||||
CacheHit = true
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Load scan data
|
||||
var scanData = await LoadScanDataAsync(request.ScanId, cancellationToken).ConfigureAwait(false);
|
||||
if (scanData == null)
|
||||
{
|
||||
throw new InvalidOperationException($"Scan {request.ScanId} not found");
|
||||
}
|
||||
|
||||
// Build extraction request
|
||||
var extractionRequest = BuildExtractionRequest(request, scanData);
|
||||
|
||||
// Extract slice
|
||||
var slice = _extractor.Extract(extractionRequest);
|
||||
|
||||
// Store in CAS
|
||||
var casResult = await _casStorage.StoreAsync(slice, _cas, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
// Cache the result
|
||||
if (_options.EnableCache)
|
||||
{
|
||||
var cacheEntry = new CachedSliceResult
|
||||
{
|
||||
SliceDigest = casResult.SliceDigest,
|
||||
Verdict = slice.Verdict.Status.ToString().ToLowerInvariant(),
|
||||
Confidence = slice.Verdict.Confidence,
|
||||
PathWitnesses = slice.Verdict.PathWitnesses.IsDefaultOrEmpty
|
||||
? Array.Empty<string>()
|
||||
: slice.Verdict.PathWitnesses.ToList(),
|
||||
CachedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
await _cache.SetAsync(cacheKey, cacheEntry, TimeSpan.FromHours(1), cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Generated slice {Digest} for scan {ScanId}: {NodeCount} nodes, {EdgeCount} edges, verdict={Verdict}",
|
||||
casResult.SliceDigest,
|
||||
request.ScanId,
|
||||
slice.Subgraph.Nodes.Length,
|
||||
slice.Subgraph.Edges.Length,
|
||||
slice.Verdict.Status);
|
||||
|
||||
return BuildResponse(slice, casResult.SliceDigest, cacheHit: false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ReachabilitySlice?> GetSliceAsync(
|
||||
string digest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
|
||||
|
||||
var casKey = ExtractDigestHex(digest);
|
||||
var stream = await _cas.GetAsync(new FileCasGetRequest(casKey), cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (stream == null) return null;
|
||||
|
||||
await using (stream)
|
||||
{
|
||||
return await System.Text.Json.JsonSerializer.DeserializeAsync<ReachabilitySlice>(
|
||||
stream,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<object?> GetSliceDsseAsync(
|
||||
string digest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
|
||||
|
||||
var dsseKey = $"{ExtractDigestHex(digest)}.dsse";
|
||||
var stream = await _cas.GetAsync(new FileCasGetRequest(dsseKey), cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (stream == null) return null;
|
||||
|
||||
await using (stream)
|
||||
{
|
||||
return await System.Text.Json.JsonSerializer.DeserializeAsync<object>(
|
||||
stream,
|
||||
cancellationToken: cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<SliceReplayResponse> ReplayAsync(
|
||||
SliceReplayRequest request,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
_logger.LogDebug("Replaying slice {Digest}", request.SliceDigest);
|
||||
|
||||
// Load original slice
|
||||
var original = await GetSliceAsync(request.SliceDigest, cancellationToken).ConfigureAwait(false);
|
||||
if (original == null)
|
||||
{
|
||||
throw new InvalidOperationException($"Slice {request.SliceDigest} not found");
|
||||
}
|
||||
|
||||
// Load scan data for recomputation
|
||||
var scanId = ExtractScanIdFromManifest(original.Manifest);
|
||||
var scanData = await LoadScanDataAsync(scanId, cancellationToken).ConfigureAwait(false);
|
||||
if (scanData == null)
|
||||
{
|
||||
throw new InvalidOperationException($"Cannot replay: scan {scanId} not found");
|
||||
}
|
||||
|
||||
// Recompute slice with same parameters
|
||||
var extractionRequest = new SliceExtractionRequest(
|
||||
scanData.Graph,
|
||||
original.Inputs,
|
||||
original.Query,
|
||||
original.Manifest);
|
||||
|
||||
var recomputed = _extractor.Extract(extractionRequest);
|
||||
var recomputedDigest = _hasher.ComputeDigest(recomputed);
|
||||
|
||||
// Compare
|
||||
var diffResult = _diffComputer.Compute(original, recomputed);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Replay verification for {Digest}: match={Match}",
|
||||
request.SliceDigest,
|
||||
diffResult.Match);
|
||||
|
||||
return new SliceReplayResponse
|
||||
{
|
||||
Match = diffResult.Match,
|
||||
OriginalDigest = request.SliceDigest,
|
||||
RecomputedDigest = recomputedDigest.Digest,
|
||||
Diff = diffResult.Match ? null : new SliceDiff
|
||||
{
|
||||
MissingNodes = diffResult.NodesDiff.Missing.IsDefaultOrEmpty ? null : diffResult.NodesDiff.Missing.ToList(),
|
||||
ExtraNodes = diffResult.NodesDiff.Extra.IsDefaultOrEmpty ? null : diffResult.NodesDiff.Extra.ToList(),
|
||||
MissingEdges = diffResult.EdgesDiff.Missing.IsDefaultOrEmpty ? null : diffResult.EdgesDiff.Missing.ToList(),
|
||||
ExtraEdges = diffResult.EdgesDiff.Extra.IsDefaultOrEmpty ? null : diffResult.EdgesDiff.Extra.ToList(),
|
||||
VerdictDiff = diffResult.VerdictDiff
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceQueryResponse BuildResponse(ReachabilitySlice slice, string digest, bool cacheHit)
|
||||
{
|
||||
return new SliceQueryResponse
|
||||
{
|
||||
SliceDigest = digest,
|
||||
Verdict = slice.Verdict.Status.ToString().ToLowerInvariant(),
|
||||
Confidence = slice.Verdict.Confidence,
|
||||
PathWitnesses = slice.Verdict.PathWitnesses.IsDefaultOrEmpty
|
||||
? null
|
||||
: slice.Verdict.PathWitnesses.ToList(),
|
||||
CacheHit = cacheHit,
|
||||
JobId = null
|
||||
};
|
||||
}
|
||||
|
||||
private SliceExtractionRequest BuildExtractionRequest(SliceQueryRequest request, ScanData scanData)
|
||||
{
|
||||
var query = new SliceQuery
|
||||
{
|
||||
CveId = request.CveId,
|
||||
TargetSymbols = request.Symbols?.ToImmutableArray() ?? ImmutableArray<string>.Empty,
|
||||
Entrypoints = request.Entrypoints?.ToImmutableArray() ?? ImmutableArray<string>.Empty,
|
||||
PolicyHash = request.PolicyHash
|
||||
};
|
||||
|
||||
var inputs = new SliceInputs
|
||||
{
|
||||
GraphDigest = scanData.GraphDigest,
|
||||
BinaryDigests = scanData.BinaryDigests,
|
||||
SbomDigest = scanData.SbomDigest,
|
||||
LayerDigests = scanData.LayerDigests
|
||||
};
|
||||
|
||||
return new SliceExtractionRequest(scanData.Graph, inputs, query, scanData.Manifest);
|
||||
}
|
||||
|
||||
private static string ComputeCacheKey(SliceQueryRequest request)
|
||||
{
|
||||
var keyParts = new[]
|
||||
{
|
||||
request.ScanId,
|
||||
request.CveId ?? "",
|
||||
string.Join(",", request.Symbols?.OrderBy(s => s, StringComparer.Ordinal) ?? Array.Empty<string>()),
|
||||
string.Join(",", request.Entrypoints?.OrderBy(e => e, StringComparer.Ordinal) ?? Array.Empty<string>()),
|
||||
request.PolicyHash ?? ""
|
||||
};
|
||||
|
||||
var combined = string.Join("|", keyParts);
|
||||
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(combined));
|
||||
return "slice:" + Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private async Task<ScanData?> LoadScanDataAsync(string scanId, CancellationToken cancellationToken)
|
||||
{
|
||||
// This would load the full scan data including call graph
|
||||
// For now, return a stub - actual implementation depends on scan storage
|
||||
var metadata = await _scanRepo.GetMetadataAsync(scanId, cancellationToken).ConfigureAwait(false);
|
||||
if (metadata == null) return null;
|
||||
|
||||
// Load call graph from CAS or graph store
|
||||
// This is a placeholder - actual implementation would hydrate the full graph
|
||||
var emptyGraph = new RichGraph(
|
||||
Nodes: Array.Empty<RichGraphNode>(),
|
||||
Edges: Array.Empty<RichGraphEdge>(),
|
||||
Roots: Array.Empty<RichGraphRoot>(),
|
||||
Analyzer: new RichGraphAnalyzer("scanner", "1.0.0", null));
|
||||
|
||||
return new ScanData
|
||||
{
|
||||
ScanId = scanId,
|
||||
Graph = metadata?.RichGraph ?? emptyGraph,
|
||||
GraphDigest = metadata?.GraphDigest ?? "",
|
||||
BinaryDigests = metadata?.BinaryDigests ?? ImmutableArray<string>.Empty,
|
||||
SbomDigest = metadata?.SbomDigest,
|
||||
LayerDigests = metadata?.LayerDigests ?? ImmutableArray<string>.Empty,
|
||||
Manifest = metadata?.Manifest ?? new ScanManifest
|
||||
{
|
||||
ScanId = scanId,
|
||||
Timestamp = DateTimeOffset.UtcNow.ToString("O"),
|
||||
ScannerVersion = "1.0.0",
|
||||
Environment = "production"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string ExtractScanIdFromManifest(ScanManifest manifest)
|
||||
{
|
||||
return manifest.ScanId ?? manifest.Subject?.Digest ?? "unknown";
|
||||
}
|
||||
|
||||
private static string ExtractDigestHex(string prefixed)
|
||||
{
|
||||
var colonIndex = prefixed.IndexOf(':');
|
||||
return colonIndex >= 0 ? prefixed[(colonIndex + 1)..] : prefixed;
|
||||
}
|
||||
|
||||
private sealed record ScanData
|
||||
{
|
||||
public required string ScanId { get; init; }
|
||||
public required RichGraph Graph { get; init; }
|
||||
public required string GraphDigest { get; init; }
|
||||
public ImmutableArray<string> BinaryDigests { get; init; }
|
||||
public string? SbomDigest { get; init; }
|
||||
public ImmutableArray<string> LayerDigests { get; init; }
|
||||
public required ScanManifest Manifest { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using StellaOps.Scanner.Triage;
|
||||
using StellaOps.Scanner.Triage.Entities;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
public sealed class TriageQueryService : ITriageQueryService
|
||||
{
|
||||
private readonly TriageDbContext _dbContext;
|
||||
private readonly ILogger<TriageQueryService> _logger;
|
||||
|
||||
public TriageQueryService(TriageDbContext dbContext, ILogger<TriageQueryService> logger)
|
||||
{
|
||||
_dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<TriageFinding?> GetFindingAsync(string findingId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!Guid.TryParse(findingId, out var id))
|
||||
{
|
||||
_logger.LogWarning("Invalid finding id: {FindingId}", findingId);
|
||||
return null;
|
||||
}
|
||||
|
||||
return await _dbContext.Findings
|
||||
.Include(f => f.ReachabilityResults)
|
||||
.Include(f => f.RiskResults)
|
||||
.Include(f => f.EffectiveVexRecords)
|
||||
.Include(f => f.EvidenceArtifacts)
|
||||
.AsNoTracking()
|
||||
.FirstOrDefaultAsync(f => f.Id == id, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,7 @@
|
||||
<RootNamespace>StellaOps.Scanner.WebService</RootNamespace>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CycloneDX.Core" Version="10.0.2" />
|
||||
<PackageReference Include="CycloneDX.Core" Version="11.0.0" />
|
||||
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
|
||||
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
|
||||
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
|
||||
@@ -38,6 +38,7 @@
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Triage/StellaOps.Scanner.Triage.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
|
||||
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
|
||||
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Reachability/StellaOps.Scanner.Reachability.csproj" />
|
||||
|
||||
@@ -9,3 +9,4 @@
|
||||
| `DRIFT-3600-API` | `docs/implplan/SPRINT_3600_0003_0001_drift_detection_engine.md` | DONE | Add reachability drift endpoints (`/api/v1/scans/{id}/drift`, `/api/v1/drift/{id}/sinks`) + integration tests. |
|
||||
| `SCAN-API-3103-001` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DONE | Implement missing ingestion services + DI for callgraph/SBOM endpoints and add deterministic integration tests. |
|
||||
| `EPSS-SCAN-011` | `docs/implplan/SPRINT_3410_0002_0001_epss_scanner_integration.md` | DONE | Wired `/api/v1/epss/*` endpoints and added `EpssEndpointsTests` integration coverage. |
|
||||
| `SLICE-3820-API` | `docs/implplan/SPRINT_3820_0001_0001_slice_query_replay_apis.md` | DOING | Implement slice query/replay endpoints, caching, and OpenAPI updates. |
|
||||
|
||||
@@ -163,6 +163,14 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Envelope
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.SmartDiff.Tests", "__Tests\StellaOps.Scanner.SmartDiff.Tests\StellaOps.Scanner.SmartDiff.Tests.csproj", "{71472842-BC50-4476-9224-31A9B0A1115A}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Advisory", "__Libraries\StellaOps.Scanner.Advisory\StellaOps.Scanner.Advisory.csproj", "{C6118565-FEC6-4AA4-BF2B-81C765D4919E}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Advisory.Tests", "__Tests\StellaOps.Scanner.Advisory.Tests\StellaOps.Scanner.Advisory.Tests.csproj", "{89920F9B-17CC-4D54-9985-2A4C06631488}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Evidence", "__Libraries\StellaOps.Scanner.Evidence\StellaOps.Scanner.Evidence.csproj", "{0D15A8D6-076D-4701-B838-6C0DB971F1BD}"
|
||||
EndProject
|
||||
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Evidence.Tests", "__Tests\StellaOps.Scanner.Evidence.Tests\StellaOps.Scanner.Evidence.Tests.csproj", "{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Any CPU = Debug|Any CPU
|
||||
@@ -1097,6 +1105,54 @@ Global
|
||||
{71472842-BC50-4476-9224-31A9B0A1115A}.Release|x64.Build.0 = Release|Any CPU
|
||||
{71472842-BC50-4476-9224-31A9B0A1115A}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{71472842-BC50-4476-9224-31A9B0A1115A}.Release|x86.Build.0 = Release|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Release|x64.Build.0 = Release|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E}.Release|x86.Build.0 = Release|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Release|x64.Build.0 = Release|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488}.Release|x86.Build.0 = Release|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Release|x64.Build.0 = Release|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD}.Release|x86.Build.0 = Release|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Debug|Any CPU.Build.0 = Debug|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Debug|x64.ActiveCfg = Debug|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Debug|x64.Build.0 = Debug|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Debug|x86.ActiveCfg = Debug|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Debug|x86.Build.0 = Debug|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Release|Any CPU.ActiveCfg = Release|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Release|Any CPU.Build.0 = Release|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Release|x64.ActiveCfg = Release|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Release|x64.Build.0 = Release|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Release|x86.ActiveCfg = Release|Any CPU
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C}.Release|x86.Build.0 = Release|Any CPU
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
@@ -1151,5 +1207,9 @@ Global
|
||||
{C8EE1699-99B6-4D64-B0DB-9E876C6E9EE4} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{A872DEC5-C3A7-4E8B-B2E3-D9A7B9255D21} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{71472842-BC50-4476-9224-31A9B0A1115A} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{C6118565-FEC6-4AA4-BF2B-81C765D4919E} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{89920F9B-17CC-4D54-9985-2A4C06631488} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
{0D15A8D6-076D-4701-B838-6C0DB971F1BD} = {41F15E67-7190-CF23-3BC4-77E87134CADD}
|
||||
{EE463A2F-8DDB-42C5-BF63-48B9E2B4220C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642}
|
||||
EndGlobalSection
|
||||
EndGlobal
|
||||
|
||||
33
src/Scanner/__Libraries/StellaOps.Scanner.Advisory/AGENTS.md
Normal file
33
src/Scanner/__Libraries/StellaOps.Scanner.Advisory/AGENTS.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# AGENTS - Scanner Advisory Library
|
||||
|
||||
## Mission
|
||||
Provide advisory feed integration and offline bundles for CVE-to-symbol mapping used by reachability slices.
|
||||
|
||||
## Roles
|
||||
- Backend engineer (.NET 10, C# preview).
|
||||
- QA engineer (deterministic tests; offline fixtures).
|
||||
|
||||
## Required Reading
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/scanner/architecture.md`
|
||||
- `docs/modules/concelier/architecture.md`
|
||||
- `docs/reachability/slice-schema.md`
|
||||
|
||||
## Working Directory & Boundaries
|
||||
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Advisory/`
|
||||
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Advisory.Tests/`
|
||||
- Avoid cross-module edits unless explicitly noted in the sprint.
|
||||
|
||||
## Determinism & Offline Rules
|
||||
- Prefer offline advisory bundles; no network access in tests.
|
||||
- Cache advisory data deterministically with stable ordering and TTL control.
|
||||
|
||||
## Testing Expectations
|
||||
- Unit tests for HTTP client shape and offline fallback.
|
||||
- Deterministic serialization and cache hit/miss behavior.
|
||||
|
||||
## Workflow
|
||||
- Update sprint status on task transitions.
|
||||
- Record notable decisions in the sprint Execution Log.
|
||||
@@ -0,0 +1,74 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.Advisory;
|
||||
|
||||
public interface IAdvisoryBundleStore
|
||||
{
|
||||
Task<AdvisorySymbolMapping?> TryGetAsync(string cveId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed class NullAdvisoryBundleStore : IAdvisoryBundleStore
|
||||
{
|
||||
public Task<AdvisorySymbolMapping?> TryGetAsync(string cveId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<AdvisorySymbolMapping?>(null);
|
||||
}
|
||||
|
||||
public sealed class FileAdvisoryBundleStore : IAdvisoryBundleStore
|
||||
{
|
||||
private readonly string _bundlePath;
|
||||
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web);
|
||||
private ImmutableDictionary<string, AdvisorySymbolMapping>? _cache;
|
||||
|
||||
public FileAdvisoryBundleStore(string bundlePath)
|
||||
{
|
||||
_bundlePath = bundlePath ?? throw new ArgumentNullException(nameof(bundlePath));
|
||||
}
|
||||
|
||||
public async Task<AdvisorySymbolMapping?> TryGetAsync(string cveId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cveId))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var normalized = cveId.Trim().ToUpperInvariant();
|
||||
var cache = await LoadAsync(cancellationToken).ConfigureAwait(false);
|
||||
return cache.TryGetValue(normalized, out var mapping) ? mapping : null;
|
||||
}
|
||||
|
||||
private async Task<ImmutableDictionary<string, AdvisorySymbolMapping>> LoadAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (_cache is not null)
|
||||
{
|
||||
return _cache;
|
||||
}
|
||||
|
||||
if (!File.Exists(_bundlePath))
|
||||
{
|
||||
_cache = ImmutableDictionary<string, AdvisorySymbolMapping>.Empty;
|
||||
return _cache;
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(_bundlePath);
|
||||
var bundle = await JsonSerializer.DeserializeAsync<AdvisoryBundleDocument>(stream, _serializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var items = bundle?.Items ?? Array.Empty<AdvisorySymbolMapping>();
|
||||
var builder = ImmutableDictionary.CreateBuilder<string, AdvisorySymbolMapping>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var item in items)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(item.CveId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
builder[item.CveId.Trim().ToUpperInvariant()] = item;
|
||||
}
|
||||
|
||||
_cache = builder.ToImmutable();
|
||||
return _cache;
|
||||
}
|
||||
|
||||
private sealed record AdvisoryBundleDocument(IReadOnlyList<AdvisorySymbolMapping> Items);
|
||||
}
|
||||
@@ -0,0 +1,196 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Net.Http.Json;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Caching.Memory;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Advisory;
|
||||
|
||||
public sealed class AdvisoryClient : IAdvisoryClient
|
||||
{
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly IMemoryCache _cache;
|
||||
private readonly AdvisoryClientOptions _options;
|
||||
private readonly IAdvisoryBundleStore _bundleStore;
|
||||
private readonly ILogger<AdvisoryClient> _logger;
|
||||
private readonly JsonSerializerOptions _serializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public AdvisoryClient(
|
||||
HttpClient httpClient,
|
||||
IMemoryCache cache,
|
||||
IOptions<AdvisoryClientOptions> options,
|
||||
IAdvisoryBundleStore bundleStore,
|
||||
ILogger<AdvisoryClient> logger)
|
||||
{
|
||||
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
|
||||
_options = (options ?? throw new ArgumentNullException(nameof(options))).Value;
|
||||
_bundleStore = bundleStore ?? throw new ArgumentNullException(nameof(bundleStore));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_httpClient.Timeout = TimeSpan.FromSeconds(Math.Max(1, _options.TimeoutSeconds));
|
||||
}
|
||||
|
||||
public async Task<AdvisorySymbolMapping?> GetCveSymbolsAsync(string cveId, CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(cveId))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var normalized = cveId.Trim().ToUpperInvariant();
|
||||
var cacheKey = $"advisory:cve:{normalized}";
|
||||
if (_cache.TryGetValue(cacheKey, out AdvisorySymbolMapping cached))
|
||||
{
|
||||
return cached;
|
||||
}
|
||||
|
||||
AdvisorySymbolMapping? mapping = null;
|
||||
|
||||
if (_options.Enabled && !string.IsNullOrWhiteSpace(_options.BaseUrl))
|
||||
{
|
||||
mapping = await FetchFromConcelierAsync(normalized, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
mapping ??= await _bundleStore.TryGetAsync(normalized, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (mapping is not null)
|
||||
{
|
||||
var ttl = TimeSpan.FromMinutes(Math.Max(1, _options.CacheTtlMinutes));
|
||||
_cache.Set(cacheKey, mapping, ttl);
|
||||
}
|
||||
|
||||
return mapping;
|
||||
}
|
||||
|
||||
private async Task<AdvisorySymbolMapping?> FetchFromConcelierAsync(string cveId, CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
ApplyHeaders();
|
||||
|
||||
var purls = _options.UseSearchEndpoint
|
||||
? await FetchPurlsFromSearchAsync(cveId, cancellationToken).ConfigureAwait(false)
|
||||
: await FetchPurlsFromLinksetAsync(cveId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
if (purls.IsDefaultOrEmpty)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var packages = purls
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(p => p, StringComparer.OrdinalIgnoreCase)
|
||||
.Select(p => new AdvisoryPackageSymbols { Purl = p, Symbols = ImmutableArray<string>.Empty })
|
||||
.ToImmutableArray();
|
||||
|
||||
return new AdvisorySymbolMapping
|
||||
{
|
||||
CveId = cveId,
|
||||
Packages = packages,
|
||||
Source = "concelier"
|
||||
};
|
||||
}
|
||||
catch (Exception ex) when (ex is HttpRequestException or TaskCanceledException or JsonException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to fetch advisory mapping from Concelier for {CveId}", cveId);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<string>> FetchPurlsFromLinksetAsync(string cveId, CancellationToken cancellationToken)
|
||||
{
|
||||
var path = _options.LinksetEndpointTemplate.Replace("{cveId}", Uri.EscapeDataString(cveId), StringComparison.OrdinalIgnoreCase);
|
||||
using var response = await _httpClient.GetAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<LnmLinksetResponse>(_serializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (payload is null)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
return CollectPurls(payload).ToImmutableArray();
|
||||
}
|
||||
|
||||
private async Task<ImmutableArray<string>> FetchPurlsFromSearchAsync(string cveId, CancellationToken cancellationToken)
|
||||
{
|
||||
var request = new LnmLinksetSearchRequest(cveId);
|
||||
using var response = await _httpClient.PostAsJsonAsync(_options.SearchEndpoint, request, _serializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (!response.IsSuccessStatusCode)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var payload = await response.Content.ReadFromJsonAsync<LnmLinksetPage>(_serializerOptions, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (payload?.Items is null)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var purls = payload.Items.SelectMany(CollectPurls)
|
||||
.Distinct(StringComparer.OrdinalIgnoreCase)
|
||||
.OrderBy(p => p, StringComparer.OrdinalIgnoreCase)
|
||||
.ToImmutableArray();
|
||||
|
||||
return purls;
|
||||
}
|
||||
|
||||
private void ApplyHeaders()
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(_options.BaseUrl) && _httpClient.BaseAddress is null)
|
||||
{
|
||||
_httpClient.BaseAddress = new Uri(_options.BaseUrl.TrimEnd('/') + "/", UriKind.Absolute);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_options.Tenant))
|
||||
{
|
||||
_httpClient.DefaultRequestHeaders.Remove(_options.TenantHeaderName);
|
||||
_httpClient.DefaultRequestHeaders.TryAddWithoutValidation(_options.TenantHeaderName, _options.Tenant);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(_options.ApiKey))
|
||||
{
|
||||
var header = string.IsNullOrWhiteSpace(_options.ApiKeyHeader) ? "Authorization" : _options.ApiKeyHeader;
|
||||
_httpClient.DefaultRequestHeaders.Remove(header);
|
||||
_httpClient.DefaultRequestHeaders.TryAddWithoutValidation(header, _options.ApiKey);
|
||||
}
|
||||
}
|
||||
|
||||
private static IEnumerable<string> CollectPurls(LnmLinksetResponse response)
|
||||
{
|
||||
if (response.Normalized?.Purl is { Count: > 0 } normalizedPurls)
|
||||
{
|
||||
return normalizedPurls;
|
||||
}
|
||||
|
||||
return response.Purl ?? Array.Empty<string>();
|
||||
}
|
||||
|
||||
private sealed record LnmLinksetResponse(
|
||||
string AdvisoryId,
|
||||
string Source,
|
||||
IReadOnlyList<string>? Purl,
|
||||
LnmLinksetNormalized? Normalized);
|
||||
|
||||
private sealed record LnmLinksetNormalized(
|
||||
IReadOnlyList<string>? Purl,
|
||||
IReadOnlyList<string>? Aliases);
|
||||
|
||||
private sealed record LnmLinksetPage(IReadOnlyList<LnmLinksetResponse> Items);
|
||||
|
||||
private sealed record LnmLinksetSearchRequest(
|
||||
[property: JsonPropertyName("cve")] string Cve,
|
||||
[property: JsonPropertyName("page")] int Page = 1,
|
||||
[property: JsonPropertyName("pageSize")] int PageSize = 100);
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
namespace StellaOps.Scanner.Advisory;
|
||||
|
||||
public sealed class AdvisoryClientOptions
|
||||
{
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
public string? BaseUrl { get; set; }
|
||||
|
||||
public string? Tenant { get; set; }
|
||||
|
||||
public string TenantHeaderName { get; set; } = "X-Stella-Tenant";
|
||||
|
||||
public string? ApiKey { get; set; }
|
||||
|
||||
public string ApiKeyHeader { get; set; } = "Authorization";
|
||||
|
||||
public int TimeoutSeconds { get; set; } = 30;
|
||||
|
||||
public int CacheTtlMinutes { get; set; } = 60;
|
||||
|
||||
public string LinksetEndpointTemplate { get; set; } = "/v1/lnm/linksets/{cveId}";
|
||||
|
||||
public string SearchEndpoint { get; set; } = "/v1/lnm/linksets/search";
|
||||
|
||||
public bool UseSearchEndpoint { get; set; } = false;
|
||||
}
|
||||
@@ -0,0 +1,25 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Advisory;
|
||||
|
||||
public sealed record AdvisorySymbolMapping
|
||||
{
|
||||
[JsonPropertyName("cveId")]
|
||||
public required string CveId { get; init; }
|
||||
|
||||
[JsonPropertyName("packages")]
|
||||
public ImmutableArray<AdvisoryPackageSymbols> Packages { get; init; } = ImmutableArray<AdvisoryPackageSymbols>.Empty;
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public required string Source { get; init; }
|
||||
}
|
||||
|
||||
public sealed record AdvisoryPackageSymbols
|
||||
{
|
||||
[JsonPropertyName("purl")]
|
||||
public required string Purl { get; init; }
|
||||
|
||||
[JsonPropertyName("symbols")]
|
||||
public ImmutableArray<string> Symbols { get; init; } = ImmutableArray<string>.Empty;
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
namespace StellaOps.Scanner.Advisory;
|
||||
|
||||
public interface IAdvisoryClient
|
||||
{
|
||||
Task<AdvisorySymbolMapping?> GetCveSymbolsAsync(string cveId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,184 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native.RuntimeCapture.Timeline;
|
||||
|
||||
/// <summary>
|
||||
/// Runtime observation timeline for a finding.
|
||||
/// </summary>
|
||||
public sealed record RuntimeTimeline
|
||||
{
|
||||
/// <summary>
|
||||
/// Finding this timeline is for.
|
||||
/// </summary>
|
||||
public required Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerable component being tracked.
|
||||
/// </summary>
|
||||
public required string ComponentPurl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time window start.
|
||||
/// </summary>
|
||||
public required DateTimeOffset WindowStart { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time window end.
|
||||
/// </summary>
|
||||
public required DateTimeOffset WindowEnd { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall posture based on observations.
|
||||
/// </summary>
|
||||
public required RuntimePosture Posture { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Posture explanation.
|
||||
/// </summary>
|
||||
public required string PostureExplanation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time buckets with observation summaries.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<TimelineBucket> Buckets { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Significant events in the timeline.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<TimelineEvent> Events { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total observation count.
|
||||
/// </summary>
|
||||
public int TotalObservations => Buckets.Sum(b => b.ObservationCount);
|
||||
|
||||
/// <summary>
|
||||
/// Capture session digests.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<string> SessionDigests { get; init; }
|
||||
}
|
||||
|
||||
public enum RuntimePosture
|
||||
{
|
||||
/// <summary>No runtime data available.</summary>
|
||||
Unknown,
|
||||
|
||||
/// <summary>Runtime evidence supports the verdict.</summary>
|
||||
Supports,
|
||||
|
||||
/// <summary>Runtime evidence contradicts the verdict.</summary>
|
||||
Contradicts,
|
||||
|
||||
/// <summary>Runtime evidence is inconclusive.</summary>
|
||||
Inconclusive
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A time bucket in the timeline.
|
||||
/// </summary>
|
||||
public sealed record TimelineBucket
|
||||
{
|
||||
/// <summary>
|
||||
/// Bucket start time.
|
||||
/// </summary>
|
||||
public required DateTimeOffset Start { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Bucket end time.
|
||||
/// </summary>
|
||||
public required DateTimeOffset End { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of observations in this bucket.
|
||||
/// </summary>
|
||||
public required int ObservationCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Observation types in this bucket.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ObservationTypeSummary> ByType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether component was loaded in this bucket.
|
||||
/// </summary>
|
||||
public required bool ComponentLoaded { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether vulnerable code was executed.
|
||||
/// </summary>
|
||||
public bool? VulnerableCodeExecuted { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary of observations by type.
|
||||
/// </summary>
|
||||
public sealed record ObservationTypeSummary
|
||||
{
|
||||
public required ObservationType Type { get; init; }
|
||||
public required int Count { get; init; }
|
||||
}
|
||||
|
||||
public enum ObservationType
|
||||
{
|
||||
LibraryLoad,
|
||||
Syscall,
|
||||
NetworkConnection,
|
||||
FileAccess,
|
||||
ProcessSpawn,
|
||||
SymbolResolution
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A significant event in the timeline.
|
||||
/// </summary>
|
||||
public sealed record TimelineEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Event timestamp.
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Event type.
|
||||
/// </summary>
|
||||
public required TimelineEventType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Event description.
|
||||
/// </summary>
|
||||
public required string Description { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Significance level.
|
||||
/// </summary>
|
||||
public required EventSignificance Significance { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Related evidence digest.
|
||||
/// </summary>
|
||||
public string? EvidenceDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional details.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string> Details { get; init; }
|
||||
= new Dictionary<string, string>();
|
||||
}
|
||||
|
||||
public enum TimelineEventType
|
||||
{
|
||||
ComponentLoaded,
|
||||
ComponentUnloaded,
|
||||
VulnerableFunctionCalled,
|
||||
NetworkExposure,
|
||||
SyscallBlocked,
|
||||
ProcessForked,
|
||||
CaptureStarted,
|
||||
CaptureStopped
|
||||
}
|
||||
|
||||
public enum EventSignificance
|
||||
{
|
||||
Low,
|
||||
Medium,
|
||||
High,
|
||||
Critical
|
||||
}
|
||||
@@ -0,0 +1,257 @@
|
||||
namespace StellaOps.Scanner.Analyzers.Native.RuntimeCapture.Timeline;
|
||||
|
||||
public interface ITimelineBuilder
|
||||
{
|
||||
RuntimeTimeline Build(
|
||||
RuntimeEvidence evidence,
|
||||
string componentPurl,
|
||||
TimelineOptions options);
|
||||
}
|
||||
|
||||
public sealed class TimelineBuilder : ITimelineBuilder
|
||||
{
|
||||
public RuntimeTimeline Build(
|
||||
RuntimeEvidence evidence,
|
||||
string componentPurl,
|
||||
TimelineOptions options)
|
||||
{
|
||||
var windowStart = options.WindowStart ?? evidence.FirstObservation;
|
||||
var windowEnd = options.WindowEnd ?? evidence.LastObservation;
|
||||
|
||||
// Build time buckets
|
||||
var buckets = BuildBuckets(evidence, componentPurl, windowStart, windowEnd, options.BucketSize);
|
||||
|
||||
// Extract significant events
|
||||
var events = ExtractEvents(evidence, componentPurl);
|
||||
|
||||
// Determine posture
|
||||
var (posture, explanation) = DeterminePosture(buckets, events, componentPurl);
|
||||
|
||||
return new RuntimeTimeline
|
||||
{
|
||||
FindingId = Guid.Empty, // Set by caller
|
||||
ComponentPurl = componentPurl,
|
||||
WindowStart = windowStart,
|
||||
WindowEnd = windowEnd,
|
||||
Posture = posture,
|
||||
PostureExplanation = explanation,
|
||||
Buckets = buckets,
|
||||
Events = events.OrderBy(e => e.Timestamp).ToList(),
|
||||
SessionDigests = evidence.SessionDigests.ToList()
|
||||
};
|
||||
}
|
||||
|
||||
private List<TimelineBucket> BuildBuckets(
|
||||
RuntimeEvidence evidence,
|
||||
string componentPurl,
|
||||
DateTimeOffset start,
|
||||
DateTimeOffset end,
|
||||
TimeSpan bucketSize)
|
||||
{
|
||||
var buckets = new List<TimelineBucket>();
|
||||
var current = start;
|
||||
|
||||
while (current < end)
|
||||
{
|
||||
var bucketEnd = current + bucketSize;
|
||||
if (bucketEnd > end) bucketEnd = end;
|
||||
|
||||
var observations = evidence.Observations
|
||||
.Where(o => o.Timestamp >= current && o.Timestamp < bucketEnd)
|
||||
.ToList();
|
||||
|
||||
var byType = observations
|
||||
.GroupBy(o => ClassifyObservation(o))
|
||||
.Select(g => new ObservationTypeSummary
|
||||
{
|
||||
Type = g.Key,
|
||||
Count = g.Count()
|
||||
})
|
||||
.ToList();
|
||||
|
||||
var componentLoaded = observations.Any(o =>
|
||||
o.Type == "library_load" &&
|
||||
o.Path?.Contains(ExtractComponentName(componentPurl)) == true);
|
||||
|
||||
buckets.Add(new TimelineBucket
|
||||
{
|
||||
Start = current,
|
||||
End = bucketEnd,
|
||||
ObservationCount = observations.Count,
|
||||
ByType = byType,
|
||||
ComponentLoaded = componentLoaded,
|
||||
VulnerableCodeExecuted = componentLoaded ? DetectVulnerableExecution(observations) : null
|
||||
});
|
||||
|
||||
current = bucketEnd;
|
||||
}
|
||||
|
||||
return buckets;
|
||||
}
|
||||
|
||||
private List<TimelineEvent> ExtractEvents(RuntimeEvidence evidence, string componentPurl)
|
||||
{
|
||||
var events = new List<TimelineEvent>();
|
||||
var componentName = ExtractComponentName(componentPurl);
|
||||
|
||||
foreach (var obs in evidence.Observations)
|
||||
{
|
||||
if (obs.Type == "library_load" && obs.Path?.Contains(componentName) == true)
|
||||
{
|
||||
events.Add(new TimelineEvent
|
||||
{
|
||||
Timestamp = obs.Timestamp,
|
||||
Type = TimelineEventType.ComponentLoaded,
|
||||
Description = $"Component {componentName} loaded",
|
||||
Significance = EventSignificance.High,
|
||||
EvidenceDigest = obs.Digest,
|
||||
Details = new Dictionary<string, string>
|
||||
{
|
||||
["path"] = obs.Path ?? "",
|
||||
["process_id"] = obs.ProcessId.ToString()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (obs.Type == "network" && obs.Port is > 0 and < 1024)
|
||||
{
|
||||
events.Add(new TimelineEvent
|
||||
{
|
||||
Timestamp = obs.Timestamp,
|
||||
Type = TimelineEventType.NetworkExposure,
|
||||
Description = $"Network exposure on port {obs.Port}",
|
||||
Significance = EventSignificance.Critical,
|
||||
EvidenceDigest = obs.Digest
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add capture session events
|
||||
foreach (var session in evidence.Sessions)
|
||||
{
|
||||
events.Add(new TimelineEvent
|
||||
{
|
||||
Timestamp = session.StartTime,
|
||||
Type = TimelineEventType.CaptureStarted,
|
||||
Description = $"Capture session started ({session.Platform})",
|
||||
Significance = EventSignificance.Low
|
||||
});
|
||||
|
||||
if (session.EndTime.HasValue)
|
||||
{
|
||||
events.Add(new TimelineEvent
|
||||
{
|
||||
Timestamp = session.EndTime.Value,
|
||||
Type = TimelineEventType.CaptureStopped,
|
||||
Description = "Capture session stopped",
|
||||
Significance = EventSignificance.Low
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
private static (RuntimePosture posture, string explanation) DeterminePosture(
|
||||
List<TimelineBucket> buckets,
|
||||
List<TimelineEvent> events,
|
||||
string componentPurl)
|
||||
{
|
||||
if (buckets.Count == 0 || buckets.All(b => b.ObservationCount == 0))
|
||||
{
|
||||
return (RuntimePosture.Unknown, "No runtime observations collected");
|
||||
}
|
||||
|
||||
var componentLoadedCount = buckets.Count(b => b.ComponentLoaded);
|
||||
var totalBuckets = buckets.Count;
|
||||
|
||||
if (componentLoadedCount == 0)
|
||||
{
|
||||
return (RuntimePosture.Supports,
|
||||
$"Component {ExtractComponentName(componentPurl)} was not loaded during observation window");
|
||||
}
|
||||
|
||||
var hasNetworkExposure = events.Any(e => e.Type == TimelineEventType.NetworkExposure);
|
||||
var hasVulnerableExecution = buckets.Any(b => b.VulnerableCodeExecuted == true);
|
||||
|
||||
if (hasVulnerableExecution || hasNetworkExposure)
|
||||
{
|
||||
return (RuntimePosture.Contradicts,
|
||||
"Runtime evidence shows component is actively used and exposed");
|
||||
}
|
||||
|
||||
if (componentLoadedCount < totalBuckets / 2)
|
||||
{
|
||||
return (RuntimePosture.Inconclusive,
|
||||
$"Component loaded in {componentLoadedCount}/{totalBuckets} time periods");
|
||||
}
|
||||
|
||||
return (RuntimePosture.Supports,
|
||||
"Component loaded but no evidence of vulnerable code execution");
|
||||
}
|
||||
|
||||
private static ObservationType ClassifyObservation(RuntimeObservation obs)
|
||||
{
|
||||
return obs.Type switch
|
||||
{
|
||||
"library_load" or "dlopen" => ObservationType.LibraryLoad,
|
||||
"syscall" => ObservationType.Syscall,
|
||||
"network" or "connect" => ObservationType.NetworkConnection,
|
||||
"file" or "open" => ObservationType.FileAccess,
|
||||
"fork" or "exec" => ObservationType.ProcessSpawn,
|
||||
"symbol" => ObservationType.SymbolResolution,
|
||||
_ => ObservationType.LibraryLoad
|
||||
};
|
||||
}
|
||||
|
||||
private static string ExtractComponentName(string purl)
|
||||
{
|
||||
// Extract name from PURL like pkg:npm/lodash@4.17.21
|
||||
var parts = purl.Split('/');
|
||||
var namePart = parts.LastOrDefault() ?? purl;
|
||||
return namePart.Split('@').FirstOrDefault() ?? namePart;
|
||||
}
|
||||
|
||||
private static bool? DetectVulnerableExecution(List<RuntimeObservation> observations)
|
||||
{
|
||||
// Check if any observation indicates vulnerable code path execution
|
||||
return observations.Any(o =>
|
||||
o.Type == "symbol" ||
|
||||
o.Attributes?.ContainsKey("vulnerable_function") == true);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record TimelineOptions
|
||||
{
|
||||
public DateTimeOffset? WindowStart { get; init; }
|
||||
public DateTimeOffset? WindowEnd { get; init; }
|
||||
public TimeSpan BucketSize { get; init; } = TimeSpan.FromHours(1);
|
||||
}
|
||||
|
||||
// Simplified runtime evidence types for Timeline API
|
||||
public sealed record RuntimeEvidence
|
||||
{
|
||||
public required DateTimeOffset FirstObservation { get; init; }
|
||||
public required DateTimeOffset LastObservation { get; init; }
|
||||
public required IReadOnlyList<RuntimeObservation> Observations { get; init; }
|
||||
public required IReadOnlyList<RuntimeSession> Sessions { get; init; }
|
||||
public required IReadOnlyList<string> SessionDigests { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimeObservation
|
||||
{
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string Type { get; init; }
|
||||
public string? Path { get; init; }
|
||||
public int? Port { get; init; }
|
||||
public int ProcessId { get; init; }
|
||||
public string? Digest { get; init; }
|
||||
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimeSession
|
||||
{
|
||||
public required DateTimeOffset StartTime { get; init; }
|
||||
public DateTimeOffset? EndTime { get; init; }
|
||||
public required string Platform { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
# AGENTS - Scanner CallGraph Library
|
||||
|
||||
## Mission
|
||||
Provide deterministic call graph extraction for supported languages and native binaries, producing stable node/edge outputs for reachability analysis.
|
||||
|
||||
## Roles
|
||||
- Backend/analyzer engineer (.NET 10, C# preview).
|
||||
- QA engineer (unit + deterministic fixtures).
|
||||
|
||||
## Required Reading
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/scanner/architecture.md`
|
||||
- `docs/reachability/DELIVERY_GUIDE.md`
|
||||
- `docs/reachability/binary-reachability-schema.md`
|
||||
|
||||
## Working Directory & Boundaries
|
||||
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.CallGraph/`
|
||||
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.CallGraph.Tests/`
|
||||
- Avoid cross-module edits unless the sprint explicitly calls them out.
|
||||
|
||||
## Determinism & Offline Rules
|
||||
- Stable ordering for nodes/edges; avoid wall-clock timestamps in outputs.
|
||||
- No network access or external binaries at runtime.
|
||||
- Normalize paths and symbol names consistently.
|
||||
|
||||
## Testing Expectations
|
||||
- Add/extend unit tests for new extractors and edge kinds.
|
||||
- Use deterministic fixtures/golden outputs; document inputs in test comments when needed.
|
||||
- Run `dotnet test src/Scanner/StellaOps.Scanner.sln` when feasible.
|
||||
|
||||
## Workflow
|
||||
- Update sprint status on start/finish (`TODO -> DOING -> DONE/BLOCKED`).
|
||||
- Record notable decisions in the sprint Execution Log.
|
||||
@@ -0,0 +1,128 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
using StellaOps.Scanner.CallGraph.Binary;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Analysis;
|
||||
|
||||
internal sealed class BinaryDynamicLoadDetector
|
||||
{
|
||||
private static readonly string[] LoaderSymbols =
|
||||
[
|
||||
"dlopen",
|
||||
"dlsym",
|
||||
"dlmopen",
|
||||
"LoadLibraryA",
|
||||
"LoadLibraryW",
|
||||
"LoadLibraryExA",
|
||||
"LoadLibraryExW",
|
||||
"GetProcAddress"
|
||||
];
|
||||
|
||||
private readonly BinaryStringLiteralScanner _stringScanner;
|
||||
|
||||
public BinaryDynamicLoadDetector(BinaryStringLiteralScanner? stringScanner = null)
|
||||
{
|
||||
_stringScanner = stringScanner ?? new BinaryStringLiteralScanner();
|
||||
}
|
||||
|
||||
public async Task<ImmutableArray<CallGraphEdge>> ExtractAsync(
|
||||
string path,
|
||||
BinaryFormat format,
|
||||
string binaryName,
|
||||
IReadOnlyCollection<CallGraphEdge> directEdges,
|
||||
IReadOnlyCollection<BinaryRelocation> relocations,
|
||||
CancellationToken ct)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var loaderNames = new HashSet<string>(LoaderSymbols, StringComparer.OrdinalIgnoreCase);
|
||||
var loaderSources = new HashSet<string>(StringComparer.Ordinal);
|
||||
var loaderTargets = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
foreach (var edge in directEdges)
|
||||
{
|
||||
if (TryGetSymbol(edge.TargetId, out var targetSymbol)
|
||||
&& loaderNames.Contains(targetSymbol))
|
||||
{
|
||||
loaderSources.Add(edge.SourceId);
|
||||
loaderTargets.Add(targetSymbol);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var reloc in relocations)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(reloc.TargetSymbol))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (loaderNames.Contains(reloc.TargetSymbol))
|
||||
{
|
||||
loaderTargets.Add(reloc.TargetSymbol);
|
||||
}
|
||||
}
|
||||
|
||||
if (loaderSources.Count == 0 && loaderTargets.Count == 0)
|
||||
{
|
||||
return ImmutableArray<CallGraphEdge>.Empty;
|
||||
}
|
||||
|
||||
if (loaderSources.Count == 0)
|
||||
{
|
||||
foreach (var target in loaderTargets)
|
||||
{
|
||||
loaderSources.Add($"native:{binaryName}/{target}");
|
||||
}
|
||||
}
|
||||
|
||||
var candidates = await _stringScanner.ExtractLibraryCandidatesAsync(path, format, ct);
|
||||
if (candidates.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<CallGraphEdge>.Empty;
|
||||
}
|
||||
|
||||
var orderedSources = loaderSources.OrderBy(value => value, StringComparer.Ordinal).ToArray();
|
||||
var orderedCandidates = candidates.OrderBy(value => value, StringComparer.Ordinal).ToArray();
|
||||
|
||||
var edges = ImmutableArray.CreateBuilder<CallGraphEdge>(orderedSources.Length * orderedCandidates.Length);
|
||||
foreach (var source in orderedSources)
|
||||
{
|
||||
foreach (var candidate in orderedCandidates)
|
||||
{
|
||||
var targetId = $"native:external/{candidate}";
|
||||
edges.Add(new CallGraphEdge(
|
||||
SourceId: source,
|
||||
TargetId: targetId,
|
||||
CallKind: CallKind.Dynamic,
|
||||
CallSite: $"string:{candidate}"));
|
||||
}
|
||||
}
|
||||
|
||||
return edges.ToImmutable();
|
||||
}
|
||||
|
||||
private static bool TryGetSymbol(string nodeId, out string symbol)
|
||||
{
|
||||
symbol = string.Empty;
|
||||
if (string.IsNullOrWhiteSpace(nodeId))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
const string prefix = "native:";
|
||||
if (!nodeId.StartsWith(prefix, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var remainder = nodeId.Substring(prefix.Length);
|
||||
var slashIndex = remainder.IndexOf('/');
|
||||
if (slashIndex < 0 || slashIndex == remainder.Length - 1)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
symbol = remainder[(slashIndex + 1)..];
|
||||
return !string.IsNullOrWhiteSpace(symbol);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,464 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.CallGraph.Binary;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Analysis;
|
||||
|
||||
internal sealed class BinaryStringLiteralScanner
|
||||
{
|
||||
private const int MinStringLength = 4;
|
||||
|
||||
public async Task<ImmutableArray<string>> ExtractLibraryCandidatesAsync(
|
||||
string path,
|
||||
BinaryFormat format,
|
||||
CancellationToken ct)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var sections = await ReadStringSectionsAsync(path, format, ct);
|
||||
if (sections.Count == 0)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var candidates = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var section in sections)
|
||||
{
|
||||
foreach (var value in ExtractStrings(section))
|
||||
{
|
||||
var normalized = NormalizeCandidate(value);
|
||||
if (string.IsNullOrWhiteSpace(normalized))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (IsLibraryCandidate(normalized))
|
||||
{
|
||||
candidates.Add(normalized);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return candidates
|
||||
.OrderBy(value => value, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static IEnumerable<string> ExtractStrings(byte[] bytes)
|
||||
{
|
||||
if (bytes.Length == 0)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
for (var i = 0; i < bytes.Length; i++)
|
||||
{
|
||||
var current = bytes[i];
|
||||
if (current >= 0x20 && current <= 0x7E)
|
||||
{
|
||||
builder.Append((char)current);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (builder.Length >= MinStringLength)
|
||||
{
|
||||
yield return builder.ToString();
|
||||
}
|
||||
|
||||
builder.Clear();
|
||||
}
|
||||
|
||||
if (builder.Length >= MinStringLength)
|
||||
{
|
||||
yield return builder.ToString();
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeCandidate(string value)
|
||||
{
|
||||
var trimmed = value.Trim().Trim('"', '\'');
|
||||
if (trimmed.Length == 0)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
return trimmed.Replace('\\', '/');
|
||||
}
|
||||
|
||||
private static bool IsLibraryCandidate(string value)
|
||||
{
|
||||
var lowered = value.ToLowerInvariant();
|
||||
|
||||
if (lowered.EndsWith(".dll", StringComparison.Ordinal)
|
||||
|| lowered.EndsWith(".dylib", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (lowered.Contains(".so", StringComparison.Ordinal))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private static async Task<List<byte[]>> ReadStringSectionsAsync(
|
||||
string path,
|
||||
BinaryFormat format,
|
||||
CancellationToken ct)
|
||||
{
|
||||
return format switch
|
||||
{
|
||||
BinaryFormat.Elf => await ReadElfStringSectionsAsync(path, ct),
|
||||
BinaryFormat.Pe => await ReadPeStringSectionsAsync(path, ct),
|
||||
BinaryFormat.MachO => await ReadMachOStringSectionsAsync(path, ct),
|
||||
_ => []
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<List<byte[]>> ReadElfStringSectionsAsync(string path, CancellationToken ct)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
var ident = reader.ReadBytes(16);
|
||||
if (ident.Length < 16)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var is64Bit = ident[4] == 2;
|
||||
var isLittleEndian = ident[5] == 1;
|
||||
if (!isLittleEndian)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
stream.Seek(is64Bit ? 40 : 32, SeekOrigin.Begin);
|
||||
var sectionHeaderOffset = is64Bit ? reader.ReadInt64() : reader.ReadInt32();
|
||||
stream.Seek(is64Bit ? 58 : 46, SeekOrigin.Begin);
|
||||
var sectionHeaderSize = reader.ReadUInt16();
|
||||
var sectionHeaderCount = reader.ReadUInt16();
|
||||
var strTabIndex = reader.ReadUInt16();
|
||||
|
||||
if (sectionHeaderOffset <= 0 || sectionHeaderCount == 0)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var nameTableOffset = ReadElfSectionOffset(reader, stream, sectionHeaderOffset, sectionHeaderSize, strTabIndex, is64Bit);
|
||||
var nameTableSize = ReadElfSectionSize(reader, stream, sectionHeaderOffset, sectionHeaderSize, strTabIndex, is64Bit);
|
||||
if (nameTableOffset <= 0 || nameTableSize <= 0)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
stream.Seek(nameTableOffset, SeekOrigin.Begin);
|
||||
var nameTable = reader.ReadBytes((int)nameTableSize);
|
||||
|
||||
var sections = new List<byte[]>();
|
||||
|
||||
for (int i = 0; i < sectionHeaderCount; i++)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
stream.Seek(sectionHeaderOffset + i * sectionHeaderSize, SeekOrigin.Begin);
|
||||
var nameIndex = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // sh_type
|
||||
|
||||
if (is64Bit)
|
||||
{
|
||||
reader.ReadUInt64(); // sh_flags
|
||||
reader.ReadUInt64(); // sh_addr
|
||||
var offset = reader.ReadInt64();
|
||||
var size = reader.ReadInt64();
|
||||
if (ShouldReadSection(nameTable, nameIndex) && offset > 0 && size > 0)
|
||||
{
|
||||
sections.Add(ReadSection(reader, stream, offset, size));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
reader.ReadUInt32(); // sh_flags
|
||||
reader.ReadUInt32(); // sh_addr
|
||||
var offset = reader.ReadInt32();
|
||||
var size = reader.ReadInt32();
|
||||
if (ShouldReadSection(nameTable, nameIndex) && offset > 0 && size > 0)
|
||||
{
|
||||
sections.Add(ReadSection(reader, stream, offset, size));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
return sections;
|
||||
}
|
||||
|
||||
private static bool ShouldReadSection(byte[] nameTable, uint nameIndex)
|
||||
{
|
||||
var name = ReadNullTerminatedString(nameTable, (int)nameIndex);
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return name.Contains("rodata", StringComparison.Ordinal)
|
||||
|| name.Contains("rdata", StringComparison.Ordinal)
|
||||
|| name.Contains("data", StringComparison.Ordinal)
|
||||
|| name.Contains("cstring", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static async Task<List<byte[]>> ReadPeStringSectionsAsync(string path, CancellationToken ct)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
stream.Seek(0x3C, SeekOrigin.Begin);
|
||||
var peOffset = reader.ReadInt32();
|
||||
|
||||
stream.Seek(peOffset, SeekOrigin.Begin);
|
||||
var signature = reader.ReadUInt32();
|
||||
if (signature != 0x00004550)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
reader.ReadUInt16(); // machine
|
||||
var numberOfSections = reader.ReadUInt16();
|
||||
reader.ReadUInt32(); // timestamp
|
||||
reader.ReadUInt32(); // symbol table ptr
|
||||
reader.ReadUInt32(); // number of symbols
|
||||
var optionalHeaderSize = reader.ReadUInt16();
|
||||
reader.ReadUInt16(); // characteristics
|
||||
|
||||
if (optionalHeaderSize == 0)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
stream.Seek(stream.Position + optionalHeaderSize, SeekOrigin.Begin);
|
||||
|
||||
var sections = new List<byte[]>();
|
||||
for (int i = 0; i < numberOfSections; i++)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var nameBytes = reader.ReadBytes(8);
|
||||
var name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
|
||||
reader.ReadUInt32(); // virtual size
|
||||
reader.ReadUInt32(); // virtual address
|
||||
var sizeOfRawData = reader.ReadUInt32();
|
||||
var pointerToRawData = reader.ReadUInt32();
|
||||
|
||||
reader.ReadUInt32(); // pointer to relocations
|
||||
reader.ReadUInt32(); // pointer to line numbers
|
||||
reader.ReadUInt16(); // number of relocations
|
||||
reader.ReadUInt16(); // number of line numbers
|
||||
reader.ReadUInt32(); // characteristics
|
||||
|
||||
if (!IsPeStringSection(name) || pointerToRawData == 0 || sizeOfRawData == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
sections.Add(ReadSection(reader, stream, pointerToRawData, sizeOfRawData));
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
return sections;
|
||||
}
|
||||
|
||||
private static bool IsPeStringSection(string name)
|
||||
{
|
||||
return string.Equals(name, ".rdata", StringComparison.Ordinal)
|
||||
|| string.Equals(name, ".data", StringComparison.Ordinal)
|
||||
|| string.Equals(name, ".rodata", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static async Task<List<byte[]>> ReadMachOStringSectionsAsync(string path, CancellationToken ct)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
var magic = reader.ReadUInt32();
|
||||
var is64Bit = magic is 0xFEEDFACF or 0xCFFAEDFE;
|
||||
var isSwapped = magic is 0xCEFAEDFE or 0xCFFAEDFE;
|
||||
if (isSwapped)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
reader.ReadInt32(); // cputype
|
||||
reader.ReadInt32(); // cpusubtype
|
||||
reader.ReadUInt32(); // filetype
|
||||
var ncmds = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // sizeofcmds
|
||||
reader.ReadUInt32(); // flags
|
||||
if (is64Bit)
|
||||
{
|
||||
reader.ReadUInt32(); // reserved
|
||||
}
|
||||
|
||||
var sections = new List<byte[]>();
|
||||
|
||||
for (int i = 0; i < ncmds; i++)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
var cmdStart = stream.Position;
|
||||
var cmd = reader.ReadUInt32();
|
||||
var cmdsize = reader.ReadUInt32();
|
||||
|
||||
var isSegment = cmd == (is64Bit ? 0x19u : 0x1u);
|
||||
if (!isSegment)
|
||||
{
|
||||
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
|
||||
continue;
|
||||
}
|
||||
|
||||
reader.ReadBytes(16); // segname
|
||||
if (is64Bit)
|
||||
{
|
||||
reader.ReadUInt64(); // vmaddr
|
||||
reader.ReadUInt64(); // vmsize
|
||||
reader.ReadUInt64(); // fileoff
|
||||
reader.ReadUInt64(); // filesize
|
||||
reader.ReadInt32(); // maxprot
|
||||
reader.ReadInt32(); // initprot
|
||||
var nsects = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // flags
|
||||
|
||||
for (int s = 0; s < nsects; s++)
|
||||
{
|
||||
var sectName = ReadFixedString(reader, 16);
|
||||
reader.ReadBytes(16); // segname
|
||||
reader.ReadUInt64(); // addr
|
||||
var size = reader.ReadUInt64();
|
||||
var offset = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // align
|
||||
reader.ReadUInt32(); // reloff
|
||||
reader.ReadUInt32(); // nreloc
|
||||
reader.ReadUInt32(); // flags
|
||||
reader.ReadUInt32(); // reserved1
|
||||
reader.ReadUInt32(); // reserved2
|
||||
reader.ReadUInt32(); // reserved3
|
||||
|
||||
if (IsMachOStringSection(sectName) && offset > 0 && size > 0)
|
||||
{
|
||||
sections.Add(ReadSection(reader, stream, (long)offset, (long)size));
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
reader.ReadUInt32(); // vmaddr
|
||||
reader.ReadUInt32(); // vmsize
|
||||
reader.ReadUInt32(); // fileoff
|
||||
reader.ReadUInt32(); // filesize
|
||||
reader.ReadInt32(); // maxprot
|
||||
reader.ReadInt32(); // initprot
|
||||
var nsects = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // flags
|
||||
|
||||
for (int s = 0; s < nsects; s++)
|
||||
{
|
||||
var sectName = ReadFixedString(reader, 16);
|
||||
reader.ReadBytes(16); // segname
|
||||
reader.ReadUInt32(); // addr
|
||||
var size = reader.ReadUInt32();
|
||||
var offset = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // align
|
||||
reader.ReadUInt32(); // reloff
|
||||
reader.ReadUInt32(); // nreloc
|
||||
reader.ReadUInt32(); // flags
|
||||
reader.ReadUInt32(); // reserved1
|
||||
reader.ReadUInt32(); // reserved2
|
||||
|
||||
if (IsMachOStringSection(sectName) && offset > 0 && size > 0)
|
||||
{
|
||||
sections.Add(ReadSection(reader, stream, (long)offset, (long)size));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
return sections;
|
||||
}
|
||||
|
||||
private static bool IsMachOStringSection(string sectName)
|
||||
{
|
||||
return string.Equals(sectName, "__cstring", StringComparison.Ordinal)
|
||||
|| string.Equals(sectName, "__const", StringComparison.Ordinal)
|
||||
|| string.Equals(sectName, "__data", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
private static byte[] ReadSection(BinaryReader reader, Stream stream, long offset, long size)
|
||||
{
|
||||
if (offset < 0 || size <= 0 || offset + size > stream.Length)
|
||||
{
|
||||
return Array.Empty<byte>();
|
||||
}
|
||||
|
||||
var current = stream.Position;
|
||||
stream.Seek(offset, SeekOrigin.Begin);
|
||||
var bytes = reader.ReadBytes((int)size);
|
||||
stream.Seek(current, SeekOrigin.Begin);
|
||||
return bytes;
|
||||
}
|
||||
|
||||
private static byte[] ReadSection(BinaryReader reader, Stream stream, uint offset, uint size)
|
||||
=> ReadSection(reader, stream, (long)offset, (long)size);
|
||||
|
||||
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
|
||||
{
|
||||
var position = sectionHeaderOffset + index * entrySize;
|
||||
return ReadElfSectionOffset(reader, stream, position, is64Bit);
|
||||
}
|
||||
|
||||
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long position, bool is64Bit)
|
||||
{
|
||||
stream.Seek(position + (is64Bit ? 24 : 16), SeekOrigin.Begin);
|
||||
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
|
||||
}
|
||||
|
||||
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
|
||||
{
|
||||
var position = sectionHeaderOffset + index * entrySize;
|
||||
return ReadElfSectionSize(reader, stream, position, is64Bit);
|
||||
}
|
||||
|
||||
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long position, bool is64Bit)
|
||||
{
|
||||
stream.Seek(position + (is64Bit ? 32 : 20), SeekOrigin.Begin);
|
||||
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
|
||||
}
|
||||
|
||||
private static string ReadFixedString(BinaryReader reader, int length)
|
||||
{
|
||||
var bytes = reader.ReadBytes(length);
|
||||
var nullIndex = Array.IndexOf(bytes, (byte)0);
|
||||
var count = nullIndex >= 0 ? nullIndex : bytes.Length;
|
||||
return Encoding.ASCII.GetString(bytes, 0, count);
|
||||
}
|
||||
|
||||
private static string ReadNullTerminatedString(byte[] buffer, int offset)
|
||||
{
|
||||
if (offset < 0 || offset >= buffer.Length)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var end = offset;
|
||||
while (end < buffer.Length && buffer[end] != 0)
|
||||
{
|
||||
end++;
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetString(buffer, offset, end - offset);
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,8 @@
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.CallGraph.Binary.Analysis;
|
||||
using StellaOps.Scanner.CallGraph.Binary.Disassembly;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary;
|
||||
@@ -19,6 +21,8 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
private readonly ILogger<BinaryCallGraphExtractor> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly BinaryEntrypointClassifier _entrypointClassifier;
|
||||
private readonly DirectCallExtractor _directCallExtractor;
|
||||
private readonly BinaryDynamicLoadDetector _dynamicLoadDetector;
|
||||
|
||||
public BinaryCallGraphExtractor(
|
||||
ILogger<BinaryCallGraphExtractor> logger,
|
||||
@@ -27,6 +31,8 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_entrypointClassifier = new BinaryEntrypointClassifier();
|
||||
_directCallExtractor = new DirectCallExtractor();
|
||||
_dynamicLoadDetector = new BinaryDynamicLoadDetector();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
@@ -70,7 +76,18 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
_ => []
|
||||
};
|
||||
|
||||
return BuildSnapshot(request.ScanId, targetPath, symbols, relocations);
|
||||
var directEdges = await ExtractDirectCallEdgesAsync(targetPath, format, symbols, cancellationToken);
|
||||
var dynamicEdges = await _dynamicLoadDetector.ExtractAsync(
|
||||
targetPath,
|
||||
format,
|
||||
Path.GetFileName(targetPath),
|
||||
directEdges,
|
||||
relocations,
|
||||
cancellationToken);
|
||||
|
||||
var extraEdges = directEdges.Concat(dynamicEdges).ToArray();
|
||||
|
||||
return BuildSnapshot(request.ScanId, targetPath, symbols, relocations, extraEdges);
|
||||
}
|
||||
|
||||
private async Task<BinaryFormat> DetectBinaryFormatAsync(string path, CancellationToken ct)
|
||||
@@ -107,6 +124,31 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
throw new NotSupportedException($"Unknown binary format: {path}");
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyCollection<CallGraphEdge>> ExtractDirectCallEdgesAsync(
|
||||
string path,
|
||||
BinaryFormat format,
|
||||
List<BinarySymbol> symbols,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var textSection = await BinaryTextSectionReader.TryReadAsync(path, format, ct);
|
||||
if (textSection is null)
|
||||
{
|
||||
return Array.Empty<CallGraphEdge>();
|
||||
}
|
||||
|
||||
if (textSection.Architecture == BinaryArchitecture.Unknown)
|
||||
{
|
||||
_logger.LogDebug("Skipping disassembly; unknown architecture for {Path}", path);
|
||||
return Array.Empty<CallGraphEdge>();
|
||||
}
|
||||
|
||||
var binaryName = Path.GetFileName(path);
|
||||
var edges = _directCallExtractor.Extract(textSection, symbols, binaryName);
|
||||
|
||||
_logger.LogDebug("Extracted {Count} direct call edges from .text", edges.Length);
|
||||
return edges;
|
||||
}
|
||||
|
||||
private async Task<List<BinarySymbol>> ExtractElfSymbolsAsync(string path, CancellationToken ct)
|
||||
{
|
||||
var symbols = new List<BinarySymbol>();
|
||||
@@ -255,6 +297,7 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
reader.ReadUInt16(); // characteristics
|
||||
|
||||
var is64Bit = machine == 0x8664; // AMD64
|
||||
var sectionBases = new ulong[numberOfSections + 1];
|
||||
|
||||
// Read optional header to get export directory
|
||||
if (optionalHeaderSize > 0)
|
||||
@@ -271,6 +314,28 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
// For now, just log that exports exist
|
||||
_logger.LogDebug("PE has export directory at RVA 0x{Rva:X}", exportRva);
|
||||
}
|
||||
|
||||
var sectionHeadersStart = optionalHeaderStart + optionalHeaderSize;
|
||||
var currentPos = stream.Position;
|
||||
stream.Seek(sectionHeadersStart, SeekOrigin.Begin);
|
||||
|
||||
for (int i = 0; i < numberOfSections; i++)
|
||||
{
|
||||
reader.ReadBytes(8); // name
|
||||
reader.ReadUInt32(); // virtual size
|
||||
var virtualAddress = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // size of raw data
|
||||
reader.ReadUInt32(); // pointer to raw data
|
||||
reader.ReadUInt32(); // pointer to relocations
|
||||
reader.ReadUInt32(); // pointer to line numbers
|
||||
reader.ReadUInt16(); // number of relocations
|
||||
reader.ReadUInt16(); // number of line numbers
|
||||
reader.ReadUInt32(); // characteristics
|
||||
|
||||
sectionBases[i + 1] = virtualAddress;
|
||||
}
|
||||
|
||||
stream.Seek(currentPos, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
// Read COFF symbol table if present
|
||||
@@ -310,10 +375,15 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
name = System.Text.Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
|
||||
}
|
||||
|
||||
var baseAddress = section > 0 && section < sectionBases.Length
|
||||
? sectionBases[section]
|
||||
: 0;
|
||||
var resolvedAddress = baseAddress + value;
|
||||
|
||||
symbols.Add(new BinarySymbol
|
||||
{
|
||||
Name = name,
|
||||
Address = value,
|
||||
Address = resolvedAddress,
|
||||
Size = 0, // PE doesn't store function size in symbol table
|
||||
IsGlobal = storageClass == 2, // IMAGE_SYM_CLASS_EXTERNAL
|
||||
IsExported = false // Would need to check export directory
|
||||
@@ -476,6 +546,7 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
{
|
||||
// Process relocation section
|
||||
var isRela = shType == 4;
|
||||
var isPltReloc = sectionName.Contains(".plt", StringComparison.Ordinal);
|
||||
var entrySize = is64Bit
|
||||
? (isRela ? 24 : 16)
|
||||
: (isRela ? 12 : 8);
|
||||
@@ -511,9 +582,10 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
{
|
||||
Address = relocOffset,
|
||||
SymbolIndex = (int)symIndex,
|
||||
SourceSymbol = "", // Will be resolved later
|
||||
SourceSymbol = isPltReloc ? "__plt__" : "",
|
||||
TargetSymbol = "", // Will be resolved later
|
||||
IsExternal = true
|
||||
IsExternal = true,
|
||||
CallKind = isPltReloc ? CallKind.Plt : CallKind.Direct
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -593,13 +665,20 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
var magic = reader.ReadUInt16();
|
||||
var is64Bit = magic == 0x20b; // PE32+
|
||||
|
||||
// Skip to data directories
|
||||
stream.Seek(optionalHeaderStart + (is64Bit ? 112 : 96), SeekOrigin.Begin);
|
||||
|
||||
// Read import table RVA and size (directory entry 1)
|
||||
stream.Seek(8, SeekOrigin.Current); // Skip export table
|
||||
// Read data directories
|
||||
var dataDirectoryOffset = optionalHeaderStart + (is64Bit ? 112 : 96);
|
||||
stream.Seek(dataDirectoryOffset, SeekOrigin.Begin);
|
||||
var exportTableRva = reader.ReadUInt32();
|
||||
var exportTableSize = reader.ReadUInt32();
|
||||
var importTableRva = reader.ReadUInt32();
|
||||
var importTableSize = reader.ReadUInt32();
|
||||
stream.Seek(dataDirectoryOffset + 13 * 8, SeekOrigin.Begin); // delay import entry
|
||||
var delayImportRva = reader.ReadUInt32();
|
||||
var delayImportSize = reader.ReadUInt32();
|
||||
_ = exportTableRva;
|
||||
_ = exportTableSize;
|
||||
_ = importTableSize;
|
||||
_ = delayImportSize;
|
||||
|
||||
if (importTableRva == 0)
|
||||
{
|
||||
@@ -618,6 +697,25 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
// Parse import directory
|
||||
stream.Seek(importTableOffset, SeekOrigin.Begin);
|
||||
|
||||
ReadPeImportTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, importTableOffset, relocations);
|
||||
ReadPeDelayImportTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, delayImportRva, relocations);
|
||||
|
||||
await Task.CompletedTask;
|
||||
_logger.LogDebug("Extracted {Count} imports from PE", relocations.Count);
|
||||
return relocations;
|
||||
}
|
||||
|
||||
private static void ReadPeImportTable(
|
||||
Stream stream,
|
||||
BinaryReader reader,
|
||||
long sectionHeadersStart,
|
||||
int numberOfSections,
|
||||
bool is64Bit,
|
||||
long importTableOffset,
|
||||
List<BinaryRelocation> relocations)
|
||||
{
|
||||
stream.Seek(importTableOffset, SeekOrigin.Begin);
|
||||
|
||||
while (true)
|
||||
{
|
||||
var importLookupTableRva = reader.ReadUInt32();
|
||||
@@ -631,66 +729,151 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
break; // End of import directory
|
||||
}
|
||||
|
||||
// Read DLL name
|
||||
var nameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
|
||||
var currentPos = stream.Position;
|
||||
stream.Seek(nameOffset, SeekOrigin.Begin);
|
||||
var dllName = ReadCString(reader);
|
||||
stream.Seek(currentPos, SeekOrigin.Begin);
|
||||
var dllName = ReadPeDllName(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
|
||||
if (string.IsNullOrWhiteSpace(dllName))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Parse import lookup table
|
||||
var lookupOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, importLookupTableRva);
|
||||
if (lookupOffset > 0)
|
||||
{
|
||||
var lookupPos = stream.Position;
|
||||
stream.Seek(lookupOffset, SeekOrigin.Begin);
|
||||
ParseImportLookupTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, lookupOffset, dllName, relocations);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while (true)
|
||||
private static void ReadPeDelayImportTable(
|
||||
Stream stream,
|
||||
BinaryReader reader,
|
||||
long sectionHeadersStart,
|
||||
int numberOfSections,
|
||||
bool is64Bit,
|
||||
uint delayImportRva,
|
||||
List<BinaryRelocation> relocations)
|
||||
{
|
||||
if (delayImportRva == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var delayImportOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, delayImportRva);
|
||||
if (delayImportOffset == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
stream.Seek(delayImportOffset, SeekOrigin.Begin);
|
||||
for (var i = 0; i < 256; i++)
|
||||
{
|
||||
var attributes = reader.ReadUInt32();
|
||||
var nameRva = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // module handle
|
||||
reader.ReadUInt32(); // delay import address table
|
||||
var delayImportNameTableRva = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // bound delay import table
|
||||
reader.ReadUInt32(); // unload delay import table
|
||||
reader.ReadUInt32(); // timestamp
|
||||
_ = attributes;
|
||||
|
||||
if (nameRva == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var dllName = ReadPeDllName(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
|
||||
if (string.IsNullOrWhiteSpace(dllName) || delayImportNameTableRva == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var nameTableOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, delayImportNameTableRva);
|
||||
if (nameTableOffset == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
ParseImportLookupTable(stream, reader, sectionHeadersStart, numberOfSections, is64Bit, nameTableOffset, dllName, relocations);
|
||||
}
|
||||
}
|
||||
|
||||
private static string? ReadPeDllName(
|
||||
Stream stream,
|
||||
BinaryReader reader,
|
||||
long sectionHeadersStart,
|
||||
int numberOfSections,
|
||||
uint nameRva)
|
||||
{
|
||||
var nameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, nameRva);
|
||||
if (nameOffset == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var currentPos = stream.Position;
|
||||
stream.Seek(nameOffset, SeekOrigin.Begin);
|
||||
var dllName = ReadCString(reader);
|
||||
stream.Seek(currentPos, SeekOrigin.Begin);
|
||||
return dllName;
|
||||
}
|
||||
|
||||
private static void ParseImportLookupTable(
|
||||
Stream stream,
|
||||
BinaryReader reader,
|
||||
long sectionHeadersStart,
|
||||
int numberOfSections,
|
||||
bool is64Bit,
|
||||
long lookupOffset,
|
||||
string dllName,
|
||||
List<BinaryRelocation> relocations)
|
||||
{
|
||||
var lookupPos = stream.Position;
|
||||
stream.Seek(lookupOffset, SeekOrigin.Begin);
|
||||
|
||||
while (true)
|
||||
{
|
||||
var entry = is64Bit ? reader.ReadUInt64() : reader.ReadUInt32();
|
||||
if (entry == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var isOrdinal = is64Bit
|
||||
? (entry & 0x8000000000000000) != 0
|
||||
: (entry & 0x80000000) != 0;
|
||||
|
||||
if (!isOrdinal)
|
||||
{
|
||||
var hintNameRva = (uint)(entry & 0x7FFFFFFF);
|
||||
var hintNameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, hintNameRva);
|
||||
|
||||
if (hintNameOffset > 0)
|
||||
{
|
||||
var entry = is64Bit ? reader.ReadUInt64() : reader.ReadUInt32();
|
||||
if (entry == 0)
|
||||
var entryPos = stream.Position;
|
||||
stream.Seek(hintNameOffset + 2, SeekOrigin.Begin); // Skip hint
|
||||
var funcName = ReadCString(reader);
|
||||
stream.Seek(entryPos, SeekOrigin.Begin);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(funcName))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
var isOrdinal = is64Bit
|
||||
? (entry & 0x8000000000000000) != 0
|
||||
: (entry & 0x80000000) != 0;
|
||||
|
||||
if (!isOrdinal)
|
||||
{
|
||||
var hintNameRva = (uint)(entry & 0x7FFFFFFF);
|
||||
var hintNameOffset = RvaToFileOffset(stream, reader, sectionHeadersStart, numberOfSections, hintNameRva);
|
||||
|
||||
if (hintNameOffset > 0)
|
||||
relocations.Add(new BinaryRelocation
|
||||
{
|
||||
var entryPos = stream.Position;
|
||||
stream.Seek(hintNameOffset + 2, SeekOrigin.Begin); // Skip hint
|
||||
var funcName = ReadCString(reader);
|
||||
stream.Seek(entryPos, SeekOrigin.Begin);
|
||||
|
||||
relocations.Add(new BinaryRelocation
|
||||
{
|
||||
Address = 0,
|
||||
SymbolIndex = 0,
|
||||
SourceSymbol = dllName,
|
||||
TargetSymbol = funcName,
|
||||
IsExternal = true
|
||||
});
|
||||
}
|
||||
Address = 0,
|
||||
SymbolIndex = 0,
|
||||
SourceSymbol = dllName,
|
||||
TargetSymbol = funcName,
|
||||
IsExternal = true,
|
||||
CallKind = CallKind.Iat
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
stream.Seek(lookupPos, SeekOrigin.Begin);
|
||||
}
|
||||
}
|
||||
|
||||
await Task.CompletedTask;
|
||||
_logger.LogDebug("Extracted {Count} imports from PE", relocations.Count);
|
||||
return relocations;
|
||||
stream.Seek(lookupPos, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
private long RvaToFileOffset(
|
||||
private static long RvaToFileOffset(
|
||||
Stream stream,
|
||||
BinaryReader reader,
|
||||
long sectionHeadersStart,
|
||||
@@ -797,7 +980,8 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
string scanId,
|
||||
string binaryPath,
|
||||
List<BinarySymbol> symbols,
|
||||
List<BinaryRelocation> relocations)
|
||||
List<BinaryRelocation> relocations,
|
||||
IReadOnlyCollection<CallGraphEdge> extraEdges)
|
||||
{
|
||||
var nodesById = new Dictionary<string, CallGraphNode>(StringComparer.Ordinal);
|
||||
var edges = new HashSet<CallGraphEdge>(CallGraphEdgeComparer.Instance);
|
||||
@@ -826,7 +1010,10 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
// Add edges from relocations
|
||||
foreach (var reloc in relocations)
|
||||
{
|
||||
var sourceId = $"native:{binaryName}/{reloc.SourceSymbol}";
|
||||
var sourceSymbol = string.IsNullOrWhiteSpace(reloc.SourceSymbol)
|
||||
? (reloc.CallKind == CallKind.Plt ? "__plt__" : "__reloc__")
|
||||
: reloc.SourceSymbol;
|
||||
var sourceId = $"native:{binaryName}/{sourceSymbol}";
|
||||
var targetId = reloc.IsExternal
|
||||
? $"native:external/{reloc.TargetSymbol}"
|
||||
: $"native:{binaryName}/{reloc.TargetSymbol}";
|
||||
@@ -834,10 +1021,20 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
edges.Add(new CallGraphEdge(
|
||||
SourceId: sourceId,
|
||||
TargetId: targetId,
|
||||
CallKind: CallKind.Direct,
|
||||
CallKind: reloc.CallKind,
|
||||
CallSite: $"0x{reloc.Address:X}"));
|
||||
}
|
||||
|
||||
if (extraEdges.Count > 0)
|
||||
{
|
||||
foreach (var edge in extraEdges)
|
||||
{
|
||||
edges.Add(edge);
|
||||
}
|
||||
}
|
||||
|
||||
EnsureNodesForEdges(nodesById, edges, binaryPath, binaryName);
|
||||
|
||||
var nodes = nodesById.Values
|
||||
.Select(n => n.Trimmed())
|
||||
.OrderBy(n => n.NodeId, StringComparer.Ordinal)
|
||||
@@ -876,6 +1073,70 @@ public sealed class BinaryCallGraphExtractor : ICallGraphExtractor
|
||||
return provisional with { GraphDigest = digest };
|
||||
}
|
||||
|
||||
private static void EnsureNodesForEdges(
|
||||
Dictionary<string, CallGraphNode> nodesById,
|
||||
IEnumerable<CallGraphEdge> edges,
|
||||
string binaryPath,
|
||||
string binaryName)
|
||||
{
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
EnsureNode(nodesById, edge.SourceId, binaryPath, binaryName);
|
||||
EnsureNode(nodesById, edge.TargetId, binaryPath, binaryName);
|
||||
}
|
||||
}
|
||||
|
||||
private static void EnsureNode(
|
||||
Dictionary<string, CallGraphNode> nodesById,
|
||||
string nodeId,
|
||||
string binaryPath,
|
||||
string binaryName)
|
||||
{
|
||||
if (nodesById.ContainsKey(nodeId))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var (package, symbol, isExternal) = ParseNodeId(nodeId, binaryName);
|
||||
var filePath = isExternal ? string.Empty : binaryPath;
|
||||
var visibility = isExternal ? Visibility.Public : Visibility.Private;
|
||||
|
||||
nodesById[nodeId] = new CallGraphNode(
|
||||
NodeId: nodeId,
|
||||
Symbol: symbol,
|
||||
File: filePath,
|
||||
Line: 0,
|
||||
Package: package,
|
||||
Visibility: visibility,
|
||||
IsEntrypoint: false,
|
||||
EntrypointType: null,
|
||||
IsSink: false,
|
||||
SinkCategory: null);
|
||||
}
|
||||
|
||||
private static (string Package, string Symbol, bool IsExternal) ParseNodeId(string nodeId, string binaryName)
|
||||
{
|
||||
const string Prefix = "native:";
|
||||
|
||||
if (!nodeId.StartsWith(Prefix, StringComparison.Ordinal))
|
||||
{
|
||||
return (binaryName, nodeId, false);
|
||||
}
|
||||
|
||||
var remainder = nodeId.Substring(Prefix.Length);
|
||||
var slashIndex = remainder.IndexOf('/');
|
||||
if (slashIndex < 0)
|
||||
{
|
||||
return (binaryName, remainder, false);
|
||||
}
|
||||
|
||||
var package = remainder.Substring(0, slashIndex);
|
||||
var symbol = remainder.Substring(slashIndex + 1);
|
||||
var isExternal = string.Equals(package, "external", StringComparison.Ordinal);
|
||||
|
||||
return (package, symbol, isExternal);
|
||||
}
|
||||
|
||||
private static string ReadNullTerminatedString(byte[] buffer, int offset)
|
||||
{
|
||||
if (offset < 0 || offset >= buffer.Length)
|
||||
@@ -917,4 +1178,5 @@ internal sealed class BinaryRelocation
|
||||
public ulong Address { get; init; }
|
||||
public bool IsExternal { get; init; }
|
||||
public int SymbolIndex { get; init; }
|
||||
public CallKind CallKind { get; init; } = CallKind.Direct;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,100 @@
|
||||
using System.Collections.Immutable;
|
||||
using Gee.External.Capstone;
|
||||
using Gee.External.Capstone.Arm64;
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
|
||||
|
||||
internal sealed class Arm64Disassembler
|
||||
{
|
||||
public ImmutableArray<BinaryCallInstruction> ExtractDirectCalls(
|
||||
ReadOnlySpan<byte> code,
|
||||
ulong baseAddress)
|
||||
{
|
||||
if (code.IsEmpty)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
|
||||
if (!CapstoneDisassembler.IsArm64Supported)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var disassembler = CapstoneDisassembler.CreateArm64Disassembler(
|
||||
Arm64DisassembleMode.Arm | Arm64DisassembleMode.LittleEndian);
|
||||
disassembler.EnableInstructionDetails = true;
|
||||
|
||||
var instructions = disassembler.Disassemble(code.ToArray(), (long)baseAddress);
|
||||
if (instructions.Length == 0)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
|
||||
var calls = ImmutableArray.CreateBuilder<BinaryCallInstruction>();
|
||||
|
||||
foreach (var instruction in instructions)
|
||||
{
|
||||
if (instruction.IsSkippedData)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var isCall = instruction.Id is Arm64InstructionId.ARM64_INS_BL or Arm64InstructionId.ARM64_INS_BLR;
|
||||
if (!isCall)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!instruction.HasDetails || instruction.Details is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var target = TryResolveTarget(instruction);
|
||||
if (target is null)
|
||||
{
|
||||
calls.Add(new BinaryCallInstruction(
|
||||
(ulong)instruction.Address,
|
||||
0,
|
||||
CallKind.Dynamic));
|
||||
continue;
|
||||
}
|
||||
|
||||
calls.Add(new BinaryCallInstruction(
|
||||
(ulong)instruction.Address,
|
||||
target.Value,
|
||||
CallKind.Direct));
|
||||
}
|
||||
|
||||
return calls.ToImmutable();
|
||||
}
|
||||
catch (DllNotFoundException)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
catch (TypeInitializationException)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
catch (BadImageFormatException)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
}
|
||||
|
||||
private static ulong? TryResolveTarget(Arm64Instruction instruction)
|
||||
{
|
||||
foreach (var operand in instruction.Details!.Operands)
|
||||
{
|
||||
if (operand.Type == Arm64OperandType.Immediate)
|
||||
{
|
||||
return (ulong)operand.Immediate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
|
||||
|
||||
internal enum BinaryArchitecture
|
||||
{
|
||||
Unknown,
|
||||
X86,
|
||||
X64,
|
||||
Arm64
|
||||
}
|
||||
|
||||
internal sealed record BinaryTextSection(
|
||||
byte[] Bytes,
|
||||
ulong VirtualAddress,
|
||||
int Bitness,
|
||||
BinaryArchitecture Architecture,
|
||||
string SectionName)
|
||||
{
|
||||
public ulong EndAddress => VirtualAddress + (ulong)Bytes.Length;
|
||||
}
|
||||
|
||||
internal sealed record BinaryCallInstruction(
|
||||
ulong InstructionAddress,
|
||||
ulong TargetAddress,
|
||||
CallKind CallKind);
|
||||
@@ -0,0 +1,395 @@
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.CallGraph.Binary;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
|
||||
|
||||
internal static class BinaryTextSectionReader
|
||||
{
|
||||
public static async Task<BinaryTextSection?> TryReadAsync(
|
||||
string path,
|
||||
BinaryFormat format,
|
||||
CancellationToken ct)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
return format switch
|
||||
{
|
||||
BinaryFormat.Elf => await TryReadElfTextSectionAsync(path, ct),
|
||||
BinaryFormat.Pe => await TryReadPeTextSectionAsync(path, ct),
|
||||
BinaryFormat.MachO => await TryReadMachOTextSectionAsync(path, ct),
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static async Task<BinaryTextSection?> TryReadElfTextSectionAsync(string path, CancellationToken ct)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
var ident = reader.ReadBytes(16);
|
||||
if (ident.Length < 16)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var is64Bit = ident[4] == 2;
|
||||
var isLittleEndian = ident[5] == 1;
|
||||
if (!isLittleEndian)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var eType = reader.ReadUInt16();
|
||||
var eMachine = reader.ReadUInt16();
|
||||
|
||||
_ = eType;
|
||||
|
||||
var architecture = eMachine switch
|
||||
{
|
||||
3 => BinaryArchitecture.X86,
|
||||
62 => BinaryArchitecture.X64,
|
||||
183 => BinaryArchitecture.Arm64,
|
||||
_ => BinaryArchitecture.Unknown
|
||||
};
|
||||
|
||||
// e_shoff
|
||||
stream.Seek(is64Bit ? 40 : 32, SeekOrigin.Begin);
|
||||
var sectionHeaderOffset = is64Bit ? reader.ReadInt64() : reader.ReadInt32();
|
||||
// e_shentsize, e_shnum, e_shstrndx
|
||||
stream.Seek(is64Bit ? 58 : 46, SeekOrigin.Begin);
|
||||
var sectionHeaderSize = reader.ReadUInt16();
|
||||
var sectionHeaderCount = reader.ReadUInt16();
|
||||
var sectionNameIndex = reader.ReadUInt16();
|
||||
|
||||
if (sectionHeaderOffset <= 0 || sectionHeaderCount == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Read section name string table
|
||||
var nameTableOffset = ReadElfSectionOffset(reader, stream, sectionHeaderOffset, sectionHeaderSize, sectionNameIndex, is64Bit);
|
||||
var nameTableSize = ReadElfSectionSize(reader, stream, sectionHeaderOffset, sectionHeaderSize, sectionNameIndex, is64Bit);
|
||||
|
||||
if (nameTableOffset <= 0 || nameTableSize <= 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
stream.Seek(nameTableOffset, SeekOrigin.Begin);
|
||||
var nameTable = reader.ReadBytes((int)nameTableSize);
|
||||
|
||||
for (int i = 0; i < sectionHeaderCount; i++)
|
||||
{
|
||||
stream.Seek(sectionHeaderOffset + i * sectionHeaderSize, SeekOrigin.Begin);
|
||||
var nameIndex = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // sh_type
|
||||
ulong sectionAddress;
|
||||
long sectionOffset;
|
||||
long sectionSize;
|
||||
|
||||
if (is64Bit)
|
||||
{
|
||||
reader.ReadUInt64(); // sh_flags
|
||||
sectionAddress = reader.ReadUInt64();
|
||||
sectionOffset = reader.ReadInt64();
|
||||
sectionSize = reader.ReadInt64();
|
||||
}
|
||||
else
|
||||
{
|
||||
reader.ReadUInt32(); // sh_flags
|
||||
sectionAddress = reader.ReadUInt32();
|
||||
sectionOffset = reader.ReadInt32();
|
||||
sectionSize = reader.ReadInt32();
|
||||
}
|
||||
|
||||
var name = ReadNullTerminatedString(nameTable, (int)nameIndex);
|
||||
|
||||
if (string.Equals(name, ".text", StringComparison.Ordinal))
|
||||
{
|
||||
if (sectionOffset <= 0 || sectionSize <= 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
stream.Seek(sectionOffset, SeekOrigin.Begin);
|
||||
var bytes = reader.ReadBytes((int)sectionSize);
|
||||
await Task.CompletedTask;
|
||||
return new BinaryTextSection(
|
||||
bytes,
|
||||
sectionAddress,
|
||||
is64Bit ? 64 : 32,
|
||||
architecture,
|
||||
name);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
|
||||
{
|
||||
var position = sectionHeaderOffset + index * entrySize;
|
||||
return ReadElfSectionOffset(reader, stream, position, is64Bit);
|
||||
}
|
||||
|
||||
private static long ReadElfSectionOffset(BinaryReader reader, Stream stream, long position, bool is64Bit)
|
||||
{
|
||||
stream.Seek(position + (is64Bit ? 24 : 16), SeekOrigin.Begin);
|
||||
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
|
||||
}
|
||||
|
||||
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long sectionHeaderOffset, ushort entrySize, ushort index, bool is64Bit)
|
||||
{
|
||||
var position = sectionHeaderOffset + index * entrySize;
|
||||
return ReadElfSectionSize(reader, stream, position, is64Bit);
|
||||
}
|
||||
|
||||
private static long ReadElfSectionSize(BinaryReader reader, Stream stream, long position, bool is64Bit)
|
||||
{
|
||||
stream.Seek(position + (is64Bit ? 32 : 20), SeekOrigin.Begin);
|
||||
return is64Bit ? reader.ReadInt64() : reader.ReadInt32();
|
||||
}
|
||||
|
||||
private static async Task<BinaryTextSection?> TryReadPeTextSectionAsync(string path, CancellationToken ct)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
stream.Seek(0x3C, SeekOrigin.Begin);
|
||||
var peOffset = reader.ReadInt32();
|
||||
|
||||
stream.Seek(peOffset, SeekOrigin.Begin);
|
||||
var signature = reader.ReadUInt32();
|
||||
if (signature != 0x00004550)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var machine = reader.ReadUInt16();
|
||||
var numberOfSections = reader.ReadUInt16();
|
||||
reader.ReadUInt32(); // timestamp
|
||||
reader.ReadUInt32(); // symbol table ptr
|
||||
reader.ReadUInt32(); // number of symbols
|
||||
var optionalHeaderSize = reader.ReadUInt16();
|
||||
reader.ReadUInt16(); // characteristics
|
||||
|
||||
var architecture = machine switch
|
||||
{
|
||||
0x014c => BinaryArchitecture.X86,
|
||||
0x8664 => BinaryArchitecture.X64,
|
||||
0xaa64 => BinaryArchitecture.Arm64,
|
||||
_ => BinaryArchitecture.Unknown
|
||||
};
|
||||
|
||||
if (optionalHeaderSize == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var optionalHeaderStart = stream.Position;
|
||||
var magic = reader.ReadUInt16();
|
||||
var is64Bit = magic == 0x20b;
|
||||
_ = is64Bit;
|
||||
|
||||
stream.Seek(optionalHeaderStart + optionalHeaderSize, SeekOrigin.Begin);
|
||||
|
||||
for (int i = 0; i < numberOfSections; i++)
|
||||
{
|
||||
var nameBytes = reader.ReadBytes(8);
|
||||
var name = Encoding.ASCII.GetString(nameBytes).TrimEnd('\0');
|
||||
var virtualSize = reader.ReadUInt32();
|
||||
var virtualAddress = reader.ReadUInt32();
|
||||
var sizeOfRawData = reader.ReadUInt32();
|
||||
var pointerToRawData = reader.ReadUInt32();
|
||||
|
||||
reader.ReadUInt32(); // pointer to relocations
|
||||
reader.ReadUInt32(); // pointer to line numbers
|
||||
reader.ReadUInt16(); // number of relocations
|
||||
reader.ReadUInt16(); // number of line numbers
|
||||
reader.ReadUInt32(); // characteristics
|
||||
|
||||
if (!string.Equals(name, ".text", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (pointerToRawData == 0 || sizeOfRawData == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
stream.Seek(pointerToRawData, SeekOrigin.Begin);
|
||||
var bytes = reader.ReadBytes((int)sizeOfRawData);
|
||||
await Task.CompletedTask;
|
||||
return new BinaryTextSection(
|
||||
bytes,
|
||||
virtualAddress,
|
||||
is64Bit ? 64 : 32,
|
||||
architecture,
|
||||
name);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static async Task<BinaryTextSection?> TryReadMachOTextSectionAsync(string path, CancellationToken ct)
|
||||
{
|
||||
using var stream = File.OpenRead(path);
|
||||
using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true);
|
||||
|
||||
var magic = reader.ReadUInt32();
|
||||
var is64Bit = magic is 0xFEEDFACF or 0xCFFAEDFE;
|
||||
var isSwapped = magic is 0xCEFAEDFE or 0xCFFAEDFE;
|
||||
|
||||
if (isSwapped)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var cpuType = reader.ReadInt32();
|
||||
reader.ReadInt32(); // cpusubtype
|
||||
reader.ReadUInt32(); // filetype
|
||||
var ncmds = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // sizeofcmds
|
||||
reader.ReadUInt32(); // flags
|
||||
if (is64Bit)
|
||||
{
|
||||
reader.ReadUInt32(); // reserved
|
||||
}
|
||||
|
||||
var architecture = cpuType switch
|
||||
{
|
||||
7 => BinaryArchitecture.X86,
|
||||
0x01000007 => BinaryArchitecture.X64,
|
||||
0x0100000C => BinaryArchitecture.Arm64,
|
||||
_ => BinaryArchitecture.Unknown
|
||||
};
|
||||
|
||||
for (int i = 0; i < ncmds; i++)
|
||||
{
|
||||
var cmdStart = stream.Position;
|
||||
var cmd = reader.ReadUInt32();
|
||||
var cmdsize = reader.ReadUInt32();
|
||||
|
||||
var isSegment = cmd == (is64Bit ? 0x19u : 0x1u);
|
||||
if (!isSegment)
|
||||
{
|
||||
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
|
||||
continue;
|
||||
}
|
||||
|
||||
var segName = ReadFixedString(reader, 16);
|
||||
if (is64Bit)
|
||||
{
|
||||
reader.ReadUInt64(); // vmaddr
|
||||
reader.ReadUInt64(); // vmsize
|
||||
reader.ReadUInt64(); // fileoff
|
||||
reader.ReadUInt64(); // filesize
|
||||
reader.ReadInt32(); // maxprot
|
||||
reader.ReadInt32(); // initprot
|
||||
var nsects = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // flags
|
||||
|
||||
for (int s = 0; s < nsects; s++)
|
||||
{
|
||||
var sectName = ReadFixedString(reader, 16);
|
||||
var sectSegName = ReadFixedString(reader, 16);
|
||||
var addr = reader.ReadUInt64();
|
||||
var size = reader.ReadUInt64();
|
||||
var offset = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // align
|
||||
reader.ReadUInt32(); // reloff
|
||||
reader.ReadUInt32(); // nreloc
|
||||
reader.ReadUInt32(); // flags
|
||||
reader.ReadUInt32(); // reserved1
|
||||
reader.ReadUInt32(); // reserved2
|
||||
reader.ReadUInt32(); // reserved3
|
||||
|
||||
if (!string.Equals(sectName, "__text", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
stream.Seek(offset, SeekOrigin.Begin);
|
||||
var bytes = reader.ReadBytes((int)size);
|
||||
await Task.CompletedTask;
|
||||
return new BinaryTextSection(
|
||||
bytes,
|
||||
addr,
|
||||
64,
|
||||
architecture,
|
||||
sectName);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
reader.ReadUInt32(); // vmaddr
|
||||
reader.ReadUInt32(); // vmsize
|
||||
reader.ReadUInt32(); // fileoff
|
||||
reader.ReadUInt32(); // filesize
|
||||
reader.ReadInt32(); // maxprot
|
||||
reader.ReadInt32(); // initprot
|
||||
var nsects = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // flags
|
||||
|
||||
for (int s = 0; s < nsects; s++)
|
||||
{
|
||||
var sectName = ReadFixedString(reader, 16);
|
||||
var sectSegName = ReadFixedString(reader, 16);
|
||||
var addr = reader.ReadUInt32();
|
||||
var size = reader.ReadUInt32();
|
||||
var offset = reader.ReadUInt32();
|
||||
reader.ReadUInt32(); // align
|
||||
reader.ReadUInt32(); // reloff
|
||||
reader.ReadUInt32(); // nreloc
|
||||
reader.ReadUInt32(); // flags
|
||||
reader.ReadUInt32(); // reserved1
|
||||
reader.ReadUInt32(); // reserved2
|
||||
|
||||
if (!string.Equals(sectName, "__text", StringComparison.Ordinal))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
stream.Seek(offset, SeekOrigin.Begin);
|
||||
var bytes = reader.ReadBytes((int)size);
|
||||
await Task.CompletedTask;
|
||||
return new BinaryTextSection(
|
||||
bytes,
|
||||
addr,
|
||||
32,
|
||||
architecture,
|
||||
sectName);
|
||||
}
|
||||
}
|
||||
|
||||
stream.Seek(cmdStart + cmdsize, SeekOrigin.Begin);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string ReadFixedString(BinaryReader reader, int length)
|
||||
{
|
||||
var bytes = reader.ReadBytes(length);
|
||||
var nullIndex = Array.IndexOf(bytes, (byte)0);
|
||||
var count = nullIndex >= 0 ? nullIndex : bytes.Length;
|
||||
return Encoding.ASCII.GetString(bytes, 0, count);
|
||||
}
|
||||
|
||||
private static string ReadNullTerminatedString(byte[] buffer, int offset)
|
||||
{
|
||||
if (offset < 0 || offset >= buffer.Length)
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var end = offset;
|
||||
while (end < buffer.Length && buffer[end] != 0)
|
||||
{
|
||||
end++;
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetString(buffer, offset, end - offset);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,146 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
using StellaOps.Scanner.CallGraph.Binary;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
|
||||
|
||||
internal sealed class DirectCallExtractor
|
||||
{
|
||||
private readonly X86Disassembler _x86Disassembler;
|
||||
private readonly Arm64Disassembler _arm64Disassembler;
|
||||
|
||||
public DirectCallExtractor(
|
||||
X86Disassembler? x86Disassembler = null,
|
||||
Arm64Disassembler? arm64Disassembler = null)
|
||||
{
|
||||
_x86Disassembler = x86Disassembler ?? new X86Disassembler();
|
||||
_arm64Disassembler = arm64Disassembler ?? new Arm64Disassembler();
|
||||
}
|
||||
|
||||
public ImmutableArray<CallGraphEdge> Extract(
|
||||
BinaryTextSection textSection,
|
||||
IReadOnlyList<BinarySymbol> symbols,
|
||||
string binaryName)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(textSection);
|
||||
ArgumentNullException.ThrowIfNull(symbols);
|
||||
|
||||
if (textSection.Bytes.Length == 0)
|
||||
{
|
||||
return ImmutableArray<CallGraphEdge>.Empty;
|
||||
}
|
||||
|
||||
var orderedSymbols = symbols
|
||||
.Where(symbol => symbol is not null)
|
||||
.OrderBy(symbol => symbol.Address)
|
||||
.ThenBy(symbol => symbol.Name, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var calls = textSection.Architecture switch
|
||||
{
|
||||
BinaryArchitecture.X86 => _x86Disassembler.ExtractDirectCalls(
|
||||
textSection.Bytes,
|
||||
textSection.VirtualAddress,
|
||||
32),
|
||||
BinaryArchitecture.X64 => _x86Disassembler.ExtractDirectCalls(
|
||||
textSection.Bytes,
|
||||
textSection.VirtualAddress,
|
||||
64),
|
||||
BinaryArchitecture.Arm64 => _arm64Disassembler.ExtractDirectCalls(
|
||||
textSection.Bytes,
|
||||
textSection.VirtualAddress),
|
||||
_ => ImmutableArray<BinaryCallInstruction>.Empty
|
||||
};
|
||||
|
||||
if (calls.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<CallGraphEdge>.Empty;
|
||||
}
|
||||
|
||||
var edges = ImmutableArray.CreateBuilder<CallGraphEdge>(calls.Length);
|
||||
foreach (var call in calls)
|
||||
{
|
||||
var sourceSymbol = ResolveSymbol(orderedSymbols, call.InstructionAddress);
|
||||
var targetSymbol = ResolveSymbol(orderedSymbols, call.TargetAddress);
|
||||
var targetIsInternal = call.TargetAddress >= textSection.VirtualAddress
|
||||
&& call.TargetAddress < textSection.EndAddress;
|
||||
|
||||
var sourceId = BuildNodeId(binaryName, sourceSymbol, call.InstructionAddress, isExternal: false);
|
||||
var targetId = BuildNodeId(
|
||||
targetIsInternal ? binaryName : "external",
|
||||
targetSymbol,
|
||||
call.TargetAddress,
|
||||
isExternal: !targetIsInternal);
|
||||
|
||||
edges.Add(new CallGraphEdge(
|
||||
SourceId: sourceId,
|
||||
TargetId: targetId,
|
||||
CallKind: call.CallKind,
|
||||
CallSite: $"0x{call.InstructionAddress:X}"));
|
||||
}
|
||||
|
||||
return edges
|
||||
.OrderBy(edge => edge.SourceId, StringComparer.Ordinal)
|
||||
.ThenBy(edge => edge.TargetId, StringComparer.Ordinal)
|
||||
.ThenBy(edge => edge.CallKind.ToString(), StringComparer.Ordinal)
|
||||
.ThenBy(edge => edge.CallSite ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static string? ResolveSymbol(IReadOnlyList<BinarySymbol> symbols, ulong address)
|
||||
{
|
||||
string? bestSymbol = null;
|
||||
ulong bestAddress = 0;
|
||||
|
||||
foreach (var symbol in symbols)
|
||||
{
|
||||
if (symbol.Address > address)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (symbol.Address == address)
|
||||
{
|
||||
return symbol.Name;
|
||||
}
|
||||
|
||||
if (symbol.Address <= address)
|
||||
{
|
||||
bestSymbol = symbol.Name;
|
||||
bestAddress = symbol.Address;
|
||||
}
|
||||
}
|
||||
|
||||
if (bestSymbol is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var candidate = symbols.FirstOrDefault(s => s.Address == bestAddress);
|
||||
if (candidate is not null && candidate.Size > 0)
|
||||
{
|
||||
var end = candidate.Address + candidate.Size;
|
||||
if (address >= end)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return bestSymbol;
|
||||
}
|
||||
|
||||
private static string BuildNodeId(
|
||||
string binaryName,
|
||||
string? symbol,
|
||||
ulong address,
|
||||
bool isExternal)
|
||||
{
|
||||
var safeSymbol = string.IsNullOrWhiteSpace(symbol) ? $"addr_{address:X}" : symbol!;
|
||||
if (isExternal)
|
||||
{
|
||||
return $"native:external/{safeSymbol}";
|
||||
}
|
||||
|
||||
return $"native:{binaryName}/{safeSymbol}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,53 @@
|
||||
using System.Collections.Immutable;
|
||||
using Iced.Intel;
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
|
||||
namespace StellaOps.Scanner.CallGraph.Binary.Disassembly;
|
||||
|
||||
internal sealed class X86Disassembler
|
||||
{
|
||||
public ImmutableArray<BinaryCallInstruction> ExtractDirectCalls(
|
||||
ReadOnlySpan<byte> code,
|
||||
ulong baseAddress,
|
||||
int bitness)
|
||||
{
|
||||
if (bitness is not (16 or 32 or 64))
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(bitness), "Bitness must be 16, 32, or 64.");
|
||||
}
|
||||
|
||||
if (code.IsEmpty)
|
||||
{
|
||||
return ImmutableArray<BinaryCallInstruction>.Empty;
|
||||
}
|
||||
|
||||
var reader = new ByteArrayCodeReader(code.ToArray());
|
||||
var decoder = Decoder.Create(bitness, reader);
|
||||
decoder.IP = baseAddress;
|
||||
|
||||
var calls = ImmutableArray.CreateBuilder<BinaryCallInstruction>();
|
||||
|
||||
while (reader.CanReadByte)
|
||||
{
|
||||
decoder.Decode(out var instruction);
|
||||
if (instruction.IsInvalid)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
if (instruction.IsCallNear || instruction.IsJmpNear)
|
||||
{
|
||||
if (instruction.Op0Kind is OpKind.NearBranch16 or OpKind.NearBranch32 or OpKind.NearBranch64)
|
||||
{
|
||||
var target = instruction.NearBranchTarget;
|
||||
calls.Add(new BinaryCallInstruction(
|
||||
instruction.IP,
|
||||
target,
|
||||
CallKind.Direct));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return calls.ToImmutable();
|
||||
}
|
||||
}
|
||||
@@ -123,7 +123,9 @@ public enum CallKind
|
||||
Virtual,
|
||||
Delegate,
|
||||
Reflection,
|
||||
Dynamic
|
||||
Dynamic,
|
||||
Plt,
|
||||
Iat
|
||||
}
|
||||
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<EntrypointType>))]
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Gee.External.Capstone" Version="2.3.0" />
|
||||
<PackageReference Include="Iced" Version="1.21.0" />
|
||||
<PackageReference Include="Microsoft.Build.Locator" Version="1.10.0" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.CSharp.Workspaces" Version="4.14.0" />
|
||||
<PackageReference Include="Microsoft.CodeAnalysis.Workspaces.MSBuild" Version="4.14.0" />
|
||||
|
||||
@@ -6,14 +6,11 @@ using CycloneDX.Models;
|
||||
namespace StellaOps.Scanner.Emit.Composition;
|
||||
|
||||
/// <summary>
|
||||
/// Extension methods for CycloneDX 1.7 support.
|
||||
/// Workaround for CycloneDX.Core not yet exposing SpecificationVersion.v1_7.
|
||||
/// Helpers and media type constants for CycloneDX 1.7.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Sprint: SPRINT_5000_0001_0001 - Advisory Alignment (CycloneDX 1.7 Upgrade)
|
||||
///
|
||||
/// Once CycloneDX.Core adds v1_7 support, this extension can be removed
|
||||
/// and the code can use SpecificationVersion.v1_7 directly.
|
||||
/// Keep upgrade helpers for backward-compatibility with 1.6 inputs.
|
||||
/// </remarks>
|
||||
public static class CycloneDx17Extensions
|
||||
{
|
||||
|
||||
@@ -47,12 +47,38 @@ public sealed record CycloneDxArtifact
|
||||
public required string ProtobufMediaType { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxArtifact
|
||||
{
|
||||
public required SbomView View { get; init; }
|
||||
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
|
||||
public required byte[] JsonBytes { get; init; }
|
||||
|
||||
public required string JsonSha256 { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Canonical content hash (sha256, hex) of the SPDX JSON-LD payload.
|
||||
/// </summary>
|
||||
public required string ContentHash { get; init; }
|
||||
|
||||
public required string JsonMediaType { get; init; }
|
||||
|
||||
public byte[]? TagValueBytes { get; init; }
|
||||
|
||||
public string? TagValueSha256 { get; init; }
|
||||
|
||||
public string? TagValueMediaType { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SbomCompositionResult
|
||||
{
|
||||
public required CycloneDxArtifact Inventory { get; init; }
|
||||
|
||||
public CycloneDxArtifact? Usage { get; init; }
|
||||
|
||||
public SpdxArtifact? SpdxInventory { get; init; }
|
||||
|
||||
public required ComponentGraph Graph { get; init; }
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -0,0 +1,413 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using StellaOps.Scanner.Emit.Spdx;
|
||||
using StellaOps.Scanner.Emit.Spdx.Models;
|
||||
using StellaOps.Scanner.Emit.Spdx.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Composition;
|
||||
|
||||
public interface ISpdxComposer
|
||||
{
|
||||
SpdxArtifact Compose(
|
||||
SbomCompositionRequest request,
|
||||
SpdxCompositionOptions options,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
ValueTask<SpdxArtifact> ComposeAsync(
|
||||
SbomCompositionRequest request,
|
||||
SpdxCompositionOptions options,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed record SpdxCompositionOptions
|
||||
{
|
||||
public string CreatorTool { get; init; } = "StellaOps-Scanner";
|
||||
|
||||
public string? CreatorOrganization { get; init; }
|
||||
|
||||
public string NamespaceBase { get; init; } = "https://stellaops.io/spdx";
|
||||
|
||||
public bool IncludeFiles { get; init; }
|
||||
|
||||
public bool IncludeSnippets { get; init; }
|
||||
|
||||
public bool IncludeTagValue { get; init; }
|
||||
|
||||
public SpdxLicenseListVersion LicenseListVersion { get; init; } = SpdxLicenseListVersion.V3_21;
|
||||
|
||||
public ImmutableArray<string> ProfileConformance { get; init; } = ImmutableArray.Create("core", "software");
|
||||
}
|
||||
|
||||
public sealed class SpdxComposer : ISpdxComposer
|
||||
{
|
||||
private const string JsonMediaType = "application/spdx+json; version=3.0.1";
|
||||
private const string TagValueMediaType = "text/spdx; version=2.3";
|
||||
|
||||
public SpdxArtifact Compose(
|
||||
SbomCompositionRequest request,
|
||||
SpdxCompositionOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
|
||||
var graph = ComponentGraphBuilder.Build(request.LayerFragments);
|
||||
var generatedAt = ScannerTimestamps.Normalize(request.GeneratedAt);
|
||||
|
||||
var idBuilder = new SpdxIdBuilder(options.NamespaceBase, request.Image.ImageDigest);
|
||||
var licenseList = SpdxLicenseListProvider.Get(options.LicenseListVersion);
|
||||
|
||||
var creationInfo = BuildCreationInfo(request, options, generatedAt);
|
||||
var document = BuildDocument(request, options, graph, idBuilder, creationInfo, licenseList);
|
||||
|
||||
var jsonBytes = SpdxJsonLdSerializer.Serialize(document);
|
||||
var jsonHash = CanonJson.Sha256Hex(jsonBytes);
|
||||
|
||||
byte[]? tagBytes = null;
|
||||
string? tagHash = null;
|
||||
if (options.IncludeTagValue)
|
||||
{
|
||||
tagBytes = SpdxTagValueSerializer.Serialize(document);
|
||||
tagHash = CanonJson.Sha256Hex(tagBytes);
|
||||
}
|
||||
|
||||
return new SpdxArtifact
|
||||
{
|
||||
View = SbomView.Inventory,
|
||||
GeneratedAt = generatedAt,
|
||||
JsonBytes = jsonBytes,
|
||||
JsonSha256 = jsonHash,
|
||||
ContentHash = jsonHash,
|
||||
JsonMediaType = JsonMediaType,
|
||||
TagValueBytes = tagBytes,
|
||||
TagValueSha256 = tagHash,
|
||||
TagValueMediaType = tagBytes is null ? null : TagValueMediaType
|
||||
};
|
||||
}
|
||||
|
||||
public ValueTask<SpdxArtifact> ComposeAsync(
|
||||
SbomCompositionRequest request,
|
||||
SpdxCompositionOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> ValueTask.FromResult(Compose(request, options, cancellationToken));
|
||||
|
||||
private static SpdxCreationInfo BuildCreationInfo(
|
||||
SbomCompositionRequest request,
|
||||
SpdxCompositionOptions options,
|
||||
DateTimeOffset generatedAt)
|
||||
{
|
||||
var creators = ImmutableArray.CreateBuilder<string>();
|
||||
|
||||
var toolName = !string.IsNullOrWhiteSpace(request.GeneratorName)
|
||||
? request.GeneratorName!.Trim()
|
||||
: options.CreatorTool;
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(toolName))
|
||||
{
|
||||
var toolLabel = !string.IsNullOrWhiteSpace(request.GeneratorVersion)
|
||||
? $"{toolName}-{request.GeneratorVersion!.Trim()}"
|
||||
: toolName;
|
||||
creators.Add($"Tool: {toolLabel}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CreatorOrganization))
|
||||
{
|
||||
creators.Add($"Organization: {options.CreatorOrganization!.Trim()}");
|
||||
}
|
||||
|
||||
return new SpdxCreationInfo
|
||||
{
|
||||
Created = generatedAt,
|
||||
Creators = creators.ToImmutable(),
|
||||
SpecVersion = SpdxDefaults.SpecVersion
|
||||
};
|
||||
}
|
||||
|
||||
private static SpdxDocument BuildDocument(
|
||||
SbomCompositionRequest request,
|
||||
SpdxCompositionOptions options,
|
||||
ComponentGraph graph,
|
||||
SpdxIdBuilder idBuilder,
|
||||
SpdxCreationInfo creationInfo,
|
||||
SpdxLicenseList licenseList)
|
||||
{
|
||||
var packages = new List<SpdxPackage>();
|
||||
var packageIdMap = new Dictionary<string, string>(StringComparer.Ordinal);
|
||||
|
||||
var rootPackage = BuildRootPackage(request.Image, idBuilder);
|
||||
packages.Add(rootPackage);
|
||||
|
||||
foreach (var component in graph.Components)
|
||||
{
|
||||
var package = BuildComponentPackage(component, idBuilder, licenseList);
|
||||
packages.Add(package);
|
||||
packageIdMap[component.Identity.Key] = package.SpdxId;
|
||||
}
|
||||
|
||||
var rootElementIds = packages
|
||||
.Select(static pkg => pkg.SpdxId)
|
||||
.OrderBy(id => id, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var sbom = new SpdxSbom
|
||||
{
|
||||
SpdxId = idBuilder.SbomId,
|
||||
Name = "software-sbom",
|
||||
RootElements = new[] { rootPackage.SpdxId }.ToImmutableArray(),
|
||||
Elements = rootElementIds,
|
||||
SbomTypes = new[] { "build" }.ToImmutableArray()
|
||||
};
|
||||
|
||||
var relationships = BuildRelationships(idBuilder, graph, rootPackage, packageIdMap);
|
||||
|
||||
var name = request.Image.ImageReference ?? request.Image.Repository ?? request.Image.ImageDigest;
|
||||
|
||||
return new SpdxDocument
|
||||
{
|
||||
DocumentNamespace = idBuilder.DocumentNamespace,
|
||||
Name = $"SBOM for {name}",
|
||||
CreationInfo = creationInfo,
|
||||
Sbom = sbom,
|
||||
Elements = packages.Cast<SpdxElement>().ToImmutableArray(),
|
||||
Relationships = relationships,
|
||||
ProfileConformance = options.ProfileConformance
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<SpdxRelationship> BuildRelationships(
|
||||
SpdxIdBuilder idBuilder,
|
||||
ComponentGraph graph,
|
||||
SpdxPackage rootPackage,
|
||||
IReadOnlyDictionary<string, string> packageIdMap)
|
||||
{
|
||||
var relationships = new List<SpdxRelationship>();
|
||||
|
||||
var documentId = idBuilder.DocumentNamespace;
|
||||
relationships.Add(new SpdxRelationship
|
||||
{
|
||||
SpdxId = idBuilder.CreateRelationshipId(documentId, "describes", rootPackage.SpdxId),
|
||||
FromElement = documentId,
|
||||
Type = SpdxRelationshipType.Describes,
|
||||
ToElements = ImmutableArray.Create(rootPackage.SpdxId)
|
||||
});
|
||||
|
||||
var dependencyTargets = new HashSet<string>(StringComparer.Ordinal);
|
||||
foreach (var component in graph.Components)
|
||||
{
|
||||
foreach (var dependencyKey in component.Dependencies)
|
||||
{
|
||||
if (packageIdMap.ContainsKey(dependencyKey))
|
||||
{
|
||||
dependencyTargets.Add(dependencyKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var rootDependencies = graph.Components
|
||||
.Where(component => !dependencyTargets.Contains(component.Identity.Key))
|
||||
.OrderBy(component => component.Identity.Key, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
foreach (var component in rootDependencies)
|
||||
{
|
||||
if (!packageIdMap.TryGetValue(component.Identity.Key, out var targetId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
relationships.Add(new SpdxRelationship
|
||||
{
|
||||
SpdxId = idBuilder.CreateRelationshipId(rootPackage.SpdxId, "dependsOn", targetId),
|
||||
FromElement = rootPackage.SpdxId,
|
||||
Type = SpdxRelationshipType.DependsOn,
|
||||
ToElements = ImmutableArray.Create(targetId)
|
||||
});
|
||||
}
|
||||
|
||||
foreach (var component in graph.Components.OrderBy(component => component.Identity.Key, StringComparer.Ordinal))
|
||||
{
|
||||
if (!packageIdMap.TryGetValue(component.Identity.Key, out var fromId))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var deps = component.Dependencies
|
||||
.Where(packageIdMap.ContainsKey)
|
||||
.OrderBy(key => key, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
foreach (var depKey in deps)
|
||||
{
|
||||
var toId = packageIdMap[depKey];
|
||||
relationships.Add(new SpdxRelationship
|
||||
{
|
||||
SpdxId = idBuilder.CreateRelationshipId(fromId, "dependsOn", toId),
|
||||
FromElement = fromId,
|
||||
Type = SpdxRelationshipType.DependsOn,
|
||||
ToElements = ImmutableArray.Create(toId)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return relationships
|
||||
.OrderBy(rel => rel.FromElement, StringComparer.Ordinal)
|
||||
.ThenBy(rel => rel.Type)
|
||||
.ThenBy(rel => rel.ToElements.FirstOrDefault() ?? string.Empty, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static SpdxPackage BuildRootPackage(ImageArtifactDescriptor image, SpdxIdBuilder idBuilder)
|
||||
{
|
||||
var digest = image.ImageDigest;
|
||||
var digestParts = digest.Split(':', 2, StringSplitOptions.TrimEntries);
|
||||
var digestValue = digestParts.Length == 2 ? digestParts[1] : digest;
|
||||
|
||||
var checksums = ImmutableArray.Create(new SpdxChecksum
|
||||
{
|
||||
Algorithm = digestParts.Length == 2 ? digestParts[0].ToUpperInvariant() : "SHA256",
|
||||
Value = digestValue
|
||||
});
|
||||
|
||||
return new SpdxPackage
|
||||
{
|
||||
SpdxId = idBuilder.CreatePackageId($"image:{image.ImageDigest}"),
|
||||
Name = image.ImageReference ?? image.Repository ?? image.ImageDigest,
|
||||
Version = digestValue,
|
||||
PackageUrl = BuildImagePurl(image),
|
||||
DownloadLocation = "NOASSERTION",
|
||||
PrimaryPurpose = "container",
|
||||
Checksums = checksums
|
||||
};
|
||||
}
|
||||
|
||||
private static SpdxPackage BuildComponentPackage(
|
||||
AggregatedComponent component,
|
||||
SpdxIdBuilder idBuilder,
|
||||
SpdxLicenseList licenseList)
|
||||
{
|
||||
var packageUrl = !string.IsNullOrWhiteSpace(component.Identity.Purl)
|
||||
? component.Identity.Purl
|
||||
: (component.Identity.Key.StartsWith("pkg:", StringComparison.Ordinal) ? component.Identity.Key : null);
|
||||
|
||||
var declared = BuildLicenseExpression(component.Metadata?.Licenses, licenseList);
|
||||
|
||||
return new SpdxPackage
|
||||
{
|
||||
SpdxId = idBuilder.CreatePackageId(component.Identity.Key),
|
||||
Name = component.Identity.Name,
|
||||
Version = component.Identity.Version,
|
||||
PackageUrl = packageUrl,
|
||||
DownloadLocation = "NOASSERTION",
|
||||
PrimaryPurpose = MapPrimaryPurpose(component.Identity.ComponentType),
|
||||
DeclaredLicense = declared
|
||||
};
|
||||
}
|
||||
|
||||
private static SpdxLicenseExpression? BuildLicenseExpression(
|
||||
IReadOnlyList<string>? licenses,
|
||||
SpdxLicenseList licenseList)
|
||||
{
|
||||
if (licenses is null || licenses.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var expressions = new List<SpdxLicenseExpression>();
|
||||
foreach (var license in licenses)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(license))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (SpdxLicenseExpressionParser.TryParse(license, out var parsed, licenseList))
|
||||
{
|
||||
expressions.Add(parsed!);
|
||||
continue;
|
||||
}
|
||||
|
||||
expressions.Add(new SpdxSimpleLicense(ToLicenseRef(license)));
|
||||
}
|
||||
|
||||
if (expressions.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var current = expressions[0];
|
||||
for (var i = 1; i < expressions.Count; i++)
|
||||
{
|
||||
current = new SpdxDisjunctiveLicense(current, expressions[i]);
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
|
||||
private static string ToLicenseRef(string license)
|
||||
{
|
||||
var normalized = new string(license
|
||||
.Trim()
|
||||
.Select(ch => char.IsLetterOrDigit(ch) || ch == '.' || ch == '-' ? ch : '-')
|
||||
.ToArray());
|
||||
|
||||
if (normalized.StartsWith("LicenseRef-", StringComparison.Ordinal))
|
||||
{
|
||||
return normalized;
|
||||
}
|
||||
|
||||
return $"LicenseRef-{normalized}";
|
||||
}
|
||||
|
||||
private static string? MapPrimaryPurpose(string? type)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(type))
|
||||
{
|
||||
return "library";
|
||||
}
|
||||
|
||||
return type.Trim().ToLowerInvariant() switch
|
||||
{
|
||||
"application" => "application",
|
||||
"framework" => "framework",
|
||||
"container" => "container",
|
||||
"operating-system" or "os" => "operatingSystem",
|
||||
"device" => "device",
|
||||
"firmware" => "firmware",
|
||||
"file" => "file",
|
||||
_ => "library"
|
||||
};
|
||||
}
|
||||
|
||||
private static string? BuildImagePurl(ImageArtifactDescriptor image)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(image.Repository))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var repo = image.Repository.Trim();
|
||||
var tag = string.IsNullOrWhiteSpace(image.Tag) ? null : image.Tag.Trim();
|
||||
var digest = image.ImageDigest.Trim();
|
||||
|
||||
var builder = new System.Text.StringBuilder("pkg:oci/");
|
||||
builder.Append(repo.Replace("/", "%2F", StringComparison.Ordinal));
|
||||
if (!string.IsNullOrWhiteSpace(tag))
|
||||
{
|
||||
builder.Append('@').Append(tag);
|
||||
}
|
||||
|
||||
builder.Append("?digest=").Append(Uri.EscapeDataString(digest));
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(image.Architecture))
|
||||
{
|
||||
builder.Append("&arch=").Append(Uri.EscapeDataString(image.Architecture.Trim()));
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
}
|
||||
@@ -88,6 +88,17 @@ public sealed class ScannerArtifactPackageBuilder
|
||||
descriptors.Add(CreateDescriptor(ArtifactDocumentType.ImageBom, ArtifactDocumentFormat.CycloneDxProtobuf, composition.Usage.ProtobufMediaType, composition.Usage.ProtobufBytes, composition.Usage.ProtobufSha256, SbomView.Usage));
|
||||
}
|
||||
|
||||
if (composition.SpdxInventory is not null)
|
||||
{
|
||||
descriptors.Add(CreateDescriptor(
|
||||
ArtifactDocumentType.ImageBom,
|
||||
ArtifactDocumentFormat.SpdxJson,
|
||||
composition.SpdxInventory.JsonMediaType,
|
||||
composition.SpdxInventory.JsonBytes,
|
||||
composition.SpdxInventory.JsonSha256,
|
||||
SbomView.Inventory));
|
||||
}
|
||||
|
||||
descriptors.Add(CreateDescriptor(ArtifactDocumentType.Index, ArtifactDocumentFormat.BomIndex, "application/vnd.stellaops.bom-index.v1+binary", bomIndex.Bytes, bomIndex.Sha256, null));
|
||||
|
||||
descriptors.Add(CreateDescriptor(
|
||||
|
||||
@@ -0,0 +1,196 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using CycloneDX.Models;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using StellaOps.Scanner.Emit.Spdx.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Spdx.Conversion;
|
||||
|
||||
public sealed record SpdxConversionOptions
|
||||
{
|
||||
public string NamespaceBase { get; init; } = "https://stellaops.io/spdx";
|
||||
}
|
||||
|
||||
public static class SpdxCycloneDxConverter
|
||||
{
|
||||
public static SpdxDocument FromCycloneDx(Bom bom, SpdxConversionOptions? options = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bom);
|
||||
options ??= new SpdxConversionOptions();
|
||||
|
||||
var basis = bom.SerialNumber ?? bom.Metadata?.Component?.BomRef ?? "cyclonedx";
|
||||
var namespaceHash = ScannerIdentifiers.CreateDeterministicHash(basis);
|
||||
var creationInfo = new SpdxCreationInfo
|
||||
{
|
||||
Created = bom.Metadata?.Timestamp is { } timestamp
|
||||
? new DateTimeOffset(timestamp, TimeSpan.Zero)
|
||||
: ScannerTimestamps.UtcNow(),
|
||||
Creators = ImmutableArray.Create("Tool: CycloneDX")
|
||||
};
|
||||
|
||||
var idBuilder = new SpdxIdBuilder(options.NamespaceBase, namespaceHash);
|
||||
var documentNamespace = idBuilder.DocumentNamespace;
|
||||
|
||||
var rootComponent = bom.Metadata?.Component;
|
||||
var rootPackage = rootComponent is null
|
||||
? new SpdxPackage
|
||||
{
|
||||
SpdxId = idBuilder.CreatePackageId("root"),
|
||||
Name = "root",
|
||||
DownloadLocation = "NOASSERTION",
|
||||
PrimaryPurpose = "application"
|
||||
}
|
||||
: MapComponent(rootComponent, idBuilder);
|
||||
|
||||
var packages = new List<SpdxPackage> { rootPackage };
|
||||
if (bom.Components is not null)
|
||||
{
|
||||
packages.AddRange(bom.Components.Select(component => MapComponent(component, idBuilder)));
|
||||
}
|
||||
|
||||
var sbom = new SpdxSbom
|
||||
{
|
||||
SpdxId = idBuilder.SbomId,
|
||||
Name = "software-sbom",
|
||||
RootElements = ImmutableArray.Create(rootPackage.SpdxId),
|
||||
Elements = packages.Select(package => package.SpdxId).OrderBy(id => id, StringComparer.Ordinal).ToImmutableArray(),
|
||||
SbomTypes = ImmutableArray.Create("build")
|
||||
};
|
||||
|
||||
var relationships = BuildRelationshipsFromCycloneDx(bom, idBuilder, packages);
|
||||
|
||||
return new SpdxDocument
|
||||
{
|
||||
DocumentNamespace = documentNamespace,
|
||||
Name = "SPDX converted from CycloneDX",
|
||||
CreationInfo = creationInfo,
|
||||
Sbom = sbom,
|
||||
Elements = packages.Cast<SpdxElement>().ToImmutableArray(),
|
||||
Relationships = relationships
|
||||
};
|
||||
}
|
||||
|
||||
public static Bom ToCycloneDx(SpdxDocument document)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
var rootId = document.Sbom.RootElements.FirstOrDefault();
|
||||
var packages = document.Elements.OfType<SpdxPackage>().ToList();
|
||||
var rootPackage = packages.FirstOrDefault(pkg => string.Equals(pkg.SpdxId, rootId, StringComparison.Ordinal))
|
||||
?? packages.FirstOrDefault();
|
||||
|
||||
var bom = new Bom
|
||||
{
|
||||
SpecVersion = SpecificationVersion.v1_7,
|
||||
Version = 1,
|
||||
Metadata = new Metadata
|
||||
{
|
||||
Timestamp = document.CreationInfo.Created.UtcDateTime,
|
||||
Component = rootPackage is null ? null : MapPackage(rootPackage)
|
||||
}
|
||||
};
|
||||
|
||||
bom.Components = packages
|
||||
.Where(pkg => rootPackage is null || !string.Equals(pkg.SpdxId, rootPackage.SpdxId, StringComparison.Ordinal))
|
||||
.Select(MapPackage)
|
||||
.ToList();
|
||||
|
||||
bom.Dependencies = BuildDependenciesFromSpdx(document, packages);
|
||||
|
||||
return bom;
|
||||
}
|
||||
|
||||
private static SpdxPackage MapComponent(Component component, SpdxIdBuilder idBuilder)
|
||||
{
|
||||
return new SpdxPackage
|
||||
{
|
||||
SpdxId = idBuilder.CreatePackageId(component.BomRef ?? component.Name ?? "component"),
|
||||
Name = component.Name ?? component.BomRef ?? "component",
|
||||
Version = component.Version,
|
||||
PackageUrl = component.Purl,
|
||||
DownloadLocation = "NOASSERTION",
|
||||
PrimaryPurpose = component.Type.ToString().Replace("_", "-", StringComparison.Ordinal).ToLowerInvariant()
|
||||
};
|
||||
}
|
||||
|
||||
private static Component MapPackage(SpdxPackage package)
|
||||
{
|
||||
return new Component
|
||||
{
|
||||
BomRef = package.SpdxId,
|
||||
Name = package.Name ?? package.SpdxId,
|
||||
Version = package.Version,
|
||||
Purl = package.PackageUrl,
|
||||
Type = Component.Classification.Library
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<SpdxRelationship> BuildRelationshipsFromCycloneDx(
|
||||
Bom bom,
|
||||
SpdxIdBuilder idBuilder,
|
||||
IReadOnlyList<SpdxPackage> packages)
|
||||
{
|
||||
var packageMap = packages.ToDictionary(pkg => pkg.SpdxId, StringComparer.Ordinal);
|
||||
var relationships = new List<SpdxRelationship>();
|
||||
|
||||
if (bom.Dependencies is null)
|
||||
{
|
||||
return ImmutableArray<SpdxRelationship>.Empty;
|
||||
}
|
||||
|
||||
foreach (var dependency in bom.Dependencies)
|
||||
{
|
||||
if (dependency.Dependencies is null || dependency.Ref is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var target in dependency.Dependencies.Where(dep => dep.Ref is not null))
|
||||
{
|
||||
relationships.Add(new SpdxRelationship
|
||||
{
|
||||
SpdxId = idBuilder.CreateRelationshipId(dependency.Ref, "dependsOn", target.Ref!),
|
||||
FromElement = dependency.Ref,
|
||||
Type = SpdxRelationshipType.DependsOn,
|
||||
ToElements = ImmutableArray.Create(target.Ref!)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return relationships.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static List<Dependency>? BuildDependenciesFromSpdx(
|
||||
SpdxDocument document,
|
||||
IReadOnlyList<SpdxPackage> packages)
|
||||
{
|
||||
var dependencies = new List<Dependency>();
|
||||
var packageIds = packages.Select(pkg => pkg.SpdxId).ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
foreach (var relationship in document.Relationships
|
||||
.Where(rel => rel.Type == SpdxRelationshipType.DependsOn))
|
||||
{
|
||||
if (!packageIds.Contains(relationship.FromElement))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var targets = relationship.ToElements.Where(packageIds.Contains).ToList();
|
||||
if (targets.Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
dependencies.Add(new Dependency
|
||||
{
|
||||
Ref = relationship.FromElement,
|
||||
Dependencies = targets.Select(target => new Dependency { Ref = target }).ToList()
|
||||
});
|
||||
}
|
||||
|
||||
return dependencies.Count == 0 ? null : dependencies;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,35 @@
|
||||
namespace StellaOps.Scanner.Emit.Spdx.Models;
|
||||
|
||||
public abstract record SpdxLicenseExpression;
|
||||
|
||||
public sealed record SpdxSimpleLicense(string LicenseId) : SpdxLicenseExpression;
|
||||
|
||||
public sealed record SpdxConjunctiveLicense(
|
||||
SpdxLicenseExpression Left,
|
||||
SpdxLicenseExpression Right) : SpdxLicenseExpression;
|
||||
|
||||
public sealed record SpdxDisjunctiveLicense(
|
||||
SpdxLicenseExpression Left,
|
||||
SpdxLicenseExpression Right) : SpdxLicenseExpression;
|
||||
|
||||
public sealed record SpdxWithException(
|
||||
SpdxLicenseExpression License,
|
||||
string Exception) : SpdxLicenseExpression;
|
||||
|
||||
public sealed record SpdxNoneLicense : SpdxLicenseExpression
|
||||
{
|
||||
public static SpdxNoneLicense Instance { get; } = new();
|
||||
|
||||
private SpdxNoneLicense()
|
||||
{
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SpdxNoAssertionLicense : SpdxLicenseExpression
|
||||
{
|
||||
public static SpdxNoAssertionLicense Instance { get; } = new();
|
||||
|
||||
private SpdxNoAssertionLicense()
|
||||
{
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,406 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Reflection;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Spdx.Models;
|
||||
|
||||
public enum SpdxLicenseListVersion
|
||||
{
|
||||
V3_21
|
||||
}
|
||||
|
||||
public sealed record SpdxLicenseList
|
||||
{
|
||||
public required string Version { get; init; }
|
||||
|
||||
public required ImmutableHashSet<string> LicenseIds { get; init; }
|
||||
|
||||
public required ImmutableHashSet<string> ExceptionIds { get; init; }
|
||||
}
|
||||
|
||||
public static class SpdxLicenseListProvider
|
||||
{
|
||||
private const string LicenseResource = "StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-list-3.21.json";
|
||||
private const string ExceptionResource = "StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-exceptions-3.21.json";
|
||||
|
||||
private static readonly Lazy<SpdxLicenseList> LicenseListV321 = new(LoadV321);
|
||||
|
||||
public static SpdxLicenseList Get(SpdxLicenseListVersion version)
|
||||
=> version switch
|
||||
{
|
||||
SpdxLicenseListVersion.V3_21 => LicenseListV321.Value,
|
||||
_ => LicenseListV321.Value,
|
||||
};
|
||||
|
||||
private static SpdxLicenseList LoadV321()
|
||||
{
|
||||
var assembly = Assembly.GetExecutingAssembly();
|
||||
var licenseIds = LoadLicenseIds(assembly, LicenseResource, "licenses", "licenseId");
|
||||
var exceptionIds = LoadLicenseIds(assembly, ExceptionResource, "exceptions", "licenseExceptionId");
|
||||
|
||||
return new SpdxLicenseList
|
||||
{
|
||||
Version = "3.21",
|
||||
LicenseIds = licenseIds,
|
||||
ExceptionIds = exceptionIds,
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableHashSet<string> LoadLicenseIds(
|
||||
Assembly assembly,
|
||||
string resourceName,
|
||||
string arrayProperty,
|
||||
string idProperty)
|
||||
{
|
||||
using var stream = assembly.GetManifestResourceStream(resourceName)
|
||||
?? throw new InvalidOperationException($"Missing embedded resource: {resourceName}");
|
||||
using var document = JsonDocument.Parse(stream);
|
||||
|
||||
if (!document.RootElement.TryGetProperty(arrayProperty, out var array) ||
|
||||
array.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return ImmutableHashSet<string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.Ordinal);
|
||||
foreach (var entry in array.EnumerateArray())
|
||||
{
|
||||
if (entry.TryGetProperty(idProperty, out var idElement) &&
|
||||
idElement.ValueKind == JsonValueKind.String &&
|
||||
idElement.GetString() is { Length: > 0 } id)
|
||||
{
|
||||
builder.Add(id);
|
||||
}
|
||||
}
|
||||
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
public static class SpdxLicenseExpressionParser
|
||||
{
|
||||
public static bool TryParse(string expression, out SpdxLicenseExpression? result, SpdxLicenseList? licenseList = null)
|
||||
{
|
||||
result = null;
|
||||
if (string.IsNullOrWhiteSpace(expression))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
result = Parse(expression, licenseList);
|
||||
return true;
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static SpdxLicenseExpression Parse(string expression, SpdxLicenseList? licenseList = null)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(expression))
|
||||
{
|
||||
throw new FormatException("License expression is empty.");
|
||||
}
|
||||
|
||||
var tokens = Tokenize(expression);
|
||||
var parser = new Parser(tokens);
|
||||
var parsed = parser.ParseExpression();
|
||||
|
||||
if (parser.HasMoreTokens)
|
||||
{
|
||||
throw new FormatException("Unexpected trailing tokens in license expression.");
|
||||
}
|
||||
|
||||
if (licenseList is not null)
|
||||
{
|
||||
Validate(parsed, licenseList);
|
||||
}
|
||||
|
||||
return parsed;
|
||||
}
|
||||
|
||||
private static void Validate(SpdxLicenseExpression expression, SpdxLicenseList list)
|
||||
{
|
||||
switch (expression)
|
||||
{
|
||||
case SpdxSimpleLicense simple:
|
||||
if (IsSpecial(simple.LicenseId) || IsLicenseRef(simple.LicenseId))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!list.LicenseIds.Contains(simple.LicenseId))
|
||||
{
|
||||
throw new FormatException($"Unknown SPDX license identifier: {simple.LicenseId}");
|
||||
}
|
||||
break;
|
||||
case SpdxWithException withException:
|
||||
Validate(withException.License, list);
|
||||
if (!list.ExceptionIds.Contains(withException.Exception))
|
||||
{
|
||||
throw new FormatException($"Unknown SPDX license exception: {withException.Exception}");
|
||||
}
|
||||
break;
|
||||
case SpdxConjunctiveLicense conjunctive:
|
||||
Validate(conjunctive.Left, list);
|
||||
Validate(conjunctive.Right, list);
|
||||
break;
|
||||
case SpdxDisjunctiveLicense disjunctive:
|
||||
Validate(disjunctive.Left, list);
|
||||
Validate(disjunctive.Right, list);
|
||||
break;
|
||||
case SpdxNoneLicense:
|
||||
case SpdxNoAssertionLicense:
|
||||
break;
|
||||
default:
|
||||
throw new FormatException("Unsupported SPDX license expression node.");
|
||||
}
|
||||
}
|
||||
|
||||
private static bool IsSpecial(string licenseId)
|
||||
=> string.Equals(licenseId, "NONE", StringComparison.Ordinal)
|
||||
|| string.Equals(licenseId, "NOASSERTION", StringComparison.Ordinal);
|
||||
|
||||
private static bool IsLicenseRef(string licenseId)
|
||||
=> licenseId.StartsWith("LicenseRef-", StringComparison.Ordinal)
|
||||
|| licenseId.StartsWith("DocumentRef-", StringComparison.Ordinal);
|
||||
|
||||
private static List<Token> Tokenize(string expression)
|
||||
{
|
||||
var tokens = new List<Token>();
|
||||
var buffer = new StringBuilder();
|
||||
|
||||
void Flush()
|
||||
{
|
||||
if (buffer.Length == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var value = buffer.ToString();
|
||||
buffer.Clear();
|
||||
tokens.Add(Token.From(value));
|
||||
}
|
||||
|
||||
foreach (var ch in expression)
|
||||
{
|
||||
switch (ch)
|
||||
{
|
||||
case '(':
|
||||
Flush();
|
||||
tokens.Add(new Token(TokenType.OpenParen, "("));
|
||||
break;
|
||||
case ')':
|
||||
Flush();
|
||||
tokens.Add(new Token(TokenType.CloseParen, ")"));
|
||||
break;
|
||||
default:
|
||||
if (char.IsWhiteSpace(ch))
|
||||
{
|
||||
Flush();
|
||||
}
|
||||
else
|
||||
{
|
||||
buffer.Append(ch);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Flush();
|
||||
return tokens;
|
||||
}
|
||||
|
||||
private sealed class Parser
|
||||
{
|
||||
private readonly IReadOnlyList<Token> _tokens;
|
||||
private int _index;
|
||||
|
||||
public Parser(IReadOnlyList<Token> tokens)
|
||||
{
|
||||
_tokens = tokens;
|
||||
}
|
||||
|
||||
public bool HasMoreTokens => _index < _tokens.Count;
|
||||
|
||||
public SpdxLicenseExpression ParseExpression()
|
||||
{
|
||||
var left = ParseWith();
|
||||
while (TryMatch(TokenType.And, out _) || TryMatch(TokenType.Or, out var op))
|
||||
{
|
||||
var right = ParseWith();
|
||||
left = op!.Type == TokenType.And
|
||||
? new SpdxConjunctiveLicense(left, right)
|
||||
: new SpdxDisjunctiveLicense(left, right);
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
private SpdxLicenseExpression ParseWith()
|
||||
{
|
||||
var left = ParsePrimary();
|
||||
if (TryMatch(TokenType.With, out var withToken))
|
||||
{
|
||||
var exception = Expect(TokenType.Identifier);
|
||||
left = new SpdxWithException(left, exception.Value);
|
||||
}
|
||||
|
||||
return left;
|
||||
}
|
||||
|
||||
private SpdxLicenseExpression ParsePrimary()
|
||||
{
|
||||
if (TryMatch(TokenType.OpenParen, out _))
|
||||
{
|
||||
var inner = ParseExpression();
|
||||
Expect(TokenType.CloseParen);
|
||||
return inner;
|
||||
}
|
||||
|
||||
var token = Expect(TokenType.Identifier);
|
||||
if (string.Equals(token.Value, "NONE", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SpdxNoneLicense.Instance;
|
||||
}
|
||||
|
||||
if (string.Equals(token.Value, "NOASSERTION", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SpdxNoAssertionLicense.Instance;
|
||||
}
|
||||
|
||||
return new SpdxSimpleLicense(token.Value);
|
||||
}
|
||||
|
||||
private bool TryMatch(TokenType type, out Token? token)
|
||||
{
|
||||
token = null;
|
||||
if (_index >= _tokens.Count)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var candidate = _tokens[_index];
|
||||
if (candidate.Type != type)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
_index++;
|
||||
token = candidate;
|
||||
return true;
|
||||
}
|
||||
|
||||
private Token Expect(TokenType type)
|
||||
{
|
||||
if (_index >= _tokens.Count)
|
||||
{
|
||||
throw new FormatException($"Expected {type} but reached end of expression.");
|
||||
}
|
||||
|
||||
var token = _tokens[_index++];
|
||||
if (token.Type != type)
|
||||
{
|
||||
throw new FormatException($"Expected {type} but found {token.Type}.");
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record Token(TokenType Type, string Value)
|
||||
{
|
||||
public static Token From(string value)
|
||||
{
|
||||
var normalized = value.Trim();
|
||||
if (string.Equals(normalized, "AND", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new Token(TokenType.And, "AND");
|
||||
}
|
||||
|
||||
if (string.Equals(normalized, "OR", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new Token(TokenType.Or, "OR");
|
||||
}
|
||||
|
||||
if (string.Equals(normalized, "WITH", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return new Token(TokenType.With, "WITH");
|
||||
}
|
||||
|
||||
return new Token(TokenType.Identifier, normalized);
|
||||
}
|
||||
}
|
||||
|
||||
private enum TokenType
|
||||
{
|
||||
Identifier,
|
||||
And,
|
||||
Or,
|
||||
With,
|
||||
OpenParen,
|
||||
CloseParen
|
||||
}
|
||||
}
|
||||
|
||||
public static class SpdxLicenseExpressionRenderer
|
||||
{
|
||||
public static string Render(SpdxLicenseExpression expression)
|
||||
{
|
||||
return RenderInternal(expression, parentOperator: null);
|
||||
}
|
||||
|
||||
private static string RenderInternal(SpdxLicenseExpression expression, SpdxBinaryOperator? parentOperator)
|
||||
{
|
||||
switch (expression)
|
||||
{
|
||||
case SpdxSimpleLicense simple:
|
||||
return simple.LicenseId;
|
||||
case SpdxNoneLicense:
|
||||
return "NONE";
|
||||
case SpdxNoAssertionLicense:
|
||||
return "NOASSERTION";
|
||||
case SpdxWithException withException:
|
||||
var licenseText = RenderInternal(withException.License, parentOperator: null);
|
||||
return $"{licenseText} WITH {withException.Exception}";
|
||||
case SpdxConjunctiveLicense conjunctive:
|
||||
return RenderBinary(conjunctive.Left, conjunctive.Right, "AND", SpdxBinaryOperator.And, parentOperator);
|
||||
case SpdxDisjunctiveLicense disjunctive:
|
||||
return RenderBinary(disjunctive.Left, disjunctive.Right, "OR", SpdxBinaryOperator.Or, parentOperator);
|
||||
default:
|
||||
throw new InvalidOperationException("Unsupported SPDX license expression node.");
|
||||
}
|
||||
}
|
||||
|
||||
private static string RenderBinary(
|
||||
SpdxLicenseExpression left,
|
||||
SpdxLicenseExpression right,
|
||||
string op,
|
||||
SpdxBinaryOperator current,
|
||||
SpdxBinaryOperator? parent)
|
||||
{
|
||||
var leftText = RenderInternal(left, current);
|
||||
var rightText = RenderInternal(right, current);
|
||||
var text = $"{leftText} {op} {rightText}";
|
||||
|
||||
if (parent.HasValue && parent.Value != current)
|
||||
{
|
||||
return $"({text})";
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
|
||||
private enum SpdxBinaryOperator
|
||||
{
|
||||
And,
|
||||
Or
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Spdx.Models;
|
||||
|
||||
public static class SpdxDefaults
|
||||
{
|
||||
public const string SpecVersion = "3.0.1";
|
||||
public const string JsonLdContext = "https://spdx.org/rdf/3.0.1/spdx-context.jsonld";
|
||||
public const string DocumentType = "SpdxDocument";
|
||||
public const string SbomType = "software_Sbom";
|
||||
public const string PackageType = "software_Package";
|
||||
public const string FileType = "software_File";
|
||||
public const string SnippetType = "software_Snippet";
|
||||
public const string RelationshipType = "Relationship";
|
||||
}
|
||||
|
||||
public sealed record SpdxDocument
|
||||
{
|
||||
public required string DocumentNamespace { get; init; }
|
||||
|
||||
public required string Name { get; init; }
|
||||
|
||||
public required SpdxCreationInfo CreationInfo { get; init; }
|
||||
|
||||
public required SpdxSbom Sbom { get; init; }
|
||||
|
||||
public ImmutableArray<SpdxElement> Elements { get; init; } = ImmutableArray<SpdxElement>.Empty;
|
||||
|
||||
public ImmutableArray<SpdxRelationship> Relationships { get; init; } = ImmutableArray<SpdxRelationship>.Empty;
|
||||
|
||||
public ImmutableArray<SpdxAnnotation> Annotations { get; init; } = ImmutableArray<SpdxAnnotation>.Empty;
|
||||
|
||||
public ImmutableArray<string> ProfileConformance { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
public string SpecVersion { get; init; } = SpdxDefaults.SpecVersion;
|
||||
}
|
||||
|
||||
public sealed record SpdxCreationInfo
|
||||
{
|
||||
public required DateTimeOffset Created { get; init; }
|
||||
|
||||
public ImmutableArray<string> Creators { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
public ImmutableArray<string> CreatedUsing { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
public string SpecVersion { get; init; } = SpdxDefaults.SpecVersion;
|
||||
}
|
||||
|
||||
public abstract record SpdxElement
|
||||
{
|
||||
public required string SpdxId { get; init; }
|
||||
|
||||
public string? Name { get; init; }
|
||||
|
||||
public string? Summary { get; init; }
|
||||
|
||||
public string? Description { get; init; }
|
||||
|
||||
public string? Comment { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxSbom : SpdxElement
|
||||
{
|
||||
public ImmutableArray<string> RootElements { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
public ImmutableArray<string> Elements { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
public ImmutableArray<string> SbomTypes { get; init; } = ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
public sealed record SpdxPackage : SpdxElement
|
||||
{
|
||||
public string? Version { get; init; }
|
||||
|
||||
public string? PackageUrl { get; init; }
|
||||
|
||||
public string? DownloadLocation { get; init; }
|
||||
|
||||
public string? PrimaryPurpose { get; init; }
|
||||
|
||||
public SpdxLicenseExpression? DeclaredLicense { get; init; }
|
||||
|
||||
public SpdxLicenseExpression? ConcludedLicense { get; init; }
|
||||
|
||||
public string? CopyrightText { get; init; }
|
||||
|
||||
public ImmutableArray<SpdxChecksum> Checksums { get; init; } = ImmutableArray<SpdxChecksum>.Empty;
|
||||
|
||||
public ImmutableArray<SpdxExternalRef> ExternalRefs { get; init; } = ImmutableArray<SpdxExternalRef>.Empty;
|
||||
|
||||
public SpdxPackageVerificationCode? VerificationCode { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxFile : SpdxElement
|
||||
{
|
||||
public string? FileName { get; init; }
|
||||
|
||||
public SpdxLicenseExpression? ConcludedLicense { get; init; }
|
||||
|
||||
public string? CopyrightText { get; init; }
|
||||
|
||||
public ImmutableArray<SpdxChecksum> Checksums { get; init; } = ImmutableArray<SpdxChecksum>.Empty;
|
||||
}
|
||||
|
||||
public sealed record SpdxSnippet : SpdxElement
|
||||
{
|
||||
public required string FromFileSpdxId { get; init; }
|
||||
|
||||
public long? ByteRangeStart { get; init; }
|
||||
|
||||
public long? ByteRangeEnd { get; init; }
|
||||
|
||||
public long? LineRangeStart { get; init; }
|
||||
|
||||
public long? LineRangeEnd { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxRelationship
|
||||
{
|
||||
public required string SpdxId { get; init; }
|
||||
|
||||
public required string FromElement { get; init; }
|
||||
|
||||
public required SpdxRelationshipType Type { get; init; }
|
||||
|
||||
public required ImmutableArray<string> ToElements { get; init; }
|
||||
}
|
||||
|
||||
public enum SpdxRelationshipType
|
||||
{
|
||||
Describes,
|
||||
DependsOn,
|
||||
Contains,
|
||||
ContainedBy,
|
||||
Other
|
||||
}
|
||||
|
||||
public sealed record SpdxAnnotation
|
||||
{
|
||||
public required string SpdxId { get; init; }
|
||||
|
||||
public required string Annotator { get; init; }
|
||||
|
||||
public required DateTimeOffset AnnotatedAt { get; init; }
|
||||
|
||||
public required string AnnotationType { get; init; }
|
||||
|
||||
public required string Comment { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxChecksum
|
||||
{
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
public required string Value { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxExternalRef
|
||||
{
|
||||
public required string Category { get; init; }
|
||||
|
||||
public required string Type { get; init; }
|
||||
|
||||
public required string Locator { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxPackageVerificationCode
|
||||
{
|
||||
public required string Value { get; init; }
|
||||
|
||||
public ImmutableArray<string> ExcludedFiles { get; init; } = ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
public sealed record SpdxExtractedLicense
|
||||
{
|
||||
public required string LicenseId { get; init; }
|
||||
|
||||
public string? Name { get; init; }
|
||||
|
||||
public string? Text { get; init; }
|
||||
|
||||
public ImmutableArray<string> References { get; init; } = ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
public sealed record SpdxVulnerability : SpdxElement
|
||||
{
|
||||
public string? Locator { get; init; }
|
||||
|
||||
public string? StatusNotes { get; init; }
|
||||
|
||||
public DateTimeOffset? PublishedTime { get; init; }
|
||||
|
||||
public DateTimeOffset? ModifiedTime { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SpdxVulnAssessment : SpdxElement
|
||||
{
|
||||
public string? Severity { get; init; }
|
||||
|
||||
public string? VectorString { get; init; }
|
||||
|
||||
public string? Score { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,643 @@
|
||||
{
|
||||
"licenseListVersion": "3.21",
|
||||
"exceptions": [
|
||||
{
|
||||
"reference": "./389-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./389-exception.html",
|
||||
"referenceNumber": 48,
|
||||
"name": "389 Directory Server Exception",
|
||||
"licenseExceptionId": "389-exception",
|
||||
"seeAlso": [
|
||||
"http://directory.fedoraproject.org/wiki/GPL_Exception_License_Text",
|
||||
"https://web.archive.org/web/20080828121337/http://directory.fedoraproject.org/wiki/GPL_Exception_License_Text"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Asterisk-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Asterisk-exception.html",
|
||||
"referenceNumber": 33,
|
||||
"name": "Asterisk exception",
|
||||
"licenseExceptionId": "Asterisk-exception",
|
||||
"seeAlso": [
|
||||
"https://github.com/asterisk/libpri/blob/7f91151e6bd10957c746c031c1f4a030e8146e9a/pri.c#L22",
|
||||
"https://github.com/asterisk/libss7/blob/03e81bcd0d28ff25d4c77c78351ddadc82ff5c3f/ss7.c#L24"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Autoconf-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Autoconf-exception-2.0.html",
|
||||
"referenceNumber": 42,
|
||||
"name": "Autoconf exception 2.0",
|
||||
"licenseExceptionId": "Autoconf-exception-2.0",
|
||||
"seeAlso": [
|
||||
"http://ac-archive.sourceforge.net/doc/copyright.html",
|
||||
"http://ftp.gnu.org/gnu/autoconf/autoconf-2.59.tar.gz"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Autoconf-exception-3.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Autoconf-exception-3.0.html",
|
||||
"referenceNumber": 41,
|
||||
"name": "Autoconf exception 3.0",
|
||||
"licenseExceptionId": "Autoconf-exception-3.0",
|
||||
"seeAlso": [
|
||||
"http://www.gnu.org/licenses/autoconf-exception-3.0.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Autoconf-exception-generic.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Autoconf-exception-generic.html",
|
||||
"referenceNumber": 4,
|
||||
"name": "Autoconf generic exception",
|
||||
"licenseExceptionId": "Autoconf-exception-generic",
|
||||
"seeAlso": [
|
||||
"https://launchpad.net/ubuntu/precise/+source/xmltooling/+copyright",
|
||||
"https://tracker.debian.org/media/packages/s/sipwitch/copyright-1.9.15-3",
|
||||
"https://opensource.apple.com/source/launchd/launchd-258.1/launchd/compile.auto.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Autoconf-exception-macro.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Autoconf-exception-macro.html",
|
||||
"referenceNumber": 19,
|
||||
"name": "Autoconf macro exception",
|
||||
"licenseExceptionId": "Autoconf-exception-macro",
|
||||
"seeAlso": [
|
||||
"https://github.com/freedesktop/xorg-macros/blob/39f07f7db58ebbf3dcb64a2bf9098ed5cf3d1223/xorg-macros.m4.in",
|
||||
"https://www.gnu.org/software/autoconf-archive/ax_pthread.html",
|
||||
"https://launchpad.net/ubuntu/precise/+source/xmltooling/+copyright"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Bison-exception-2.2.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Bison-exception-2.2.html",
|
||||
"referenceNumber": 11,
|
||||
"name": "Bison exception 2.2",
|
||||
"licenseExceptionId": "Bison-exception-2.2",
|
||||
"seeAlso": [
|
||||
"http://git.savannah.gnu.org/cgit/bison.git/tree/data/yacc.c?id\u003d193d7c7054ba7197b0789e14965b739162319b5e#n141"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Bootloader-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Bootloader-exception.html",
|
||||
"referenceNumber": 50,
|
||||
"name": "Bootloader Distribution Exception",
|
||||
"licenseExceptionId": "Bootloader-exception",
|
||||
"seeAlso": [
|
||||
"https://github.com/pyinstaller/pyinstaller/blob/develop/COPYING.txt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Classpath-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Classpath-exception-2.0.html",
|
||||
"referenceNumber": 36,
|
||||
"name": "Classpath exception 2.0",
|
||||
"licenseExceptionId": "Classpath-exception-2.0",
|
||||
"seeAlso": [
|
||||
"http://www.gnu.org/software/classpath/license.html",
|
||||
"https://fedoraproject.org/wiki/Licensing/GPL_Classpath_Exception"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./CLISP-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./CLISP-exception-2.0.html",
|
||||
"referenceNumber": 9,
|
||||
"name": "CLISP exception 2.0",
|
||||
"licenseExceptionId": "CLISP-exception-2.0",
|
||||
"seeAlso": [
|
||||
"http://sourceforge.net/p/clisp/clisp/ci/default/tree/COPYRIGHT"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./cryptsetup-OpenSSL-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./cryptsetup-OpenSSL-exception.html",
|
||||
"referenceNumber": 39,
|
||||
"name": "cryptsetup OpenSSL exception",
|
||||
"licenseExceptionId": "cryptsetup-OpenSSL-exception",
|
||||
"seeAlso": [
|
||||
"https://gitlab.com/cryptsetup/cryptsetup/-/blob/main/COPYING",
|
||||
"https://gitlab.nic.cz/datovka/datovka/-/blob/develop/COPYING",
|
||||
"https://github.com/nbs-system/naxsi/blob/951123ad456bdf5ac94e8d8819342fe3d49bc002/naxsi_src/naxsi_raw.c",
|
||||
"http://web.mit.edu/jgross/arch/amd64_deb60/bin/mosh"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./DigiRule-FOSS-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./DigiRule-FOSS-exception.html",
|
||||
"referenceNumber": 20,
|
||||
"name": "DigiRule FOSS License Exception",
|
||||
"licenseExceptionId": "DigiRule-FOSS-exception",
|
||||
"seeAlso": [
|
||||
"http://www.digirulesolutions.com/drupal/foss"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./eCos-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./eCos-exception-2.0.html",
|
||||
"referenceNumber": 38,
|
||||
"name": "eCos exception 2.0",
|
||||
"licenseExceptionId": "eCos-exception-2.0",
|
||||
"seeAlso": [
|
||||
"http://ecos.sourceware.org/license-overview.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Fawkes-Runtime-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Fawkes-Runtime-exception.html",
|
||||
"referenceNumber": 8,
|
||||
"name": "Fawkes Runtime Exception",
|
||||
"licenseExceptionId": "Fawkes-Runtime-exception",
|
||||
"seeAlso": [
|
||||
"http://www.fawkesrobotics.org/about/license/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./FLTK-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./FLTK-exception.html",
|
||||
"referenceNumber": 18,
|
||||
"name": "FLTK exception",
|
||||
"licenseExceptionId": "FLTK-exception",
|
||||
"seeAlso": [
|
||||
"http://www.fltk.org/COPYING.php"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Font-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Font-exception-2.0.html",
|
||||
"referenceNumber": 7,
|
||||
"name": "Font exception 2.0",
|
||||
"licenseExceptionId": "Font-exception-2.0",
|
||||
"seeAlso": [
|
||||
"http://www.gnu.org/licenses/gpl-faq.html#FontException"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./freertos-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./freertos-exception-2.0.html",
|
||||
"referenceNumber": 47,
|
||||
"name": "FreeRTOS Exception 2.0",
|
||||
"licenseExceptionId": "freertos-exception-2.0",
|
||||
"seeAlso": [
|
||||
"https://web.archive.org/web/20060809182744/http://www.freertos.org/a00114.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GCC-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GCC-exception-2.0.html",
|
||||
"referenceNumber": 54,
|
||||
"name": "GCC Runtime Library exception 2.0",
|
||||
"licenseExceptionId": "GCC-exception-2.0",
|
||||
"seeAlso": [
|
||||
"https://gcc.gnu.org/git/?p\u003dgcc.git;a\u003dblob;f\u003dgcc/libgcc1.c;h\u003d762f5143fc6eed57b6797c82710f3538aa52b40b;hb\u003dcb143a3ce4fb417c68f5fa2691a1b1b1053dfba9#l10"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GCC-exception-3.1.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GCC-exception-3.1.html",
|
||||
"referenceNumber": 27,
|
||||
"name": "GCC Runtime Library exception 3.1",
|
||||
"licenseExceptionId": "GCC-exception-3.1",
|
||||
"seeAlso": [
|
||||
"http://www.gnu.org/licenses/gcc-exception-3.1.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GNAT-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GNAT-exception.html",
|
||||
"referenceNumber": 13,
|
||||
"name": "GNAT exception",
|
||||
"licenseExceptionId": "GNAT-exception",
|
||||
"seeAlso": [
|
||||
"https://github.com/AdaCore/florist/blob/master/libsrc/posix-configurable_file_limits.adb"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./gnu-javamail-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./gnu-javamail-exception.html",
|
||||
"referenceNumber": 34,
|
||||
"name": "GNU JavaMail exception",
|
||||
"licenseExceptionId": "gnu-javamail-exception",
|
||||
"seeAlso": [
|
||||
"http://www.gnu.org/software/classpathx/javamail/javamail.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GPL-3.0-interface-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GPL-3.0-interface-exception.html",
|
||||
"referenceNumber": 21,
|
||||
"name": "GPL-3.0 Interface Exception",
|
||||
"licenseExceptionId": "GPL-3.0-interface-exception",
|
||||
"seeAlso": [
|
||||
"https://www.gnu.org/licenses/gpl-faq.en.html#LinkingOverControlledInterface"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GPL-3.0-linking-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GPL-3.0-linking-exception.html",
|
||||
"referenceNumber": 1,
|
||||
"name": "GPL-3.0 Linking Exception",
|
||||
"licenseExceptionId": "GPL-3.0-linking-exception",
|
||||
"seeAlso": [
|
||||
"https://www.gnu.org/licenses/gpl-faq.en.html#GPLIncompatibleLibs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GPL-3.0-linking-source-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GPL-3.0-linking-source-exception.html",
|
||||
"referenceNumber": 37,
|
||||
"name": "GPL-3.0 Linking Exception (with Corresponding Source)",
|
||||
"licenseExceptionId": "GPL-3.0-linking-source-exception",
|
||||
"seeAlso": [
|
||||
"https://www.gnu.org/licenses/gpl-faq.en.html#GPLIncompatibleLibs",
|
||||
"https://github.com/mirror/wget/blob/master/src/http.c#L20"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GPL-CC-1.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GPL-CC-1.0.html",
|
||||
"referenceNumber": 52,
|
||||
"name": "GPL Cooperation Commitment 1.0",
|
||||
"licenseExceptionId": "GPL-CC-1.0",
|
||||
"seeAlso": [
|
||||
"https://github.com/gplcc/gplcc/blob/master/Project/COMMITMENT",
|
||||
"https://gplcc.github.io/gplcc/Project/README-PROJECT.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GStreamer-exception-2005.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GStreamer-exception-2005.html",
|
||||
"referenceNumber": 35,
|
||||
"name": "GStreamer Exception (2005)",
|
||||
"licenseExceptionId": "GStreamer-exception-2005",
|
||||
"seeAlso": [
|
||||
"https://gstreamer.freedesktop.org/documentation/frequently-asked-questions/licensing.html?gi-language\u003dc#licensing-of-applications-using-gstreamer"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./GStreamer-exception-2008.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./GStreamer-exception-2008.html",
|
||||
"referenceNumber": 30,
|
||||
"name": "GStreamer Exception (2008)",
|
||||
"licenseExceptionId": "GStreamer-exception-2008",
|
||||
"seeAlso": [
|
||||
"https://gstreamer.freedesktop.org/documentation/frequently-asked-questions/licensing.html?gi-language\u003dc#licensing-of-applications-using-gstreamer"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./i2p-gpl-java-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./i2p-gpl-java-exception.html",
|
||||
"referenceNumber": 40,
|
||||
"name": "i2p GPL+Java Exception",
|
||||
"licenseExceptionId": "i2p-gpl-java-exception",
|
||||
"seeAlso": [
|
||||
"http://geti2p.net/en/get-involved/develop/licenses#java_exception"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./KiCad-libraries-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./KiCad-libraries-exception.html",
|
||||
"referenceNumber": 28,
|
||||
"name": "KiCad Libraries Exception",
|
||||
"licenseExceptionId": "KiCad-libraries-exception",
|
||||
"seeAlso": [
|
||||
"https://www.kicad.org/libraries/license/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./LGPL-3.0-linking-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./LGPL-3.0-linking-exception.html",
|
||||
"referenceNumber": 2,
|
||||
"name": "LGPL-3.0 Linking Exception",
|
||||
"licenseExceptionId": "LGPL-3.0-linking-exception",
|
||||
"seeAlso": [
|
||||
"https://raw.githubusercontent.com/go-xmlpath/xmlpath/v2/LICENSE",
|
||||
"https://github.com/goamz/goamz/blob/master/LICENSE",
|
||||
"https://github.com/juju/errors/blob/master/LICENSE"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./libpri-OpenH323-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./libpri-OpenH323-exception.html",
|
||||
"referenceNumber": 32,
|
||||
"name": "libpri OpenH323 exception",
|
||||
"licenseExceptionId": "libpri-OpenH323-exception",
|
||||
"seeAlso": [
|
||||
"https://github.com/asterisk/libpri/blob/1.6.0/README#L19-L22"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Libtool-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Libtool-exception.html",
|
||||
"referenceNumber": 17,
|
||||
"name": "Libtool Exception",
|
||||
"licenseExceptionId": "Libtool-exception",
|
||||
"seeAlso": [
|
||||
"http://git.savannah.gnu.org/cgit/libtool.git/tree/m4/libtool.m4"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Linux-syscall-note.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Linux-syscall-note.html",
|
||||
"referenceNumber": 49,
|
||||
"name": "Linux Syscall Note",
|
||||
"licenseExceptionId": "Linux-syscall-note",
|
||||
"seeAlso": [
|
||||
"https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/COPYING"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./LLGPL.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./LLGPL.html",
|
||||
"referenceNumber": 3,
|
||||
"name": "LLGPL Preamble",
|
||||
"licenseExceptionId": "LLGPL",
|
||||
"seeAlso": [
|
||||
"http://opensource.franz.com/preamble.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./LLVM-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./LLVM-exception.html",
|
||||
"referenceNumber": 14,
|
||||
"name": "LLVM Exception",
|
||||
"licenseExceptionId": "LLVM-exception",
|
||||
"seeAlso": [
|
||||
"http://llvm.org/foundation/relicensing/LICENSE.txt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./LZMA-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./LZMA-exception.html",
|
||||
"referenceNumber": 55,
|
||||
"name": "LZMA exception",
|
||||
"licenseExceptionId": "LZMA-exception",
|
||||
"seeAlso": [
|
||||
"http://nsis.sourceforge.net/Docs/AppendixI.html#I.6"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./mif-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./mif-exception.html",
|
||||
"referenceNumber": 53,
|
||||
"name": "Macros and Inline Functions Exception",
|
||||
"licenseExceptionId": "mif-exception",
|
||||
"seeAlso": [
|
||||
"http://www.scs.stanford.edu/histar/src/lib/cppsup/exception",
|
||||
"http://dev.bertos.org/doxygen/",
|
||||
"https://www.threadingbuildingblocks.org/licensing"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Nokia-Qt-exception-1.1.json",
|
||||
"isDeprecatedLicenseId": true,
|
||||
"detailsUrl": "./Nokia-Qt-exception-1.1.html",
|
||||
"referenceNumber": 31,
|
||||
"name": "Nokia Qt LGPL exception 1.1",
|
||||
"licenseExceptionId": "Nokia-Qt-exception-1.1",
|
||||
"seeAlso": [
|
||||
"https://www.keepassx.org/dev/projects/keepassx/repository/revisions/b8dfb9cc4d5133e0f09cd7533d15a4f1c19a40f2/entry/LICENSE.NOKIA-LGPL-EXCEPTION"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./OCaml-LGPL-linking-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./OCaml-LGPL-linking-exception.html",
|
||||
"referenceNumber": 29,
|
||||
"name": "OCaml LGPL Linking Exception",
|
||||
"licenseExceptionId": "OCaml-LGPL-linking-exception",
|
||||
"seeAlso": [
|
||||
"https://caml.inria.fr/ocaml/license.en.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./OCCT-exception-1.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./OCCT-exception-1.0.html",
|
||||
"referenceNumber": 15,
|
||||
"name": "Open CASCADE Exception 1.0",
|
||||
"licenseExceptionId": "OCCT-exception-1.0",
|
||||
"seeAlso": [
|
||||
"http://www.opencascade.com/content/licensing"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./OpenJDK-assembly-exception-1.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./OpenJDK-assembly-exception-1.0.html",
|
||||
"referenceNumber": 24,
|
||||
"name": "OpenJDK Assembly exception 1.0",
|
||||
"licenseExceptionId": "OpenJDK-assembly-exception-1.0",
|
||||
"seeAlso": [
|
||||
"http://openjdk.java.net/legal/assembly-exception.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./openvpn-openssl-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./openvpn-openssl-exception.html",
|
||||
"referenceNumber": 43,
|
||||
"name": "OpenVPN OpenSSL Exception",
|
||||
"licenseExceptionId": "openvpn-openssl-exception",
|
||||
"seeAlso": [
|
||||
"http://openvpn.net/index.php/license.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./PS-or-PDF-font-exception-20170817.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./PS-or-PDF-font-exception-20170817.html",
|
||||
"referenceNumber": 45,
|
||||
"name": "PS/PDF font exception (2017-08-17)",
|
||||
"licenseExceptionId": "PS-or-PDF-font-exception-20170817",
|
||||
"seeAlso": [
|
||||
"https://github.com/ArtifexSoftware/urw-base35-fonts/blob/65962e27febc3883a17e651cdb23e783668c996f/LICENSE"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./QPL-1.0-INRIA-2004-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./QPL-1.0-INRIA-2004-exception.html",
|
||||
"referenceNumber": 44,
|
||||
"name": "INRIA QPL 1.0 2004 variant exception",
|
||||
"licenseExceptionId": "QPL-1.0-INRIA-2004-exception",
|
||||
"seeAlso": [
|
||||
"https://git.frama-c.com/pub/frama-c/-/blob/master/licenses/Q_MODIFIED_LICENSE",
|
||||
"https://github.com/maranget/hevea/blob/master/LICENSE"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Qt-GPL-exception-1.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Qt-GPL-exception-1.0.html",
|
||||
"referenceNumber": 10,
|
||||
"name": "Qt GPL exception 1.0",
|
||||
"licenseExceptionId": "Qt-GPL-exception-1.0",
|
||||
"seeAlso": [
|
||||
"http://code.qt.io/cgit/qt/qtbase.git/tree/LICENSE.GPL3-EXCEPT"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Qt-LGPL-exception-1.1.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Qt-LGPL-exception-1.1.html",
|
||||
"referenceNumber": 16,
|
||||
"name": "Qt LGPL exception 1.1",
|
||||
"licenseExceptionId": "Qt-LGPL-exception-1.1",
|
||||
"seeAlso": [
|
||||
"http://code.qt.io/cgit/qt/qtbase.git/tree/LGPL_EXCEPTION.txt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Qwt-exception-1.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Qwt-exception-1.0.html",
|
||||
"referenceNumber": 51,
|
||||
"name": "Qwt exception 1.0",
|
||||
"licenseExceptionId": "Qwt-exception-1.0",
|
||||
"seeAlso": [
|
||||
"http://qwt.sourceforge.net/qwtlicense.html"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./SHL-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./SHL-2.0.html",
|
||||
"referenceNumber": 26,
|
||||
"name": "Solderpad Hardware License v2.0",
|
||||
"licenseExceptionId": "SHL-2.0",
|
||||
"seeAlso": [
|
||||
"https://solderpad.org/licenses/SHL-2.0/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./SHL-2.1.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./SHL-2.1.html",
|
||||
"referenceNumber": 23,
|
||||
"name": "Solderpad Hardware License v2.1",
|
||||
"licenseExceptionId": "SHL-2.1",
|
||||
"seeAlso": [
|
||||
"https://solderpad.org/licenses/SHL-2.1/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./SWI-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./SWI-exception.html",
|
||||
"referenceNumber": 22,
|
||||
"name": "SWI exception",
|
||||
"licenseExceptionId": "SWI-exception",
|
||||
"seeAlso": [
|
||||
"https://github.com/SWI-Prolog/packages-clpqr/blob/bfa80b9270274f0800120d5b8e6fef42ac2dc6a5/clpqr/class.pl"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Swift-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Swift-exception.html",
|
||||
"referenceNumber": 46,
|
||||
"name": "Swift Exception",
|
||||
"licenseExceptionId": "Swift-exception",
|
||||
"seeAlso": [
|
||||
"https://swift.org/LICENSE.txt",
|
||||
"https://github.com/apple/swift-package-manager/blob/7ab2275f447a5eb37497ed63a9340f8a6d1e488b/LICENSE.txt#L205"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./u-boot-exception-2.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./u-boot-exception-2.0.html",
|
||||
"referenceNumber": 5,
|
||||
"name": "U-Boot exception 2.0",
|
||||
"licenseExceptionId": "u-boot-exception-2.0",
|
||||
"seeAlso": [
|
||||
"http://git.denx.de/?p\u003du-boot.git;a\u003dblob;f\u003dLicenses/Exceptions"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./Universal-FOSS-exception-1.0.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./Universal-FOSS-exception-1.0.html",
|
||||
"referenceNumber": 12,
|
||||
"name": "Universal FOSS Exception, Version 1.0",
|
||||
"licenseExceptionId": "Universal-FOSS-exception-1.0",
|
||||
"seeAlso": [
|
||||
"https://oss.oracle.com/licenses/universal-foss-exception/"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./vsftpd-openssl-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./vsftpd-openssl-exception.html",
|
||||
"referenceNumber": 56,
|
||||
"name": "vsftpd OpenSSL exception",
|
||||
"licenseExceptionId": "vsftpd-openssl-exception",
|
||||
"seeAlso": [
|
||||
"https://git.stg.centos.org/source-git/vsftpd/blob/f727873674d9c9cd7afcae6677aa782eb54c8362/f/LICENSE",
|
||||
"https://launchpad.net/debian/squeeze/+source/vsftpd/+copyright",
|
||||
"https://github.com/richardcochran/vsftpd/blob/master/COPYING"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./WxWindows-exception-3.1.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./WxWindows-exception-3.1.html",
|
||||
"referenceNumber": 25,
|
||||
"name": "WxWindows Library Exception 3.1",
|
||||
"licenseExceptionId": "WxWindows-exception-3.1",
|
||||
"seeAlso": [
|
||||
"http://www.opensource.org/licenses/WXwindows"
|
||||
]
|
||||
},
|
||||
{
|
||||
"reference": "./x11vnc-openssl-exception.json",
|
||||
"isDeprecatedLicenseId": false,
|
||||
"detailsUrl": "./x11vnc-openssl-exception.html",
|
||||
"referenceNumber": 6,
|
||||
"name": "x11vnc OpenSSL Exception",
|
||||
"licenseExceptionId": "x11vnc-openssl-exception",
|
||||
"seeAlso": [
|
||||
"https://github.com/LibVNC/x11vnc/blob/master/src/8to24.c#L22"
|
||||
]
|
||||
}
|
||||
],
|
||||
"releaseDate": "2023-06-18"
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,413 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Canonical.Json;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using StellaOps.Scanner.Emit.Spdx.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Spdx.Serialization;
|
||||
|
||||
public static class SpdxJsonLdSerializer
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public static byte[] Serialize(SpdxDocument document)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
var creationInfoId = "_:creationinfo";
|
||||
var creatorNodes = BuildCreatorNodes(document, creationInfoId, document.CreationInfo.Creators);
|
||||
var createdUsingNodes = BuildCreatorNodes(document, creationInfoId, document.CreationInfo.CreatedUsing);
|
||||
|
||||
var createdByRefs = creatorNodes
|
||||
.Select(node => node.Reference)
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(reference => reference, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var createdUsingRefs = createdUsingNodes
|
||||
.Select(node => node.Reference)
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(reference => reference, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var creationInfo = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = "CreationInfo",
|
||||
["@id"] = creationInfoId,
|
||||
["created"] = ScannerTimestamps.ToIso8601(document.CreationInfo.Created),
|
||||
["specVersion"] = document.CreationInfo.SpecVersion
|
||||
};
|
||||
|
||||
if (createdByRefs.Length > 0)
|
||||
{
|
||||
creationInfo["createdBy"] = createdByRefs;
|
||||
}
|
||||
|
||||
if (createdUsingRefs.Length > 0)
|
||||
{
|
||||
creationInfo["createdUsing"] = createdUsingRefs;
|
||||
}
|
||||
|
||||
var graph = new List<object>
|
||||
{
|
||||
creationInfo
|
||||
};
|
||||
|
||||
foreach (var node in creatorNodes.Concat(createdUsingNodes).Select(entry => entry.Node))
|
||||
{
|
||||
graph.Add(node);
|
||||
}
|
||||
|
||||
var documentId = document.DocumentNamespace;
|
||||
var elementIds = BuildElementIds(document, creatorNodes, createdUsingNodes);
|
||||
var profileConformance = document.ProfileConformance.IsDefaultOrEmpty
|
||||
? new[] { "core", "software" }
|
||||
: document.ProfileConformance.OrderBy(value => value, StringComparer.Ordinal).ToArray();
|
||||
|
||||
var documentNode = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = SpdxDefaults.DocumentType,
|
||||
["spdxId"] = documentId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["rootElement"] = new[] { document.Sbom.SpdxId },
|
||||
["element"] = elementIds,
|
||||
["profileConformance"] = profileConformance
|
||||
};
|
||||
|
||||
graph.Add(documentNode);
|
||||
|
||||
var sbomElementIds = document.Elements
|
||||
.OfType<SpdxElement>()
|
||||
.Select(element => element.SpdxId)
|
||||
.OrderBy(id => id, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
var sbomNode = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = SpdxDefaults.SbomType,
|
||||
["spdxId"] = document.Sbom.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["rootElement"] = document.Sbom.RootElements.OrderBy(id => id, StringComparer.Ordinal).ToArray(),
|
||||
["element"] = sbomElementIds,
|
||||
["software_sbomType"] = document.Sbom.SbomTypes.IsDefaultOrEmpty
|
||||
? new[] { "build" }
|
||||
: document.Sbom.SbomTypes.OrderBy(value => value, StringComparer.Ordinal).ToArray()
|
||||
};
|
||||
|
||||
graph.Add(sbomNode);
|
||||
|
||||
foreach (var element in document.Elements.OrderBy(element => element.SpdxId, StringComparer.Ordinal))
|
||||
{
|
||||
switch (element)
|
||||
{
|
||||
case SpdxPackage package:
|
||||
graph.Add(BuildPackageNode(package, creationInfoId));
|
||||
break;
|
||||
case SpdxFile file:
|
||||
graph.Add(BuildFileNode(file, creationInfoId));
|
||||
break;
|
||||
case SpdxSnippet snippet:
|
||||
graph.Add(BuildSnippetNode(snippet, creationInfoId));
|
||||
break;
|
||||
case SpdxVulnerability vulnerability:
|
||||
graph.Add(BuildVulnerabilityNode(vulnerability, creationInfoId));
|
||||
break;
|
||||
case SpdxVulnAssessment assessment:
|
||||
graph.Add(BuildVulnAssessmentNode(assessment, creationInfoId));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var relationship in document.Relationships.OrderBy(relationship => relationship.SpdxId, StringComparer.Ordinal))
|
||||
{
|
||||
graph.Add(BuildRelationshipNode(relationship, creationInfoId));
|
||||
}
|
||||
|
||||
var root = new Dictionary<string, object?>
|
||||
{
|
||||
["@context"] = SpdxDefaults.JsonLdContext,
|
||||
["@graph"] = graph
|
||||
};
|
||||
|
||||
return CanonJson.Canonicalize(root, JsonOptions);
|
||||
}
|
||||
|
||||
private static string[] BuildElementIds(
|
||||
SpdxDocument document,
|
||||
IEnumerable<CreatorNode> creatorNodes,
|
||||
IEnumerable<CreatorNode> createdUsingNodes)
|
||||
{
|
||||
var ids = new HashSet<string>(StringComparer.Ordinal)
|
||||
{
|
||||
document.Sbom.SpdxId
|
||||
};
|
||||
|
||||
foreach (var element in document.Elements)
|
||||
{
|
||||
ids.Add(element.SpdxId);
|
||||
}
|
||||
|
||||
foreach (var relationship in document.Relationships)
|
||||
{
|
||||
ids.Add(relationship.SpdxId);
|
||||
}
|
||||
|
||||
foreach (var creator in creatorNodes.Concat(createdUsingNodes))
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(creator.Reference))
|
||||
{
|
||||
ids.Add(creator.Reference);
|
||||
}
|
||||
}
|
||||
|
||||
return ids.OrderBy(id => id, StringComparer.Ordinal).ToArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyList<CreatorNode> BuildCreatorNodes(
|
||||
SpdxDocument document,
|
||||
string creationInfoId,
|
||||
ImmutableArray<string> creators)
|
||||
{
|
||||
if (creators.IsDefaultOrEmpty)
|
||||
{
|
||||
return Array.Empty<CreatorNode>();
|
||||
}
|
||||
|
||||
var nodes = new List<CreatorNode>();
|
||||
foreach (var entry in creators)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(entry))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parsed = ParseCreator(entry);
|
||||
if (parsed is null)
|
||||
{
|
||||
var fallbackName = entry.Trim();
|
||||
var fallbackReference = CreateCreatorId(document.DocumentNamespace, "tool", fallbackName);
|
||||
nodes.Add(new CreatorNode(fallbackReference, fallbackName, new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = "Tool",
|
||||
["spdxId"] = fallbackReference,
|
||||
["name"] = fallbackName,
|
||||
["creationInfo"] = creationInfoId
|
||||
}));
|
||||
continue;
|
||||
}
|
||||
|
||||
var (type, name) = parsed.Value;
|
||||
var reference = CreateCreatorId(document.DocumentNamespace, type, name);
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = type,
|
||||
["spdxId"] = reference,
|
||||
["name"] = name,
|
||||
["creationInfo"] = creationInfoId
|
||||
};
|
||||
|
||||
nodes.Add(new CreatorNode(reference, name, node));
|
||||
}
|
||||
|
||||
return nodes
|
||||
.OrderBy(node => node.Reference, StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static (string Type, string Name)? ParseCreator(string creator)
|
||||
{
|
||||
var trimmed = creator.Trim();
|
||||
var splitIndex = trimmed.IndexOf(':');
|
||||
if (splitIndex <= 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var prefix = trimmed[..splitIndex].Trim();
|
||||
var name = trimmed[(splitIndex + 1)..].Trim();
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return prefix.ToLowerInvariant() switch
|
||||
{
|
||||
"tool" => ("Tool", name),
|
||||
"organization" => ("Organization", name),
|
||||
"person" => ("Person", name),
|
||||
_ => null
|
||||
};
|
||||
}
|
||||
|
||||
private static string CreateCreatorId(string documentNamespace, string type, string name)
|
||||
{
|
||||
var normalizedType = type.Trim().ToLowerInvariant();
|
||||
var normalizedName = name.Trim();
|
||||
return $"{documentNamespace}#{normalizedType}-{ScannerIdentifiers.CreateDeterministicHash(documentNamespace, normalizedType, normalizedName)}";
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> BuildPackageNode(SpdxPackage package, string creationInfoId)
|
||||
{
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = SpdxDefaults.PackageType,
|
||||
["spdxId"] = package.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["name"] = package.Name ?? package.SpdxId
|
||||
};
|
||||
|
||||
AddIfValue(node, "software_packageVersion", package.Version);
|
||||
AddIfValue(node, "software_packageUrl", package.PackageUrl);
|
||||
if (!string.Equals(package.DownloadLocation, "NOASSERTION", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
AddIfValue(node, "software_downloadLocation", package.DownloadLocation);
|
||||
}
|
||||
AddIfValue(node, "software_primaryPurpose", package.PrimaryPurpose);
|
||||
AddIfValue(node, "software_copyrightText", package.CopyrightText);
|
||||
|
||||
if (package.DeclaredLicense is not null)
|
||||
{
|
||||
node["simplelicensing_licenseExpression"] = SpdxLicenseExpressionRenderer.Render(package.DeclaredLicense);
|
||||
}
|
||||
else if (package.ConcludedLicense is not null)
|
||||
{
|
||||
node["simplelicensing_licenseExpression"] = SpdxLicenseExpressionRenderer.Render(package.ConcludedLicense);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> BuildFileNode(SpdxFile file, string creationInfoId)
|
||||
{
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = SpdxDefaults.FileType,
|
||||
["spdxId"] = file.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["name"] = file.FileName ?? file.Name ?? file.SpdxId
|
||||
};
|
||||
|
||||
AddIfValue(node, "software_copyrightText", file.CopyrightText);
|
||||
|
||||
if (file.ConcludedLicense is not null)
|
||||
{
|
||||
node["simplelicensing_licenseExpression"] = SpdxLicenseExpressionRenderer.Render(file.ConcludedLicense);
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> BuildSnippetNode(SpdxSnippet snippet, string creationInfoId)
|
||||
{
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = SpdxDefaults.SnippetType,
|
||||
["spdxId"] = snippet.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["name"] = snippet.Name ?? snippet.SpdxId,
|
||||
["software_snippetFromFile"] = snippet.FromFileSpdxId
|
||||
};
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> BuildVulnerabilityNode(SpdxVulnerability vulnerability, string creationInfoId)
|
||||
{
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = "security_Vulnerability",
|
||||
["spdxId"] = vulnerability.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["name"] = vulnerability.Name ?? vulnerability.SpdxId
|
||||
};
|
||||
|
||||
AddIfValue(node, "security_locator", vulnerability.Locator);
|
||||
AddIfValue(node, "security_statusNotes", vulnerability.StatusNotes);
|
||||
AddIfValue(node, "security_publishedTime", vulnerability.PublishedTime);
|
||||
AddIfValue(node, "security_modifiedTime", vulnerability.ModifiedTime);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> BuildVulnAssessmentNode(SpdxVulnAssessment assessment, string creationInfoId)
|
||||
{
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = "security_VulnAssessmentRelationship",
|
||||
["spdxId"] = assessment.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["name"] = assessment.Name ?? assessment.SpdxId
|
||||
};
|
||||
|
||||
AddIfValue(node, "security_severity", assessment.Severity);
|
||||
AddIfValue(node, "security_vectorString", assessment.VectorString);
|
||||
AddIfValue(node, "security_score", assessment.Score);
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private static Dictionary<string, object?> BuildRelationshipNode(SpdxRelationship relationship, string creationInfoId)
|
||||
{
|
||||
var node = new Dictionary<string, object?>
|
||||
{
|
||||
["type"] = SpdxDefaults.RelationshipType,
|
||||
["spdxId"] = relationship.SpdxId,
|
||||
["creationInfo"] = creationInfoId,
|
||||
["from"] = relationship.FromElement,
|
||||
["relationshipType"] = RelationshipTypeToString(relationship.Type),
|
||||
["to"] = relationship.ToElements.ToArray()
|
||||
};
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
private static void AddIfValue(Dictionary<string, object?> node, string key, string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
node[key] = value;
|
||||
}
|
||||
|
||||
private static void AddIfValue(Dictionary<string, object?> node, string key, long? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
node[key] = value.Value;
|
||||
}
|
||||
|
||||
private static void AddIfValue(Dictionary<string, object?> node, string key, DateTimeOffset? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
node[key] = ScannerTimestamps.ToIso8601(value.Value);
|
||||
}
|
||||
|
||||
private static string RelationshipTypeToString(SpdxRelationshipType type)
|
||||
=> type switch
|
||||
{
|
||||
SpdxRelationshipType.Describes => "describes",
|
||||
SpdxRelationshipType.DependsOn => "dependsOn",
|
||||
SpdxRelationshipType.Contains => "contains",
|
||||
SpdxRelationshipType.ContainedBy => "containedBy",
|
||||
_ => "other"
|
||||
};
|
||||
|
||||
private sealed record CreatorNode(string Reference, string Name, Dictionary<string, object?> Node);
|
||||
}
|
||||
@@ -0,0 +1,115 @@
|
||||
using System;
|
||||
using System.Collections.Immutable;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
using StellaOps.Scanner.Emit.Spdx.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Spdx.Serialization;
|
||||
|
||||
public sealed record SpdxTagValueOptions
|
||||
{
|
||||
public bool IncludeFiles { get; init; }
|
||||
|
||||
public bool IncludeSnippets { get; init; }
|
||||
}
|
||||
|
||||
public static class SpdxTagValueSerializer
|
||||
{
|
||||
public static byte[] Serialize(SpdxDocument document, SpdxTagValueOptions? options = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(document);
|
||||
|
||||
options ??= new SpdxTagValueOptions();
|
||||
var builder = new StringBuilder();
|
||||
|
||||
builder.AppendLine("SPDXVersion: SPDX-2.3");
|
||||
builder.AppendLine("DataLicense: CC0-1.0");
|
||||
builder.AppendLine("SPDXID: SPDXRef-DOCUMENT");
|
||||
builder.AppendLine($"DocumentName: {Escape(document.Name)}");
|
||||
builder.AppendLine($"DocumentNamespace: {Escape(document.DocumentNamespace)}");
|
||||
|
||||
foreach (var creator in document.CreationInfo.Creators
|
||||
.Where(static entry => !string.IsNullOrWhiteSpace(entry))
|
||||
.OrderBy(entry => entry, StringComparer.Ordinal))
|
||||
{
|
||||
builder.AppendLine($"Creator: {Escape(creator)}");
|
||||
}
|
||||
|
||||
builder.AppendLine($"Created: {ScannerTimestamps.ToIso8601(document.CreationInfo.Created)}");
|
||||
builder.AppendLine();
|
||||
|
||||
var packages = document.Elements
|
||||
.OfType<SpdxPackage>()
|
||||
.OrderBy(pkg => pkg.SpdxId, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
foreach (var package in packages)
|
||||
{
|
||||
builder.AppendLine($"PackageName: {Escape(package.Name ?? package.SpdxId)}");
|
||||
builder.AppendLine($"SPDXID: {Escape(package.SpdxId)}");
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(package.Version))
|
||||
{
|
||||
builder.AppendLine($"PackageVersion: {Escape(package.Version)}");
|
||||
}
|
||||
|
||||
builder.AppendLine($"PackageDownloadLocation: {Escape(package.DownloadLocation ?? "NOASSERTION")}");
|
||||
|
||||
if (package.DeclaredLicense is not null)
|
||||
{
|
||||
builder.AppendLine($"PackageLicenseDeclared: {SpdxLicenseExpressionRenderer.Render(package.DeclaredLicense)}");
|
||||
}
|
||||
else if (package.ConcludedLicense is not null)
|
||||
{
|
||||
builder.AppendLine($"PackageLicenseConcluded: {SpdxLicenseExpressionRenderer.Render(package.ConcludedLicense)}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(package.PackageUrl))
|
||||
{
|
||||
builder.AppendLine($"ExternalRef: PACKAGE-MANAGER purl {Escape(package.PackageUrl)}");
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(package.PrimaryPurpose))
|
||||
{
|
||||
builder.AppendLine($"PrimaryPackagePurpose: {Escape(package.PrimaryPurpose)}");
|
||||
}
|
||||
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
foreach (var relationship in document.Relationships
|
||||
.OrderBy(rel => rel.FromElement, StringComparer.Ordinal)
|
||||
.ThenBy(rel => rel.Type)
|
||||
.ThenBy(rel => rel.ToElements.FirstOrDefault() ?? string.Empty, StringComparer.Ordinal))
|
||||
{
|
||||
foreach (var target in relationship.ToElements.OrderBy(id => id, StringComparer.Ordinal))
|
||||
{
|
||||
builder.AppendLine($"Relationship: {Escape(relationship.FromElement)} {RelationshipTypeToTagValue(relationship.Type)} {Escape(target)}");
|
||||
}
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetBytes(builder.ToString());
|
||||
}
|
||||
|
||||
private static string RelationshipTypeToTagValue(SpdxRelationshipType type)
|
||||
=> type switch
|
||||
{
|
||||
SpdxRelationshipType.Describes => "DESCRIBES",
|
||||
SpdxRelationshipType.DependsOn => "DEPENDS_ON",
|
||||
SpdxRelationshipType.Contains => "CONTAINS",
|
||||
SpdxRelationshipType.ContainedBy => "CONTAINED_BY",
|
||||
_ => "OTHER"
|
||||
};
|
||||
|
||||
private static string Escape(string value)
|
||||
{
|
||||
if (!value.Contains('\n', StringComparison.Ordinal) && !value.Contains('\r', StringComparison.Ordinal))
|
||||
{
|
||||
return value.Trim();
|
||||
}
|
||||
|
||||
var normalized = value.Replace("\r\n", "\n", StringComparison.Ordinal).Replace('\r', '\n');
|
||||
return $"<text>{normalized}</text>";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,45 @@
|
||||
using System;
|
||||
using StellaOps.Scanner.Core.Utility;
|
||||
|
||||
namespace StellaOps.Scanner.Emit.Spdx;
|
||||
|
||||
internal sealed class SpdxIdBuilder
|
||||
{
|
||||
public SpdxIdBuilder(string namespaceBase, string imageDigest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(namespaceBase))
|
||||
{
|
||||
throw new ArgumentException("Namespace base is required.", nameof(namespaceBase));
|
||||
}
|
||||
|
||||
var normalizedBase = TrimTrailingSlash(namespaceBase.Trim());
|
||||
var normalizedDigest = ScannerIdentifiers.NormalizeDigest(imageDigest) ?? "unknown";
|
||||
var digestValue = normalizedDigest.Split(':', 2, StringSplitOptions.TrimEntries)[^1];
|
||||
DocumentNamespace = $"{normalizedBase}/image/{digestValue}";
|
||||
}
|
||||
|
||||
public string DocumentNamespace { get; }
|
||||
|
||||
public string DocumentId => $"{DocumentNamespace}#document";
|
||||
|
||||
public string SbomId => $"{DocumentNamespace}#sbom";
|
||||
|
||||
public string CreationInfoId => "_:creationinfo";
|
||||
|
||||
public string CreatePackageId(string key)
|
||||
=> $"{DocumentNamespace}#pkg-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "pkg", key)}";
|
||||
|
||||
public string CreateRelationshipId(string from, string type, string to)
|
||||
=> $"{DocumentNamespace}#rel-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "rel", from, type, to)}";
|
||||
|
||||
public string CreateToolId(string name)
|
||||
=> $"{DocumentNamespace}#tool-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "tool", name)}";
|
||||
|
||||
public string CreateOrganizationId(string name)
|
||||
=> $"{DocumentNamespace}#org-{ScannerIdentifiers.CreateDeterministicHash(DocumentNamespace, "org", name)}";
|
||||
|
||||
private static string TrimTrailingSlash(string value)
|
||||
=> string.IsNullOrWhiteSpace(value)
|
||||
? string.Empty
|
||||
: value.Trim().TrimEnd('/');
|
||||
}
|
||||
@@ -14,7 +14,12 @@
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="CycloneDX.Core" Version="10.0.2" />
|
||||
<PackageReference Include="CycloneDX.Core" Version="11.0.0" />
|
||||
<PackageReference Include="RoaringBitmap" Version="0.0.9" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<EmbeddedResource Include="Spdx/Resources/spdx-license-list-3.21.json" LogicalName="StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-list-3.21.json" />
|
||||
<EmbeddedResource Include="Spdx/Resources/spdx-license-exceptions-3.21.json" LogicalName="StellaOps.Scanner.Emit.Spdx.Resources.spdx-license-exceptions-3.21.json" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -3,3 +3,4 @@
|
||||
| Task ID | Sprint | Status | Notes |
|
||||
| --- | --- | --- | --- |
|
||||
| `BSE-009` | `docs/implplan/SPRINT_3500_0012_0001_binary_sbom_emission.md` | DONE | Added end-to-end integration test coverage for native binary SBOM emission (emit → fragments → CycloneDX). |
|
||||
| `SPRINT-3600-0002-T1` | `docs/implplan/SPRINT_3600_0002_0001_cyclonedx_1_7_upgrade.md` | DOING | Update CycloneDX packages and defaults to 1.7. |
|
||||
|
||||
@@ -0,0 +1,239 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Sprint: SPRINT_4300_0002_0001
|
||||
// Task: Evidence Privacy Controls - Evidence model definitions
|
||||
|
||||
namespace StellaOps.Scanner.Evidence.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Bundle of evidence for a finding.
|
||||
/// </summary>
|
||||
public sealed record EvidenceBundle
|
||||
{
|
||||
/// <summary>
|
||||
/// Reachability analysis evidence.
|
||||
/// </summary>
|
||||
public ReachabilityEvidence? Reachability { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Call stack evidence (runtime or static analysis).
|
||||
/// </summary>
|
||||
public CallStackEvidence? CallStack { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance/build evidence.
|
||||
/// </summary>
|
||||
public ProvenanceEvidence? Provenance { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// VEX statements.
|
||||
/// </summary>
|
||||
public VexEvidence? Vex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS evidence.
|
||||
/// </summary>
|
||||
public EpssEvidence? Epss { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reachability analysis evidence.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Reachability result.
|
||||
/// </summary>
|
||||
public required string Result { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence score [0,1].
|
||||
/// </summary>
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Paths from entrypoints to vulnerable code.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ReachabilityPath> Paths { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of paths (preserved in minimal redaction).
|
||||
/// </summary>
|
||||
public int PathCount => Paths.Count;
|
||||
|
||||
/// <summary>
|
||||
/// Digest of the call graph used.
|
||||
/// </summary>
|
||||
public required string GraphDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A path from an entrypoint to vulnerable code.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityPath
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique path identifier.
|
||||
/// </summary>
|
||||
public required string PathId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Steps in the path.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ReachabilityStep> Steps { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A step in a reachability path.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityStep
|
||||
{
|
||||
/// <summary>
|
||||
/// Node identifier (function/method name).
|
||||
/// </summary>
|
||||
public required string Node { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the file containing this code.
|
||||
/// </summary>
|
||||
public required string FileHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Line range [start, end].
|
||||
/// </summary>
|
||||
public required int[] Lines { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Raw source code (null when redacted).
|
||||
/// </summary>
|
||||
public string? SourceCode { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Call stack evidence.
|
||||
/// </summary>
|
||||
public sealed record CallStackEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Stack frames.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<CallFrame> Frames { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Stack trace digest.
|
||||
/// </summary>
|
||||
public string? StackDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A frame in a call stack.
|
||||
/// </summary>
|
||||
public sealed record CallFrame
|
||||
{
|
||||
/// <summary>
|
||||
/// Function/method name.
|
||||
/// </summary>
|
||||
public required string Function { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the file.
|
||||
/// </summary>
|
||||
public required string FileHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Line number.
|
||||
/// </summary>
|
||||
public required int Line { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Function arguments (null when redacted).
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? Arguments { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Local variables (null when redacted).
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? Locals { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance/build evidence.
|
||||
/// </summary>
|
||||
public sealed record ProvenanceEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Build identifier.
|
||||
/// </summary>
|
||||
public required string BuildId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Build digest.
|
||||
/// </summary>
|
||||
public required string BuildDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether provenance was verified.
|
||||
/// </summary>
|
||||
public required bool Verified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata (null when redacted).
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// VEX evidence.
|
||||
/// </summary>
|
||||
public sealed record VexEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// VEX status.
|
||||
/// </summary>
|
||||
public required string Status { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Justification for not_affected status.
|
||||
/// </summary>
|
||||
public string? Justification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Impact statement.
|
||||
/// </summary>
|
||||
public string? ImpactStatement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Action statement.
|
||||
/// </summary>
|
||||
public string? ActionStatement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of the VEX statement.
|
||||
/// </summary>
|
||||
public DateTimeOffset? Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// EPSS evidence.
|
||||
/// </summary>
|
||||
public sealed record EpssEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// EPSS probability score [0,1].
|
||||
/// </summary>
|
||||
public required double Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// EPSS percentile rank [0,1].
|
||||
/// </summary>
|
||||
public required double Percentile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Model date.
|
||||
/// </summary>
|
||||
public required DateOnly ModelDate { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When this evidence was captured.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CapturedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,46 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Sprint: SPRINT_4300_0002_0001
|
||||
// Task: T1 - Define Redaction Levels
|
||||
|
||||
namespace StellaOps.Scanner.Evidence.Privacy;
|
||||
|
||||
/// <summary>
|
||||
/// Redaction levels for evidence data.
|
||||
/// </summary>
|
||||
public enum EvidenceRedactionLevel
|
||||
{
|
||||
/// <summary>
|
||||
/// Full evidence including raw source code.
|
||||
/// Requires elevated permissions.
|
||||
/// </summary>
|
||||
Full = 0,
|
||||
|
||||
/// <summary>
|
||||
/// Standard redaction: file hashes, symbol names, line ranges.
|
||||
/// No raw source code.
|
||||
/// </summary>
|
||||
Standard = 1,
|
||||
|
||||
/// <summary>
|
||||
/// Minimal: only digests and counts.
|
||||
/// For external sharing.
|
||||
/// </summary>
|
||||
Minimal = 2
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fields that can be redacted.
|
||||
/// </summary>
|
||||
[Flags]
|
||||
public enum RedactableFields
|
||||
{
|
||||
None = 0,
|
||||
SourceCode = 1 << 0,
|
||||
FilePaths = 1 << 1,
|
||||
LineNumbers = 1 << 2,
|
||||
SymbolNames = 1 << 3,
|
||||
CallArguments = 1 << 4,
|
||||
EnvironmentVars = 1 << 5,
|
||||
InternalUrls = 1 << 6,
|
||||
All = SourceCode | FilePaths | LineNumbers | SymbolNames | CallArguments | EnvironmentVars | InternalUrls
|
||||
}
|
||||
@@ -0,0 +1,227 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Sprint: SPRINT_4300_0002_0001
|
||||
// Task: T2 - Implement EvidenceRedactionService
|
||||
|
||||
using System.Security.Claims;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Evidence.Models;
|
||||
|
||||
namespace StellaOps.Scanner.Evidence.Privacy;
|
||||
|
||||
/// <summary>
|
||||
/// Service interface for redacting evidence based on privacy rules.
|
||||
/// </summary>
|
||||
public interface IEvidenceRedactionService
|
||||
{
|
||||
/// <summary>
|
||||
/// Redacts evidence based on the specified level.
|
||||
/// </summary>
|
||||
EvidenceBundle Redact(EvidenceBundle bundle, EvidenceRedactionLevel level);
|
||||
|
||||
/// <summary>
|
||||
/// Redacts specific fields from evidence.
|
||||
/// </summary>
|
||||
EvidenceBundle RedactFields(EvidenceBundle bundle, RedactableFields fields);
|
||||
|
||||
/// <summary>
|
||||
/// Determines the appropriate redaction level for a user.
|
||||
/// </summary>
|
||||
EvidenceRedactionLevel DetermineLevel(ClaimsPrincipal user);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for redacting evidence based on privacy rules.
|
||||
/// </summary>
|
||||
public sealed class EvidenceRedactionService : IEvidenceRedactionService
|
||||
{
|
||||
private readonly ILogger<EvidenceRedactionService> _logger;
|
||||
|
||||
public EvidenceRedactionService(ILogger<EvidenceRedactionService> logger)
|
||||
{
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Redacts evidence based on the specified level.
|
||||
/// </summary>
|
||||
public EvidenceBundle Redact(EvidenceBundle bundle, EvidenceRedactionLevel level)
|
||||
{
|
||||
_logger.LogDebug("Redacting evidence to level {Level}", level);
|
||||
|
||||
return level switch
|
||||
{
|
||||
EvidenceRedactionLevel.Full => bundle,
|
||||
EvidenceRedactionLevel.Standard => RedactStandard(bundle),
|
||||
EvidenceRedactionLevel.Minimal => RedactMinimal(bundle),
|
||||
_ => RedactStandard(bundle)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Redacts specific fields from evidence.
|
||||
/// </summary>
|
||||
public EvidenceBundle RedactFields(EvidenceBundle bundle, RedactableFields fields)
|
||||
{
|
||||
if (fields == RedactableFields.None)
|
||||
{
|
||||
return bundle;
|
||||
}
|
||||
|
||||
var result = bundle;
|
||||
|
||||
if (fields.HasFlag(RedactableFields.SourceCode))
|
||||
{
|
||||
result = result with
|
||||
{
|
||||
Reachability = result.Reachability is not null
|
||||
? RedactSourceCodeFromReachability(result.Reachability)
|
||||
: null
|
||||
};
|
||||
}
|
||||
|
||||
if (fields.HasFlag(RedactableFields.CallArguments))
|
||||
{
|
||||
result = result with
|
||||
{
|
||||
CallStack = result.CallStack is not null
|
||||
? RedactCallStackArguments(result.CallStack)
|
||||
: null
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the appropriate redaction level for a user.
|
||||
/// </summary>
|
||||
public EvidenceRedactionLevel DetermineLevel(ClaimsPrincipal user)
|
||||
{
|
||||
if (user.HasClaim("scope", "evidence:full") ||
|
||||
user.HasClaim("role", "security_admin"))
|
||||
{
|
||||
_logger.LogDebug("User has full evidence access");
|
||||
return EvidenceRedactionLevel.Full;
|
||||
}
|
||||
|
||||
if (user.HasClaim("scope", "evidence:standard") ||
|
||||
user.HasClaim("role", "security_analyst"))
|
||||
{
|
||||
_logger.LogDebug("User has standard evidence access");
|
||||
return EvidenceRedactionLevel.Standard;
|
||||
}
|
||||
|
||||
_logger.LogDebug("User has minimal evidence access (default)");
|
||||
return EvidenceRedactionLevel.Minimal;
|
||||
}
|
||||
|
||||
private EvidenceBundle RedactStandard(EvidenceBundle bundle)
|
||||
{
|
||||
return bundle with
|
||||
{
|
||||
Reachability = bundle.Reachability is not null
|
||||
? RedactReachability(bundle.Reachability)
|
||||
: null,
|
||||
CallStack = bundle.CallStack is not null
|
||||
? RedactCallStack(bundle.CallStack)
|
||||
: null,
|
||||
Provenance = bundle.Provenance // Keep as-is (already redacted at standard level)
|
||||
};
|
||||
}
|
||||
|
||||
private ReachabilityEvidence RedactReachability(ReachabilityEvidence evidence)
|
||||
{
|
||||
return evidence with
|
||||
{
|
||||
Paths = evidence.Paths.Select(p => new ReachabilityPath
|
||||
{
|
||||
PathId = p.PathId,
|
||||
Steps = p.Steps.Select(s => new ReachabilityStep
|
||||
{
|
||||
Node = RedactSymbol(s.Node),
|
||||
FileHash = s.FileHash, // Keep hash
|
||||
Lines = s.Lines, // Keep line range
|
||||
SourceCode = null // Redact source
|
||||
}).ToList()
|
||||
}).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
private CallStackEvidence RedactCallStack(CallStackEvidence evidence)
|
||||
{
|
||||
return evidence with
|
||||
{
|
||||
Frames = evidence.Frames.Select(f => new CallFrame
|
||||
{
|
||||
Function = RedactSymbol(f.Function),
|
||||
FileHash = f.FileHash,
|
||||
Line = f.Line,
|
||||
Arguments = null, // Redact arguments
|
||||
Locals = null // Redact locals
|
||||
}).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
private string RedactSymbol(string symbol)
|
||||
{
|
||||
// Keep class and method names, redact arguments
|
||||
// "MyClass.MyMethod(string arg1, int arg2)" -> "MyClass.MyMethod(...)"
|
||||
var parenIndex = symbol.IndexOf('(');
|
||||
if (parenIndex > 0)
|
||||
{
|
||||
return symbol[..parenIndex] + "(...)";
|
||||
}
|
||||
return symbol;
|
||||
}
|
||||
|
||||
private EvidenceBundle RedactMinimal(EvidenceBundle bundle)
|
||||
{
|
||||
return bundle with
|
||||
{
|
||||
Reachability = bundle.Reachability is not null
|
||||
? new ReachabilityEvidence
|
||||
{
|
||||
Result = bundle.Reachability.Result,
|
||||
Confidence = bundle.Reachability.Confidence,
|
||||
Paths = [], // No paths
|
||||
GraphDigest = bundle.Reachability.GraphDigest
|
||||
}
|
||||
: null,
|
||||
CallStack = null, // Remove entirely
|
||||
Provenance = bundle.Provenance is not null
|
||||
? new ProvenanceEvidence
|
||||
{
|
||||
BuildId = bundle.Provenance.BuildId,
|
||||
BuildDigest = bundle.Provenance.BuildDigest,
|
||||
Verified = bundle.Provenance.Verified
|
||||
}
|
||||
: null,
|
||||
Vex = bundle.Vex, // Keep VEX (public data)
|
||||
Epss = bundle.Epss // Keep EPSS (public data)
|
||||
};
|
||||
}
|
||||
|
||||
private ReachabilityEvidence RedactSourceCodeFromReachability(ReachabilityEvidence evidence)
|
||||
{
|
||||
return evidence with
|
||||
{
|
||||
Paths = evidence.Paths.Select(p => new ReachabilityPath
|
||||
{
|
||||
PathId = p.PathId,
|
||||
Steps = p.Steps.Select(s => s with { SourceCode = null }).ToList()
|
||||
}).ToList()
|
||||
};
|
||||
}
|
||||
|
||||
private CallStackEvidence RedactCallStackArguments(CallStackEvidence evidence)
|
||||
{
|
||||
return evidence with
|
||||
{
|
||||
Frames = evidence.Frames.Select(f => f with
|
||||
{
|
||||
Arguments = null,
|
||||
Locals = null
|
||||
}).ToList()
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,433 @@
|
||||
using System.Diagnostics;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Scanner.Orchestration.Fidelity;
|
||||
|
||||
public interface IFidelityAwareAnalyzer
|
||||
{
|
||||
Task<FidelityAnalysisResult> AnalyzeAsync(
|
||||
AnalysisRequest request,
|
||||
FidelityLevel level,
|
||||
CancellationToken ct);
|
||||
|
||||
Task<FidelityUpgradeResult> UpgradeFidelityAsync(
|
||||
Guid findingId,
|
||||
FidelityLevel targetLevel,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public sealed class FidelityAwareAnalyzer : IFidelityAwareAnalyzer
|
||||
{
|
||||
private readonly ICallGraphExtractor _callGraphExtractor;
|
||||
private readonly IRuntimeCorrelator _runtimeCorrelator;
|
||||
private readonly IBinaryMapper _binaryMapper;
|
||||
private readonly IPackageMatcher _packageMatcher;
|
||||
private readonly IAnalysisRepository _repository;
|
||||
private readonly ILogger<FidelityAwareAnalyzer> _logger;
|
||||
|
||||
public FidelityAwareAnalyzer(
|
||||
ICallGraphExtractor callGraphExtractor,
|
||||
IRuntimeCorrelator runtimeCorrelator,
|
||||
IBinaryMapper binaryMapper,
|
||||
IPackageMatcher packageMatcher,
|
||||
IAnalysisRepository repository,
|
||||
ILogger<FidelityAwareAnalyzer> logger)
|
||||
{
|
||||
_callGraphExtractor = callGraphExtractor;
|
||||
_runtimeCorrelator = runtimeCorrelator;
|
||||
_binaryMapper = binaryMapper;
|
||||
_packageMatcher = packageMatcher;
|
||||
_repository = repository;
|
||||
_logger = logger;
|
||||
}
|
||||
|
||||
public async Task<FidelityAnalysisResult> AnalyzeAsync(
|
||||
AnalysisRequest request,
|
||||
FidelityLevel level,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var config = FidelityConfiguration.FromLevel(level);
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
using var cts = CancellationTokenSource.CreateLinkedTokenSource(ct);
|
||||
cts.CancelAfter(config.Timeout);
|
||||
|
||||
try
|
||||
{
|
||||
// Level 1: Package matching (always done)
|
||||
var packageResult = await _packageMatcher.MatchAsync(request, cts.Token);
|
||||
|
||||
if (level == FidelityLevel.Quick)
|
||||
{
|
||||
return BuildResult(packageResult, config, stopwatch.Elapsed);
|
||||
}
|
||||
|
||||
// Level 2: Call graph analysis (Standard and Deep)
|
||||
CallGraphResult? callGraphResult = null;
|
||||
if (config.EnableCallGraph)
|
||||
{
|
||||
var languages = config.TargetLanguages ?? request.DetectedLanguages;
|
||||
callGraphResult = await _callGraphExtractor.ExtractAsync(
|
||||
request,
|
||||
languages,
|
||||
config.MaxCallGraphDepth,
|
||||
cts.Token);
|
||||
}
|
||||
|
||||
if (level == FidelityLevel.Standard)
|
||||
{
|
||||
return BuildResult(packageResult, callGraphResult, config, stopwatch.Elapsed);
|
||||
}
|
||||
|
||||
// Level 3: Binary mapping and runtime (Deep only)
|
||||
BinaryMappingResult? binaryResult = null;
|
||||
RuntimeCorrelationResult? runtimeResult = null;
|
||||
|
||||
if (config.EnableBinaryMapping)
|
||||
{
|
||||
binaryResult = await _binaryMapper.MapAsync(request, cts.Token);
|
||||
}
|
||||
|
||||
if (config.EnableRuntimeCorrelation)
|
||||
{
|
||||
runtimeResult = await _runtimeCorrelator.CorrelateAsync(request, cts.Token);
|
||||
}
|
||||
|
||||
return BuildResult(
|
||||
packageResult,
|
||||
callGraphResult,
|
||||
binaryResult,
|
||||
runtimeResult,
|
||||
config,
|
||||
stopwatch.Elapsed);
|
||||
}
|
||||
catch (OperationCanceledException) when (cts.IsCancellationRequested && !ct.IsCancellationRequested)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Analysis timeout at fidelity {Level} after {Elapsed}",
|
||||
level, stopwatch.Elapsed);
|
||||
|
||||
return BuildTimeoutResult(level, config, stopwatch.Elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<FidelityUpgradeResult> UpgradeFidelityAsync(
|
||||
Guid findingId,
|
||||
FidelityLevel targetLevel,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// Load existing analysis
|
||||
var existing = await _repository.GetAnalysisAsync(findingId, ct);
|
||||
if (existing is null)
|
||||
{
|
||||
return FidelityUpgradeResult.NotFound(findingId);
|
||||
}
|
||||
|
||||
if (existing.FidelityLevel >= targetLevel)
|
||||
{
|
||||
return FidelityUpgradeResult.AlreadyAtLevel(existing);
|
||||
}
|
||||
|
||||
// Perform incremental upgrade
|
||||
var request = existing.ToAnalysisRequest();
|
||||
var result = await AnalyzeAsync(request, targetLevel, ct);
|
||||
|
||||
// Merge with existing
|
||||
var merged = MergeResults(existing, result);
|
||||
|
||||
// Persist upgraded result
|
||||
await _repository.SaveAnalysisAsync(merged, ct);
|
||||
|
||||
return new FidelityUpgradeResult
|
||||
{
|
||||
Success = true,
|
||||
FindingId = findingId,
|
||||
PreviousLevel = existing.FidelityLevel,
|
||||
NewLevel = targetLevel,
|
||||
ConfidenceImprovement = merged.Confidence - existing.Confidence,
|
||||
NewResult = merged
|
||||
};
|
||||
}
|
||||
|
||||
private FidelityAnalysisResult BuildResult(
|
||||
PackageMatchResult packageResult,
|
||||
FidelityConfiguration config,
|
||||
TimeSpan elapsed)
|
||||
{
|
||||
var confidence = config.BaseConfidence;
|
||||
|
||||
// Adjust confidence based on match quality
|
||||
if (packageResult.HasExactMatch)
|
||||
confidence += 0.1m;
|
||||
|
||||
return new FidelityAnalysisResult
|
||||
{
|
||||
FidelityLevel = config.Level,
|
||||
Confidence = Math.Min(confidence, 1.0m),
|
||||
IsReachable = null, // Unknown at Quick level
|
||||
PackageMatches = packageResult.Matches,
|
||||
CallGraph = null,
|
||||
BinaryMapping = null,
|
||||
RuntimeCorrelation = null,
|
||||
AnalysisTime = elapsed,
|
||||
TimedOut = false,
|
||||
CanUpgrade = true,
|
||||
UpgradeRecommendation = "Upgrade to Standard for call graph analysis"
|
||||
};
|
||||
}
|
||||
|
||||
private FidelityAnalysisResult BuildResult(
|
||||
PackageMatchResult packageResult,
|
||||
CallGraphResult? callGraphResult,
|
||||
FidelityConfiguration config,
|
||||
TimeSpan elapsed)
|
||||
{
|
||||
var confidence = config.BaseConfidence;
|
||||
|
||||
// Adjust based on call graph completeness
|
||||
if (callGraphResult?.IsComplete == true)
|
||||
confidence += 0.15m;
|
||||
|
||||
var isReachable = callGraphResult?.HasPathToVulnerable;
|
||||
|
||||
return new FidelityAnalysisResult
|
||||
{
|
||||
FidelityLevel = config.Level,
|
||||
Confidence = Math.Min(confidence, 1.0m),
|
||||
IsReachable = isReachable,
|
||||
PackageMatches = packageResult.Matches,
|
||||
CallGraph = callGraphResult,
|
||||
BinaryMapping = null,
|
||||
RuntimeCorrelation = null,
|
||||
AnalysisTime = elapsed,
|
||||
TimedOut = false,
|
||||
CanUpgrade = true,
|
||||
UpgradeRecommendation = isReachable == true
|
||||
? "Upgrade to Deep for runtime verification"
|
||||
: "Upgrade to Deep for binary mapping confirmation"
|
||||
};
|
||||
}
|
||||
|
||||
private FidelityAnalysisResult BuildResult(
|
||||
PackageMatchResult packageResult,
|
||||
CallGraphResult? callGraphResult,
|
||||
BinaryMappingResult? binaryResult,
|
||||
RuntimeCorrelationResult? runtimeResult,
|
||||
FidelityConfiguration config,
|
||||
TimeSpan elapsed)
|
||||
{
|
||||
var confidence = config.BaseConfidence;
|
||||
|
||||
// Adjust based on runtime corroboration
|
||||
if (runtimeResult?.HasCorroboration == true)
|
||||
confidence = 0.95m;
|
||||
else if (binaryResult?.HasMapping == true)
|
||||
confidence += 0.05m;
|
||||
|
||||
var isReachable = DetermineReachability(
|
||||
callGraphResult,
|
||||
binaryResult,
|
||||
runtimeResult);
|
||||
|
||||
return new FidelityAnalysisResult
|
||||
{
|
||||
FidelityLevel = config.Level,
|
||||
Confidence = Math.Min(confidence, 1.0m),
|
||||
IsReachable = isReachable,
|
||||
PackageMatches = packageResult.Matches,
|
||||
CallGraph = callGraphResult,
|
||||
BinaryMapping = binaryResult,
|
||||
RuntimeCorrelation = runtimeResult,
|
||||
AnalysisTime = elapsed,
|
||||
TimedOut = false,
|
||||
CanUpgrade = false,
|
||||
UpgradeRecommendation = null
|
||||
};
|
||||
}
|
||||
|
||||
private static bool? DetermineReachability(
|
||||
CallGraphResult? callGraph,
|
||||
BinaryMappingResult? binary,
|
||||
RuntimeCorrelationResult? runtime)
|
||||
{
|
||||
// Runtime is authoritative
|
||||
if (runtime?.WasExecuted == true)
|
||||
return true;
|
||||
if (runtime?.WasExecuted == false && runtime.ObservationCount > 100)
|
||||
return false;
|
||||
|
||||
// Fall back to call graph
|
||||
if (callGraph?.HasPathToVulnerable == true)
|
||||
return true;
|
||||
if (callGraph?.HasPathToVulnerable == false && callGraph.IsComplete)
|
||||
return false;
|
||||
|
||||
return null; // Unknown
|
||||
}
|
||||
|
||||
private FidelityAnalysisResult BuildTimeoutResult(
|
||||
FidelityLevel attemptedLevel,
|
||||
FidelityConfiguration config,
|
||||
TimeSpan elapsed)
|
||||
{
|
||||
return new FidelityAnalysisResult
|
||||
{
|
||||
FidelityLevel = attemptedLevel,
|
||||
Confidence = 0.3m,
|
||||
IsReachable = null,
|
||||
PackageMatches = [],
|
||||
CallGraph = null,
|
||||
BinaryMapping = null,
|
||||
RuntimeCorrelation = null,
|
||||
AnalysisTime = elapsed,
|
||||
TimedOut = true,
|
||||
CanUpgrade = false,
|
||||
UpgradeRecommendation = "Analysis timed out. Try with smaller scope."
|
||||
};
|
||||
}
|
||||
|
||||
private FidelityAnalysisResult MergeResults(
|
||||
FidelityAnalysisResult existing,
|
||||
FidelityAnalysisResult upgraded)
|
||||
{
|
||||
// Take the upgraded result but preserve any existing data not replaced
|
||||
return new FidelityAnalysisResult
|
||||
{
|
||||
FidelityLevel = upgraded.FidelityLevel,
|
||||
Confidence = upgraded.Confidence,
|
||||
IsReachable = upgraded.IsReachable ?? existing.IsReachable,
|
||||
PackageMatches = upgraded.PackageMatches,
|
||||
CallGraph = upgraded.CallGraph ?? existing.CallGraph,
|
||||
BinaryMapping = upgraded.BinaryMapping ?? existing.BinaryMapping,
|
||||
RuntimeCorrelation = upgraded.RuntimeCorrelation ?? existing.RuntimeCorrelation,
|
||||
AnalysisTime = existing.AnalysisTime + upgraded.AnalysisTime,
|
||||
TimedOut = upgraded.TimedOut,
|
||||
CanUpgrade = upgraded.CanUpgrade,
|
||||
UpgradeRecommendation = upgraded.UpgradeRecommendation
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record FidelityAnalysisResult
|
||||
{
|
||||
public required FidelityLevel FidelityLevel { get; init; }
|
||||
public required decimal Confidence { get; init; }
|
||||
public bool? IsReachable { get; init; }
|
||||
public required IReadOnlyList<PackageMatch> PackageMatches { get; init; }
|
||||
public CallGraphResult? CallGraph { get; init; }
|
||||
public BinaryMappingResult? BinaryMapping { get; init; }
|
||||
public RuntimeCorrelationResult? RuntimeCorrelation { get; init; }
|
||||
public required TimeSpan AnalysisTime { get; init; }
|
||||
public required bool TimedOut { get; init; }
|
||||
public required bool CanUpgrade { get; init; }
|
||||
public string? UpgradeRecommendation { get; init; }
|
||||
|
||||
public AnalysisRequest ToAnalysisRequest()
|
||||
{
|
||||
// Convert back to analysis request for upgrade scenarios
|
||||
return new AnalysisRequest
|
||||
{
|
||||
// Populate from existing result
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record FidelityUpgradeResult
|
||||
{
|
||||
public required bool Success { get; init; }
|
||||
public Guid FindingId { get; init; }
|
||||
public FidelityLevel? PreviousLevel { get; init; }
|
||||
public FidelityLevel? NewLevel { get; init; }
|
||||
public decimal ConfidenceImprovement { get; init; }
|
||||
public FidelityAnalysisResult? NewResult { get; init; }
|
||||
public string? Error { get; init; }
|
||||
|
||||
public static FidelityUpgradeResult NotFound(Guid id) => new()
|
||||
{
|
||||
Success = false,
|
||||
FindingId = id,
|
||||
Error = "Finding not found"
|
||||
};
|
||||
|
||||
public static FidelityUpgradeResult AlreadyAtLevel(FidelityAnalysisResult existing) => new()
|
||||
{
|
||||
Success = true,
|
||||
PreviousLevel = existing.FidelityLevel,
|
||||
NewLevel = existing.FidelityLevel,
|
||||
ConfidenceImprovement = 0,
|
||||
NewResult = existing
|
||||
};
|
||||
}
|
||||
|
||||
// Supporting interfaces and types
|
||||
|
||||
public interface ICallGraphExtractor
|
||||
{
|
||||
Task<CallGraphResult> ExtractAsync(
|
||||
AnalysisRequest request,
|
||||
IReadOnlyList<string> languages,
|
||||
int maxDepth,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public interface IRuntimeCorrelator
|
||||
{
|
||||
Task<RuntimeCorrelationResult> CorrelateAsync(
|
||||
AnalysisRequest request,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public interface IBinaryMapper
|
||||
{
|
||||
Task<BinaryMappingResult> MapAsync(
|
||||
AnalysisRequest request,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public interface IPackageMatcher
|
||||
{
|
||||
Task<PackageMatchResult> MatchAsync(
|
||||
AnalysisRequest request,
|
||||
CancellationToken ct);
|
||||
}
|
||||
|
||||
public interface IAnalysisRepository
|
||||
{
|
||||
Task<FidelityAnalysisResult?> GetAnalysisAsync(Guid findingId, CancellationToken ct);
|
||||
Task SaveAnalysisAsync(FidelityAnalysisResult result, CancellationToken ct);
|
||||
}
|
||||
|
||||
public sealed record AnalysisRequest
|
||||
{
|
||||
public IReadOnlyList<string> DetectedLanguages { get; init; } = Array.Empty<string>();
|
||||
}
|
||||
|
||||
public sealed record PackageMatchResult
|
||||
{
|
||||
public bool HasExactMatch { get; init; }
|
||||
public IReadOnlyList<PackageMatch> Matches { get; init; } = Array.Empty<PackageMatch>();
|
||||
}
|
||||
|
||||
public sealed record PackageMatch
|
||||
{
|
||||
public required string PackageName { get; init; }
|
||||
public required string Version { get; init; }
|
||||
}
|
||||
|
||||
public sealed record CallGraphResult
|
||||
{
|
||||
public bool IsComplete { get; init; }
|
||||
public bool? HasPathToVulnerable { get; init; }
|
||||
}
|
||||
|
||||
public sealed record BinaryMappingResult
|
||||
{
|
||||
public bool HasMapping { get; init; }
|
||||
}
|
||||
|
||||
public sealed record RuntimeCorrelationResult
|
||||
{
|
||||
public bool? WasExecuted { get; init; }
|
||||
public int ObservationCount { get; init; }
|
||||
public bool HasCorroboration { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
namespace StellaOps.Scanner.Orchestration.Fidelity;
|
||||
|
||||
/// <summary>
|
||||
/// Analysis fidelity level controlling depth vs speed tradeoff.
|
||||
/// </summary>
|
||||
public enum FidelityLevel
|
||||
{
|
||||
/// <summary>
|
||||
/// Fast heuristic analysis. Uses package-level matching only.
|
||||
/// ~10x faster than Standard. Lower confidence.
|
||||
/// </summary>
|
||||
Quick,
|
||||
|
||||
/// <summary>
|
||||
/// Standard analysis. Includes call graph for top languages.
|
||||
/// Balanced speed and accuracy.
|
||||
/// </summary>
|
||||
Standard,
|
||||
|
||||
/// <summary>
|
||||
/// Deep analysis. Full call graph, runtime correlation, binary mapping.
|
||||
/// Highest confidence but slowest.
|
||||
/// </summary>
|
||||
Deep
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for each fidelity level.
|
||||
/// </summary>
|
||||
public sealed record FidelityConfiguration
|
||||
{
|
||||
public required FidelityLevel Level { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to perform call graph extraction.
|
||||
/// </summary>
|
||||
public bool EnableCallGraph { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to correlate with runtime evidence.
|
||||
/// </summary>
|
||||
public bool EnableRuntimeCorrelation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether to perform binary mapping.
|
||||
/// </summary>
|
||||
public bool EnableBinaryMapping { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum call graph depth.
|
||||
/// </summary>
|
||||
public int MaxCallGraphDepth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timeout for analysis.
|
||||
/// </summary>
|
||||
public TimeSpan Timeout { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base confidence for this fidelity level.
|
||||
/// </summary>
|
||||
public decimal BaseConfidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Languages to analyze (null = all).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? TargetLanguages { get; init; }
|
||||
|
||||
public static FidelityConfiguration Quick => new()
|
||||
{
|
||||
Level = FidelityLevel.Quick,
|
||||
EnableCallGraph = false,
|
||||
EnableRuntimeCorrelation = false,
|
||||
EnableBinaryMapping = false,
|
||||
MaxCallGraphDepth = 0,
|
||||
Timeout = TimeSpan.FromSeconds(30),
|
||||
BaseConfidence = 0.5m,
|
||||
TargetLanguages = null
|
||||
};
|
||||
|
||||
public static FidelityConfiguration Standard => new()
|
||||
{
|
||||
Level = FidelityLevel.Standard,
|
||||
EnableCallGraph = true,
|
||||
EnableRuntimeCorrelation = false,
|
||||
EnableBinaryMapping = false,
|
||||
MaxCallGraphDepth = 10,
|
||||
Timeout = TimeSpan.FromMinutes(5),
|
||||
BaseConfidence = 0.75m,
|
||||
TargetLanguages = ["java", "dotnet", "python", "go", "node"]
|
||||
};
|
||||
|
||||
public static FidelityConfiguration Deep => new()
|
||||
{
|
||||
Level = FidelityLevel.Deep,
|
||||
EnableCallGraph = true,
|
||||
EnableRuntimeCorrelation = true,
|
||||
EnableBinaryMapping = true,
|
||||
MaxCallGraphDepth = 50,
|
||||
Timeout = TimeSpan.FromMinutes(30),
|
||||
BaseConfidence = 0.9m,
|
||||
TargetLanguages = null
|
||||
};
|
||||
|
||||
public static FidelityConfiguration FromLevel(FidelityLevel level) => level switch
|
||||
{
|
||||
FidelityLevel.Quick => Quick,
|
||||
FidelityLevel.Standard => Standard,
|
||||
FidelityLevel.Deep => Deep,
|
||||
_ => Standard
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
# AGENTS - Scanner Reachability Library
|
||||
|
||||
## Mission
|
||||
Deliver deterministic reachability analysis, slice generation, and evidence artifacts used by Scanner and downstream policy/VEX workflows.
|
||||
|
||||
## Roles
|
||||
- Backend engineer (.NET 10, C# preview).
|
||||
- QA engineer (unit/integration tests with deterministic fixtures).
|
||||
|
||||
## Required Reading
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/scanner/architecture.md`
|
||||
- `docs/reachability/DELIVERY_GUIDE.md`
|
||||
- `docs/reachability/slice-schema.md`
|
||||
- `docs/reachability/replay-verification.md`
|
||||
|
||||
## Working Directory & Boundaries
|
||||
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Reachability/`
|
||||
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Reachability.Tests/`
|
||||
- Avoid cross-module edits unless explicitly noted in the sprint.
|
||||
|
||||
## Determinism & Offline Rules
|
||||
- Stable ordering for graphs, slices, and diffs.
|
||||
- UTC timestamps only; avoid wall-clock nondeterminism.
|
||||
- Offline-first: no external network calls; use CAS and local caches.
|
||||
|
||||
## Testing Expectations
|
||||
- Add schema validation and round-trip tests for slice artifacts.
|
||||
- Ensure deterministic serialization bytes for any DSSE payloads.
|
||||
- Run `dotnet test src/Scanner/StellaOps.Scanner.sln` when feasible.
|
||||
|
||||
## Workflow
|
||||
- Update sprint status on task transitions.
|
||||
- Record decisions/risks in sprint Execution Log and Decisions & Risks.
|
||||
@@ -0,0 +1,17 @@
|
||||
using StellaOps.Scanner.Reachability.Subgraph;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
public sealed record ReachabilitySubgraphPublishResult(
|
||||
string SubgraphDigest,
|
||||
string? CasUri,
|
||||
string AttestationDigest,
|
||||
byte[] DsseEnvelopeBytes);
|
||||
|
||||
public interface IReachabilitySubgraphPublisher
|
||||
{
|
||||
Task<ReachabilitySubgraphPublishResult> PublishAsync(
|
||||
ReachabilitySubgraph subgraph,
|
||||
string subjectDigest,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -47,6 +47,18 @@ public static class ReachabilityAttestationServiceCollectionExtensions
|
||||
|
||||
// Register options
|
||||
services.AddOptions<ReachabilityWitnessOptions>();
|
||||
services.AddOptions<ReachabilitySubgraphOptions>();
|
||||
|
||||
// Register subgraph publisher
|
||||
services.TryAddSingleton<IReachabilitySubgraphPublisher>(sp =>
|
||||
new ReachabilitySubgraphPublisher(
|
||||
sp.GetRequiredService<IOptions<ReachabilitySubgraphOptions>>(),
|
||||
sp.GetRequiredService<ICryptoHash>(),
|
||||
sp.GetRequiredService<ILogger<ReachabilitySubgraphPublisher>>(),
|
||||
timeProvider: sp.GetService<TimeProvider>(),
|
||||
cas: sp.GetService<IFileContentAddressableStore>(),
|
||||
dsseSigningService: sp.GetService<IDsseSigningService>(),
|
||||
cryptoProfile: sp.GetService<ICryptoProfile>()));
|
||||
|
||||
return services;
|
||||
}
|
||||
@@ -64,4 +76,18 @@ public static class ReachabilityAttestationServiceCollectionExtensions
|
||||
services.Configure(configure);
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configures reachability subgraph options.
|
||||
/// </summary>
|
||||
/// <param name="services">The service collection.</param>
|
||||
/// <param name="configure">Configuration action.</param>
|
||||
/// <returns>The service collection for chaining.</returns>
|
||||
public static IServiceCollection ConfigureReachabilitySubgraphOptions(
|
||||
this IServiceCollection services,
|
||||
Action<ReachabilitySubgraphOptions> configure)
|
||||
{
|
||||
services.Configure(configure);
|
||||
return services;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,24 @@
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Options for reachability subgraph attestation.
|
||||
/// </summary>
|
||||
public sealed class ReachabilitySubgraphOptions
|
||||
{
|
||||
public const string SectionName = "Scanner:ReachabilitySubgraph";
|
||||
|
||||
/// <summary>
|
||||
/// Whether to generate DSSE attestations.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to store subgraph payloads in CAS when available.
|
||||
/// </summary>
|
||||
public bool StoreInCas { get; set; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Optional signing key identifier.
|
||||
/// </summary>
|
||||
public string? SigningKeyId { get; set; }
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Attestor.ProofChain.Predicates;
|
||||
using StellaOps.Attestor.ProofChain.Statements;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Cache.Abstractions;
|
||||
using StellaOps.Scanner.ProofSpine;
|
||||
using StellaOps.Scanner.Reachability.Subgraph;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Attestation;
|
||||
|
||||
public sealed class ReachabilitySubgraphPublisher : IReachabilitySubgraphPublisher
|
||||
{
|
||||
private static readonly JsonSerializerOptions DsseJsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly ReachabilitySubgraphOptions _options;
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
private readonly ILogger<ReachabilitySubgraphPublisher> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IFileContentAddressableStore? _cas;
|
||||
private readonly IDsseSigningService? _dsseSigningService;
|
||||
private readonly ICryptoProfile? _cryptoProfile;
|
||||
|
||||
public ReachabilitySubgraphPublisher(
|
||||
IOptions<ReachabilitySubgraphOptions> options,
|
||||
ICryptoHash cryptoHash,
|
||||
ILogger<ReachabilitySubgraphPublisher> logger,
|
||||
TimeProvider? timeProvider = null,
|
||||
IFileContentAddressableStore? cas = null,
|
||||
IDsseSigningService? dsseSigningService = null,
|
||||
ICryptoProfile? cryptoProfile = null)
|
||||
{
|
||||
_options = options?.Value ?? throw new ArgumentNullException(nameof(options));
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_cas = cas;
|
||||
_dsseSigningService = dsseSigningService;
|
||||
_cryptoProfile = cryptoProfile;
|
||||
}
|
||||
|
||||
public async Task<ReachabilitySubgraphPublishResult> PublishAsync(
|
||||
ReachabilitySubgraph subgraph,
|
||||
string subjectDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subgraph);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(subjectDigest);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
_logger.LogDebug("Reachability subgraph attestation disabled");
|
||||
return new ReachabilitySubgraphPublishResult(
|
||||
SubgraphDigest: string.Empty,
|
||||
CasUri: null,
|
||||
AttestationDigest: string.Empty,
|
||||
DsseEnvelopeBytes: Array.Empty<byte>());
|
||||
}
|
||||
|
||||
var normalized = subgraph.Normalize();
|
||||
var subgraphBytes = CanonicalJson.SerializeToUtf8Bytes(normalized);
|
||||
var subgraphDigest = _cryptoHash.ComputePrefixedHashForPurpose(subgraphBytes, HashPurpose.Graph);
|
||||
|
||||
string? casUri = null;
|
||||
if (_options.StoreInCas)
|
||||
{
|
||||
casUri = await StoreSubgraphAsync(subgraphBytes, subgraphDigest, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var statement = BuildStatement(normalized, subgraphDigest, casUri, subjectDigest);
|
||||
var statementBytes = CanonicalJson.SerializeToUtf8Bytes(statement);
|
||||
|
||||
var (envelope, envelopeBytes) = await CreateDsseEnvelopeAsync(statement, statementBytes, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
var attestationDigest = _cryptoHash.ComputePrefixedHashForPurpose(envelopeBytes, HashPurpose.Attestation);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Created reachability subgraph attestation: graphDigest={GraphDigest}, attestationDigest={AttestationDigest}",
|
||||
subgraphDigest,
|
||||
attestationDigest);
|
||||
|
||||
return new ReachabilitySubgraphPublishResult(
|
||||
SubgraphDigest: subgraphDigest,
|
||||
CasUri: casUri,
|
||||
AttestationDigest: attestationDigest,
|
||||
DsseEnvelopeBytes: envelopeBytes);
|
||||
}
|
||||
|
||||
private ReachabilitySubgraphStatement BuildStatement(
|
||||
ReachabilitySubgraph subgraph,
|
||||
string subgraphDigest,
|
||||
string? casUri,
|
||||
string subjectDigest)
|
||||
{
|
||||
var analysis = subgraph.AnalysisMetadata;
|
||||
var predicate = new ReachabilitySubgraphPredicate
|
||||
{
|
||||
SchemaVersion = subgraph.Version,
|
||||
GraphDigest = subgraphDigest,
|
||||
GraphCasUri = casUri,
|
||||
FindingKeys = subgraph.FindingKeys,
|
||||
Analysis = new ReachabilitySubgraphAnalysis
|
||||
{
|
||||
Analyzer = analysis?.Analyzer ?? "reachability",
|
||||
AnalyzerVersion = analysis?.AnalyzerVersion ?? "unknown",
|
||||
Confidence = analysis?.Confidence ?? 0.5,
|
||||
Completeness = analysis?.Completeness ?? "partial",
|
||||
GeneratedAt = analysis?.GeneratedAt ?? _timeProvider.GetUtcNow(),
|
||||
HashAlgorithm = _cryptoHash.GetAlgorithmForPurpose(HashPurpose.Graph)
|
||||
}
|
||||
};
|
||||
|
||||
return new ReachabilitySubgraphStatement
|
||||
{
|
||||
Subject =
|
||||
[
|
||||
BuildSubject(subjectDigest)
|
||||
],
|
||||
Predicate = predicate
|
||||
};
|
||||
}
|
||||
|
||||
private static Subject BuildSubject(string digest)
|
||||
{
|
||||
var (algorithm, value) = SplitDigest(digest);
|
||||
return new Subject
|
||||
{
|
||||
Name = digest,
|
||||
Digest = new Dictionary<string, string> { [algorithm] = value }
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<string?> StoreSubgraphAsync(byte[] subgraphBytes, string subgraphDigest, CancellationToken cancellationToken)
|
||||
{
|
||||
if (_cas is null)
|
||||
{
|
||||
_logger.LogWarning("CAS storage requested but no CAS store configured; skipping subgraph storage.");
|
||||
return null;
|
||||
}
|
||||
|
||||
var key = ExtractHashDigest(subgraphDigest);
|
||||
var existing = await _cas.TryGetAsync(key, cancellationToken).ConfigureAwait(false);
|
||||
if (existing is null)
|
||||
{
|
||||
await using var stream = new MemoryStream(subgraphBytes, writable: false);
|
||||
await _cas.PutAsync(new FileCasPutRequest(key, stream, leaveOpen: false), cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return $"cas://reachability/subgraphs/{key}";
|
||||
}
|
||||
|
||||
private async Task<(DsseEnvelope Envelope, byte[] EnvelopeBytes)> CreateDsseEnvelopeAsync(
|
||||
ReachabilitySubgraphStatement statement,
|
||||
byte[] statementBytes,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
const string payloadType = "application/vnd.in-toto+json";
|
||||
|
||||
if (_dsseSigningService is not null)
|
||||
{
|
||||
var profile = _cryptoProfile ?? new InlineCryptoProfile(_options.SigningKeyId ?? "scanner-deterministic", "hs256");
|
||||
var signed = await _dsseSigningService.SignAsync(statement, payloadType, profile, cancellationToken).ConfigureAwait(false);
|
||||
return (signed, SerializeDsseEnvelope(signed));
|
||||
}
|
||||
|
||||
var signature = SHA256.HashData(statementBytes);
|
||||
var envelope = new DsseEnvelope(
|
||||
payloadType,
|
||||
Convert.ToBase64String(statementBytes),
|
||||
new[] { new DsseSignature(_options.SigningKeyId ?? "scanner-deterministic", Convert.ToBase64String(signature)) });
|
||||
return (envelope, SerializeDsseEnvelope(envelope));
|
||||
}
|
||||
|
||||
private static byte[] SerializeDsseEnvelope(DsseEnvelope envelope)
|
||||
{
|
||||
var signatures = envelope.Signatures
|
||||
.OrderBy(s => s.KeyId, StringComparer.Ordinal)
|
||||
.ThenBy(s => s.Sig, StringComparer.Ordinal)
|
||||
.Select(s => new { keyid = s.KeyId, sig = s.Sig })
|
||||
.ToArray();
|
||||
|
||||
var dto = new
|
||||
{
|
||||
payloadType = envelope.PayloadType,
|
||||
payload = envelope.Payload,
|
||||
signatures
|
||||
};
|
||||
|
||||
return JsonSerializer.SerializeToUtf8Bytes(dto, DsseJsonOptions);
|
||||
}
|
||||
|
||||
private static string ExtractHashDigest(string prefixedHash)
|
||||
{
|
||||
var colonIndex = prefixedHash.IndexOf(':');
|
||||
return colonIndex >= 0 ? prefixedHash[(colonIndex + 1)..] : prefixedHash;
|
||||
}
|
||||
|
||||
private static (string Algorithm, string Value) SplitDigest(string digest)
|
||||
{
|
||||
var colonIndex = digest.IndexOf(':');
|
||||
if (colonIndex <= 0 || colonIndex == digest.Length - 1)
|
||||
{
|
||||
return ("sha256", digest);
|
||||
}
|
||||
|
||||
return (digest[..colonIndex], digest[(colonIndex + 1)..]);
|
||||
}
|
||||
|
||||
private sealed record InlineCryptoProfile(string KeyId, string Algorithm) : ICryptoProfile;
|
||||
}
|
||||
@@ -0,0 +1,247 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.MiniMap;
|
||||
|
||||
public interface IMiniMapExtractor
|
||||
{
|
||||
ReachabilityMiniMap Extract(RichGraph graph, string vulnerableComponent, int maxPaths = 10);
|
||||
}
|
||||
|
||||
public sealed class MiniMapExtractor : IMiniMapExtractor
|
||||
{
|
||||
public ReachabilityMiniMap Extract(
|
||||
RichGraph graph,
|
||||
string vulnerableComponent,
|
||||
int maxPaths = 10)
|
||||
{
|
||||
// Find vulnerable component node
|
||||
var vulnNode = graph.Nodes.FirstOrDefault(n =>
|
||||
n.Purl == vulnerableComponent ||
|
||||
n.SymbolId?.Contains(vulnerableComponent) == true);
|
||||
|
||||
if (vulnNode is null)
|
||||
{
|
||||
return CreateNotFoundMap(vulnerableComponent);
|
||||
}
|
||||
|
||||
// Find all entrypoints
|
||||
var entrypoints = graph.Nodes
|
||||
.Where(n => IsEntrypoint(n))
|
||||
.ToList();
|
||||
|
||||
// BFS from each entrypoint to vulnerable component
|
||||
var paths = new List<MiniMapPath>();
|
||||
var entrypointInfos = new List<MiniMapEntrypoint>();
|
||||
|
||||
foreach (var ep in entrypoints)
|
||||
{
|
||||
var epPaths = FindPaths(graph, ep, vulnNode, maxDepth: 20);
|
||||
|
||||
if (epPaths.Count > 0)
|
||||
{
|
||||
entrypointInfos.Add(new MiniMapEntrypoint
|
||||
{
|
||||
Node = ToMiniMapNode(ep),
|
||||
Kind = ClassifyEntrypoint(ep),
|
||||
PathCount = epPaths.Count,
|
||||
ShortestPathLength = epPaths.Min(p => p.Length)
|
||||
});
|
||||
|
||||
paths.AddRange(epPaths.Take(maxPaths / Math.Max(entrypoints.Count, 1) + 1));
|
||||
}
|
||||
}
|
||||
|
||||
// Determine state
|
||||
var state = paths.Count > 0
|
||||
? (paths.Any(p => p.HasRuntimeEvidence)
|
||||
? ReachabilityState.ConfirmedReachable
|
||||
: ReachabilityState.StaticReachable)
|
||||
: ReachabilityState.StaticUnreachable;
|
||||
|
||||
// Calculate confidence
|
||||
var confidence = CalculateConfidence(paths, entrypointInfos, graph);
|
||||
|
||||
return new ReachabilityMiniMap
|
||||
{
|
||||
FindingId = Guid.Empty, // Set by caller
|
||||
VulnerabilityId = string.Empty, // Set by caller
|
||||
VulnerableComponent = ToMiniMapNode(vulnNode),
|
||||
Entrypoints = entrypointInfos.OrderBy(e => e.ShortestPathLength).ToList(),
|
||||
Paths = paths.OrderBy(p => p.Length).Take(maxPaths).ToList(),
|
||||
State = state,
|
||||
Confidence = confidence,
|
||||
GraphDigest = ComputeGraphDigest(graph),
|
||||
AnalyzedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilityMiniMap CreateNotFoundMap(string vulnerableComponent)
|
||||
{
|
||||
return new ReachabilityMiniMap
|
||||
{
|
||||
FindingId = Guid.Empty,
|
||||
VulnerabilityId = string.Empty,
|
||||
VulnerableComponent = new MiniMapNode
|
||||
{
|
||||
Id = vulnerableComponent,
|
||||
Label = vulnerableComponent,
|
||||
Type = MiniMapNodeType.VulnerableComponent
|
||||
},
|
||||
Entrypoints = Array.Empty<MiniMapEntrypoint>(),
|
||||
Paths = Array.Empty<MiniMapPath>(),
|
||||
State = ReachabilityState.Unknown,
|
||||
Confidence = 0m,
|
||||
GraphDigest = string.Empty,
|
||||
AnalyzedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static bool IsEntrypoint(RichGraphNode node)
|
||||
{
|
||||
return node.Kind is "entrypoint" or "export" or "main" or "handler";
|
||||
}
|
||||
|
||||
private static EntrypointKind ClassifyEntrypoint(RichGraphNode node)
|
||||
{
|
||||
if (node.Attributes?.ContainsKey("http_method") == true)
|
||||
return EntrypointKind.HttpEndpoint;
|
||||
if (node.Attributes?.ContainsKey("grpc_service") == true)
|
||||
return EntrypointKind.GrpcMethod;
|
||||
if (node.Kind == "main")
|
||||
return EntrypointKind.MainFunction;
|
||||
if (node.Kind == "handler")
|
||||
return EntrypointKind.EventHandler;
|
||||
if (node.Attributes?.ContainsKey("cli_command") == true)
|
||||
return EntrypointKind.CliCommand;
|
||||
|
||||
return EntrypointKind.PublicApi;
|
||||
}
|
||||
|
||||
private List<MiniMapPath> FindPaths(
|
||||
RichGraph graph,
|
||||
RichGraphNode start,
|
||||
RichGraphNode end,
|
||||
int maxDepth)
|
||||
{
|
||||
var paths = new List<MiniMapPath>();
|
||||
var queue = new Queue<(RichGraphNode node, List<RichGraphNode> path)>();
|
||||
queue.Enqueue((start, new List<RichGraphNode> { start }));
|
||||
|
||||
while (queue.Count > 0 && paths.Count < 100)
|
||||
{
|
||||
var (current, path) = queue.Dequeue();
|
||||
|
||||
if (path.Count > maxDepth) continue;
|
||||
|
||||
if (current.Id == end.Id)
|
||||
{
|
||||
paths.Add(BuildPath(path, graph));
|
||||
continue;
|
||||
}
|
||||
|
||||
var edges = graph.Edges.Where(e => e.From == current.Id);
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
var nextNode = graph.Nodes.FirstOrDefault(n => n.Id == edge.To);
|
||||
if (nextNode is not null && !path.Any(n => n.Id == nextNode.Id))
|
||||
{
|
||||
var newPath = new List<RichGraphNode>(path) { nextNode };
|
||||
queue.Enqueue((nextNode, newPath));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return paths;
|
||||
}
|
||||
|
||||
private static MiniMapPath BuildPath(List<RichGraphNode> nodes, RichGraph graph)
|
||||
{
|
||||
var steps = nodes.Select((n, i) =>
|
||||
{
|
||||
var edge = i < nodes.Count - 1
|
||||
? graph.Edges.FirstOrDefault(e => e.From == n.Id && e.To == nodes[i + 1].Id)
|
||||
: null;
|
||||
|
||||
return new MiniMapPathStep
|
||||
{
|
||||
Index = i,
|
||||
Node = ToMiniMapNode(n),
|
||||
CallType = edge?.Kind
|
||||
};
|
||||
}).ToList();
|
||||
|
||||
var hasRuntime = graph.Edges
|
||||
.Where(e => nodes.Any(n => n.Id == e.From))
|
||||
.Any(e => e.Evidence?.Contains("runtime") == true);
|
||||
|
||||
return new MiniMapPath
|
||||
{
|
||||
PathId = $"path:{ComputePathHash(nodes)}",
|
||||
EntrypointId = nodes.First().Id,
|
||||
Steps = steps,
|
||||
HasRuntimeEvidence = hasRuntime,
|
||||
PathConfidence = hasRuntime ? 0.95m : 0.75m
|
||||
};
|
||||
}
|
||||
|
||||
private static MiniMapNode ToMiniMapNode(RichGraphNode node)
|
||||
{
|
||||
var sourceFile = node.Attributes?.GetValueOrDefault("source_file");
|
||||
int? lineNumber = null;
|
||||
if (node.Attributes?.TryGetValue("line", out var lineStr) == true && int.TryParse(lineStr, out var line))
|
||||
{
|
||||
lineNumber = line;
|
||||
}
|
||||
|
||||
return new MiniMapNode
|
||||
{
|
||||
Id = node.Id,
|
||||
Label = node.Display ?? node.SymbolId ?? node.Id,
|
||||
Type = node.Kind switch
|
||||
{
|
||||
"entrypoint" or "export" or "main" => MiniMapNodeType.Entrypoint,
|
||||
"function" or "method" => MiniMapNodeType.Function,
|
||||
"class" => MiniMapNodeType.Class,
|
||||
"module" or "package" => MiniMapNodeType.Module,
|
||||
"sink" => MiniMapNodeType.Sink,
|
||||
_ => MiniMapNodeType.Function
|
||||
},
|
||||
Purl = node.Purl,
|
||||
SourceFile = sourceFile,
|
||||
LineNumber = lineNumber
|
||||
};
|
||||
}
|
||||
|
||||
private static decimal CalculateConfidence(
|
||||
List<MiniMapPath> paths,
|
||||
List<MiniMapEntrypoint> entrypoints,
|
||||
RichGraph graph)
|
||||
{
|
||||
if (paths.Count == 0) return 0.9m; // High confidence in unreachability
|
||||
|
||||
var runtimePaths = paths.Count(p => p.HasRuntimeEvidence);
|
||||
var runtimeRatio = paths.Count > 0 ? (decimal)runtimePaths / paths.Count : 0m;
|
||||
|
||||
return 0.6m + (0.3m * runtimeRatio);
|
||||
}
|
||||
|
||||
private static string ComputePathHash(List<RichGraphNode> nodes)
|
||||
{
|
||||
var ids = string.Join("|", nodes.Select(n => n.Id));
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(ids));
|
||||
return Convert.ToHexString(hash)[..16].ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeGraphDigest(RichGraph graph)
|
||||
{
|
||||
var nodeIds = string.Join(",", graph.Nodes.Select(n => n.Id).OrderBy(x => x));
|
||||
var edgeIds = string.Join(",", graph.Edges.Select(e => $"{e.From}->{e.To}").OrderBy(x => x));
|
||||
var combined = $"{nodeIds}|{edgeIds}";
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(combined));
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,203 @@
|
||||
namespace StellaOps.Scanner.Reachability.MiniMap;
|
||||
|
||||
/// <summary>
|
||||
/// Condensed reachability visualization for a finding.
|
||||
/// Shows paths from entrypoints to vulnerable component to sinks.
|
||||
/// </summary>
|
||||
public sealed record ReachabilityMiniMap
|
||||
{
|
||||
/// <summary>
|
||||
/// Finding this map is for.
|
||||
/// </summary>
|
||||
public required Guid FindingId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vulnerability ID.
|
||||
/// </summary>
|
||||
public required string VulnerabilityId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The vulnerable component.
|
||||
/// </summary>
|
||||
public required MiniMapNode VulnerableComponent { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry points that reach the vulnerable component.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<MiniMapEntrypoint> Entrypoints { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Paths from entrypoints to vulnerable component.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<MiniMapPath> Paths { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Overall reachability state.
|
||||
/// </summary>
|
||||
public required ReachabilityState State { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence of the analysis.
|
||||
/// </summary>
|
||||
public required decimal Confidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Full graph digest for verification.
|
||||
/// </summary>
|
||||
public required string GraphDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When analysis was performed.
|
||||
/// </summary>
|
||||
public required DateTimeOffset AnalyzedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A node in the mini-map.
|
||||
/// </summary>
|
||||
public sealed record MiniMapNode
|
||||
{
|
||||
/// <summary>
|
||||
/// Node identifier.
|
||||
/// </summary>
|
||||
public required string Id { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Display label.
|
||||
/// </summary>
|
||||
public required string Label { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Node type.
|
||||
/// </summary>
|
||||
public required MiniMapNodeType Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package URL (if applicable).
|
||||
/// </summary>
|
||||
public string? Purl { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source file location.
|
||||
/// </summary>
|
||||
public string? SourceFile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Line number in source.
|
||||
/// </summary>
|
||||
public int? LineNumber { get; init; }
|
||||
}
|
||||
|
||||
public enum MiniMapNodeType
|
||||
{
|
||||
Entrypoint,
|
||||
Function,
|
||||
Class,
|
||||
Module,
|
||||
VulnerableComponent,
|
||||
Sink
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An entry point in the mini-map.
|
||||
/// </summary>
|
||||
public sealed record MiniMapEntrypoint
|
||||
{
|
||||
/// <summary>
|
||||
/// Entry point node.
|
||||
/// </summary>
|
||||
public required MiniMapNode Node { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entry point kind.
|
||||
/// </summary>
|
||||
public required EntrypointKind Kind { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of paths from this entrypoint.
|
||||
/// </summary>
|
||||
public required int PathCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Shortest path length to vulnerable component.
|
||||
/// </summary>
|
||||
public required int ShortestPathLength { get; init; }
|
||||
}
|
||||
|
||||
public enum EntrypointKind
|
||||
{
|
||||
HttpEndpoint,
|
||||
GrpcMethod,
|
||||
MessageHandler,
|
||||
CliCommand,
|
||||
MainFunction,
|
||||
PublicApi,
|
||||
EventHandler,
|
||||
Other
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A path from entrypoint to vulnerable component.
|
||||
/// </summary>
|
||||
public sealed record MiniMapPath
|
||||
{
|
||||
/// <summary>
|
||||
/// Path identifier.
|
||||
/// </summary>
|
||||
public required string PathId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Starting entrypoint ID.
|
||||
/// </summary>
|
||||
public required string EntrypointId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Ordered steps in the path.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<MiniMapPathStep> Steps { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Path length.
|
||||
/// </summary>
|
||||
public int Length => Steps.Count;
|
||||
|
||||
/// <summary>
|
||||
/// Whether path has runtime corroboration.
|
||||
/// </summary>
|
||||
public bool HasRuntimeEvidence { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Confidence for this specific path.
|
||||
/// </summary>
|
||||
public decimal PathConfidence { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A step in a path.
|
||||
/// </summary>
|
||||
public sealed record MiniMapPathStep
|
||||
{
|
||||
/// <summary>
|
||||
/// Step index (0-based).
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Node at this step.
|
||||
/// </summary>
|
||||
public required MiniMapNode Node { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Call type to next step.
|
||||
/// </summary>
|
||||
public string? CallType { get; init; }
|
||||
}
|
||||
|
||||
public enum ReachabilityState
|
||||
{
|
||||
Unknown,
|
||||
StaticReachable,
|
||||
StaticUnreachable,
|
||||
ConfirmedReachable,
|
||||
ConfirmedUnreachable
|
||||
}
|
||||
@@ -0,0 +1,311 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability;
|
||||
|
||||
public sealed class RichGraphReader
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public async Task<RichGraph> ReadAsync(Stream stream, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(stream);
|
||||
|
||||
var document = await JsonSerializer.DeserializeAsync<RichGraphDocument>(
|
||||
stream,
|
||||
SerializerOptions,
|
||||
cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (document is null)
|
||||
{
|
||||
throw new InvalidOperationException("Failed to deserialize richgraph payload.");
|
||||
}
|
||||
|
||||
return Map(document);
|
||||
}
|
||||
|
||||
public RichGraph Read(ReadOnlySpan<byte> payload)
|
||||
{
|
||||
var document = JsonSerializer.Deserialize<RichGraphDocument>(payload, SerializerOptions);
|
||||
if (document is null)
|
||||
{
|
||||
throw new InvalidOperationException("Failed to deserialize richgraph payload.");
|
||||
}
|
||||
|
||||
return Map(document);
|
||||
}
|
||||
|
||||
private static RichGraph Map(RichGraphDocument document)
|
||||
{
|
||||
var analyzerDoc = document.Analyzer;
|
||||
var analyzer = new RichGraphAnalyzer(
|
||||
analyzerDoc?.Name ?? "scanner.reachability",
|
||||
analyzerDoc?.Version ?? "0.1.0",
|
||||
analyzerDoc?.ToolchainDigest);
|
||||
|
||||
var nodes = document.Nodes?
|
||||
.Select(MapNode)
|
||||
.Where(n => !string.IsNullOrWhiteSpace(n.Id))
|
||||
.ToList() ?? new List<RichGraphNode>();
|
||||
|
||||
var edges = document.Edges?
|
||||
.Select(MapEdge)
|
||||
.Where(e => !string.IsNullOrWhiteSpace(e.From) && !string.IsNullOrWhiteSpace(e.To))
|
||||
.ToList() ?? new List<RichGraphEdge>();
|
||||
|
||||
var roots = document.Roots?
|
||||
.Select(r => new RichGraphRoot(
|
||||
r.Id ?? string.Empty,
|
||||
string.IsNullOrWhiteSpace(r.Phase) ? "runtime" : r.Phase,
|
||||
r.Source))
|
||||
.Where(r => !string.IsNullOrWhiteSpace(r.Id))
|
||||
.ToList() ?? new List<RichGraphRoot>();
|
||||
|
||||
return new RichGraph(nodes, edges, roots, analyzer, document.Schema ?? "richgraph-v1").Trimmed();
|
||||
}
|
||||
|
||||
private static RichGraphNode MapNode(RichGraphNodeDocument node)
|
||||
{
|
||||
var symbol = node.Symbol is null
|
||||
? null
|
||||
: new ReachabilitySymbol(
|
||||
node.Symbol.Mangled,
|
||||
node.Symbol.Demangled,
|
||||
node.Symbol.Source,
|
||||
node.Symbol.Confidence);
|
||||
|
||||
return new RichGraphNode(
|
||||
Id: node.Id ?? string.Empty,
|
||||
SymbolId: string.IsNullOrWhiteSpace(node.SymbolId) ? (node.Id ?? string.Empty) : node.SymbolId,
|
||||
CodeId: node.CodeId,
|
||||
Purl: node.Purl,
|
||||
Lang: string.IsNullOrWhiteSpace(node.Lang) ? "unknown" : node.Lang,
|
||||
Kind: string.IsNullOrWhiteSpace(node.Kind) ? "unknown" : node.Kind,
|
||||
Display: node.Display,
|
||||
BuildId: node.BuildId,
|
||||
Evidence: node.Evidence,
|
||||
Attributes: node.Attributes,
|
||||
SymbolDigest: node.SymbolDigest,
|
||||
Symbol: symbol,
|
||||
CodeBlockHash: node.CodeBlockHash);
|
||||
}
|
||||
|
||||
private static RichGraphEdge MapEdge(RichGraphEdgeDocument edge)
|
||||
{
|
||||
IReadOnlyList<DetectedGate>? gates = null;
|
||||
if (edge.Gates is { Count: > 0 })
|
||||
{
|
||||
gates = edge.Gates.Select(MapGate).ToList();
|
||||
}
|
||||
|
||||
return new RichGraphEdge(
|
||||
From: edge.From ?? string.Empty,
|
||||
To: edge.To ?? string.Empty,
|
||||
Kind: string.IsNullOrWhiteSpace(edge.Kind) ? "call" : edge.Kind,
|
||||
Purl: edge.Purl,
|
||||
SymbolDigest: edge.SymbolDigest,
|
||||
Evidence: edge.Evidence,
|
||||
Confidence: edge.Confidence,
|
||||
Candidates: edge.Candidates,
|
||||
Gates: gates,
|
||||
GateMultiplierBps: edge.GateMultiplierBps);
|
||||
}
|
||||
|
||||
private static DetectedGate MapGate(RichGraphGateDocument gate)
|
||||
{
|
||||
return new DetectedGate
|
||||
{
|
||||
Type = ParseGateType(gate.Type),
|
||||
Detail = gate.Detail ?? string.Empty,
|
||||
GuardSymbol = gate.GuardSymbol ?? string.Empty,
|
||||
SourceFile = gate.SourceFile,
|
||||
LineNumber = gate.LineNumber,
|
||||
Confidence = gate.Confidence,
|
||||
DetectionMethod = gate.DetectionMethod ?? string.Empty
|
||||
};
|
||||
}
|
||||
|
||||
private static GateType ParseGateType(string? value)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(value))
|
||||
{
|
||||
return GateType.NonDefaultConfig;
|
||||
}
|
||||
|
||||
var normalized = value
|
||||
.Trim()
|
||||
.Replace("_", string.Empty, StringComparison.Ordinal)
|
||||
.Replace("-", string.Empty, StringComparison.Ordinal)
|
||||
.ToLowerInvariant();
|
||||
|
||||
return normalized switch
|
||||
{
|
||||
"authrequired" => GateType.AuthRequired,
|
||||
"featureflag" => GateType.FeatureFlag,
|
||||
"adminonly" => GateType.AdminOnly,
|
||||
"nondefaultconfig" => GateType.NonDefaultConfig,
|
||||
_ => GateType.NonDefaultConfig
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class RichGraphDocument
|
||||
{
|
||||
[JsonPropertyName("schema")]
|
||||
public string? Schema { get; init; }
|
||||
|
||||
[JsonPropertyName("analyzer")]
|
||||
public RichGraphAnalyzerDocument? Analyzer { get; init; }
|
||||
|
||||
[JsonPropertyName("nodes")]
|
||||
public List<RichGraphNodeDocument>? Nodes { get; init; }
|
||||
|
||||
[JsonPropertyName("edges")]
|
||||
public List<RichGraphEdgeDocument>? Edges { get; init; }
|
||||
|
||||
[JsonPropertyName("roots")]
|
||||
public List<RichGraphRootDocument>? Roots { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RichGraphAnalyzerDocument
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public string? Name { get; init; }
|
||||
|
||||
[JsonPropertyName("version")]
|
||||
public string? Version { get; init; }
|
||||
|
||||
[JsonPropertyName("toolchain_digest")]
|
||||
public string? ToolchainDigest { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RichGraphNodeDocument
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("symbol_id")]
|
||||
public string? SymbolId { get; init; }
|
||||
|
||||
[JsonPropertyName("code_id")]
|
||||
public string? CodeId { get; init; }
|
||||
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
[JsonPropertyName("lang")]
|
||||
public string? Lang { get; init; }
|
||||
|
||||
[JsonPropertyName("kind")]
|
||||
public string? Kind { get; init; }
|
||||
|
||||
[JsonPropertyName("display")]
|
||||
public string? Display { get; init; }
|
||||
|
||||
[JsonPropertyName("build_id")]
|
||||
public string? BuildId { get; init; }
|
||||
|
||||
[JsonPropertyName("code_block_hash")]
|
||||
public string? CodeBlockHash { get; init; }
|
||||
|
||||
[JsonPropertyName("symbol_digest")]
|
||||
public string? SymbolDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("evidence")]
|
||||
public List<string>? Evidence { get; init; }
|
||||
|
||||
[JsonPropertyName("attributes")]
|
||||
public Dictionary<string, string>? Attributes { get; init; }
|
||||
|
||||
[JsonPropertyName("symbol")]
|
||||
public RichGraphSymbolDocument? Symbol { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RichGraphSymbolDocument
|
||||
{
|
||||
[JsonPropertyName("mangled")]
|
||||
public string? Mangled { get; init; }
|
||||
|
||||
[JsonPropertyName("demangled")]
|
||||
public string? Demangled { get; init; }
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public double? Confidence { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RichGraphEdgeDocument
|
||||
{
|
||||
[JsonPropertyName("from")]
|
||||
public string? From { get; init; }
|
||||
|
||||
[JsonPropertyName("to")]
|
||||
public string? To { get; init; }
|
||||
|
||||
[JsonPropertyName("kind")]
|
||||
public string? Kind { get; init; }
|
||||
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
[JsonPropertyName("symbol_digest")]
|
||||
public string? SymbolDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; } = 0.0;
|
||||
|
||||
[JsonPropertyName("gate_multiplier_bps")]
|
||||
public int GateMultiplierBps { get; init; } = 10000;
|
||||
|
||||
[JsonPropertyName("gates")]
|
||||
public List<RichGraphGateDocument>? Gates { get; init; }
|
||||
|
||||
[JsonPropertyName("evidence")]
|
||||
public List<string>? Evidence { get; init; }
|
||||
|
||||
[JsonPropertyName("candidates")]
|
||||
public List<string>? Candidates { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RichGraphGateDocument
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
public string? Type { get; init; }
|
||||
|
||||
[JsonPropertyName("detail")]
|
||||
public string? Detail { get; init; }
|
||||
|
||||
[JsonPropertyName("guard_symbol")]
|
||||
public string? GuardSymbol { get; init; }
|
||||
|
||||
[JsonPropertyName("source_file")]
|
||||
public string? SourceFile { get; init; }
|
||||
|
||||
[JsonPropertyName("line_number")]
|
||||
public int? LineNumber { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; } = 0.0;
|
||||
|
||||
[JsonPropertyName("detection_method")]
|
||||
public string? DetectionMethod { get; init; }
|
||||
}
|
||||
|
||||
internal sealed class RichGraphRootDocument
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("phase")]
|
||||
public string? Phase { get; init; }
|
||||
|
||||
[JsonPropertyName("source")]
|
||||
public string? Source { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,347 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core;
|
||||
using StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Runtime;
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for runtime-static graph merging.
|
||||
/// </summary>
|
||||
public sealed record RuntimeStaticMergeOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Confidence boost for edges observed at runtime. Default: 1.0 (max).
|
||||
/// </summary>
|
||||
public double ObservedConfidenceBoost { get; init; } = 1.0;
|
||||
|
||||
/// <summary>
|
||||
/// Base confidence for runtime-only edges (not in static graph). Default: 0.9.
|
||||
/// </summary>
|
||||
public double RuntimeOnlyConfidence { get; init; } = 0.9;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum observation count to include a runtime-only edge. Default: 1.
|
||||
/// </summary>
|
||||
public int MinObservationCount { get; init; } = 1;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum age of observations to consider fresh. Default: 7 days.
|
||||
/// </summary>
|
||||
public TimeSpan FreshnessWindow { get; init; } = TimeSpan.FromDays(7);
|
||||
|
||||
/// <summary>
|
||||
/// Whether to add edges from runtime that don't exist in static graph.
|
||||
/// </summary>
|
||||
public bool AddRuntimeOnlyEdges { get; init; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of merging runtime traces with static call graph.
|
||||
/// </summary>
|
||||
public sealed record RuntimeStaticMergeResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Merged graph with runtime annotations.
|
||||
/// </summary>
|
||||
public required CallGraph MergedGraph { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about the merge operation.
|
||||
/// </summary>
|
||||
public required MergeStatistics Statistics { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Edges that were observed at runtime.
|
||||
/// </summary>
|
||||
public ImmutableArray<ObservedEdge> ObservedEdges { get; init; } = ImmutableArray<ObservedEdge>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Edges added from runtime that weren't in static graph.
|
||||
/// </summary>
|
||||
public ImmutableArray<RuntimeOnlyEdge> RuntimeOnlyEdges { get; init; } = ImmutableArray<RuntimeOnlyEdge>.Empty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics from the merge operation.
|
||||
/// </summary>
|
||||
public sealed record MergeStatistics
|
||||
{
|
||||
public int StaticEdgeCount { get; init; }
|
||||
public int RuntimeEventCount { get; init; }
|
||||
public int MatchedEdgeCount { get; init; }
|
||||
public int RuntimeOnlyEdgeCount { get; init; }
|
||||
public int UnmatchedStaticEdgeCount { get; init; }
|
||||
public double CoverageRatio => StaticEdgeCount > 0
|
||||
? (double)MatchedEdgeCount / StaticEdgeCount
|
||||
: 0.0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An edge that was observed at runtime.
|
||||
/// </summary>
|
||||
public sealed record ObservedEdge
|
||||
{
|
||||
public required string From { get; init; }
|
||||
public required string To { get; init; }
|
||||
public required DateTimeOffset FirstObserved { get; init; }
|
||||
public required DateTimeOffset LastObserved { get; init; }
|
||||
public required int ObservationCount { get; init; }
|
||||
public string? TraceDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An edge that only exists in runtime observations (dynamic dispatch, etc).
|
||||
/// </summary>
|
||||
public sealed record RuntimeOnlyEdge
|
||||
{
|
||||
public required string From { get; init; }
|
||||
public required string To { get; init; }
|
||||
public required DateTimeOffset FirstObserved { get; init; }
|
||||
public required DateTimeOffset LastObserved { get; init; }
|
||||
public required int ObservationCount { get; init; }
|
||||
public required string Origin { get; init; } // "runtime", "dynamic_dispatch", etc.
|
||||
public string? TraceDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a runtime call event from eBPF/ETW collectors.
|
||||
/// </summary>
|
||||
public sealed record RuntimeCallEvent
|
||||
{
|
||||
public required ulong Timestamp { get; init; }
|
||||
public required uint Pid { get; init; }
|
||||
public required uint Tid { get; init; }
|
||||
public required string CallerSymbol { get; init; }
|
||||
public required string CalleeSymbol { get; init; }
|
||||
public required string BinaryPath { get; init; }
|
||||
public string? TraceDigest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merges runtime trace observations with static call graphs.
|
||||
/// </summary>
|
||||
public sealed class RuntimeStaticMerger
|
||||
{
|
||||
private readonly RuntimeStaticMergeOptions _options;
|
||||
private readonly ILogger<RuntimeStaticMerger> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public RuntimeStaticMerger(
|
||||
RuntimeStaticMergeOptions? options = null,
|
||||
ILogger<RuntimeStaticMerger>? logger = null,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_options = options ?? new RuntimeStaticMergeOptions();
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<RuntimeStaticMerger>.Instance;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Merge runtime events into a static call graph.
|
||||
/// </summary>
|
||||
public RuntimeStaticMergeResult Merge(
|
||||
CallGraph staticGraph,
|
||||
IEnumerable<RuntimeCallEvent> runtimeEvents)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(staticGraph);
|
||||
ArgumentNullException.ThrowIfNull(runtimeEvents);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var freshnessThreshold = now - _options.FreshnessWindow;
|
||||
|
||||
// Index static edges for fast lookup
|
||||
var staticEdgeIndex = BuildStaticEdgeIndex(staticGraph);
|
||||
|
||||
// Aggregate runtime events by edge
|
||||
var runtimeEdgeAggregates = AggregateRuntimeEvents(runtimeEvents);
|
||||
|
||||
var observedEdges = new List<ObservedEdge>();
|
||||
var runtimeOnlyEdges = new List<RuntimeOnlyEdge>();
|
||||
var modifiedEdges = new List<CallEdge>();
|
||||
var matchedEdgeKeys = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var (edgeKey, aggregate) in runtimeEdgeAggregates)
|
||||
{
|
||||
// Skip stale observations
|
||||
if (aggregate.LastObserved < freshnessThreshold)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip low observation counts
|
||||
if (aggregate.ObservationCount < _options.MinObservationCount)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (staticEdgeIndex.TryGetValue(edgeKey, out var staticEdge))
|
||||
{
|
||||
// Edge exists in static graph - mark as observed
|
||||
matchedEdgeKeys.Add(edgeKey);
|
||||
|
||||
var observedMetadata = new ObservedEdgeMetadata
|
||||
{
|
||||
FirstObserved = aggregate.FirstObserved,
|
||||
LastObserved = aggregate.LastObserved,
|
||||
ObservationCount = aggregate.ObservationCount,
|
||||
TraceDigest = aggregate.TraceDigest
|
||||
};
|
||||
|
||||
var boostedEdge = staticEdge with
|
||||
{
|
||||
Confidence = _options.ObservedConfidenceBoost,
|
||||
Observed = observedMetadata
|
||||
};
|
||||
|
||||
modifiedEdges.Add(boostedEdge);
|
||||
observedEdges.Add(new ObservedEdge
|
||||
{
|
||||
From = aggregate.From,
|
||||
To = aggregate.To,
|
||||
FirstObserved = aggregate.FirstObserved,
|
||||
LastObserved = aggregate.LastObserved,
|
||||
ObservationCount = aggregate.ObservationCount,
|
||||
TraceDigest = aggregate.TraceDigest
|
||||
});
|
||||
}
|
||||
else if (_options.AddRuntimeOnlyEdges)
|
||||
{
|
||||
// Edge only exists in runtime - add it
|
||||
var runtimeEdge = new CallEdge
|
||||
{
|
||||
From = aggregate.From,
|
||||
To = aggregate.To,
|
||||
Kind = CallEdgeKind.Dynamic,
|
||||
Confidence = ComputeRuntimeOnlyConfidence(aggregate),
|
||||
Evidence = "runtime_observation",
|
||||
Observed = new ObservedEdgeMetadata
|
||||
{
|
||||
FirstObserved = aggregate.FirstObserved,
|
||||
LastObserved = aggregate.LastObserved,
|
||||
ObservationCount = aggregate.ObservationCount,
|
||||
TraceDigest = aggregate.TraceDigest
|
||||
}
|
||||
};
|
||||
|
||||
modifiedEdges.Add(runtimeEdge);
|
||||
runtimeOnlyEdges.Add(new RuntimeOnlyEdge
|
||||
{
|
||||
From = aggregate.From,
|
||||
To = aggregate.To,
|
||||
FirstObserved = aggregate.FirstObserved,
|
||||
LastObserved = aggregate.LastObserved,
|
||||
ObservationCount = aggregate.ObservationCount,
|
||||
Origin = "runtime",
|
||||
TraceDigest = aggregate.TraceDigest
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Build merged edge list: unmatched static + modified
|
||||
var mergedEdges = new List<CallEdge>();
|
||||
foreach (var edge in staticGraph.Edges)
|
||||
{
|
||||
var key = BuildEdgeKey(edge.From, edge.To);
|
||||
if (!matchedEdgeKeys.Contains(key))
|
||||
{
|
||||
mergedEdges.Add(edge);
|
||||
}
|
||||
}
|
||||
mergedEdges.AddRange(modifiedEdges);
|
||||
|
||||
var mergedGraph = staticGraph with
|
||||
{
|
||||
Edges = mergedEdges.ToImmutableArray()
|
||||
};
|
||||
|
||||
var statistics = new MergeStatistics
|
||||
{
|
||||
StaticEdgeCount = staticGraph.Edges.Length,
|
||||
RuntimeEventCount = runtimeEdgeAggregates.Count,
|
||||
MatchedEdgeCount = matchedEdgeKeys.Count,
|
||||
RuntimeOnlyEdgeCount = runtimeOnlyEdges.Count,
|
||||
UnmatchedStaticEdgeCount = staticGraph.Edges.Length - matchedEdgeKeys.Count
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Merged runtime traces: {Matched}/{Static} edges observed ({Coverage:P1}), {RuntimeOnly} runtime-only edges added",
|
||||
statistics.MatchedEdgeCount,
|
||||
statistics.StaticEdgeCount,
|
||||
statistics.CoverageRatio,
|
||||
statistics.RuntimeOnlyEdgeCount);
|
||||
|
||||
return new RuntimeStaticMergeResult
|
||||
{
|
||||
MergedGraph = mergedGraph,
|
||||
Statistics = statistics,
|
||||
ObservedEdges = observedEdges.ToImmutableArray(),
|
||||
RuntimeOnlyEdges = runtimeOnlyEdges.ToImmutableArray()
|
||||
};
|
||||
}
|
||||
|
||||
private static Dictionary<string, CallEdge> BuildStaticEdgeIndex(CallGraph graph)
|
||||
{
|
||||
var index = new Dictionary<string, CallEdge>(StringComparer.Ordinal);
|
||||
foreach (var edge in graph.Edges)
|
||||
{
|
||||
var key = BuildEdgeKey(edge.From, edge.To);
|
||||
index.TryAdd(key, edge);
|
||||
}
|
||||
return index;
|
||||
}
|
||||
|
||||
private static Dictionary<string, RuntimeEdgeAggregate> AggregateRuntimeEvents(
|
||||
IEnumerable<RuntimeCallEvent> events)
|
||||
{
|
||||
var aggregates = new Dictionary<string, RuntimeEdgeAggregate>(StringComparer.Ordinal);
|
||||
|
||||
foreach (var evt in events)
|
||||
{
|
||||
var key = BuildEdgeKey(evt.CallerSymbol, evt.CalleeSymbol);
|
||||
|
||||
if (aggregates.TryGetValue(key, out var existing))
|
||||
{
|
||||
aggregates[key] = existing with
|
||||
{
|
||||
ObservationCount = existing.ObservationCount + 1,
|
||||
LastObserved = DateTimeOffset.FromUnixTimeMilliseconds((long)(evt.Timestamp / 1_000_000))
|
||||
};
|
||||
}
|
||||
else
|
||||
{
|
||||
var timestamp = DateTimeOffset.FromUnixTimeMilliseconds((long)(evt.Timestamp / 1_000_000));
|
||||
aggregates[key] = new RuntimeEdgeAggregate
|
||||
{
|
||||
From = evt.CallerSymbol,
|
||||
To = evt.CalleeSymbol,
|
||||
FirstObserved = timestamp,
|
||||
LastObserved = timestamp,
|
||||
ObservationCount = 1,
|
||||
TraceDigest = evt.TraceDigest
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return aggregates;
|
||||
}
|
||||
|
||||
private double ComputeRuntimeOnlyConfidence(RuntimeEdgeAggregate aggregate)
|
||||
{
|
||||
// Higher observation count = higher confidence, capped at runtime-only max
|
||||
var countFactor = Math.Min(1.0, aggregate.ObservationCount / 10.0);
|
||||
return _options.RuntimeOnlyConfidence * (0.5 + 0.5 * countFactor);
|
||||
}
|
||||
|
||||
private static string BuildEdgeKey(string from, string to) => $"{from}->{to}";
|
||||
|
||||
private sealed record RuntimeEdgeAggregate
|
||||
{
|
||||
public required string From { get; init; }
|
||||
public required string To { get; init; }
|
||||
public required DateTimeOffset FirstObserved { get; init; }
|
||||
public required DateTimeOffset LastObserved { get; init; }
|
||||
public required int ObservationCount { get; init; }
|
||||
public string? TraceDigest { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// Cache for reachability slices to avoid redundant computation.
|
||||
/// </summary>
|
||||
public interface ISliceCache
|
||||
{
|
||||
/// <summary>
|
||||
/// Try to get a cached slice result.
|
||||
/// </summary>
|
||||
Task<CachedSliceResult?> TryGetAsync(
|
||||
string cacheKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Store a slice result in cache.
|
||||
/// </summary>
|
||||
Task SetAsync(
|
||||
string cacheKey,
|
||||
CachedSliceResult result,
|
||||
TimeSpan ttl,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Remove a slice from cache.
|
||||
/// </summary>
|
||||
Task RemoveAsync(
|
||||
string cacheKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Clear all cached slices.
|
||||
/// </summary>
|
||||
Task ClearAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get cache statistics.
|
||||
/// </summary>
|
||||
CacheStatistics GetStatistics();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cached slice result.
|
||||
/// </summary>
|
||||
public sealed record CachedSliceResult
|
||||
{
|
||||
public required string SliceDigest { get; init; }
|
||||
public required string Verdict { get; init; }
|
||||
public required double Confidence { get; init; }
|
||||
public required IReadOnlyList<string> PathWitnesses { get; init; }
|
||||
public required DateTimeOffset CachedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Cache statistics.
|
||||
/// </summary>
|
||||
public sealed record CacheStatistics
|
||||
{
|
||||
public required long HitCount { get; init; }
|
||||
public required long MissCount { get; init; }
|
||||
public required long EntryCount { get; init; }
|
||||
public required long EstimatedSizeBytes { get; init; }
|
||||
|
||||
public double HitRate => (HitCount + MissCount) == 0
|
||||
? 0.0
|
||||
: (double)HitCount / (HitCount + MissCount);
|
||||
}
|
||||
@@ -0,0 +1,210 @@
|
||||
using System.Collections.Concurrent;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of slice cache with TTL and memory pressure handling.
|
||||
/// </summary>
|
||||
public sealed class InMemorySliceCache : ISliceCache, IDisposable
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, CacheEntry> _cache = new();
|
||||
private readonly ILogger<InMemorySliceCache> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly Timer _evictionTimer;
|
||||
private readonly SemaphoreSlim _evictionLock = new(1, 1);
|
||||
|
||||
private long _hitCount;
|
||||
private long _missCount;
|
||||
private const long MaxCacheSizeBytes = 1_073_741_824; // 1GB
|
||||
private const int EvictionIntervalSeconds = 60;
|
||||
|
||||
public InMemorySliceCache(
|
||||
ILogger<InMemorySliceCache> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_evictionTimer = new Timer(
|
||||
_ => _ = EvictExpiredEntriesAsync(CancellationToken.None),
|
||||
null,
|
||||
TimeSpan.FromSeconds(EvictionIntervalSeconds),
|
||||
TimeSpan.FromSeconds(EvictionIntervalSeconds));
|
||||
}
|
||||
|
||||
public Task<CachedSliceResult?> TryGetAsync(
|
||||
string cacheKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out var entry))
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
if (entry.ExpiresAt > now)
|
||||
{
|
||||
Interlocked.Increment(ref _hitCount);
|
||||
_logger.LogDebug("Cache hit for key {CacheKey}", cacheKey);
|
||||
return Task.FromResult<CachedSliceResult?>(entry.Result);
|
||||
}
|
||||
|
||||
_cache.TryRemove(cacheKey, out _);
|
||||
_logger.LogDebug("Cache entry expired for key {CacheKey}", cacheKey);
|
||||
}
|
||||
|
||||
Interlocked.Increment(ref _missCount);
|
||||
_logger.LogDebug("Cache miss for key {CacheKey}", cacheKey);
|
||||
return Task.FromResult<CachedSliceResult?>(null);
|
||||
}
|
||||
|
||||
public Task SetAsync(
|
||||
string cacheKey,
|
||||
CachedSliceResult result,
|
||||
TimeSpan ttl,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var entry = new CacheEntry(result, now + ttl, EstimateSize(result));
|
||||
|
||||
_cache.AddOrUpdate(cacheKey, entry, (_, _) => entry);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Cached slice with key {CacheKey}, expires at {ExpiresAt}",
|
||||
cacheKey,
|
||||
entry.ExpiresAt);
|
||||
|
||||
_ = CheckMemoryPressureAsync(cancellationToken);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RemoveAsync(
|
||||
string cacheKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
|
||||
|
||||
_cache.TryRemove(cacheKey, out _);
|
||||
_logger.LogDebug("Removed cache entry for key {CacheKey}", cacheKey);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task ClearAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
_cache.Clear();
|
||||
_logger.LogInformation("Cleared all cache entries");
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public CacheStatistics GetStatistics()
|
||||
{
|
||||
var estimatedSize = _cache.Values.Sum(e => e.EstimatedSizeBytes);
|
||||
|
||||
return new CacheStatistics
|
||||
{
|
||||
HitCount = Interlocked.Read(ref _hitCount),
|
||||
MissCount = Interlocked.Read(ref _missCount),
|
||||
EntryCount = _cache.Count,
|
||||
EstimatedSizeBytes = estimatedSize
|
||||
};
|
||||
}
|
||||
|
||||
private async Task EvictExpiredEntriesAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
if (!await _evictionLock.WaitAsync(0, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var expiredKeys = _cache
|
||||
.Where(kv => kv.Value.ExpiresAt <= now)
|
||||
.Select(kv => kv.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in expiredKeys)
|
||||
{
|
||||
_cache.TryRemove(key, out _);
|
||||
}
|
||||
|
||||
if (expiredKeys.Count > 0)
|
||||
{
|
||||
_logger.LogDebug("Evicted {Count} expired cache entries", expiredKeys.Count);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
_evictionLock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private async Task CheckMemoryPressureAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
var stats = GetStatistics();
|
||||
if (stats.EstimatedSizeBytes <= MaxCacheSizeBytes)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
if (!await _evictionLock.WaitAsync(0, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var orderedEntries = _cache
|
||||
.OrderBy(kv => kv.Value.ExpiresAt)
|
||||
.ToList();
|
||||
|
||||
var evictionCount = Math.Max(1, orderedEntries.Count / 10);
|
||||
var toEvict = orderedEntries.Take(evictionCount);
|
||||
|
||||
foreach (var entry in toEvict)
|
||||
{
|
||||
_cache.TryRemove(entry.Key, out _);
|
||||
}
|
||||
|
||||
_logger.LogWarning(
|
||||
"Memory pressure detected. Evicted {Count} entries. Cache size: {SizeBytes} bytes",
|
||||
evictionCount,
|
||||
stats.EstimatedSizeBytes);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_evictionLock.Release();
|
||||
}
|
||||
}
|
||||
|
||||
private static long EstimateSize(CachedSliceResult result)
|
||||
{
|
||||
const int baseObjectSize = 128;
|
||||
const int stringOverhead = 32;
|
||||
const int pathWitnessAvgSize = 256;
|
||||
|
||||
var size = baseObjectSize;
|
||||
size += result.SliceDigest.Length * 2 + stringOverhead;
|
||||
size += result.Verdict.Length * 2 + stringOverhead;
|
||||
size += result.PathWitnesses.Count * pathWitnessAvgSize;
|
||||
|
||||
return size;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
_evictionTimer?.Dispose();
|
||||
_evictionLock?.Dispose();
|
||||
}
|
||||
|
||||
private sealed record CacheEntry(
|
||||
CachedSliceResult Result,
|
||||
DateTimeOffset ExpiresAt,
|
||||
long EstimatedSizeBytes);
|
||||
}
|
||||
@@ -0,0 +1,223 @@
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Core;
|
||||
using StellaOps.Scanner.Reachability.Runtime;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// Options for observed path slice generation.
|
||||
/// </summary>
|
||||
public sealed record ObservedPathSliceOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Minimum confidence threshold to include in slice. Default: 0.0 (include all).
|
||||
/// </summary>
|
||||
public double MinConfidence { get; init; } = 0.0;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to include runtime-only edges. Default: true.
|
||||
/// </summary>
|
||||
public bool IncludeRuntimeOnlyEdges { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to promote observed edges to highest confidence. Default: true.
|
||||
/// </summary>
|
||||
public bool PromoteObservedConfidence { get; init; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates reachability slices that incorporate runtime observations.
|
||||
/// </summary>
|
||||
public sealed class ObservedPathSliceGenerator
|
||||
{
|
||||
private readonly SliceExtractor _baseExtractor;
|
||||
private readonly RuntimeStaticMerger _merger;
|
||||
private readonly ObservedPathSliceOptions _options;
|
||||
private readonly ILogger<ObservedPathSliceGenerator> _logger;
|
||||
|
||||
public ObservedPathSliceGenerator(
|
||||
SliceExtractor baseExtractor,
|
||||
RuntimeStaticMerger merger,
|
||||
ObservedPathSliceOptions? options = null,
|
||||
ILogger<ObservedPathSliceGenerator>? logger = null)
|
||||
{
|
||||
_baseExtractor = baseExtractor ?? throw new ArgumentNullException(nameof(baseExtractor));
|
||||
_merger = merger ?? throw new ArgumentNullException(nameof(merger));
|
||||
_options = options ?? new ObservedPathSliceOptions();
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<ObservedPathSliceGenerator>.Instance;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract a slice with runtime observations merged in.
|
||||
/// </summary>
|
||||
public ReachabilitySlice ExtractWithObservations(
|
||||
SliceExtractionRequest request,
|
||||
IEnumerable<RuntimeCallEvent> runtimeEvents)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(runtimeEvents);
|
||||
|
||||
// First merge runtime observations into the graph
|
||||
var mergeResult = _merger.Merge(request.Graph, runtimeEvents);
|
||||
|
||||
_logger.LogDebug(
|
||||
"Merged {Matched} observed edges, {RuntimeOnly} runtime-only edges (coverage: {Coverage:P1})",
|
||||
mergeResult.Statistics.MatchedEdgeCount,
|
||||
mergeResult.Statistics.RuntimeOnlyEdgeCount,
|
||||
mergeResult.Statistics.CoverageRatio);
|
||||
|
||||
// Extract slice from merged graph
|
||||
var mergedRequest = request with { Graph = mergeResult.MergedGraph };
|
||||
var baseSlice = _baseExtractor.Extract(mergedRequest);
|
||||
|
||||
// Enhance verdict based on observations
|
||||
var enhancedVerdict = EnhanceVerdict(baseSlice.Verdict, mergeResult);
|
||||
|
||||
// Filter and transform edges based on options
|
||||
var enhancedSubgraph = EnhanceSubgraph(baseSlice.Subgraph, mergeResult);
|
||||
|
||||
return baseSlice with
|
||||
{
|
||||
Verdict = enhancedVerdict,
|
||||
Subgraph = enhancedSubgraph
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Check if any paths in the slice have been observed at runtime.
|
||||
/// </summary>
|
||||
public bool HasObservedPaths(ReachabilitySlice slice)
|
||||
{
|
||||
return slice.Subgraph.Edges.Any(e => e.Observed != null);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Get coverage statistics for a slice.
|
||||
/// </summary>
|
||||
public ObservationCoverage GetCoverage(ReachabilitySlice slice)
|
||||
{
|
||||
var totalEdges = slice.Subgraph.Edges.Length;
|
||||
var observedEdges = slice.Subgraph.Edges.Count(e => e.Observed != null);
|
||||
|
||||
return new ObservationCoverage
|
||||
{
|
||||
TotalEdges = totalEdges,
|
||||
ObservedEdges = observedEdges,
|
||||
CoverageRatio = totalEdges > 0 ? (double)observedEdges / totalEdges : 0.0,
|
||||
HasFullCoverage = totalEdges > 0 && observedEdges == totalEdges
|
||||
};
|
||||
}
|
||||
|
||||
private SliceVerdict EnhanceVerdict(SliceVerdict baseVerdict, RuntimeStaticMergeResult mergeResult)
|
||||
{
|
||||
// If we have observed paths to targets, upgrade to observed_reachable
|
||||
var hasObservedPathToTarget = mergeResult.ObservedEdges.Any();
|
||||
|
||||
if (hasObservedPathToTarget && baseVerdict.Status == SliceVerdictStatus.Reachable)
|
||||
{
|
||||
return baseVerdict with
|
||||
{
|
||||
Status = SliceVerdictStatus.ObservedReachable,
|
||||
Confidence = 1.0, // Maximum confidence for runtime-observed
|
||||
Reasons = baseVerdict.Reasons.Add("Runtime observation confirms reachability")
|
||||
};
|
||||
}
|
||||
|
||||
// If static analysis said unreachable but we observed it, override
|
||||
if (hasObservedPathToTarget && baseVerdict.Status == SliceVerdictStatus.Unreachable)
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Runtime observation contradicts static analysis (unreachable -> observed_reachable)");
|
||||
|
||||
return baseVerdict with
|
||||
{
|
||||
Status = SliceVerdictStatus.ObservedReachable,
|
||||
Confidence = 1.0,
|
||||
Reasons = baseVerdict.Reasons.Add("Runtime observation overrides static analysis")
|
||||
};
|
||||
}
|
||||
|
||||
// Boost confidence if we have supporting observations
|
||||
if (mergeResult.Statistics.CoverageRatio > 0)
|
||||
{
|
||||
var boostedConfidence = Math.Min(1.0,
|
||||
baseVerdict.Confidence + (1.0 - baseVerdict.Confidence) * mergeResult.Statistics.CoverageRatio);
|
||||
|
||||
return baseVerdict with
|
||||
{
|
||||
Confidence = boostedConfidence,
|
||||
Reasons = baseVerdict.Reasons.Add($"Confidence boosted by {mergeResult.Statistics.CoverageRatio:P0} runtime coverage")
|
||||
};
|
||||
}
|
||||
|
||||
return baseVerdict;
|
||||
}
|
||||
|
||||
private SliceSubgraph EnhanceSubgraph(SliceSubgraph baseSubgraph, RuntimeStaticMergeResult mergeResult)
|
||||
{
|
||||
var enhancedEdges = baseSubgraph.Edges
|
||||
.Select(edge => EnhanceEdge(edge, mergeResult))
|
||||
.Where(edge => edge.Confidence >= _options.MinConfidence)
|
||||
.ToImmutableArray();
|
||||
|
||||
return baseSubgraph with { Edges = enhancedEdges };
|
||||
}
|
||||
|
||||
private SliceEdge EnhanceEdge(SliceEdge edge, RuntimeStaticMergeResult mergeResult)
|
||||
{
|
||||
// Check if this edge was observed
|
||||
var observed = mergeResult.ObservedEdges
|
||||
.FirstOrDefault(o => o.From == edge.From && o.To == edge.To);
|
||||
|
||||
if (observed != null)
|
||||
{
|
||||
var confidence = _options.PromoteObservedConfidence ? 1.0 : edge.Confidence;
|
||||
|
||||
return edge with
|
||||
{
|
||||
Confidence = confidence,
|
||||
Observed = new ObservedEdgeMetadata
|
||||
{
|
||||
FirstObserved = observed.FirstObserved,
|
||||
LastObserved = observed.LastObserved,
|
||||
ObservationCount = observed.ObservationCount,
|
||||
TraceDigest = observed.TraceDigest
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Check if this is a runtime-only edge
|
||||
var runtimeOnly = mergeResult.RuntimeOnlyEdges
|
||||
.FirstOrDefault(r => r.From == edge.From && r.To == edge.To);
|
||||
|
||||
if (runtimeOnly != null && _options.IncludeRuntimeOnlyEdges)
|
||||
{
|
||||
return edge with
|
||||
{
|
||||
Kind = SliceEdgeKind.Dynamic,
|
||||
Evidence = $"runtime:{runtimeOnly.Origin}",
|
||||
Observed = new ObservedEdgeMetadata
|
||||
{
|
||||
FirstObserved = runtimeOnly.FirstObserved,
|
||||
LastObserved = runtimeOnly.LastObserved,
|
||||
ObservationCount = runtimeOnly.ObservationCount,
|
||||
TraceDigest = runtimeOnly.TraceDigest
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return edge;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Coverage statistics for runtime observations.
|
||||
/// </summary>
|
||||
public sealed record ObservationCoverage
|
||||
{
|
||||
public int TotalEdges { get; init; }
|
||||
public int ObservedEdges { get; init; }
|
||||
public double CoverageRatio { get; init; }
|
||||
public bool HasFullCoverage { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,173 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// Policy binding mode for slices.
|
||||
/// </summary>
|
||||
public enum PolicyBindingMode
|
||||
{
|
||||
/// <summary>
|
||||
/// Slice is invalid if policy changes at all.
|
||||
/// </summary>
|
||||
Strict,
|
||||
|
||||
/// <summary>
|
||||
/// Slice is valid with newer policy versions only.
|
||||
/// </summary>
|
||||
Forward,
|
||||
|
||||
/// <summary>
|
||||
/// Slice is valid with any policy version.
|
||||
/// </summary>
|
||||
Any
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Policy binding information for a reachability slice.
|
||||
/// </summary>
|
||||
public sealed record PolicyBinding
|
||||
{
|
||||
/// <summary>
|
||||
/// Content-addressed hash of the policy DSL.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyDigest")]
|
||||
public required string PolicyDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Semantic version of the policy.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVersion")]
|
||||
public required string PolicyVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the policy was bound to this slice.
|
||||
/// </summary>
|
||||
[JsonPropertyName("boundAt")]
|
||||
public required DateTimeOffset BoundAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Binding mode for validation.
|
||||
/// </summary>
|
||||
[JsonPropertyName("mode")]
|
||||
[JsonConverter(typeof(JsonStringEnumConverter))]
|
||||
public required PolicyBindingMode Mode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional policy name/identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyName")]
|
||||
public string? PolicyName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional policy source (e.g., git commit hash).
|
||||
/// </summary>
|
||||
[JsonPropertyName("policySource")]
|
||||
public string? PolicySource { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of policy binding validation.
|
||||
/// </summary>
|
||||
public sealed record PolicyBindingValidationResult
|
||||
{
|
||||
public required bool Valid { get; init; }
|
||||
public string? FailureReason { get; init; }
|
||||
public required PolicyBinding SlicePolicy { get; init; }
|
||||
public required PolicyBinding CurrentPolicy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validator for policy bindings.
|
||||
/// </summary>
|
||||
public sealed class PolicyBindingValidator
|
||||
{
|
||||
/// <summary>
|
||||
/// Validate a policy binding against current policy.
|
||||
/// </summary>
|
||||
public PolicyBindingValidationResult Validate(
|
||||
PolicyBinding sliceBinding,
|
||||
PolicyBinding currentPolicy)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sliceBinding);
|
||||
ArgumentNullException.ThrowIfNull(currentPolicy);
|
||||
|
||||
var result = sliceBinding.Mode switch
|
||||
{
|
||||
PolicyBindingMode.Strict => ValidateStrict(sliceBinding, currentPolicy),
|
||||
PolicyBindingMode.Forward => ValidateForward(sliceBinding, currentPolicy),
|
||||
PolicyBindingMode.Any => ValidateAny(sliceBinding, currentPolicy),
|
||||
_ => throw new ArgumentException($"Unknown policy binding mode: {sliceBinding.Mode}")
|
||||
};
|
||||
|
||||
return result with
|
||||
{
|
||||
SlicePolicy = sliceBinding,
|
||||
CurrentPolicy = currentPolicy
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyBindingValidationResult ValidateStrict(
|
||||
PolicyBinding sliceBinding,
|
||||
PolicyBinding currentPolicy)
|
||||
{
|
||||
var digestMatch = string.Equals(
|
||||
sliceBinding.PolicyDigest,
|
||||
currentPolicy.PolicyDigest,
|
||||
StringComparison.Ordinal);
|
||||
|
||||
return new PolicyBindingValidationResult
|
||||
{
|
||||
Valid = digestMatch,
|
||||
FailureReason = digestMatch
|
||||
? null
|
||||
: $"Policy digest mismatch. Slice bound to {sliceBinding.PolicyDigest}, current is {currentPolicy.PolicyDigest}.",
|
||||
SlicePolicy = sliceBinding,
|
||||
CurrentPolicy = currentPolicy
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyBindingValidationResult ValidateForward(
|
||||
PolicyBinding sliceBinding,
|
||||
PolicyBinding currentPolicy)
|
||||
{
|
||||
// Check if current policy version is newer or equal
|
||||
if (!Version.TryParse(sliceBinding.PolicyVersion, out var sliceVersion) ||
|
||||
!Version.TryParse(currentPolicy.PolicyVersion, out var currentVersion))
|
||||
{
|
||||
return new PolicyBindingValidationResult
|
||||
{
|
||||
Valid = false,
|
||||
FailureReason = "Invalid version format for forward compatibility check.",
|
||||
SlicePolicy = sliceBinding,
|
||||
CurrentPolicy = currentPolicy
|
||||
};
|
||||
}
|
||||
|
||||
var isForwardCompatible = currentVersion >= sliceVersion;
|
||||
|
||||
return new PolicyBindingValidationResult
|
||||
{
|
||||
Valid = isForwardCompatible,
|
||||
FailureReason = isForwardCompatible
|
||||
? null
|
||||
: $"Policy version downgrade detected. Slice bound to {sliceVersion}, current is {currentVersion}.",
|
||||
SlicePolicy = sliceBinding,
|
||||
CurrentPolicy = currentPolicy
|
||||
};
|
||||
}
|
||||
|
||||
private static PolicyBindingValidationResult ValidateAny(
|
||||
PolicyBinding sliceBinding,
|
||||
PolicyBinding currentPolicy)
|
||||
{
|
||||
// Always valid in 'any' mode
|
||||
return new PolicyBindingValidationResult
|
||||
{
|
||||
Valid = true,
|
||||
FailureReason = null,
|
||||
SlicePolicy = sliceBinding,
|
||||
CurrentPolicy = currentPolicy
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices.Replay;
|
||||
|
||||
/// <summary>
|
||||
/// Computes detailed diffs between two reachability slices.
|
||||
/// </summary>
|
||||
public sealed class SliceDiffComputer
|
||||
{
|
||||
public SliceDiffResult Compute(ReachabilitySlice original, ReachabilitySlice recomputed)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(original);
|
||||
ArgumentNullException.ThrowIfNull(recomputed);
|
||||
|
||||
var normalizedOriginal = original.Normalize();
|
||||
var normalizedRecomputed = recomputed.Normalize();
|
||||
|
||||
var nodesDiff = ComputeNodesDiff(
|
||||
normalizedOriginal.Subgraph.Nodes,
|
||||
normalizedRecomputed.Subgraph.Nodes);
|
||||
|
||||
var edgesDiff = ComputeEdgesDiff(
|
||||
normalizedOriginal.Subgraph.Edges,
|
||||
normalizedRecomputed.Subgraph.Edges);
|
||||
|
||||
var verdictDiff = ComputeVerdictDiff(
|
||||
normalizedOriginal.Verdict,
|
||||
normalizedRecomputed.Verdict);
|
||||
|
||||
var hasChanges = nodesDiff.HasChanges || edgesDiff.HasChanges || verdictDiff is not null;
|
||||
|
||||
return new SliceDiffResult(
|
||||
Match: !hasChanges,
|
||||
NodesDiff: nodesDiff,
|
||||
EdgesDiff: edgesDiff,
|
||||
VerdictDiff: verdictDiff);
|
||||
}
|
||||
|
||||
private static NodesDiff ComputeNodesDiff(
|
||||
ImmutableArray<SliceNode> original,
|
||||
ImmutableArray<SliceNode> recomputed)
|
||||
{
|
||||
var originalIds = original.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
|
||||
var recomputedIds = recomputed.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
var missing = originalIds.Except(recomputedIds).Order(StringComparer.Ordinal).ToImmutableArray();
|
||||
var extra = recomputedIds.Except(originalIds).Order(StringComparer.Ordinal).ToImmutableArray();
|
||||
|
||||
var hasChanges = missing.Length > 0 || extra.Length > 0;
|
||||
|
||||
return new NodesDiff(missing, extra, hasChanges);
|
||||
}
|
||||
|
||||
private static EdgesDiff ComputeEdgesDiff(
|
||||
ImmutableArray<SliceEdge> original,
|
||||
ImmutableArray<SliceEdge> recomputed)
|
||||
{
|
||||
var originalKeys = original
|
||||
.Select(e => EdgeKey(e))
|
||||
.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
var recomputedKeys = recomputed
|
||||
.Select(e => EdgeKey(e))
|
||||
.ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
var missing = originalKeys.Except(recomputedKeys).Order(StringComparer.Ordinal).ToImmutableArray();
|
||||
var extra = recomputedKeys.Except(originalKeys).Order(StringComparer.Ordinal).ToImmutableArray();
|
||||
|
||||
var hasChanges = missing.Length > 0 || extra.Length > 0;
|
||||
|
||||
return new EdgesDiff(missing, extra, hasChanges);
|
||||
}
|
||||
|
||||
private static string EdgeKey(SliceEdge edge)
|
||||
=> $"{edge.From}→{edge.To}:{edge.Kind}";
|
||||
|
||||
private static string? ComputeVerdictDiff(SliceVerdict original, SliceVerdict recomputed)
|
||||
{
|
||||
if (original.Status != recomputed.Status)
|
||||
{
|
||||
return $"Status changed: {original.Status} → {recomputed.Status}";
|
||||
}
|
||||
|
||||
var confidenceDiff = Math.Abs(original.Confidence - recomputed.Confidence);
|
||||
if (confidenceDiff > 0.01)
|
||||
{
|
||||
return $"Confidence changed: {original.Confidence:F3} → {recomputed.Confidence:F3} (Δ={confidenceDiff:F3})";
|
||||
}
|
||||
|
||||
if (original.UnknownCount != recomputed.UnknownCount)
|
||||
{
|
||||
return $"Unknown count changed: {original.UnknownCount} → {recomputed.UnknownCount}";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SliceDiffResult(
|
||||
bool Match,
|
||||
NodesDiff NodesDiff,
|
||||
EdgesDiff EdgesDiff,
|
||||
string? VerdictDiff);
|
||||
|
||||
public sealed record NodesDiff(
|
||||
ImmutableArray<string> Missing,
|
||||
ImmutableArray<string> Extra,
|
||||
bool HasChanges);
|
||||
|
||||
public sealed record EdgesDiff(
|
||||
ImmutableArray<string> Missing,
|
||||
ImmutableArray<string> Extra,
|
||||
bool HasChanges);
|
||||
@@ -0,0 +1,180 @@
|
||||
using System.Collections.Concurrent;
|
||||
using Microsoft.Extensions.Options;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// Options for slice caching behavior.
|
||||
/// </summary>
|
||||
public sealed class SliceCacheOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Cache time-to-live. Default: 1 hour.
|
||||
/// </summary>
|
||||
public TimeSpan Ttl { get; set; } = TimeSpan.FromHours(1);
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of cached items before eviction. Default: 10000.
|
||||
/// </summary>
|
||||
public int MaxItems { get; set; } = 10_000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether caching is enabled. Default: true.
|
||||
/// </summary>
|
||||
public bool Enabled { get; set; } = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-memory LRU cache for reachability slices with TTL eviction.
|
||||
/// </summary>
|
||||
public sealed class SliceCache : ISliceCache, IDisposable
|
||||
{
|
||||
private readonly SliceCacheOptions _options;
|
||||
private readonly ConcurrentDictionary<string, CacheItem> _cache = new(StringComparer.Ordinal);
|
||||
private readonly Timer _evictionTimer;
|
||||
private long _hitCount;
|
||||
private long _missCount;
|
||||
private bool _disposed;
|
||||
|
||||
public SliceCache(IOptions<SliceCacheOptions> options)
|
||||
{
|
||||
_options = options?.Value ?? new SliceCacheOptions();
|
||||
_evictionTimer = new Timer(EvictExpired, null, TimeSpan.FromMinutes(1), TimeSpan.FromMinutes(1));
|
||||
}
|
||||
|
||||
public Task<CachedSliceResult?> TryGetAsync(string cacheKey, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
|
||||
|
||||
if (!_options.Enabled)
|
||||
{
|
||||
return Task.FromResult<CachedSliceResult?>(null);
|
||||
}
|
||||
|
||||
if (_cache.TryGetValue(cacheKey, out var item))
|
||||
{
|
||||
if (item.ExpiresAt > DateTimeOffset.UtcNow)
|
||||
{
|
||||
item.LastAccessed = DateTimeOffset.UtcNow;
|
||||
Interlocked.Increment(ref _hitCount);
|
||||
var result = new CachedSliceResult
|
||||
{
|
||||
SliceDigest = item.Digest,
|
||||
Verdict = item.Verdict,
|
||||
Confidence = item.Confidence,
|
||||
PathWitnesses = item.PathWitnesses,
|
||||
CachedAt = item.CachedAt
|
||||
};
|
||||
return Task.FromResult<CachedSliceResult?>(result);
|
||||
}
|
||||
|
||||
// Expired - remove and return miss
|
||||
_cache.TryRemove(cacheKey, out _);
|
||||
}
|
||||
|
||||
Interlocked.Increment(ref _missCount);
|
||||
return Task.FromResult<CachedSliceResult?>(null);
|
||||
}
|
||||
|
||||
public Task SetAsync(string cacheKey, CachedSliceResult result, TimeSpan ttl, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
|
||||
ArgumentNullException.ThrowIfNull(result);
|
||||
|
||||
if (!_options.Enabled) return Task.CompletedTask;
|
||||
|
||||
// Evict if at capacity
|
||||
if (_cache.Count >= _options.MaxItems)
|
||||
{
|
||||
EvictLru();
|
||||
}
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var item = new CacheItem
|
||||
{
|
||||
Digest = result.SliceDigest,
|
||||
Verdict = result.Verdict,
|
||||
Confidence = result.Confidence,
|
||||
PathWitnesses = result.PathWitnesses.ToList(),
|
||||
CachedAt = now,
|
||||
ExpiresAt = now.Add(ttl),
|
||||
LastAccessed = now
|
||||
};
|
||||
|
||||
_cache[cacheKey] = item;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task RemoveAsync(string cacheKey, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(cacheKey);
|
||||
_cache.TryRemove(cacheKey, out _);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task ClearAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
_cache.Clear();
|
||||
Interlocked.Exchange(ref _hitCount, 0);
|
||||
Interlocked.Exchange(ref _missCount, 0);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public CacheStatistics GetStatistics() => new()
|
||||
{
|
||||
HitCount = Interlocked.Read(ref _hitCount),
|
||||
MissCount = Interlocked.Read(ref _missCount),
|
||||
EntryCount = _cache.Count,
|
||||
EstimatedSizeBytes = _cache.Count * 1024 // Rough estimate
|
||||
};
|
||||
|
||||
private void EvictExpired(object? state)
|
||||
{
|
||||
if (_disposed) return;
|
||||
|
||||
var now = DateTimeOffset.UtcNow;
|
||||
var keysToRemove = _cache
|
||||
.Where(kvp => kvp.Value.ExpiresAt <= now)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in keysToRemove)
|
||||
{
|
||||
_cache.TryRemove(key, out _);
|
||||
}
|
||||
}
|
||||
|
||||
private void EvictLru()
|
||||
{
|
||||
// Remove oldest 10% of items
|
||||
var toRemove = Math.Max(1, _options.MaxItems / 10);
|
||||
var oldest = _cache
|
||||
.OrderBy(kvp => kvp.Value.LastAccessed)
|
||||
.Take(toRemove)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
|
||||
foreach (var key in oldest)
|
||||
{
|
||||
_cache.TryRemove(key, out _);
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed) return;
|
||||
_disposed = true;
|
||||
_evictionTimer.Dispose();
|
||||
}
|
||||
|
||||
private sealed class CacheItem
|
||||
{
|
||||
public required string Digest { get; init; }
|
||||
public required string Verdict { get; init; }
|
||||
public required double Confidence { get; init; }
|
||||
public required List<string> PathWitnesses { get; init; }
|
||||
public required DateTimeOffset CachedAt { get; init; }
|
||||
public required DateTimeOffset ExpiresAt { get; init; }
|
||||
public DateTimeOffset LastAccessed { get; set; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.Cache.Abstractions;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
public sealed class SliceCasStorage
|
||||
{
|
||||
private readonly SliceHasher _hasher;
|
||||
private readonly SliceDsseSigner _signer;
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
|
||||
public SliceCasStorage(SliceHasher hasher, SliceDsseSigner signer, ICryptoHash cryptoHash)
|
||||
{
|
||||
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
|
||||
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
}
|
||||
|
||||
public async Task<SliceCasResult> StoreAsync(
|
||||
ReachabilitySlice slice,
|
||||
IFileContentAddressableStore cas,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(slice);
|
||||
ArgumentNullException.ThrowIfNull(cas);
|
||||
|
||||
var digestResult = _hasher.ComputeDigest(slice);
|
||||
var casKey = ExtractDigestHex(digestResult.Digest);
|
||||
|
||||
await using (var sliceStream = new MemoryStream(digestResult.CanonicalBytes, writable: false))
|
||||
{
|
||||
await cas.PutAsync(new FileCasPutRequest(casKey, sliceStream, leaveOpen: false), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
var signed = await _signer.SignAsync(slice, cancellationToken).ConfigureAwait(false);
|
||||
var envelopeBytes = CanonicalJson.SerializeToUtf8Bytes(signed.Envelope);
|
||||
var dsseDigest = _cryptoHash.ComputePrefixedHashForPurpose(envelopeBytes, HashPurpose.Attestation);
|
||||
var dsseKey = $"{casKey}.dsse";
|
||||
|
||||
await using (var dsseStream = new MemoryStream(envelopeBytes, writable: false))
|
||||
{
|
||||
await cas.PutAsync(new FileCasPutRequest(dsseKey, dsseStream, leaveOpen: false), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
return new SliceCasResult(
|
||||
signed.SliceDigest,
|
||||
$"cas://slices/{casKey}",
|
||||
dsseDigest,
|
||||
$"cas://slices/{dsseKey}",
|
||||
signed);
|
||||
}
|
||||
|
||||
private static string ExtractDigestHex(string prefixed)
|
||||
{
|
||||
var colonIndex = prefixed.IndexOf(':');
|
||||
return colonIndex >= 0 ? prefixed[(colonIndex + 1)..] : prefixed;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SliceCasResult(
|
||||
string SliceDigest,
|
||||
string SliceCasUri,
|
||||
string DsseDigest,
|
||||
string DsseCasUri,
|
||||
SignedSlice SignedSlice);
|
||||
@@ -0,0 +1,178 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// Computes detailed diffs between two slices for replay verification.
|
||||
/// </summary>
|
||||
public sealed class SliceDiffComputer
|
||||
{
|
||||
/// <summary>
|
||||
/// Compare two slices and produce a detailed diff.
|
||||
/// </summary>
|
||||
public SliceDiffResult Compare(ReachabilitySlice original, ReachabilitySlice recomputed)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(original);
|
||||
ArgumentNullException.ThrowIfNull(recomputed);
|
||||
|
||||
var nodeDiff = CompareNodes(original.Subgraph.Nodes, recomputed.Subgraph.Nodes);
|
||||
var edgeDiff = CompareEdges(original.Subgraph.Edges, recomputed.Subgraph.Edges);
|
||||
var verdictDiff = CompareVerdicts(original.Verdict, recomputed.Verdict);
|
||||
|
||||
var match = nodeDiff.MissingNodes.IsEmpty &&
|
||||
nodeDiff.ExtraNodes.IsEmpty &&
|
||||
edgeDiff.MissingEdges.IsEmpty &&
|
||||
edgeDiff.ExtraEdges.IsEmpty &&
|
||||
verdictDiff == null;
|
||||
|
||||
return new SliceDiffResult
|
||||
{
|
||||
Match = match,
|
||||
MissingNodes = nodeDiff.MissingNodes,
|
||||
ExtraNodes = nodeDiff.ExtraNodes,
|
||||
MissingEdges = edgeDiff.MissingEdges,
|
||||
ExtraEdges = edgeDiff.ExtraEdges,
|
||||
VerdictDiff = verdictDiff
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute a cache key for a query based on its parameters.
|
||||
/// </summary>
|
||||
public static string ComputeCacheKey(string scanId, string? cveId, IEnumerable<string>? symbols, IEnumerable<string>? entrypoints, string? policyHash)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var sb = new StringBuilder();
|
||||
|
||||
sb.Append("scan:").Append(scanId ?? "").Append('|');
|
||||
sb.Append("cve:").Append(cveId ?? "").Append('|');
|
||||
|
||||
if (symbols != null)
|
||||
{
|
||||
foreach (var s in symbols.OrderBy(x => x, StringComparer.Ordinal))
|
||||
{
|
||||
sb.Append("sym:").Append(s).Append(',');
|
||||
}
|
||||
}
|
||||
sb.Append('|');
|
||||
|
||||
if (entrypoints != null)
|
||||
{
|
||||
foreach (var e in entrypoints.OrderBy(x => x, StringComparer.Ordinal))
|
||||
{
|
||||
sb.Append("ep:").Append(e).Append(',');
|
||||
}
|
||||
}
|
||||
sb.Append('|');
|
||||
|
||||
sb.Append("policy:").Append(policyHash ?? "");
|
||||
|
||||
var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(sb.ToString()));
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static NodeDiffResult CompareNodes(ImmutableArray<SliceNode> original, ImmutableArray<SliceNode> recomputed)
|
||||
{
|
||||
var originalIds = original.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
|
||||
var recomputedIds = recomputed.Select(n => n.Id).ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
var missing = originalIds.Except(recomputedIds)
|
||||
.OrderBy(x => x, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var extra = recomputedIds.Except(originalIds)
|
||||
.OrderBy(x => x, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new NodeDiffResult(missing, extra);
|
||||
}
|
||||
|
||||
private static EdgeDiffResult CompareEdges(ImmutableArray<SliceEdge> original, ImmutableArray<SliceEdge> recomputed)
|
||||
{
|
||||
static string EdgeKey(SliceEdge e) => $"{e.From}->{e.To}:{e.Kind}";
|
||||
|
||||
var originalKeys = original.Select(EdgeKey).ToHashSet(StringComparer.Ordinal);
|
||||
var recomputedKeys = recomputed.Select(EdgeKey).ToHashSet(StringComparer.Ordinal);
|
||||
|
||||
var missing = originalKeys.Except(recomputedKeys)
|
||||
.OrderBy(x => x, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var extra = recomputedKeys.Except(originalKeys)
|
||||
.OrderBy(x => x, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new EdgeDiffResult(missing, extra);
|
||||
}
|
||||
|
||||
private static string? CompareVerdicts(SliceVerdict original, SliceVerdict recomputed)
|
||||
{
|
||||
if (original.Status != recomputed.Status)
|
||||
{
|
||||
return $"Status: {original.Status} -> {recomputed.Status}";
|
||||
}
|
||||
|
||||
if (Math.Abs(original.Confidence - recomputed.Confidence) > 0.0001)
|
||||
{
|
||||
return $"Confidence: {original.Confidence:F4} -> {recomputed.Confidence:F4}";
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private readonly record struct NodeDiffResult(ImmutableArray<string> MissingNodes, ImmutableArray<string> ExtraNodes);
|
||||
private readonly record struct EdgeDiffResult(ImmutableArray<string> MissingEdges, ImmutableArray<string> ExtraEdges);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of slice comparison.
|
||||
/// </summary>
|
||||
public sealed record SliceDiffResult
|
||||
{
|
||||
public required bool Match { get; init; }
|
||||
public ImmutableArray<string> MissingNodes { get; init; } = ImmutableArray<string>.Empty;
|
||||
public ImmutableArray<string> ExtraNodes { get; init; } = ImmutableArray<string>.Empty;
|
||||
public ImmutableArray<string> MissingEdges { get; init; } = ImmutableArray<string>.Empty;
|
||||
public ImmutableArray<string> ExtraEdges { get; init; } = ImmutableArray<string>.Empty;
|
||||
public string? VerdictDiff { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Get human-readable diff summary.
|
||||
/// </summary>
|
||||
public string ToSummary()
|
||||
{
|
||||
if (Match) return "Slices match exactly.";
|
||||
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("Slice diff:");
|
||||
|
||||
if (!MissingNodes.IsDefaultOrEmpty)
|
||||
{
|
||||
sb.AppendLine($" Missing nodes ({MissingNodes.Length}): {string.Join(", ", MissingNodes.Take(5))}{(MissingNodes.Length > 5 ? "..." : "")}");
|
||||
}
|
||||
|
||||
if (!ExtraNodes.IsDefaultOrEmpty)
|
||||
{
|
||||
sb.AppendLine($" Extra nodes ({ExtraNodes.Length}): {string.Join(", ", ExtraNodes.Take(5))}{(ExtraNodes.Length > 5 ? "..." : "")}");
|
||||
}
|
||||
|
||||
if (!MissingEdges.IsDefaultOrEmpty)
|
||||
{
|
||||
sb.AppendLine($" Missing edges ({MissingEdges.Length}): {string.Join(", ", MissingEdges.Take(5))}{(MissingEdges.Length > 5 ? "..." : "")}");
|
||||
}
|
||||
|
||||
if (!ExtraEdges.IsDefaultOrEmpty)
|
||||
{
|
||||
sb.AppendLine($" Extra edges ({ExtraEdges.Length}): {string.Join(", ", ExtraEdges.Take(5))}{(ExtraEdges.Length > 5 ? "..." : "")}");
|
||||
}
|
||||
|
||||
if (VerdictDiff != null)
|
||||
{
|
||||
sb.AppendLine($" Verdict changed: {VerdictDiff}");
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
using StellaOps.Replay.Core;
|
||||
using StellaOps.Scanner.ProofSpine;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
public sealed class SliceDsseSigner
|
||||
{
|
||||
private readonly IDsseSigningService _signingService;
|
||||
private readonly ICryptoProfile _cryptoProfile;
|
||||
private readonly SliceHasher _hasher;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public SliceDsseSigner(
|
||||
IDsseSigningService signingService,
|
||||
ICryptoProfile cryptoProfile,
|
||||
SliceHasher hasher,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_signingService = signingService ?? throw new ArgumentNullException(nameof(signingService));
|
||||
_cryptoProfile = cryptoProfile ?? throw new ArgumentNullException(nameof(cryptoProfile));
|
||||
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<SignedSlice> SignAsync(ReachabilitySlice slice, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(slice);
|
||||
|
||||
var normalized = slice.Normalize();
|
||||
var digestResult = _hasher.ComputeDigest(normalized);
|
||||
|
||||
var envelope = await _signingService.SignAsync(
|
||||
normalized,
|
||||
SliceSchema.DssePayloadType,
|
||||
_cryptoProfile,
|
||||
cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return new SignedSlice(
|
||||
Slice: normalized,
|
||||
SliceDigest: digestResult.Digest,
|
||||
Envelope: envelope,
|
||||
SignedAt: _timeProvider.GetUtcNow());
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SignedSlice(
|
||||
ReachabilitySlice Slice,
|
||||
string SliceDigest,
|
||||
DsseEnvelope Envelope,
|
||||
DateTimeOffset SignedAt);
|
||||
@@ -0,0 +1,568 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.Core;
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
public sealed class SliceExtractor
|
||||
{
|
||||
private readonly VerdictComputer _verdictComputer;
|
||||
|
||||
public SliceExtractor(VerdictComputer verdictComputer)
|
||||
{
|
||||
_verdictComputer = verdictComputer ?? throw new ArgumentNullException(nameof(verdictComputer));
|
||||
}
|
||||
|
||||
public ReachabilitySlice Extract(SliceExtractionRequest request, SliceVerdictOptions? verdictOptions = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var graph = request.Graph;
|
||||
var query = request.Query;
|
||||
|
||||
var nodeLookup = graph.Nodes.ToDictionary(n => n.Id, StringComparer.Ordinal);
|
||||
var entrypoints = ResolveEntrypoints(query, graph, nodeLookup);
|
||||
var targets = ResolveTargets(query, graph);
|
||||
|
||||
if (entrypoints.Count == 0 || targets.Count == 0)
|
||||
{
|
||||
return BuildEmptySlice(request, entrypoints.Count == 0, targets.Count == 0);
|
||||
}
|
||||
|
||||
var forwardEdges = BuildEdgeLookup(graph.Edges);
|
||||
var reverseEdges = BuildReverseEdgeLookup(graph.Edges);
|
||||
|
||||
var reachableFromEntrypoints = Traverse(entrypoints, forwardEdges);
|
||||
var canReachTargets = Traverse(targets, reverseEdges);
|
||||
|
||||
var includedNodes = new HashSet<string>(reachableFromEntrypoints, StringComparer.Ordinal);
|
||||
includedNodes.IntersectWith(canReachTargets);
|
||||
foreach (var entry in entrypoints)
|
||||
{
|
||||
includedNodes.Add(entry);
|
||||
}
|
||||
foreach (var target in targets)
|
||||
{
|
||||
includedNodes.Add(target);
|
||||
}
|
||||
|
||||
var subgraphEdges = graph.Edges
|
||||
.Where(e => includedNodes.Contains(e.From) && includedNodes.Contains(e.To))
|
||||
.Where(e => reachableFromEntrypoints.Contains(e.From) && canReachTargets.Contains(e.To))
|
||||
.ToList();
|
||||
|
||||
var subgraphNodes = includedNodes
|
||||
.Where(nodeLookup.ContainsKey)
|
||||
.Select(id => nodeLookup[id])
|
||||
.ToList();
|
||||
|
||||
var nodes = subgraphNodes
|
||||
.Select(node => MapNode(node, entrypoints, targets))
|
||||
.ToImmutableArray();
|
||||
|
||||
var edges = subgraphEdges
|
||||
.Select(MapEdge)
|
||||
.ToImmutableArray();
|
||||
|
||||
var paths = BuildPathSummaries(entrypoints, targets, subgraphEdges, nodeLookup);
|
||||
var unknownEdges = edges.Count(e => e.Kind == SliceEdgeKind.Unknown || e.Confidence < 0.5);
|
||||
var verdict = _verdictComputer.Compute(paths, unknownEdges, verdictOptions);
|
||||
|
||||
return new ReachabilitySlice
|
||||
{
|
||||
Inputs = request.Inputs,
|
||||
Query = request.Query,
|
||||
Subgraph = new SliceSubgraph { Nodes = nodes, Edges = edges },
|
||||
Verdict = verdict,
|
||||
Manifest = request.Manifest
|
||||
}.Normalize();
|
||||
}
|
||||
|
||||
private static ReachabilitySlice BuildEmptySlice(SliceExtractionRequest request, bool missingEntrypoints, bool missingTargets)
|
||||
{
|
||||
var reasons = new List<string>();
|
||||
if (missingEntrypoints)
|
||||
{
|
||||
reasons.Add("missing_entrypoints");
|
||||
}
|
||||
if (missingTargets)
|
||||
{
|
||||
reasons.Add("missing_targets");
|
||||
}
|
||||
|
||||
return new ReachabilitySlice
|
||||
{
|
||||
Inputs = request.Inputs,
|
||||
Query = request.Query,
|
||||
Subgraph = new SliceSubgraph(),
|
||||
Verdict = new SliceVerdict
|
||||
{
|
||||
Status = SliceVerdictStatus.Unknown,
|
||||
Confidence = 0.0,
|
||||
Reasons = reasons.ToImmutableArray()
|
||||
},
|
||||
Manifest = request.Manifest
|
||||
}.Normalize();
|
||||
}
|
||||
|
||||
private static HashSet<string> ResolveEntrypoints(
|
||||
SliceQuery query,
|
||||
RichGraph graph,
|
||||
Dictionary<string, RichGraphNode> nodeLookup)
|
||||
{
|
||||
var entrypoints = new HashSet<string>(StringComparer.Ordinal);
|
||||
var explicitEntrypoints = query.Entrypoints;
|
||||
|
||||
if (!explicitEntrypoints.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var entry in explicitEntrypoints)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(entry))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = entry.Trim();
|
||||
if (nodeLookup.ContainsKey(trimmed))
|
||||
{
|
||||
entrypoints.Add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var root in graph.Roots ?? Array.Empty<RichGraphRoot>())
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(root.Id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = root.Id.Trim();
|
||||
if (nodeLookup.ContainsKey(trimmed))
|
||||
{
|
||||
entrypoints.Add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entrypoints;
|
||||
}
|
||||
|
||||
private static HashSet<string> ResolveTargets(SliceQuery query, RichGraph graph)
|
||||
{
|
||||
var targets = new HashSet<string>(StringComparer.Ordinal);
|
||||
if (query.TargetSymbols.IsDefaultOrEmpty)
|
||||
{
|
||||
return targets;
|
||||
}
|
||||
|
||||
foreach (var target in query.TargetSymbols)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(target))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = target.Trim();
|
||||
if (IsPackageTarget(trimmed))
|
||||
{
|
||||
var packageTargets = graph.Nodes
|
||||
.Where(n => string.Equals(n.Purl, trimmed, StringComparison.OrdinalIgnoreCase))
|
||||
.Where(IsPublicNode)
|
||||
.Select(n => n.Id);
|
||||
|
||||
foreach (var nodeId in packageTargets)
|
||||
{
|
||||
targets.Add(nodeId);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var node in graph.Nodes)
|
||||
{
|
||||
if (string.Equals(node.Id, trimmed, StringComparison.Ordinal) ||
|
||||
string.Equals(node.SymbolId, trimmed, StringComparison.Ordinal))
|
||||
{
|
||||
targets.Add(node.Id);
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(node.Display) &&
|
||||
string.Equals(node.Display, trimmed, StringComparison.Ordinal))
|
||||
{
|
||||
targets.Add(node.Id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
private static bool IsPackageTarget(string value)
|
||||
=> value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static bool IsPublicNode(RichGraphNode node)
|
||||
{
|
||||
if (node.Attributes is not null &&
|
||||
node.Attributes.TryGetValue("visibility", out var visibility) &&
|
||||
!string.IsNullOrWhiteSpace(visibility))
|
||||
{
|
||||
return visibility.Equals("public", StringComparison.OrdinalIgnoreCase)
|
||||
|| visibility.Equals("exported", StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static Dictionary<string, List<RichGraphEdge>> BuildEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
|
||||
{
|
||||
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
|
||||
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
|
||||
{
|
||||
if (!lookup.TryGetValue(edge.From, out var list))
|
||||
{
|
||||
list = new List<RichGraphEdge>();
|
||||
lookup[edge.From] = list;
|
||||
}
|
||||
|
||||
list.Add(edge);
|
||||
}
|
||||
|
||||
foreach (var list in lookup.Values)
|
||||
{
|
||||
list.Sort(CompareForward);
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
private static Dictionary<string, List<RichGraphEdge>> BuildReverseEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
|
||||
{
|
||||
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
|
||||
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
|
||||
{
|
||||
if (!lookup.TryGetValue(edge.To, out var list))
|
||||
{
|
||||
list = new List<RichGraphEdge>();
|
||||
lookup[edge.To] = list;
|
||||
}
|
||||
|
||||
list.Add(edge);
|
||||
}
|
||||
|
||||
foreach (var list in lookup.Values)
|
||||
{
|
||||
list.Sort(CompareReverse);
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
private static HashSet<string> Traverse(
|
||||
HashSet<string> seeds,
|
||||
Dictionary<string, List<RichGraphEdge>> edgeLookup)
|
||||
{
|
||||
var visited = new HashSet<string>(seeds, StringComparer.Ordinal);
|
||||
var queue = new Queue<string>(seeds);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var current = queue.Dequeue();
|
||||
if (!edgeLookup.TryGetValue(current, out var edges))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
var next = edge.From == current ? edge.To : edge.From;
|
||||
if (!visited.Add(next))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
queue.Enqueue(next);
|
||||
}
|
||||
}
|
||||
|
||||
return visited;
|
||||
}
|
||||
|
||||
private static SliceNode MapNode(
|
||||
RichGraphNode node,
|
||||
HashSet<string> entrypoints,
|
||||
HashSet<string> targets)
|
||||
{
|
||||
var kind = SliceNodeKind.Intermediate;
|
||||
if (entrypoints.Contains(node.Id))
|
||||
{
|
||||
kind = SliceNodeKind.Entrypoint;
|
||||
}
|
||||
else if (targets.Contains(node.Id))
|
||||
{
|
||||
kind = SliceNodeKind.Target;
|
||||
}
|
||||
|
||||
return new SliceNode
|
||||
{
|
||||
Id = node.Id,
|
||||
Symbol = node.Display ?? node.SymbolId ?? node.Id,
|
||||
Kind = kind,
|
||||
File = ExtractAttribute(node, "file") ?? ExtractAttribute(node, "source_file"),
|
||||
Line = ExtractIntAttribute(node, "line"),
|
||||
Purl = node.Purl,
|
||||
Attributes = node.Attributes
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceEdge MapEdge(RichGraphEdge edge)
|
||||
{
|
||||
return new SliceEdge
|
||||
{
|
||||
From = edge.From,
|
||||
To = edge.To,
|
||||
Kind = MapEdgeKind(edge.Kind),
|
||||
Confidence = edge.Confidence,
|
||||
Evidence = edge.Evidence?.FirstOrDefault(),
|
||||
Gate = MapGate(edge.Gates)
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceEdgeKind MapEdgeKind(string? kind)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(kind))
|
||||
{
|
||||
return SliceEdgeKind.Direct;
|
||||
}
|
||||
|
||||
var normalized = kind.Trim().ToLowerInvariant();
|
||||
if (normalized.Contains("plt", StringComparison.Ordinal))
|
||||
{
|
||||
return SliceEdgeKind.Plt;
|
||||
}
|
||||
|
||||
if (normalized.Contains("iat", StringComparison.Ordinal))
|
||||
{
|
||||
return SliceEdgeKind.Iat;
|
||||
}
|
||||
|
||||
return normalized switch
|
||||
{
|
||||
EdgeTypes.Dynamic => SliceEdgeKind.Dynamic,
|
||||
EdgeTypes.Dlopen => SliceEdgeKind.Dynamic,
|
||||
EdgeTypes.Loads => SliceEdgeKind.Dynamic,
|
||||
EdgeTypes.Call => SliceEdgeKind.Direct,
|
||||
EdgeTypes.Import => SliceEdgeKind.Direct,
|
||||
_ => SliceEdgeKind.Unknown
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceGateInfo? MapGate(IReadOnlyList<DetectedGate>? gates)
|
||||
{
|
||||
if (gates is null || gates.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var gate = gates
|
||||
.OrderByDescending(g => g.Confidence)
|
||||
.ThenBy(g => g.Detail, StringComparer.Ordinal)
|
||||
.First();
|
||||
|
||||
return new SliceGateInfo
|
||||
{
|
||||
Type = gate.Type switch
|
||||
{
|
||||
GateType.FeatureFlag => SliceGateType.FeatureFlag,
|
||||
GateType.AuthRequired => SliceGateType.Auth,
|
||||
GateType.NonDefaultConfig => SliceGateType.Config,
|
||||
GateType.AdminOnly => SliceGateType.AdminOnly,
|
||||
_ => SliceGateType.Config
|
||||
},
|
||||
Condition = gate.Detail,
|
||||
Satisfied = false
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<SlicePathSummary> BuildPathSummaries(
|
||||
HashSet<string> entrypoints,
|
||||
HashSet<string> targets,
|
||||
IReadOnlyList<RichGraphEdge> edges,
|
||||
Dictionary<string, RichGraphNode> nodeLookup)
|
||||
{
|
||||
var edgeLookup = BuildEdgeLookup(edges);
|
||||
var edgeMap = new Dictionary<(string From, string To), RichGraphEdge>();
|
||||
foreach (var edge in edges
|
||||
.OrderBy(e => e.From, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.To, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Kind, StringComparer.Ordinal))
|
||||
{
|
||||
var key = (edge.From, edge.To);
|
||||
if (!edgeMap.TryGetValue(key, out var existing) || edge.Confidence > existing.Confidence)
|
||||
{
|
||||
edgeMap[key] = edge;
|
||||
}
|
||||
}
|
||||
var results = new List<SlicePathSummary>();
|
||||
var pathIndex = 0;
|
||||
|
||||
foreach (var entry in entrypoints.OrderBy(e => e, StringComparer.Ordinal))
|
||||
{
|
||||
foreach (var target in targets.OrderBy(t => t, StringComparer.Ordinal))
|
||||
{
|
||||
var path = FindShortestPath(entry, target, edgeLookup);
|
||||
if (path is null || path.Count == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var minConfidence = 1.0;
|
||||
var witnessParts = new List<string>();
|
||||
for (var i = 0; i < path.Count; i++)
|
||||
{
|
||||
if (nodeLookup.TryGetValue(path[i], out var node))
|
||||
{
|
||||
witnessParts.Add(node.Display ?? node.SymbolId ?? node.Id);
|
||||
}
|
||||
else
|
||||
{
|
||||
witnessParts.Add(path[i]);
|
||||
}
|
||||
|
||||
if (i == path.Count - 1)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (edgeMap.TryGetValue((path[i], path[i + 1]), out var edge))
|
||||
{
|
||||
minConfidence = Math.Min(minConfidence, edge.Confidence);
|
||||
}
|
||||
}
|
||||
|
||||
var witness = string.Join(" -> ", witnessParts);
|
||||
results.Add(new SlicePathSummary(
|
||||
PathId: $"path:{entry}:{target}:{pathIndex++}",
|
||||
MinConfidence: minConfidence,
|
||||
PathWitness: witness));
|
||||
}
|
||||
}
|
||||
|
||||
return results.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static List<string>? FindShortestPath(
|
||||
string start,
|
||||
string target,
|
||||
Dictionary<string, List<RichGraphEdge>> edgeLookup)
|
||||
{
|
||||
var queue = new Queue<string>();
|
||||
var visited = new HashSet<string>(StringComparer.Ordinal) { start };
|
||||
var previous = new Dictionary<string, string?>(StringComparer.Ordinal) { [start] = null };
|
||||
|
||||
queue.Enqueue(start);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var current = queue.Dequeue();
|
||||
if (string.Equals(current, target, StringComparison.Ordinal))
|
||||
{
|
||||
return BuildPath(target, previous);
|
||||
}
|
||||
|
||||
if (!edgeLookup.TryGetValue(current, out var edges))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
var next = edge.To;
|
||||
if (!visited.Add(next))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
previous[next] = current;
|
||||
queue.Enqueue(next);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static int CompareForward(RichGraphEdge left, RichGraphEdge right)
|
||||
{
|
||||
var result = string.Compare(left.To, right.To, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
return left.Confidence.CompareTo(right.Confidence);
|
||||
}
|
||||
|
||||
private static int CompareReverse(RichGraphEdge left, RichGraphEdge right)
|
||||
{
|
||||
var result = string.Compare(left.From, right.From, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
return left.Confidence.CompareTo(right.Confidence);
|
||||
}
|
||||
|
||||
private static List<string> BuildPath(string target, Dictionary<string, string?> previous)
|
||||
{
|
||||
var path = new List<string>();
|
||||
string? current = target;
|
||||
while (current is not null)
|
||||
{
|
||||
path.Add(current);
|
||||
current = previous[current];
|
||||
}
|
||||
|
||||
path.Reverse();
|
||||
return path;
|
||||
}
|
||||
|
||||
private static string? ExtractAttribute(RichGraphNode node, string key)
|
||||
{
|
||||
if (node.Attributes is not null && node.Attributes.TryGetValue(key, out var value))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static int? ExtractIntAttribute(RichGraphNode node, string key)
|
||||
{
|
||||
var value = ExtractAttribute(node, key);
|
||||
if (value is not null && int.TryParse(value, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SliceExtractionRequest(
|
||||
RichGraph Graph,
|
||||
SliceInputs Inputs,
|
||||
SliceQuery Query,
|
||||
ScanManifest Manifest);
|
||||
@@ -0,0 +1,27 @@
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Replay.Core;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
public sealed class SliceHasher
|
||||
{
|
||||
private readonly ICryptoHash _cryptoHash;
|
||||
|
||||
public SliceHasher(ICryptoHash cryptoHash)
|
||||
{
|
||||
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
|
||||
}
|
||||
|
||||
public SliceDigestResult ComputeDigest(ReachabilitySlice slice)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(slice);
|
||||
|
||||
var normalized = slice.Normalize();
|
||||
var bytes = CanonicalJson.SerializeToUtf8Bytes(normalized);
|
||||
var digest = _cryptoHash.ComputePrefixedHashForPurpose(bytes, HashPurpose.Graph);
|
||||
|
||||
return new SliceDigestResult(digest, bytes);
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SliceDigestResult(string Digest, byte[] CanonicalBytes);
|
||||
@@ -0,0 +1,392 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Core;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
public sealed record ReachabilitySlice
|
||||
{
|
||||
[JsonPropertyName("_type")]
|
||||
public string Type { get; init; } = SliceSchema.PredicateType;
|
||||
|
||||
[JsonPropertyName("inputs")]
|
||||
public required SliceInputs Inputs { get; init; }
|
||||
|
||||
[JsonPropertyName("query")]
|
||||
public required SliceQuery Query { get; init; }
|
||||
|
||||
[JsonPropertyName("subgraph")]
|
||||
public required SliceSubgraph Subgraph { get; init; }
|
||||
|
||||
[JsonPropertyName("verdict")]
|
||||
public required SliceVerdict Verdict { get; init; }
|
||||
|
||||
[JsonPropertyName("manifest")]
|
||||
public required ScanManifest Manifest { get; init; }
|
||||
|
||||
public ReachabilitySlice Normalize() => SliceNormalization.Normalize(this);
|
||||
}
|
||||
|
||||
public sealed record SliceInputs
|
||||
{
|
||||
[JsonPropertyName("graphDigest")]
|
||||
public required string GraphDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("binaryDigests")]
|
||||
public ImmutableArray<string> BinaryDigests { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
[JsonPropertyName("sbomDigest")]
|
||||
public string? SbomDigest { get; init; }
|
||||
|
||||
[JsonPropertyName("layerDigests")]
|
||||
public ImmutableArray<string> LayerDigests { get; init; } = ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
public sealed record SliceQuery
|
||||
{
|
||||
[JsonPropertyName("cveId")]
|
||||
public string? CveId { get; init; }
|
||||
|
||||
[JsonPropertyName("targetSymbols")]
|
||||
public ImmutableArray<string> TargetSymbols { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
[JsonPropertyName("entrypoints")]
|
||||
public ImmutableArray<string> Entrypoints { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
[JsonPropertyName("policyHash")]
|
||||
public string? PolicyHash { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SliceSubgraph
|
||||
{
|
||||
[JsonPropertyName("nodes")]
|
||||
public ImmutableArray<SliceNode> Nodes { get; init; } = ImmutableArray<SliceNode>.Empty;
|
||||
|
||||
[JsonPropertyName("edges")]
|
||||
public ImmutableArray<SliceEdge> Edges { get; init; } = ImmutableArray<SliceEdge>.Empty;
|
||||
}
|
||||
|
||||
public enum SliceNodeKind
|
||||
{
|
||||
Entrypoint,
|
||||
Intermediate,
|
||||
Target,
|
||||
Unknown
|
||||
}
|
||||
|
||||
public sealed record SliceNode
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
[JsonPropertyName("symbol")]
|
||||
public required string Symbol { get; init; }
|
||||
|
||||
[JsonPropertyName("kind")]
|
||||
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
|
||||
public required SliceNodeKind Kind { get; init; }
|
||||
|
||||
[JsonPropertyName("file")]
|
||||
public string? File { get; init; }
|
||||
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; init; }
|
||||
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
[JsonPropertyName("attributes")]
|
||||
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
|
||||
}
|
||||
|
||||
public enum SliceEdgeKind
|
||||
{
|
||||
Direct,
|
||||
Plt,
|
||||
Iat,
|
||||
Dynamic,
|
||||
Unknown
|
||||
}
|
||||
|
||||
public sealed record SliceEdge
|
||||
{
|
||||
[JsonPropertyName("from")]
|
||||
public required string From { get; init; }
|
||||
|
||||
[JsonPropertyName("to")]
|
||||
public required string To { get; init; }
|
||||
|
||||
[JsonPropertyName("kind")]
|
||||
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
|
||||
public SliceEdgeKind Kind { get; init; } = SliceEdgeKind.Direct;
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("evidence")]
|
||||
public string? Evidence { get; init; }
|
||||
|
||||
[JsonPropertyName("gate")]
|
||||
public SliceGateInfo? Gate { get; init; }
|
||||
|
||||
[JsonPropertyName("observed")]
|
||||
public ObservedEdgeMetadata? Observed { get; init; }
|
||||
}
|
||||
|
||||
public enum SliceGateType
|
||||
{
|
||||
FeatureFlag,
|
||||
Auth,
|
||||
Config,
|
||||
AdminOnly
|
||||
}
|
||||
|
||||
public sealed record SliceGateInfo
|
||||
{
|
||||
[JsonPropertyName("type")]
|
||||
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
|
||||
public required SliceGateType Type { get; init; }
|
||||
|
||||
[JsonPropertyName("condition")]
|
||||
public required string Condition { get; init; }
|
||||
|
||||
[JsonPropertyName("satisfied")]
|
||||
public required bool Satisfied { get; init; }
|
||||
}
|
||||
|
||||
public sealed record ObservedEdgeMetadata
|
||||
{
|
||||
[JsonPropertyName("firstObserved")]
|
||||
public required DateTimeOffset FirstObserved { get; init; }
|
||||
|
||||
[JsonPropertyName("lastObserved")]
|
||||
public required DateTimeOffset LastObserved { get; init; }
|
||||
|
||||
[JsonPropertyName("count")]
|
||||
public required int ObservationCount { get; init; }
|
||||
|
||||
[JsonPropertyName("traceDigest")]
|
||||
public string? TraceDigest { get; init; }
|
||||
}
|
||||
|
||||
public enum SliceVerdictStatus
|
||||
{
|
||||
Reachable,
|
||||
Unreachable,
|
||||
Unknown,
|
||||
Gated,
|
||||
ObservedReachable
|
||||
}
|
||||
|
||||
public sealed record GatedPath
|
||||
{
|
||||
[JsonPropertyName("pathId")]
|
||||
public required string PathId { get; init; }
|
||||
|
||||
[JsonPropertyName("gateType")]
|
||||
public required string GateType { get; init; }
|
||||
|
||||
[JsonPropertyName("gateCondition")]
|
||||
public required string GateCondition { get; init; }
|
||||
|
||||
[JsonPropertyName("gateSatisfied")]
|
||||
public required bool GateSatisfied { get; init; }
|
||||
}
|
||||
|
||||
public sealed record SliceVerdict
|
||||
{
|
||||
[JsonPropertyName("status")]
|
||||
[JsonConverter(typeof(SnakeCaseStringEnumConverter))]
|
||||
public required SliceVerdictStatus Status { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("reasons")]
|
||||
public ImmutableArray<string> Reasons { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
[JsonPropertyName("pathWitnesses")]
|
||||
public ImmutableArray<string> PathWitnesses { get; init; } = ImmutableArray<string>.Empty;
|
||||
|
||||
[JsonPropertyName("unknownCount")]
|
||||
public int UnknownCount { get; init; }
|
||||
|
||||
[JsonPropertyName("gatedPaths")]
|
||||
public ImmutableArray<GatedPath> GatedPaths { get; init; } = ImmutableArray<GatedPath>.Empty;
|
||||
}
|
||||
|
||||
internal static class SliceNormalization
|
||||
{
|
||||
public static ReachabilitySlice Normalize(ReachabilitySlice slice)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(slice);
|
||||
|
||||
return slice with
|
||||
{
|
||||
Type = string.IsNullOrWhiteSpace(slice.Type) ? SliceSchema.PredicateType : slice.Type.Trim(),
|
||||
Inputs = Normalize(slice.Inputs),
|
||||
Query = Normalize(slice.Query),
|
||||
Subgraph = Normalize(slice.Subgraph),
|
||||
Verdict = Normalize(slice.Verdict),
|
||||
Manifest = slice.Manifest
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceInputs Normalize(SliceInputs inputs)
|
||||
{
|
||||
return inputs with
|
||||
{
|
||||
GraphDigest = inputs.GraphDigest.Trim(),
|
||||
BinaryDigests = NormalizeStrings(inputs.BinaryDigests),
|
||||
SbomDigest = string.IsNullOrWhiteSpace(inputs.SbomDigest) ? null : inputs.SbomDigest.Trim(),
|
||||
LayerDigests = NormalizeStrings(inputs.LayerDigests)
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceQuery Normalize(SliceQuery query)
|
||||
{
|
||||
return query with
|
||||
{
|
||||
CveId = string.IsNullOrWhiteSpace(query.CveId) ? null : query.CveId.Trim(),
|
||||
TargetSymbols = NormalizeStrings(query.TargetSymbols),
|
||||
Entrypoints = NormalizeStrings(query.Entrypoints),
|
||||
PolicyHash = string.IsNullOrWhiteSpace(query.PolicyHash) ? null : query.PolicyHash.Trim()
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceSubgraph Normalize(SliceSubgraph subgraph)
|
||||
{
|
||||
var nodes = subgraph.Nodes
|
||||
.Where(n => n is not null)
|
||||
.Select(Normalize)
|
||||
.OrderBy(n => n.Id, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var edges = subgraph.Edges
|
||||
.Where(e => e is not null)
|
||||
.Select(Normalize)
|
||||
.OrderBy(e => e.From, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.To, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Kind.ToString(), StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
return subgraph with { Nodes = nodes, Edges = edges };
|
||||
}
|
||||
|
||||
private static SliceNode Normalize(SliceNode node)
|
||||
{
|
||||
return node with
|
||||
{
|
||||
Id = node.Id.Trim(),
|
||||
Symbol = node.Symbol.Trim(),
|
||||
File = string.IsNullOrWhiteSpace(node.File) ? null : node.File.Trim(),
|
||||
Purl = string.IsNullOrWhiteSpace(node.Purl) ? null : node.Purl.Trim(),
|
||||
Attributes = NormalizeAttributes(node.Attributes)
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceEdge Normalize(SliceEdge edge)
|
||||
{
|
||||
return edge with
|
||||
{
|
||||
From = edge.From.Trim(),
|
||||
To = edge.To.Trim(),
|
||||
Confidence = Math.Clamp(edge.Confidence, 0.0, 1.0),
|
||||
Evidence = string.IsNullOrWhiteSpace(edge.Evidence) ? null : edge.Evidence.Trim(),
|
||||
Gate = Normalize(edge.Gate),
|
||||
Observed = Normalize(edge.Observed)
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceGateInfo? Normalize(SliceGateInfo? gate)
|
||||
{
|
||||
if (gate is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return gate with
|
||||
{
|
||||
Condition = gate.Condition.Trim()
|
||||
};
|
||||
}
|
||||
|
||||
private static ObservedEdgeMetadata? Normalize(ObservedEdgeMetadata? observed)
|
||||
{
|
||||
if (observed is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return observed with
|
||||
{
|
||||
FirstObserved = observed.FirstObserved.ToUniversalTime(),
|
||||
LastObserved = observed.LastObserved.ToUniversalTime(),
|
||||
ObservationCount = Math.Max(0, observed.ObservationCount),
|
||||
TraceDigest = string.IsNullOrWhiteSpace(observed.TraceDigest) ? null : observed.TraceDigest.Trim()
|
||||
};
|
||||
}
|
||||
|
||||
private static SliceVerdict Normalize(SliceVerdict verdict)
|
||||
{
|
||||
return verdict with
|
||||
{
|
||||
Confidence = Math.Clamp(verdict.Confidence, 0.0, 1.0),
|
||||
Reasons = NormalizeStrings(verdict.Reasons),
|
||||
PathWitnesses = NormalizeStrings(verdict.PathWitnesses),
|
||||
UnknownCount = Math.Max(0, verdict.UnknownCount),
|
||||
GatedPaths = verdict.GatedPaths
|
||||
.Select(Normalize)
|
||||
.OrderBy(p => p.PathId, StringComparer.Ordinal)
|
||||
.ToImmutableArray()
|
||||
};
|
||||
}
|
||||
|
||||
private static GatedPath Normalize(GatedPath path)
|
||||
{
|
||||
return path with
|
||||
{
|
||||
PathId = path.PathId.Trim(),
|
||||
GateType = path.GateType.Trim(),
|
||||
GateCondition = path.GateCondition.Trim()
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> NormalizeStrings(ImmutableArray<string> values)
|
||||
{
|
||||
if (values.IsDefaultOrEmpty)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
return values
|
||||
.Where(v => !string.IsNullOrWhiteSpace(v))
|
||||
.Select(v => v.Trim())
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(v => v, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static IReadOnlyDictionary<string, string>? NormalizeAttributes(IReadOnlyDictionary<string, string>? attributes)
|
||||
{
|
||||
if (attributes is null || attributes.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return attributes
|
||||
.Where(kv => !string.IsNullOrWhiteSpace(kv.Key) && kv.Value is not null)
|
||||
.ToImmutableSortedDictionary(
|
||||
kv => kv.Key.Trim(),
|
||||
kv => kv.Value.Trim(),
|
||||
StringComparer.Ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class SnakeCaseStringEnumConverter : JsonStringEnumConverter
|
||||
{
|
||||
public SnakeCaseStringEnumConverter() : base(JsonNamingPolicy.SnakeCaseLower)
|
||||
{
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
/// <summary>
|
||||
/// Constants for the reachability slice schema.
|
||||
/// </summary>
|
||||
public static class SliceSchema
|
||||
{
|
||||
public const string PredicateType = "stellaops.dev/predicates/reachability-slice@v1";
|
||||
public const string JsonSchemaUri = "https://stellaops.dev/schemas/stellaops-slice.v1.schema.json";
|
||||
public const string DssePayloadType = "application/vnd.stellaops.slice.v1+json";
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Slices;
|
||||
|
||||
public sealed class VerdictComputer
|
||||
{
|
||||
public SliceVerdict Compute(
|
||||
IReadOnlyList<SlicePathSummary> paths,
|
||||
int unknownEdgeCount,
|
||||
SliceVerdictOptions? options = null)
|
||||
{
|
||||
options ??= new SliceVerdictOptions();
|
||||
var hasPath = paths.Count > 0;
|
||||
var minConfidence = hasPath ? paths.Min(p => p.MinConfidence) : 0.0;
|
||||
var unknowns = Math.Max(0, unknownEdgeCount);
|
||||
|
||||
SliceVerdictStatus status;
|
||||
if (hasPath && minConfidence > options.ReachableThreshold && unknowns == 0)
|
||||
{
|
||||
status = SliceVerdictStatus.Reachable;
|
||||
}
|
||||
else if (!hasPath && unknowns == 0)
|
||||
{
|
||||
status = SliceVerdictStatus.Unreachable;
|
||||
}
|
||||
else
|
||||
{
|
||||
status = SliceVerdictStatus.Unknown;
|
||||
}
|
||||
|
||||
var confidence = status switch
|
||||
{
|
||||
SliceVerdictStatus.Reachable => minConfidence,
|
||||
SliceVerdictStatus.Unreachable => options.UnreachableConfidence,
|
||||
_ => hasPath ? Math.Min(minConfidence, options.UnknownConfidence) : options.UnknownConfidence
|
||||
};
|
||||
|
||||
var reasons = BuildReasons(status, hasPath, unknowns, minConfidence, options);
|
||||
var witnesses = paths
|
||||
.Select(p => p.PathWitness)
|
||||
.Where(p => !string.IsNullOrWhiteSpace(p))
|
||||
.Select(p => p!.Trim())
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(p => p, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new SliceVerdict
|
||||
{
|
||||
Status = status,
|
||||
Confidence = confidence,
|
||||
Reasons = reasons,
|
||||
PathWitnesses = witnesses,
|
||||
UnknownCount = unknowns
|
||||
};
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> BuildReasons(
|
||||
SliceVerdictStatus status,
|
||||
bool hasPath,
|
||||
int unknowns,
|
||||
double minConfidence,
|
||||
SliceVerdictOptions options)
|
||||
{
|
||||
var reasons = new List<string>();
|
||||
switch (status)
|
||||
{
|
||||
case SliceVerdictStatus.Reachable:
|
||||
reasons.Add("path_exists_high_confidence");
|
||||
break;
|
||||
case SliceVerdictStatus.Unreachable:
|
||||
reasons.Add("no_paths_found");
|
||||
break;
|
||||
default:
|
||||
if (!hasPath)
|
||||
{
|
||||
reasons.Add("no_paths_found_with_unknowns");
|
||||
}
|
||||
else if (minConfidence < options.UnknownThreshold)
|
||||
{
|
||||
reasons.Add("low_confidence_path");
|
||||
}
|
||||
else
|
||||
{
|
||||
reasons.Add("unknown_edges_present");
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (unknowns > 0)
|
||||
{
|
||||
reasons.Add($"unknown_edges:{unknowns}");
|
||||
}
|
||||
|
||||
return reasons.OrderBy(r => r, StringComparer.Ordinal).ToImmutableArray();
|
||||
}
|
||||
}
|
||||
|
||||
public sealed record SliceVerdictOptions
|
||||
{
|
||||
public double ReachableThreshold { get; init; } = 0.7;
|
||||
public double UnknownThreshold { get; init; } = 0.5;
|
||||
public double UnreachableConfidence { get; init; } = 0.9;
|
||||
public double UnknownConfidence { get; init; } = 0.4;
|
||||
}
|
||||
|
||||
public sealed record SlicePathSummary(
|
||||
string PathId,
|
||||
double MinConfidence,
|
||||
string? PathWitness);
|
||||
@@ -10,6 +10,7 @@
|
||||
<PackageReference Include="Npgsql" Version="9.0.3" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.ProofSpine\StellaOps.Scanner.ProofSpine.csproj" />
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Surface.Env\StellaOps.Scanner.Surface.Env.csproj" />
|
||||
@@ -17,6 +18,7 @@
|
||||
<ProjectReference Include="..\..\StellaOps.Scanner.Analyzers.Native\StellaOps.Scanner.Analyzers.Native.csproj" />
|
||||
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\Attestor\StellaOps.Attestor.Envelope\StellaOps.Attestor.Envelope.csproj" />
|
||||
<ProjectReference Include="..\..\..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Replay.Core\StellaOps.Replay.Core.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
@@ -0,0 +1,401 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Subgraph;
|
||||
|
||||
public sealed record ReachabilitySubgraphRequest(
|
||||
RichGraph Graph,
|
||||
ImmutableArray<string> FindingKeys,
|
||||
ImmutableArray<string> TargetSymbols,
|
||||
ImmutableArray<string> Entrypoints,
|
||||
string? AnalyzerName = null,
|
||||
string? AnalyzerVersion = null,
|
||||
double Confidence = 0.9,
|
||||
string Completeness = "partial");
|
||||
|
||||
/// <summary>
|
||||
/// Extracts a focused subgraph from the full reachability graph.
|
||||
/// </summary>
|
||||
public sealed class ReachabilitySubgraphExtractor
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public ReachabilitySubgraphExtractor(TimeProvider? timeProvider = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public ReachabilitySubgraph Extract(ReachabilitySubgraphRequest request)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(request.Graph);
|
||||
|
||||
var graph = request.Graph;
|
||||
var nodeLookup = graph.Nodes.ToDictionary(n => n.Id, StringComparer.Ordinal);
|
||||
var entrypoints = ResolveEntrypoints(request, graph, nodeLookup);
|
||||
var targets = ResolveTargets(request, graph, nodeLookup);
|
||||
|
||||
if (entrypoints.Count == 0 || targets.Count == 0)
|
||||
{
|
||||
return BuildEmptySubgraph(request).Normalize();
|
||||
}
|
||||
|
||||
var forwardEdges = BuildEdgeLookup(graph.Edges);
|
||||
var reverseEdges = BuildReverseEdgeLookup(graph.Edges);
|
||||
|
||||
var reachableFromEntrypoints = Traverse(entrypoints, forwardEdges);
|
||||
var canReachTargets = Traverse(targets, reverseEdges);
|
||||
|
||||
var includedNodes = new HashSet<string>(reachableFromEntrypoints, StringComparer.Ordinal);
|
||||
includedNodes.IntersectWith(canReachTargets);
|
||||
foreach (var entry in entrypoints)
|
||||
{
|
||||
includedNodes.Add(entry);
|
||||
}
|
||||
foreach (var target in targets)
|
||||
{
|
||||
includedNodes.Add(target);
|
||||
}
|
||||
|
||||
var subgraphEdges = graph.Edges
|
||||
.Where(e => includedNodes.Contains(e.From) && includedNodes.Contains(e.To))
|
||||
.Where(e => reachableFromEntrypoints.Contains(e.From) && canReachTargets.Contains(e.To))
|
||||
.ToList();
|
||||
|
||||
var subgraphNodes = includedNodes
|
||||
.Where(nodeLookup.ContainsKey)
|
||||
.Select(id => nodeLookup[id])
|
||||
.ToList();
|
||||
|
||||
var nodes = subgraphNodes
|
||||
.Select(node => MapNode(node, entrypoints, targets))
|
||||
.ToImmutableArray();
|
||||
|
||||
var edges = subgraphEdges
|
||||
.Select(MapEdge)
|
||||
.ToImmutableArray();
|
||||
|
||||
return new ReachabilitySubgraph
|
||||
{
|
||||
FindingKeys = request.FindingKeys,
|
||||
Nodes = nodes,
|
||||
Edges = edges,
|
||||
AnalysisMetadata = BuildMetadata(request, graph)
|
||||
}.Normalize();
|
||||
}
|
||||
|
||||
private ReachabilitySubgraph BuildEmptySubgraph(ReachabilitySubgraphRequest request)
|
||||
{
|
||||
return new ReachabilitySubgraph
|
||||
{
|
||||
FindingKeys = request.FindingKeys,
|
||||
Nodes = [],
|
||||
Edges = [],
|
||||
AnalysisMetadata = BuildMetadata(request, request.Graph)
|
||||
};
|
||||
}
|
||||
|
||||
private ReachabilitySubgraphMetadata BuildMetadata(ReachabilitySubgraphRequest request, RichGraph graph)
|
||||
{
|
||||
var analyzerName = request.AnalyzerName ?? graph.Analyzer.Name;
|
||||
var analyzerVersion = request.AnalyzerVersion ?? graph.Analyzer.Version;
|
||||
return new ReachabilitySubgraphMetadata
|
||||
{
|
||||
Analyzer = string.IsNullOrWhiteSpace(analyzerName) ? "reachability" : analyzerName,
|
||||
AnalyzerVersion = string.IsNullOrWhiteSpace(analyzerVersion) ? "unknown" : analyzerVersion,
|
||||
Confidence = Math.Clamp(request.Confidence, 0.0, 1.0),
|
||||
Completeness = string.IsNullOrWhiteSpace(request.Completeness) ? "partial" : request.Completeness,
|
||||
GeneratedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
private static HashSet<string> ResolveEntrypoints(
|
||||
ReachabilitySubgraphRequest request,
|
||||
RichGraph graph,
|
||||
Dictionary<string, RichGraphNode> nodeLookup)
|
||||
{
|
||||
var entrypoints = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
if (!request.Entrypoints.IsDefaultOrEmpty)
|
||||
{
|
||||
foreach (var entry in request.Entrypoints)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(entry))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = entry.Trim();
|
||||
if (nodeLookup.ContainsKey(trimmed))
|
||||
{
|
||||
entrypoints.Add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
foreach (var root in graph.Roots ?? Array.Empty<RichGraphRoot>())
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(root.Id))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = root.Id.Trim();
|
||||
if (nodeLookup.ContainsKey(trimmed))
|
||||
{
|
||||
entrypoints.Add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entrypoints;
|
||||
}
|
||||
|
||||
private static HashSet<string> ResolveTargets(
|
||||
ReachabilitySubgraphRequest request,
|
||||
RichGraph graph,
|
||||
Dictionary<string, RichGraphNode> nodeLookup)
|
||||
{
|
||||
var targets = new HashSet<string>(StringComparer.Ordinal);
|
||||
|
||||
if (request.TargetSymbols.IsDefaultOrEmpty)
|
||||
{
|
||||
return targets;
|
||||
}
|
||||
|
||||
foreach (var target in request.TargetSymbols)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(target))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var trimmed = target.Trim();
|
||||
if (IsPackageTarget(trimmed))
|
||||
{
|
||||
foreach (var node in graph.Nodes.Where(n => string.Equals(n.Purl, trimmed, StringComparison.OrdinalIgnoreCase)))
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(node.Id))
|
||||
{
|
||||
targets.Add(node.Id);
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var node in graph.Nodes)
|
||||
{
|
||||
if (string.Equals(node.Id, trimmed, StringComparison.Ordinal) ||
|
||||
string.Equals(node.SymbolId, trimmed, StringComparison.Ordinal))
|
||||
{
|
||||
targets.Add(node.Id);
|
||||
}
|
||||
else if (!string.IsNullOrWhiteSpace(node.Display) &&
|
||||
string.Equals(node.Display, trimmed, StringComparison.Ordinal))
|
||||
{
|
||||
targets.Add(node.Id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return targets;
|
||||
}
|
||||
|
||||
private static bool IsPackageTarget(string value)
|
||||
=> value.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static Dictionary<string, List<RichGraphEdge>> BuildEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
|
||||
{
|
||||
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
|
||||
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
|
||||
{
|
||||
if (!lookup.TryGetValue(edge.From, out var list))
|
||||
{
|
||||
list = new List<RichGraphEdge>();
|
||||
lookup[edge.From] = list;
|
||||
}
|
||||
|
||||
list.Add(edge);
|
||||
}
|
||||
|
||||
foreach (var list in lookup.Values)
|
||||
{
|
||||
list.Sort(CompareForward);
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
private static Dictionary<string, List<RichGraphEdge>> BuildReverseEdgeLookup(IReadOnlyList<RichGraphEdge> edges)
|
||||
{
|
||||
var lookup = new Dictionary<string, List<RichGraphEdge>>(StringComparer.Ordinal);
|
||||
foreach (var edge in edges ?? Array.Empty<RichGraphEdge>())
|
||||
{
|
||||
if (!lookup.TryGetValue(edge.To, out var list))
|
||||
{
|
||||
list = new List<RichGraphEdge>();
|
||||
lookup[edge.To] = list;
|
||||
}
|
||||
|
||||
list.Add(edge);
|
||||
}
|
||||
|
||||
foreach (var list in lookup.Values)
|
||||
{
|
||||
list.Sort(CompareReverse);
|
||||
}
|
||||
|
||||
return lookup;
|
||||
}
|
||||
|
||||
private static HashSet<string> Traverse(
|
||||
HashSet<string> seeds,
|
||||
Dictionary<string, List<RichGraphEdge>> edgeLookup)
|
||||
{
|
||||
var visited = new HashSet<string>(seeds, StringComparer.Ordinal);
|
||||
var queue = new Queue<string>(seeds);
|
||||
|
||||
while (queue.Count > 0)
|
||||
{
|
||||
var current = queue.Dequeue();
|
||||
if (!edgeLookup.TryGetValue(current, out var edges))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
foreach (var edge in edges)
|
||||
{
|
||||
var next = edge.From == current ? edge.To : edge.From;
|
||||
if (!visited.Add(next))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
queue.Enqueue(next);
|
||||
}
|
||||
}
|
||||
|
||||
return visited;
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphNode MapNode(
|
||||
RichGraphNode node,
|
||||
HashSet<string> entrypoints,
|
||||
HashSet<string> targets)
|
||||
{
|
||||
var type = ReachabilitySubgraphNodeType.Call;
|
||||
if (entrypoints.Contains(node.Id))
|
||||
{
|
||||
type = ReachabilitySubgraphNodeType.Entrypoint;
|
||||
}
|
||||
else if (targets.Contains(node.Id))
|
||||
{
|
||||
type = ReachabilitySubgraphNodeType.Vulnerable;
|
||||
}
|
||||
|
||||
return new ReachabilitySubgraphNode
|
||||
{
|
||||
Id = node.Id,
|
||||
Symbol = node.Display ?? node.SymbolId ?? node.Id,
|
||||
Type = type,
|
||||
File = ExtractAttribute(node, "file") ?? ExtractAttribute(node, "source_file"),
|
||||
Line = ExtractIntAttribute(node, "line"),
|
||||
Purl = node.Purl,
|
||||
Attributes = node.Attributes
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphEdge MapEdge(RichGraphEdge edge)
|
||||
{
|
||||
return new ReachabilitySubgraphEdge
|
||||
{
|
||||
From = edge.From,
|
||||
To = edge.To,
|
||||
Type = string.IsNullOrWhiteSpace(edge.Kind) ? "call" : edge.Kind,
|
||||
Confidence = edge.Confidence,
|
||||
Evidence = edge.Evidence?.FirstOrDefault(),
|
||||
Gate = MapGate(edge.Gates)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphGate? MapGate(IReadOnlyList<DetectedGate>? gates)
|
||||
{
|
||||
if (gates is null || gates.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var gate = gates
|
||||
.OrderByDescending(g => g.Confidence)
|
||||
.ThenBy(g => g.Detail, StringComparer.Ordinal)
|
||||
.First();
|
||||
|
||||
return new ReachabilitySubgraphGate
|
||||
{
|
||||
GateType = ReachabilityGateMappings.ToGateTypeString(gate.Type),
|
||||
Condition = gate.Detail,
|
||||
GuardSymbol = gate.GuardSymbol,
|
||||
Confidence = gate.Confidence,
|
||||
SourceFile = gate.SourceFile,
|
||||
Line = gate.LineNumber,
|
||||
DetectionMethod = gate.DetectionMethod
|
||||
};
|
||||
}
|
||||
|
||||
private static int CompareForward(RichGraphEdge left, RichGraphEdge right)
|
||||
{
|
||||
var result = string.Compare(left.To, right.To, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
return left.Confidence.CompareTo(right.Confidence);
|
||||
}
|
||||
|
||||
private static int CompareReverse(RichGraphEdge left, RichGraphEdge right)
|
||||
{
|
||||
var result = string.Compare(left.From, right.From, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
result = string.Compare(left.Kind, right.Kind, StringComparison.Ordinal);
|
||||
if (result != 0)
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
return left.Confidence.CompareTo(right.Confidence);
|
||||
}
|
||||
|
||||
private static string? ExtractAttribute(RichGraphNode node, string key)
|
||||
{
|
||||
if (node.Attributes is not null && node.Attributes.TryGetValue(key, out var value))
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static int? ExtractIntAttribute(RichGraphNode node, string key)
|
||||
{
|
||||
var value = ExtractAttribute(node, key);
|
||||
if (value is not null && int.TryParse(value, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,272 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Scanner.Reachability.Gates;
|
||||
|
||||
namespace StellaOps.Scanner.Reachability.Subgraph;
|
||||
|
||||
/// <summary>
|
||||
/// Portable reachability subgraph representation.
|
||||
/// </summary>
|
||||
public sealed record ReachabilitySubgraph
|
||||
{
|
||||
[JsonPropertyName("version")]
|
||||
public string Version { get; init; } = "1.0";
|
||||
|
||||
[JsonPropertyName("findingKeys")]
|
||||
public ImmutableArray<string> FindingKeys { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("nodes")]
|
||||
public ImmutableArray<ReachabilitySubgraphNode> Nodes { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("edges")]
|
||||
public ImmutableArray<ReachabilitySubgraphEdge> Edges { get; init; } = [];
|
||||
|
||||
[JsonPropertyName("analysisMetadata")]
|
||||
public ReachabilitySubgraphMetadata? AnalysisMetadata { get; init; }
|
||||
|
||||
public ReachabilitySubgraph Normalize() => ReachabilitySubgraphNormalizer.Normalize(this);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subgraph node.
|
||||
/// </summary>
|
||||
public sealed record ReachabilitySubgraphNode
|
||||
{
|
||||
[JsonPropertyName("id")]
|
||||
public required string Id { get; init; }
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ReachabilitySubgraphNodeType>))]
|
||||
public required ReachabilitySubgraphNodeType Type { get; init; }
|
||||
|
||||
[JsonPropertyName("symbol")]
|
||||
public required string Symbol { get; init; }
|
||||
|
||||
[JsonPropertyName("file")]
|
||||
public string? File { get; init; }
|
||||
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; init; }
|
||||
|
||||
[JsonPropertyName("purl")]
|
||||
public string? Purl { get; init; }
|
||||
|
||||
[JsonPropertyName("attributes")]
|
||||
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subgraph node type.
|
||||
/// </summary>
|
||||
[JsonConverter(typeof(JsonStringEnumConverter<ReachabilitySubgraphNodeType>))]
|
||||
public enum ReachabilitySubgraphNodeType
|
||||
{
|
||||
[JsonStringEnumMemberName("entrypoint")]
|
||||
Entrypoint,
|
||||
|
||||
[JsonStringEnumMemberName("call")]
|
||||
Call,
|
||||
|
||||
[JsonStringEnumMemberName("vulnerable")]
|
||||
Vulnerable,
|
||||
|
||||
[JsonStringEnumMemberName("unknown")]
|
||||
Unknown
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subgraph edge.
|
||||
/// </summary>
|
||||
public sealed record ReachabilitySubgraphEdge
|
||||
{
|
||||
[JsonPropertyName("from")]
|
||||
public required string From { get; init; }
|
||||
|
||||
[JsonPropertyName("to")]
|
||||
public required string To { get; init; }
|
||||
|
||||
[JsonPropertyName("type")]
|
||||
public required string Type { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public double Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("evidence")]
|
||||
public string? Evidence { get; init; }
|
||||
|
||||
[JsonPropertyName("gate")]
|
||||
public ReachabilitySubgraphGate? Gate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gate metadata associated with a subgraph edge.
|
||||
/// </summary>
|
||||
public sealed record ReachabilitySubgraphGate
|
||||
{
|
||||
[JsonPropertyName("gateType")]
|
||||
public required string GateType { get; init; }
|
||||
|
||||
[JsonPropertyName("condition")]
|
||||
public required string Condition { get; init; }
|
||||
|
||||
[JsonPropertyName("guardSymbol")]
|
||||
public required string GuardSymbol { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("sourceFile")]
|
||||
public string? SourceFile { get; init; }
|
||||
|
||||
[JsonPropertyName("line")]
|
||||
public int? Line { get; init; }
|
||||
|
||||
[JsonPropertyName("detectionMethod")]
|
||||
public string? DetectionMethod { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata about the subgraph extraction.
|
||||
/// </summary>
|
||||
public sealed record ReachabilitySubgraphMetadata
|
||||
{
|
||||
[JsonPropertyName("analyzer")]
|
||||
public required string Analyzer { get; init; }
|
||||
|
||||
[JsonPropertyName("analyzerVersion")]
|
||||
public required string AnalyzerVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("confidence")]
|
||||
public required double Confidence { get; init; }
|
||||
|
||||
[JsonPropertyName("completeness")]
|
||||
public required string Completeness { get; init; }
|
||||
|
||||
[JsonPropertyName("generatedAt")]
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
}
|
||||
|
||||
internal static class ReachabilitySubgraphNormalizer
|
||||
{
|
||||
public static ReachabilitySubgraph Normalize(ReachabilitySubgraph subgraph)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(subgraph);
|
||||
|
||||
var nodes = subgraph.Nodes
|
||||
.Where(n => n is not null)
|
||||
.Select(Normalize)
|
||||
.OrderBy(n => n.Id, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var edges = subgraph.Edges
|
||||
.Where(e => e is not null)
|
||||
.Select(Normalize)
|
||||
.OrderBy(e => e.From, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.To, StringComparer.Ordinal)
|
||||
.ThenBy(e => e.Type, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
var findingKeys = subgraph.FindingKeys
|
||||
.Where(k => !string.IsNullOrWhiteSpace(k))
|
||||
.Select(k => k.Trim())
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.OrderBy(k => k, StringComparer.Ordinal)
|
||||
.ToImmutableArray();
|
||||
|
||||
return subgraph with
|
||||
{
|
||||
Version = string.IsNullOrWhiteSpace(subgraph.Version) ? "1.0" : subgraph.Version.Trim(),
|
||||
FindingKeys = findingKeys,
|
||||
Nodes = nodes,
|
||||
Edges = edges,
|
||||
AnalysisMetadata = Normalize(subgraph.AnalysisMetadata)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphNode Normalize(ReachabilitySubgraphNode node)
|
||||
{
|
||||
return node with
|
||||
{
|
||||
Id = node.Id.Trim(),
|
||||
Symbol = node.Symbol.Trim(),
|
||||
File = string.IsNullOrWhiteSpace(node.File) ? null : node.File.Trim(),
|
||||
Purl = string.IsNullOrWhiteSpace(node.Purl) ? null : node.Purl.Trim(),
|
||||
Attributes = NormalizeAttributes(node.Attributes)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphEdge Normalize(ReachabilitySubgraphEdge edge)
|
||||
{
|
||||
return edge with
|
||||
{
|
||||
From = edge.From.Trim(),
|
||||
To = edge.To.Trim(),
|
||||
Type = string.IsNullOrWhiteSpace(edge.Type) ? "call" : edge.Type.Trim(),
|
||||
Confidence = Math.Clamp(edge.Confidence, 0.0, 1.0),
|
||||
Evidence = string.IsNullOrWhiteSpace(edge.Evidence) ? null : edge.Evidence.Trim(),
|
||||
Gate = Normalize(edge.Gate)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphGate? Normalize(ReachabilitySubgraphGate? gate)
|
||||
{
|
||||
if (gate is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return gate with
|
||||
{
|
||||
GateType = gate.GateType.Trim(),
|
||||
Condition = gate.Condition.Trim(),
|
||||
GuardSymbol = gate.GuardSymbol.Trim(),
|
||||
DetectionMethod = string.IsNullOrWhiteSpace(gate.DetectionMethod) ? null : gate.DetectionMethod.Trim(),
|
||||
SourceFile = string.IsNullOrWhiteSpace(gate.SourceFile) ? null : gate.SourceFile.Trim(),
|
||||
Confidence = Math.Clamp(gate.Confidence, 0.0, 1.0)
|
||||
};
|
||||
}
|
||||
|
||||
private static ReachabilitySubgraphMetadata? Normalize(ReachabilitySubgraphMetadata? metadata)
|
||||
{
|
||||
if (metadata is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return metadata with
|
||||
{
|
||||
Analyzer = metadata.Analyzer.Trim(),
|
||||
AnalyzerVersion = metadata.AnalyzerVersion.Trim(),
|
||||
Completeness = metadata.Completeness.Trim(),
|
||||
Confidence = Math.Clamp(metadata.Confidence, 0.0, 1.0),
|
||||
GeneratedAt = metadata.GeneratedAt.ToUniversalTime()
|
||||
};
|
||||
}
|
||||
|
||||
private static IReadOnlyDictionary<string, string>? NormalizeAttributes(IReadOnlyDictionary<string, string>? attributes)
|
||||
{
|
||||
if (attributes is null || attributes.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return attributes
|
||||
.Where(kv => !string.IsNullOrWhiteSpace(kv.Key) && kv.Value is not null)
|
||||
.ToImmutableSortedDictionary(
|
||||
kv => kv.Key.Trim(),
|
||||
kv => kv.Value.Trim(),
|
||||
StringComparer.Ordinal);
|
||||
}
|
||||
}
|
||||
|
||||
internal static class ReachabilityGateMappings
|
||||
{
|
||||
public static string ToGateTypeString(GateType type) => type switch
|
||||
{
|
||||
GateType.AuthRequired => "auth",
|
||||
GateType.FeatureFlag => "feature_flag",
|
||||
GateType.AdminOnly => "admin_only",
|
||||
GateType.NonDefaultConfig => "non_default_config",
|
||||
_ => "unknown"
|
||||
};
|
||||
}
|
||||
35
src/Scanner/__Libraries/StellaOps.Scanner.Runtime/AGENTS.md
Normal file
35
src/Scanner/__Libraries/StellaOps.Scanner.Runtime/AGENTS.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# AGENTS - Scanner Runtime Library
|
||||
|
||||
## Mission
|
||||
Capture and normalize runtime trace evidence (eBPF/ETW) and merge it with static reachability graphs to produce observed-path evidence.
|
||||
|
||||
## Roles
|
||||
- Backend engineer (.NET 10, C# preview).
|
||||
- QA engineer (deterministic tests; offline fixtures).
|
||||
|
||||
## Required Reading
|
||||
- `docs/README.md`
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/scanner/architecture.md`
|
||||
- `docs/modules/zastava/architecture.md`
|
||||
- `docs/reachability/runtime-facts.md`
|
||||
- `docs/reachability/runtime-static-union-schema.md`
|
||||
|
||||
## Working Directory & Boundaries
|
||||
- Primary scope: `src/Scanner/__Libraries/StellaOps.Scanner.Runtime/`
|
||||
- Tests: `src/Scanner/__Tests/StellaOps.Scanner.Runtime.Tests/`
|
||||
- Avoid cross-module edits unless explicitly noted in the sprint.
|
||||
|
||||
## Determinism & Offline Rules
|
||||
- Normalize timestamps to UTC; stable ordering of events and edges.
|
||||
- Offline-first; no network access in collectors or ingestion.
|
||||
- Prefer configuration-driven retention policies with deterministic pruning.
|
||||
|
||||
## Testing Expectations
|
||||
- Unit tests for ingestion, merge, and retention logic.
|
||||
- Use deterministic fixtures (fixed timestamps and IDs).
|
||||
|
||||
## Workflow
|
||||
- Update sprint status on task transitions.
|
||||
- Log design/decision changes in sprint Execution Log.
|
||||
@@ -0,0 +1,150 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
namespace StellaOps.Scanner.Runtime.Ebpf;
|
||||
|
||||
/// <summary>
|
||||
/// eBPF-based trace collector for Linux using uprobe tracing.
|
||||
/// </summary>
|
||||
public sealed class EbpfTraceCollector : ITraceCollector
|
||||
{
|
||||
private readonly ILogger<EbpfTraceCollector> _logger;
|
||||
private readonly ISymbolResolver _symbolResolver;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private bool _isRunning;
|
||||
private TraceCollectorStats _stats = new TraceCollectorStats
|
||||
{
|
||||
EventsCollected = 0,
|
||||
EventsDropped = 0,
|
||||
BytesProcessed = 0,
|
||||
StartedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
public EbpfTraceCollector(
|
||||
ILogger<EbpfTraceCollector> logger,
|
||||
ISymbolResolver symbolResolver,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_symbolResolver = symbolResolver ?? throw new ArgumentNullException(nameof(symbolResolver));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Linux))
|
||||
{
|
||||
throw new PlatformNotSupportedException("eBPF tracing is only supported on Linux");
|
||||
}
|
||||
|
||||
if (_isRunning)
|
||||
{
|
||||
throw new InvalidOperationException("Collector is already running");
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting eBPF trace collector for PID {Pid}, container {Container}",
|
||||
config.TargetPid,
|
||||
config.TargetContainerId ?? "all");
|
||||
|
||||
// TODO: Actual eBPF program loading and uprobe attachment
|
||||
// This would use libbpf or bpf2go to:
|
||||
// 1. Load BPF program into kernel
|
||||
// 2. Attach uprobes to target functions
|
||||
// 3. Set up ringbuffer for event streaming
|
||||
// 4. Handle ASLR via /proc/pid/maps
|
||||
|
||||
_isRunning = true;
|
||||
_stats = _stats with { StartedAt = _timeProvider.GetUtcNow() };
|
||||
|
||||
_logger.LogInformation("eBPF trace collector started successfully");
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task StopAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_isRunning)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Stopping eBPF trace collector");
|
||||
|
||||
// TODO: Detach uprobes and cleanup BPF resources
|
||||
|
||||
_isRunning = false;
|
||||
_stats = _stats with { Duration = _timeProvider.GetUtcNow() - _stats.StartedAt };
|
||||
|
||||
_logger.LogInformation(
|
||||
"eBPF trace collector stopped. Events: {Events}, Dropped: {Dropped}",
|
||||
_stats.EventsCollected,
|
||||
_stats.EventsDropped);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(
|
||||
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_isRunning)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
// TODO: Read events from eBPF ringbuffer
|
||||
// This is a placeholder - actual implementation would:
|
||||
// 1. Poll ringbuffer for events
|
||||
// 2. Resolve symbols using /proc/kallsyms and binary debug info
|
||||
// 3. Handle container namespace awareness
|
||||
// 4. Apply rate limiting
|
||||
|
||||
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
|
||||
yield break;
|
||||
}
|
||||
|
||||
public TraceCollectorStats GetStatistics() => _stats;
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
await StopAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Symbol resolver for eBPF events.
|
||||
/// </summary>
|
||||
public interface ISymbolResolver
|
||||
{
|
||||
Task<string> ResolveSymbolAsync(uint pid, ulong address, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Symbol resolver implementation using /proc and binary debug info.
|
||||
/// </summary>
|
||||
public sealed class LinuxSymbolResolver : ISymbolResolver
|
||||
{
|
||||
private readonly ILogger<LinuxSymbolResolver> _logger;
|
||||
|
||||
public LinuxSymbolResolver(ILogger<LinuxSymbolResolver> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<string> ResolveSymbolAsync(
|
||||
uint pid,
|
||||
ulong address,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
// TODO: Actual symbol resolution:
|
||||
// 1. Read /proc/{pid}/maps to find binary containing address
|
||||
// 2. Adjust for ASLR offset
|
||||
// 3. Use libdwarf or addr2line to resolve symbol
|
||||
// 4. Cache results for performance
|
||||
|
||||
await Task.Delay(1, cancellationToken).ConfigureAwait(false);
|
||||
return $"func_0x{address:x}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,112 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using System.Runtime.InteropServices;
|
||||
|
||||
namespace StellaOps.Scanner.Runtime.Etw;
|
||||
|
||||
/// <summary>
|
||||
/// ETW-based trace collector for Windows.
|
||||
/// </summary>
|
||||
public sealed class EtwTraceCollector : ITraceCollector
|
||||
{
|
||||
private readonly ILogger<EtwTraceCollector> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private bool _isRunning;
|
||||
private TraceCollectorStats _stats = new TraceCollectorStats
|
||||
{
|
||||
EventsCollected = 0,
|
||||
EventsDropped = 0,
|
||||
BytesProcessed = 0,
|
||||
StartedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
public EtwTraceCollector(
|
||||
ILogger<EtwTraceCollector> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
|
||||
{
|
||||
throw new PlatformNotSupportedException("ETW tracing is only supported on Windows");
|
||||
}
|
||||
|
||||
if (_isRunning)
|
||||
{
|
||||
throw new InvalidOperationException("Collector is already running");
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting ETW trace collector for PID {Pid}",
|
||||
config.TargetPid);
|
||||
|
||||
// TODO: Actual ETW session setup
|
||||
// This would use TraceEvent or Microsoft.Diagnostics.Tracing.TraceEvent to:
|
||||
// 1. Create ETW session
|
||||
// 2. Subscribe to Microsoft-Windows-DotNETRuntime provider
|
||||
// 3. Subscribe to native call events
|
||||
// 4. Enable stack walking
|
||||
// 5. Filter by process ID
|
||||
|
||||
_isRunning = true;
|
||||
_stats = _stats with { StartedAt = _timeProvider.GetUtcNow() };
|
||||
|
||||
_logger.LogInformation("ETW trace collector started successfully");
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public Task StopAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_isRunning)
|
||||
{
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Stopping ETW trace collector");
|
||||
|
||||
// TODO: Stop ETW session and cleanup
|
||||
|
||||
_isRunning = false;
|
||||
_stats = _stats with { Duration = _timeProvider.GetUtcNow() - _stats.StartedAt };
|
||||
|
||||
_logger.LogInformation(
|
||||
"ETW trace collector stopped. Events: {Events}, Dropped: {Dropped}",
|
||||
_stats.EventsCollected,
|
||||
_stats.EventsDropped);
|
||||
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
public async IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(
|
||||
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_isRunning)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
// TODO: Process ETW events
|
||||
// This is a placeholder - actual implementation would:
|
||||
// 1. Subscribe to ETW event stream
|
||||
// 2. Process CLR and native method events
|
||||
// 3. Resolve symbols using DbgHelp
|
||||
// 4. Correlate stack traces
|
||||
// 5. Apply rate limiting
|
||||
|
||||
await Task.Delay(100, cancellationToken).ConfigureAwait(false);
|
||||
yield break;
|
||||
}
|
||||
|
||||
public TraceCollectorStats GetStatistics() => _stats;
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
await StopAsync().ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,136 @@
|
||||
namespace StellaOps.Scanner.Runtime;
|
||||
|
||||
/// <summary>
|
||||
/// Runtime call event captured by trace collector.
|
||||
/// </summary>
|
||||
public sealed record RuntimeCallEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Nanoseconds since boot (Linux) or UTC timestamp (Windows).
|
||||
/// </summary>
|
||||
public required ulong Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Process ID.
|
||||
/// </summary>
|
||||
public required uint Pid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Thread ID.
|
||||
/// </summary>
|
||||
public required uint Tid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Caller function address.
|
||||
/// </summary>
|
||||
public required ulong CallerAddress { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Callee function address.
|
||||
/// </summary>
|
||||
public required ulong CalleeAddress { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Resolved caller symbol name.
|
||||
/// </summary>
|
||||
public required string CallerSymbol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Resolved callee symbol name.
|
||||
/// </summary>
|
||||
public required string CalleeSymbol { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Binary path containing the symbols.
|
||||
/// </summary>
|
||||
public required string BinaryPath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Container ID if running in container.
|
||||
/// </summary>
|
||||
public string? ContainerId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Stack trace if available.
|
||||
/// </summary>
|
||||
public IReadOnlyList<ulong>? StackTrace { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for trace collector.
|
||||
/// </summary>
|
||||
public sealed record TraceCollectorConfig
|
||||
{
|
||||
/// <summary>
|
||||
/// Target process ID to trace (0 = all processes).
|
||||
/// </summary>
|
||||
public uint TargetPid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Target container ID to trace.
|
||||
/// </summary>
|
||||
public string? TargetContainerId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Symbol patterns to trace (glob patterns).
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? SymbolPatterns { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Binary paths to trace.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string>? BinaryPaths { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Maximum events per second (rate limiting).
|
||||
/// </summary>
|
||||
public int MaxEventsPerSecond { get; init; } = 10_000;
|
||||
|
||||
/// <summary>
|
||||
/// Event buffer size.
|
||||
/// </summary>
|
||||
public int BufferSize { get; init; } = 8192;
|
||||
|
||||
/// <summary>
|
||||
/// Enable stack trace capture.
|
||||
/// </summary>
|
||||
public bool CaptureStackTraces { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Platform-agnostic trace collector interface.
|
||||
/// </summary>
|
||||
public interface ITraceCollector : IAsyncDisposable
|
||||
{
|
||||
/// <summary>
|
||||
/// Start collecting runtime traces.
|
||||
/// </summary>
|
||||
Task StartAsync(TraceCollectorConfig config, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stop collecting traces.
|
||||
/// </summary>
|
||||
Task StopAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get stream of runtime call events.
|
||||
/// </summary>
|
||||
IAsyncEnumerable<RuntimeCallEvent> GetEventsAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get collector statistics.
|
||||
/// </summary>
|
||||
TraceCollectorStats GetStatistics();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trace collector statistics.
|
||||
/// </summary>
|
||||
public sealed record TraceCollectorStats
|
||||
{
|
||||
public required long EventsCollected { get; init; }
|
||||
public required long EventsDropped { get; init; }
|
||||
public required long BytesProcessed { get; init; }
|
||||
public required DateTimeOffset StartedAt { get; init; }
|
||||
public TimeSpan? Duration { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,74 @@
|
||||
namespace StellaOps.Scanner.Runtime.Ingestion;
|
||||
|
||||
/// <summary>
|
||||
/// Normalized runtime trace for storage.
|
||||
/// </summary>
|
||||
public sealed record NormalizedTrace
|
||||
{
|
||||
public required string TraceId { get; init; }
|
||||
public required string ScanId { get; init; }
|
||||
public required DateTimeOffset CollectedAt { get; init; }
|
||||
public required IReadOnlyList<RuntimeCallEdge> Edges { get; init; }
|
||||
public required TraceMetadata Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Runtime call edge.
|
||||
/// </summary>
|
||||
public sealed record RuntimeCallEdge
|
||||
{
|
||||
public required string From { get; init; }
|
||||
public required string To { get; init; }
|
||||
public required ulong ObservationCount { get; init; }
|
||||
public required DateTimeOffset FirstObserved { get; init; }
|
||||
public required DateTimeOffset LastObserved { get; init; }
|
||||
public IReadOnlyList<ulong>? StackTraces { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Trace metadata.
|
||||
/// </summary>
|
||||
public sealed record TraceMetadata
|
||||
{
|
||||
public required uint ProcessId { get; init; }
|
||||
public required string BinaryPath { get; init; }
|
||||
public required TimeSpan Duration { get; init; }
|
||||
public required long EventCount { get; init; }
|
||||
public string? ContainerId { get; init; }
|
||||
public string? CollectorVersion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service for ingesting and storing runtime traces.
|
||||
/// </summary>
|
||||
public interface ITraceIngestionService
|
||||
{
|
||||
/// <summary>
|
||||
/// Ingest runtime call events and normalize for storage.
|
||||
/// </summary>
|
||||
Task<NormalizedTrace> IngestAsync(
|
||||
IAsyncEnumerable<RuntimeCallEvent> events,
|
||||
string scanId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Store normalized trace.
|
||||
/// </summary>
|
||||
Task<string> StoreAsync(
|
||||
NormalizedTrace trace,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Retrieve trace by ID.
|
||||
/// </summary>
|
||||
Task<NormalizedTrace?> GetTraceAsync(
|
||||
string traceId,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Get all traces for a scan.
|
||||
/// </summary>
|
||||
Task<IReadOnlyList<NormalizedTrace>> GetTracesForScanAsync(
|
||||
string scanId,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
@@ -0,0 +1,187 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Cache.Abstractions;
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Scanner.Runtime.Ingestion;
|
||||
|
||||
/// <summary>
|
||||
/// Service for ingesting runtime traces.
|
||||
/// </summary>
|
||||
public sealed class TraceIngestionService : ITraceIngestionService
|
||||
{
|
||||
private readonly IFileContentAddressableStore _cas;
|
||||
private readonly ILogger<TraceIngestionService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public TraceIngestionService(
|
||||
IFileContentAddressableStore cas,
|
||||
ILogger<TraceIngestionService> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_cas = cas ?? throw new ArgumentNullException(nameof(cas));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
public async Task<NormalizedTrace> IngestAsync(
|
||||
IAsyncEnumerable<RuntimeCallEvent> events,
|
||||
string scanId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(events);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
|
||||
|
||||
var edgeMap = new Dictionary<(string, string), RuntimeCallEdgeBuilder>();
|
||||
var eventCount = 0L;
|
||||
var firstEvent = (DateTimeOffset?)null;
|
||||
var lastEvent = (DateTimeOffset?)null;
|
||||
uint? pid = null;
|
||||
string? binaryPath = null;
|
||||
|
||||
await foreach (var evt in events.WithCancellation(cancellationToken))
|
||||
{
|
||||
eventCount++;
|
||||
|
||||
var timestamp = DateTimeOffset.FromUnixTimeMilliseconds((long)(evt.Timestamp / 1_000_000));
|
||||
firstEvent ??= timestamp;
|
||||
lastEvent = timestamp;
|
||||
pid ??= evt.Pid;
|
||||
binaryPath ??= evt.BinaryPath;
|
||||
|
||||
var key = (evt.CallerSymbol, evt.CalleeSymbol);
|
||||
|
||||
if (!edgeMap.TryGetValue(key, out var builder))
|
||||
{
|
||||
builder = new RuntimeCallEdgeBuilder
|
||||
{
|
||||
From = evt.CallerSymbol,
|
||||
To = evt.CalleeSymbol,
|
||||
FirstObserved = timestamp,
|
||||
LastObserved = timestamp,
|
||||
ObservationCount = 1
|
||||
};
|
||||
edgeMap[key] = builder;
|
||||
}
|
||||
else
|
||||
{
|
||||
builder.LastObserved = timestamp;
|
||||
builder.ObservationCount++;
|
||||
}
|
||||
}
|
||||
|
||||
var edges = edgeMap.Values
|
||||
.Select(b => new RuntimeCallEdge
|
||||
{
|
||||
From = b.From,
|
||||
To = b.To,
|
||||
ObservationCount = b.ObservationCount,
|
||||
FirstObserved = b.FirstObserved,
|
||||
LastObserved = b.LastObserved
|
||||
})
|
||||
.OrderBy(e => e.From)
|
||||
.ThenBy(e => e.To)
|
||||
.ToList();
|
||||
|
||||
var duration = (lastEvent ?? _timeProvider.GetUtcNow()) - (firstEvent ?? _timeProvider.GetUtcNow());
|
||||
|
||||
var trace = new NormalizedTrace
|
||||
{
|
||||
TraceId = GenerateTraceId(scanId, eventCount),
|
||||
ScanId = scanId,
|
||||
CollectedAt = _timeProvider.GetUtcNow(),
|
||||
Edges = edges,
|
||||
Metadata = new TraceMetadata
|
||||
{
|
||||
ProcessId = pid ?? 0,
|
||||
BinaryPath = binaryPath ?? "unknown",
|
||||
Duration = duration,
|
||||
EventCount = eventCount
|
||||
}
|
||||
};
|
||||
|
||||
_logger.LogInformation(
|
||||
"Ingested trace {TraceId} for scan {ScanId}: {EdgeCount} edges from {EventCount} events",
|
||||
trace.TraceId,
|
||||
scanId,
|
||||
edges.Count,
|
||||
eventCount);
|
||||
|
||||
return trace;
|
||||
}
|
||||
|
||||
public async Task<string> StoreAsync(
|
||||
NormalizedTrace trace,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(trace);
|
||||
|
||||
var json = System.Text.Json.JsonSerializer.Serialize(trace);
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
|
||||
|
||||
await using var stream = new MemoryStream(bytes, writable: false);
|
||||
var casKey = $"trace_{trace.TraceId}";
|
||||
|
||||
await _cas.PutAsync(new FileCasPutRequest(casKey, stream, leaveOpen: false), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogInformation("Stored trace {TraceId} in CAS with key {CasKey}", trace.TraceId, casKey);
|
||||
|
||||
return trace.TraceId;
|
||||
}
|
||||
|
||||
public async Task<NormalizedTrace?> GetTraceAsync(
|
||||
string traceId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(traceId);
|
||||
|
||||
var casKey = $"trace_{traceId}";
|
||||
|
||||
try
|
||||
{
|
||||
var bytes = await _cas.GetAsync(new FileCasGetRequest(casKey), cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (bytes is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var trace = System.Text.Json.JsonSerializer.Deserialize<NormalizedTrace>(bytes);
|
||||
return trace;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error retrieving trace {TraceId}", traceId);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public async Task<IReadOnlyList<NormalizedTrace>> GetTracesForScanAsync(
|
||||
string scanId,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId);
|
||||
|
||||
// TODO: Implement scan-to-trace index
|
||||
// For now, return empty list
|
||||
await Task.Delay(1, cancellationToken).ConfigureAwait(false);
|
||||
return Array.Empty<NormalizedTrace>();
|
||||
}
|
||||
|
||||
private static string GenerateTraceId(string scanId, long eventCount)
|
||||
{
|
||||
var input = $"{scanId}|{eventCount}|{DateTimeOffset.UtcNow.Ticks}";
|
||||
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input));
|
||||
return $"trace_{Convert.ToHexString(hash)[..16].ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private sealed class RuntimeCallEdgeBuilder
|
||||
{
|
||||
public required string From { get; init; }
|
||||
public required string To { get; init; }
|
||||
public required DateTimeOffset FirstObserved { get; set; }
|
||||
public required DateTimeOffset LastObserved { get; set; }
|
||||
public required ulong ObservationCount { get; set; }
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user