Refactor code structure for improved readability and maintainability; optimize performance in key functions.

This commit is contained in:
master
2025-12-22 19:06:31 +02:00
parent dfaa2079aa
commit 4602ccc3a3
1444 changed files with 109919 additions and 8058 deletions

View File

@@ -1,7 +1,7 @@
// -----------------------------------------------------------------------------
// FindingEvidenceContracts.cs
// Sprint: SPRINT_3800_0001_0001_evidence_api_models
// Description: Unified evidence API response contracts for findings.
// Sprint: SPRINT_4300_0001_0002_findings_evidence_api
// Description: Evidence API response contracts for explainable triage.
// -----------------------------------------------------------------------------
using System;
@@ -11,447 +11,188 @@ using System.Text.Json.Serialization;
namespace StellaOps.Scanner.WebService.Contracts;
/// <summary>
/// Unified evidence response for a finding, combining reachability, boundary,
/// VEX evidence, and score explanation.
/// Consolidated evidence response for a finding.
/// Matches the advisory contract for explainable triage UX.
/// </summary>
public sealed record FindingEvidenceResponse
{
/// <summary>
/// Unique identifier for the finding.
/// Unique finding identifier.
/// </summary>
[JsonPropertyName("finding_id")]
public string FindingId { get; init; } = string.Empty;
public required string FindingId { get; init; }
/// <summary>
/// CVE identifier (e.g., "CVE-2021-44228").
/// CVE or vulnerability identifier.
/// </summary>
[JsonPropertyName("cve")]
public string Cve { get; init; } = string.Empty;
public required string Cve { get; init; }
/// <summary>
/// Component where the vulnerability was found.
/// Affected component details.
/// </summary>
[JsonPropertyName("component")]
public ComponentRef? Component { get; init; }
public required ComponentInfo Component { get; init; }
/// <summary>
/// Reachable call path from entrypoint to vulnerable sink.
/// Each element is a fully-qualified name (FQN).
/// Reachable path from entrypoint to vulnerable code.
/// </summary>
[JsonPropertyName("reachable_path")]
public IReadOnlyList<string>? ReachablePath { get; init; }
public IReadOnlyList<string> ReachablePath { get; init; } = Array.Empty<string>();
/// <summary>
/// Entrypoint proof (how the code is exposed).
/// Entrypoint details (HTTP route, CLI command, etc.).
/// </summary>
[JsonPropertyName("entrypoint")]
public EntrypointProof? Entrypoint { get; init; }
public EntrypointInfo? Entrypoint { get; init; }
/// <summary>
/// Boundary proof (surface exposure and controls).
/// </summary>
[JsonPropertyName("boundary")]
public BoundaryProofDto? Boundary { get; init; }
/// <summary>
/// VEX (Vulnerability Exploitability eXchange) evidence.
/// VEX exploitability status.
/// </summary>
[JsonPropertyName("vex")]
public VexEvidenceDto? Vex { get; init; }
public VexStatusInfo? Vex { get; init; }
/// <summary>
/// Score explanation with additive risk breakdown.
/// </summary>
[JsonPropertyName("score_explain")]
public ScoreExplanationDto? ScoreExplain { get; init; }
/// <summary>
/// When the finding was last observed.
/// When this evidence was last observed/generated.
/// </summary>
[JsonPropertyName("last_seen")]
public DateTimeOffset LastSeen { get; init; }
public required DateTimeOffset LastSeen { get; init; }
/// <summary>
/// When the evidence expires (for VEX/attestation freshness).
/// </summary>
[JsonPropertyName("expires_at")]
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Whether the evidence is stale (expired or near-expiry).
/// </summary>
[JsonPropertyName("is_stale")]
public bool IsStale { get; init; }
/// <summary>
/// References to DSSE/in-toto attestations backing this evidence.
/// Content-addressed references to attestations.
/// </summary>
[JsonPropertyName("attestation_refs")]
public IReadOnlyList<string>? AttestationRefs { get; init; }
public IReadOnlyList<string> AttestationRefs { get; init; } = Array.Empty<string>();
/// <summary>
/// Risk score with explanation.
/// </summary>
[JsonPropertyName("score")]
public ScoreInfo? Score { get; init; }
/// <summary>
/// Boundary exposure information.
/// </summary>
[JsonPropertyName("boundary")]
public BoundaryInfo? Boundary { get; init; }
/// <summary>
/// Evidence freshness and TTL.
/// </summary>
[JsonPropertyName("freshness")]
public FreshnessInfo Freshness { get; init; } = new();
}
/// <summary>
/// Reference to a component (package) by PURL and version.
/// </summary>
public sealed record ComponentRef
public sealed record ComponentInfo
{
/// <summary>
/// Package URL (PURL) identifier.
/// </summary>
[JsonPropertyName("purl")]
public string Purl { get; init; } = string.Empty;
/// <summary>
/// Package name.
/// </summary>
[JsonPropertyName("name")]
public string Name { get; init; } = string.Empty;
public required string Name { get; init; }
/// <summary>
/// Package version.
/// </summary>
[JsonPropertyName("version")]
public string Version { get; init; } = string.Empty;
public required string Version { get; init; }
/// <summary>
/// Package type/ecosystem (npm, maven, nuget, etc.).
/// </summary>
[JsonPropertyName("type")]
public string Type { get; init; } = string.Empty;
[JsonPropertyName("purl")]
public string? Purl { get; init; }
[JsonPropertyName("ecosystem")]
public string? Ecosystem { get; init; }
}
/// <summary>
/// Proof of how code is exposed as an entrypoint.
/// </summary>
public sealed record EntrypointProof
public sealed record EntrypointInfo
{
/// <summary>
/// Type of entrypoint (http_handler, grpc_method, cli_command, etc.).
/// </summary>
[JsonPropertyName("type")]
public string Type { get; init; } = string.Empty;
public required string Type { get; init; }
/// <summary>
/// Route or path (e.g., "/api/v1/users", "grpc.UserService.GetUser").
/// </summary>
[JsonPropertyName("route")]
public string? Route { get; init; }
/// <summary>
/// HTTP method if applicable (GET, POST, etc.).
/// </summary>
[JsonPropertyName("method")]
public string? Method { get; init; }
/// <summary>
/// Authentication requirement (none, optional, required).
/// </summary>
[JsonPropertyName("auth")]
public string? Auth { get; init; }
/// <summary>
/// Execution phase (startup, runtime, shutdown).
/// </summary>
[JsonPropertyName("phase")]
public string? Phase { get; init; }
/// <summary>
/// Fully qualified name of the entrypoint symbol.
/// </summary>
[JsonPropertyName("fqn")]
public string Fqn { get; init; } = string.Empty;
/// <summary>
/// Source file location.
/// </summary>
[JsonPropertyName("location")]
public SourceLocation? Location { get; init; }
}
/// <summary>
/// Source file location reference.
/// </summary>
public sealed record SourceLocation
public sealed record VexStatusInfo
{
/// <summary>
/// File path relative to repository root.
/// </summary>
[JsonPropertyName("file")]
public string File { get; init; } = string.Empty;
/// <summary>
/// Line number (1-indexed).
/// </summary>
[JsonPropertyName("line")]
public int? Line { get; init; }
/// <summary>
/// Column number (1-indexed).
/// </summary>
[JsonPropertyName("column")]
public int? Column { get; init; }
}
/// <summary>
/// Boundary proof describing surface exposure and controls.
/// </summary>
public sealed record BoundaryProofDto
{
/// <summary>
/// Kind of boundary (network, file, ipc, etc.).
/// </summary>
[JsonPropertyName("kind")]
public string Kind { get; init; } = string.Empty;
/// <summary>
/// Surface descriptor (what is exposed).
/// </summary>
[JsonPropertyName("surface")]
public SurfaceDescriptor? Surface { get; init; }
/// <summary>
/// Exposure descriptor (how it's exposed).
/// </summary>
[JsonPropertyName("exposure")]
public ExposureDescriptor? Exposure { get; init; }
/// <summary>
/// Authentication descriptor.
/// </summary>
[JsonPropertyName("auth")]
public AuthDescriptor? Auth { get; init; }
/// <summary>
/// Security controls in place.
/// </summary>
[JsonPropertyName("controls")]
public IReadOnlyList<ControlDescriptor>? Controls { get; init; }
/// <summary>
/// When the boundary was last verified.
/// </summary>
[JsonPropertyName("last_seen")]
public DateTimeOffset LastSeen { get; init; }
/// <summary>
/// Confidence score (0.0 to 1.0).
/// </summary>
[JsonPropertyName("confidence")]
public double Confidence { get; init; }
}
/// <summary>
/// Describes what attack surface is exposed.
/// </summary>
public sealed record SurfaceDescriptor
{
/// <summary>
/// Type of surface (api, web, cli, library).
/// </summary>
[JsonPropertyName("type")]
public string Type { get; init; } = string.Empty;
/// <summary>
/// Protocol (http, https, grpc, tcp).
/// </summary>
[JsonPropertyName("protocol")]
public string? Protocol { get; init; }
/// <summary>
/// Port number if network-exposed.
/// </summary>
[JsonPropertyName("port")]
public int? Port { get; init; }
}
/// <summary>
/// Describes how the surface is exposed.
/// </summary>
public sealed record ExposureDescriptor
{
/// <summary>
/// Exposure level (public, internal, private).
/// </summary>
[JsonPropertyName("level")]
public string Level { get; init; } = string.Empty;
/// <summary>
/// Whether the exposure is internet-facing.
/// </summary>
[JsonPropertyName("internet_facing")]
public bool InternetFacing { get; init; }
/// <summary>
/// Network zone (dmz, internal, trusted).
/// </summary>
[JsonPropertyName("zone")]
public string? Zone { get; init; }
}
/// <summary>
/// Describes authentication requirements.
/// </summary>
public sealed record AuthDescriptor
{
/// <summary>
/// Whether authentication is required.
/// </summary>
[JsonPropertyName("required")]
public bool Required { get; init; }
/// <summary>
/// Authentication type (jwt, oauth2, basic, api_key).
/// </summary>
[JsonPropertyName("type")]
public string? Type { get; init; }
/// <summary>
/// Required roles/scopes.
/// </summary>
[JsonPropertyName("roles")]
public IReadOnlyList<string>? Roles { get; init; }
}
/// <summary>
/// Describes a security control.
/// </summary>
public sealed record ControlDescriptor
{
/// <summary>
/// Type of control (rate_limit, waf, input_validation, etc.).
/// </summary>
[JsonPropertyName("type")]
public string Type { get; init; } = string.Empty;
/// <summary>
/// Whether the control is active.
/// </summary>
[JsonPropertyName("active")]
public bool Active { get; init; }
/// <summary>
/// Control configuration details.
/// </summary>
[JsonPropertyName("config")]
public string? Config { get; init; }
}
/// <summary>
/// VEX (Vulnerability Exploitability eXchange) evidence.
/// </summary>
public sealed record VexEvidenceDto
{
/// <summary>
/// VEX status (not_affected, affected, fixed, under_investigation).
/// </summary>
[JsonPropertyName("status")]
public string Status { get; init; } = string.Empty;
public required string Status { get; init; }
/// <summary>
/// Justification for the status.
/// </summary>
[JsonPropertyName("justification")]
public string? Justification { get; init; }
/// <summary>
/// Impact statement explaining why not affected.
/// </summary>
[JsonPropertyName("impact")]
public string? Impact { get; init; }
[JsonPropertyName("timestamp")]
public DateTimeOffset? Timestamp { get; init; }
/// <summary>
/// Action statement (remediation steps).
/// </summary>
[JsonPropertyName("action")]
public string? Action { get; init; }
[JsonPropertyName("issuer")]
public string? Issuer { get; init; }
}
/// <summary>
/// Reference to the VEX document/attestation.
/// </summary>
[JsonPropertyName("attestation_ref")]
public string? AttestationRef { get; init; }
public sealed record ScoreInfo
{
[JsonPropertyName("risk_score")]
public required int RiskScore { get; init; }
/// <summary>
/// When the VEX statement was issued.
/// </summary>
[JsonPropertyName("issued_at")]
public DateTimeOffset? IssuedAt { get; init; }
[JsonPropertyName("contributions")]
public IReadOnlyList<ScoreContribution> Contributions { get; init; } = Array.Empty<ScoreContribution>();
}
public sealed record ScoreContribution
{
[JsonPropertyName("factor")]
public required string Factor { get; init; }
[JsonPropertyName("value")]
public required int Value { get; init; }
[JsonPropertyName("reason")]
public string? Reason { get; init; }
}
public sealed record BoundaryInfo
{
[JsonPropertyName("surface")]
public required string Surface { get; init; }
[JsonPropertyName("exposure")]
public required string Exposure { get; init; }
[JsonPropertyName("auth")]
public AuthInfo? Auth { get; init; }
[JsonPropertyName("controls")]
public IReadOnlyList<string> Controls { get; init; } = Array.Empty<string>();
}
public sealed record AuthInfo
{
[JsonPropertyName("mechanism")]
public required string Mechanism { get; init; }
[JsonPropertyName("required_scopes")]
public IReadOnlyList<string> RequiredScopes { get; init; } = Array.Empty<string>();
}
public sealed record FreshnessInfo
{
[JsonPropertyName("is_stale")]
public bool IsStale { get; init; }
/// <summary>
/// When the VEX statement expires.
/// </summary>
[JsonPropertyName("expires_at")]
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Source of the VEX statement (vendor, first-party, third-party).
/// </summary>
[JsonPropertyName("source")]
public string? Source { get; init; }
[JsonPropertyName("ttl_remaining_hours")]
public int? TtlRemainingHours { get; init; }
}
/// <summary>
/// Score explanation with additive breakdown of risk factors.
/// </summary>
public sealed record ScoreExplanationDto
public sealed record BatchEvidenceRequest
{
/// <summary>
/// Kind of scoring algorithm (stellaops_risk_v1, cvss_v4, etc.).
/// </summary>
[JsonPropertyName("kind")]
public string Kind { get; init; } = string.Empty;
/// <summary>
/// Final computed risk score.
/// </summary>
[JsonPropertyName("risk_score")]
public double RiskScore { get; init; }
/// <summary>
/// Individual score contributions.
/// </summary>
[JsonPropertyName("contributions")]
public IReadOnlyList<ScoreContributionDto>? Contributions { get; init; }
/// <summary>
/// When the score was computed.
/// </summary>
[JsonPropertyName("last_seen")]
public DateTimeOffset LastSeen { get; init; }
[JsonPropertyName("finding_ids")]
public required IReadOnlyList<string> FindingIds { get; init; }
}
/// <summary>
/// Individual contribution to the risk score.
/// </summary>
public sealed record ScoreContributionDto
public sealed record BatchEvidenceResponse
{
/// <summary>
/// Factor name (cvss_base, epss, reachability, gate_multiplier, etc.).
/// </summary>
[JsonPropertyName("factor")]
public string Factor { get; init; } = string.Empty;
/// <summary>
/// Weight applied to this factor (0.0 to 1.0).
/// </summary>
[JsonPropertyName("weight")]
public double Weight { get; init; }
/// <summary>
/// Raw value before weighting.
/// </summary>
[JsonPropertyName("raw_value")]
public double RawValue { get; init; }
/// <summary>
/// Weighted contribution to final score.
/// </summary>
[JsonPropertyName("contribution")]
public double Contribution { get; init; }
/// <summary>
/// Human-readable explanation of this factor.
/// </summary>
[JsonPropertyName("explanation")]
public string? Explanation { get; init; }
[JsonPropertyName("findings")]
public required IReadOnlyList<FindingEvidenceResponse> Findings { get; init; }
}

View File

@@ -1,3 +1,4 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.WebService.Contracts;
@@ -11,6 +12,153 @@ public sealed record SbomAcceptedResponseDto(
[property: JsonPropertyName("componentCount")] int ComponentCount,
[property: JsonPropertyName("digest")] string Digest);
/// <summary>
/// Request payload for BYOS SBOM uploads.
/// </summary>
public sealed record SbomUploadRequestDto
{
[JsonPropertyName("artifactRef")]
public string ArtifactRef { get; init; } = string.Empty;
[JsonPropertyName("artifactDigest")]
public string? ArtifactDigest { get; init; }
[JsonPropertyName("sbom")]
public JsonElement? Sbom { get; init; }
[JsonPropertyName("sbomBase64")]
public string? SbomBase64 { get; init; }
[JsonPropertyName("format")]
public string? Format { get; init; }
[JsonPropertyName("source")]
public SbomUploadSourceDto? Source { get; init; }
}
/// <summary>
/// Provenance metadata for a BYOS SBOM upload.
/// </summary>
public sealed record SbomUploadSourceDto
{
[JsonPropertyName("tool")]
public string? Tool { get; init; }
[JsonPropertyName("version")]
public string? Version { get; init; }
[JsonPropertyName("ciContext")]
public SbomUploadCiContextDto? CiContext { get; init; }
}
/// <summary>
/// CI metadata attached to a BYOS SBOM upload.
/// </summary>
public sealed record SbomUploadCiContextDto
{
[JsonPropertyName("buildId")]
public string? BuildId { get; init; }
[JsonPropertyName("repository")]
public string? Repository { get; init; }
}
/// <summary>
/// Response payload for BYOS SBOM uploads.
/// </summary>
public sealed record SbomUploadResponseDto
{
[JsonPropertyName("sbomId")]
public string SbomId { get; init; } = string.Empty;
[JsonPropertyName("artifactRef")]
public string ArtifactRef { get; init; } = string.Empty;
[JsonPropertyName("artifactDigest")]
public string? ArtifactDigest { get; init; }
[JsonPropertyName("digest")]
public string Digest { get; init; } = string.Empty;
[JsonPropertyName("format")]
public string Format { get; init; } = string.Empty;
[JsonPropertyName("formatVersion")]
public string FormatVersion { get; init; } = string.Empty;
[JsonPropertyName("validationResult")]
public SbomValidationSummaryDto ValidationResult { get; init; } = new();
[JsonPropertyName("analysisJobId")]
public string AnalysisJobId { get; init; } = string.Empty;
[JsonPropertyName("uploadedAtUtc")]
public DateTimeOffset UploadedAtUtc { get; init; }
}
/// <summary>
/// Validation summary for a BYOS SBOM upload.
/// </summary>
public sealed record SbomValidationSummaryDto
{
[JsonPropertyName("valid")]
public bool Valid { get; init; }
[JsonPropertyName("qualityScore")]
public double QualityScore { get; init; }
[JsonPropertyName("warnings")]
public IReadOnlyList<string> Warnings { get; init; } = Array.Empty<string>();
[JsonPropertyName("errors")]
public IReadOnlyList<string> Errors { get; init; } = Array.Empty<string>();
[JsonPropertyName("componentCount")]
public int ComponentCount { get; init; }
}
/// <summary>
/// Upload record returned for BYOS queries.
/// </summary>
public sealed record SbomUploadRecordDto
{
[JsonPropertyName("sbomId")]
public string SbomId { get; init; } = string.Empty;
[JsonPropertyName("artifactRef")]
public string ArtifactRef { get; init; } = string.Empty;
[JsonPropertyName("artifactDigest")]
public string? ArtifactDigest { get; init; }
[JsonPropertyName("digest")]
public string Digest { get; init; } = string.Empty;
[JsonPropertyName("format")]
public string Format { get; init; } = string.Empty;
[JsonPropertyName("formatVersion")]
public string FormatVersion { get; init; } = string.Empty;
[JsonPropertyName("analysisJobId")]
public string AnalysisJobId { get; init; } = string.Empty;
[JsonPropertyName("componentCount")]
public int ComponentCount { get; init; }
[JsonPropertyName("qualityScore")]
public double QualityScore { get; init; }
[JsonPropertyName("warnings")]
public IReadOnlyList<string> Warnings { get; init; } = Array.Empty<string>();
[JsonPropertyName("source")]
public SbomUploadSourceDto? Source { get; init; }
[JsonPropertyName("createdAtUtc")]
public DateTimeOffset CreatedAtUtc { get; init; }
}
/// <summary>
/// SBOM format types.
/// </summary>

View File

@@ -0,0 +1,89 @@
using Microsoft.AspNetCore.Mvc;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Services;
namespace StellaOps.Scanner.WebService.Controllers;
[ApiController]
[Route("api/v1/findings")]
[Produces("application/json")]
public sealed class FindingsEvidenceController : ControllerBase
{
private readonly IEvidenceCompositionService _evidenceService;
private readonly ITriageQueryService _triageService;
private readonly ILogger<FindingsEvidenceController> _logger;
public FindingsEvidenceController(
IEvidenceCompositionService evidenceService,
ITriageQueryService triageService,
ILogger<FindingsEvidenceController> logger)
{
_evidenceService = evidenceService ?? throw new ArgumentNullException(nameof(evidenceService));
_triageService = triageService ?? throw new ArgumentNullException(nameof(triageService));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <summary>
/// Get consolidated evidence for a finding.
/// </summary>
/// <param name="findingId">The finding identifier.</param>
/// <param name="includeRaw">Include raw source locations (requires elevated permissions).</param>
/// <response code="200">Evidence retrieved successfully.</response>
/// <response code="404">Finding not found.</response>
/// <response code="403">Insufficient permissions for raw source.</response>
[HttpGet("{findingId}/evidence")]
[ProducesResponseType(typeof(FindingEvidenceResponse), StatusCodes.Status200OK)]
[ProducesResponseType(StatusCodes.Status404NotFound)]
[ProducesResponseType(StatusCodes.Status403Forbidden)]
public async Task<IActionResult> GetEvidenceAsync(
[FromRoute] string findingId,
[FromQuery] bool includeRaw = false,
CancellationToken ct = default)
{
_logger.LogDebug("Getting evidence for finding {FindingId}", findingId);
if (includeRaw && !User.HasClaim("scope", "evidence:raw"))
{
return Forbid("Requires evidence:raw scope for raw source access");
}
var finding = await _triageService.GetFindingAsync(findingId, ct).ConfigureAwait(false);
if (finding is null)
{
return NotFound(new { error = "Finding not found", findingId });
}
var response = await _evidenceService.ComposeAsync(finding, includeRaw, ct).ConfigureAwait(false);
return Ok(response);
}
/// <summary>
/// Get evidence for multiple findings (batch).
/// </summary>
[HttpPost("evidence/batch")]
[ProducesResponseType(typeof(BatchEvidenceResponse), StatusCodes.Status200OK)]
public async Task<IActionResult> GetBatchEvidenceAsync(
[FromBody] BatchEvidenceRequest request,
CancellationToken ct = default)
{
if (request.FindingIds.Count > 100)
{
return BadRequest(new { error = "Maximum 100 findings per batch" });
}
var results = new List<FindingEvidenceResponse>();
foreach (var findingId in request.FindingIds)
{
var finding = await _triageService.GetFindingAsync(findingId, ct).ConfigureAwait(false);
if (finding is null)
{
continue;
}
var evidence = await _evidenceService.ComposeAsync(finding, includeRaw: false, ct).ConfigureAwait(false);
results.Add(evidence);
}
return Ok(new BatchEvidenceResponse { Findings = results });
}
}

View File

@@ -102,13 +102,13 @@ internal static class EvidenceEndpoints
}
// Add warning header if evidence is stale or near expiry
if (evidence.IsStale)
if (evidence.Freshness.IsStale)
{
context.Response.Headers["X-Evidence-Warning"] = "stale";
}
else if (evidence.ExpiresAt.HasValue)
else if (evidence.Freshness.ExpiresAt.HasValue)
{
var timeUntilExpiry = evidence.ExpiresAt.Value - DateTimeOffset.UtcNow;
var timeUntilExpiry = evidence.Freshness.ExpiresAt.Value - DateTimeOffset.UtcNow;
if (timeUntilExpiry <= TimeSpan.FromDays(1))
{
context.Response.Headers["X-Evidence-Warning"] = "near-expiry";

View File

@@ -35,7 +35,7 @@ internal static class ExportEndpoints
scansGroup.MapGet("/{scanId}/exports/cdxr", HandleExportCycloneDxRAsync)
.WithName("scanner.scans.exports.cdxr")
.WithTags("Exports")
.Produces(StatusCodes.Status200OK, contentType: "application/vnd.cyclonedx+json")
.Produces(StatusCodes.Status200OK, contentType: "application/vnd.cyclonedx+json; version=1.7")
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
@@ -137,7 +137,7 @@ internal static class ExportEndpoints
}
var json = JsonSerializer.Serialize(cdxDocument, SerializerOptions);
return Results.Content(json, "application/vnd.cyclonedx+json", System.Text.Encoding.UTF8, StatusCodes.Status200OK);
return Results.Content(json, "application/vnd.cyclonedx+json; version=1.7", System.Text.Encoding.UTF8, StatusCodes.Status200OK);
}
private static async Task<IResult> HandleExportOpenVexAsync(

View File

@@ -0,0 +1,45 @@
using Microsoft.AspNetCore.Mvc;
using StellaOps.Scanner.Orchestration.Fidelity;
namespace StellaOps.Scanner.WebService.Endpoints;
public static class FidelityEndpoints
{
public static void MapFidelityEndpoints(this WebApplication app)
{
var group = app.MapGroup("/api/v1/scan")
.WithTags("Fidelity")
.RequireAuthorization();
// POST /api/v1/scan/analyze?fidelity={level}
group.MapPost("/analyze", async (
[FromBody] AnalysisRequest request,
[FromQuery] FidelityLevel fidelity = FidelityLevel.Standard,
IFidelityAwareAnalyzer analyzer,
CancellationToken ct) =>
{
var result = await analyzer.AnalyzeAsync(request, fidelity, ct);
return Results.Ok(result);
})
.WithName("AnalyzeWithFidelity")
.WithDescription("Analyze with specified fidelity level")
.Produces<FidelityAnalysisResult>(200);
// POST /api/v1/scan/findings/{findingId}/upgrade
group.MapPost("/findings/{findingId:guid}/upgrade", async (
Guid findingId,
[FromQuery] FidelityLevel target = FidelityLevel.Deep,
IFidelityAwareAnalyzer analyzer,
CancellationToken ct) =>
{
var result = await analyzer.UpgradeFidelityAsync(findingId, target, ct);
return result.Success
? Results.Ok(result)
: Results.BadRequest(result);
})
.WithName("UpgradeFidelity")
.WithDescription("Upgrade analysis fidelity for a finding")
.Produces<FidelityUpgradeResult>(200)
.Produces<FidelityUpgradeResult>(400);
}
}

View File

@@ -27,7 +27,12 @@ internal static class SbomEndpoints
scansGroup.MapPost("/{scanId}/sbom", HandleSubmitSbomAsync)
.WithName("scanner.scans.sbom.submit")
.WithTags("Scans")
.Accepts<JsonDocument>("application/vnd.cyclonedx+json", "application/spdx+json", "application/json")
.Accepts<JsonDocument>(
"application/vnd.cyclonedx+json; version=1.7",
"application/vnd.cyclonedx+json; version=1.6",
"application/vnd.cyclonedx+json",
"application/spdx+json",
"application/json")
.Produces<SbomAcceptedResponseDto>(StatusCodes.Status202Accepted)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
@@ -96,7 +101,7 @@ internal static class SbomEndpoints
ProblemTypes.Validation,
"Unknown SBOM format",
StatusCodes.Status400BadRequest,
detail: "Could not detect SBOM format. Use Content-Type 'application/vnd.cyclonedx+json' or 'application/spdx+json'.");
detail: "Could not detect SBOM format. Use Content-Type 'application/vnd.cyclonedx+json; version=1.7' (or 1.6) or 'application/spdx+json'.");
}
// Validate the SBOM

View File

@@ -0,0 +1,96 @@
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.WebService.Constants;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Infrastructure;
using StellaOps.Scanner.WebService.Security;
using StellaOps.Scanner.WebService.Services;
namespace StellaOps.Scanner.WebService.Endpoints;
internal static class SbomUploadEndpoints
{
public static void MapSbomUploadEndpoints(this RouteGroupBuilder apiGroup)
{
ArgumentNullException.ThrowIfNull(apiGroup);
var sbomGroup = apiGroup.MapGroup("/sbom");
sbomGroup.MapPost("/upload", HandleUploadAsync)
.WithName("scanner.sbom.upload")
.WithTags("SBOM")
.Produces<SbomUploadResponseDto>(StatusCodes.Status202Accepted)
.Produces(StatusCodes.Status400BadRequest)
.RequireAuthorization(ScannerPolicies.ScansWrite);
sbomGroup.MapGet("/uploads/{sbomId}", HandleGetUploadAsync)
.WithName("scanner.sbom.uploads.get")
.WithTags("SBOM")
.Produces<SbomUploadRecordDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
}
private static async Task<IResult> HandleUploadAsync(
SbomUploadRequestDto request,
ISbomByosUploadService uploadService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(uploadService);
var (response, validation) = await uploadService.UploadAsync(request, cancellationToken).ConfigureAwait(false);
if (!validation.Valid)
{
var extensions = new Dictionary<string, object?>
{
["errors"] = validation.Errors,
["warnings"] = validation.Warnings
};
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid SBOM",
StatusCodes.Status400BadRequest,
detail: "SBOM validation failed.",
extensions: extensions);
}
return Results.Accepted($"/api/v1/sbom/uploads/{response.SbomId}", response);
}
private static async Task<IResult> HandleGetUploadAsync(
string sbomId,
ISbomByosUploadService uploadService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(uploadService);
if (string.IsNullOrWhiteSpace(sbomId))
{
return ProblemResultFactory.Create(
context,
ProblemTypes.Validation,
"Invalid SBOM identifier",
StatusCodes.Status400BadRequest,
detail: "SBOM identifier is required.");
}
var record = await uploadService.GetRecordAsync(sbomId.Trim(), cancellationToken).ConfigureAwait(false);
if (record is null)
{
return ProblemResultFactory.Create(
context,
ProblemTypes.NotFound,
"SBOM upload not found",
StatusCodes.Status404NotFound,
detail: "Requested SBOM upload could not be located.");
}
return Results.Ok(record);
}
}

View File

@@ -0,0 +1,386 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.DependencyInjection;
using StellaOps.Scanner.WebService.Security;
using StellaOps.Scanner.WebService.Services;
namespace StellaOps.Scanner.WebService.Endpoints;
/// <summary>
/// Endpoints for slice query and replay operations.
/// </summary>
internal static class SliceEndpoints
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter() }
};
public static void MapSliceEndpoints(this IEndpointRouteBuilder endpoints)
{
ArgumentNullException.ThrowIfNull(endpoints);
var slicesGroup = endpoints.MapGroup("/api/slices")
.WithTags("Slices");
// POST /api/slices/query - Generate reachability slice on demand
slicesGroup.MapPost("/query", HandleQueryAsync)
.WithName("scanner.slices.query")
.WithDescription("Query reachability for CVE/symbols and generate an attested slice")
.Produces<SliceQueryResponseDto>(StatusCodes.Status200OK)
.Produces<SliceQueryResponseDto>(StatusCodes.Status202Accepted)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
// GET /api/slices/{digest} - Retrieve attested slice by digest
slicesGroup.MapGet("/{digest}", HandleGetSliceAsync)
.WithName("scanner.slices.get")
.WithDescription("Retrieve an attested reachability slice by its content digest")
.Produces<object>(StatusCodes.Status200OK, "application/json")
.Produces<object>(StatusCodes.Status200OK, "application/dsse+json")
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
// POST /api/slices/replay - Verify slice reproducibility
slicesGroup.MapPost("/replay", HandleReplayAsync)
.WithName("scanner.slices.replay")
.WithDescription("Recompute a slice and verify byte-for-byte match with the original")
.Produces<SliceReplayResponseDto>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.Produces(StatusCodes.Status404NotFound)
.RequireAuthorization(ScannerPolicies.ScansRead);
// GET /api/slices/cache/stats - Cache statistics (admin only)
slicesGroup.MapGet("/cache/stats", HandleCacheStatsAsync)
.WithName("scanner.slices.cache.stats")
.WithDescription("Get slice cache statistics")
.Produces<SliceCacheStatsDto>(StatusCodes.Status200OK)
.RequireAuthorization(ScannerPolicies.Admin);
}
private static async Task<IResult> HandleQueryAsync(
[FromBody] SliceQueryRequestDto request,
[FromServices] ISliceQueryService sliceService,
CancellationToken cancellationToken)
{
if (request == null)
{
return Results.BadRequest(new { error = "Request body is required" });
}
if (string.IsNullOrWhiteSpace(request.ScanId))
{
return Results.BadRequest(new { error = "scanId is required" });
}
if (string.IsNullOrWhiteSpace(request.CveId) &&
(request.Symbols == null || request.Symbols.Count == 0))
{
return Results.BadRequest(new { error = "Either cveId or symbols must be specified" });
}
try
{
var serviceRequest = new SliceQueryRequest
{
ScanId = request.ScanId,
CveId = request.CveId,
Symbols = request.Symbols,
Entrypoints = request.Entrypoints,
PolicyHash = request.PolicyHash
};
var response = await sliceService.QueryAsync(serviceRequest, cancellationToken).ConfigureAwait(false);
var dto = new SliceQueryResponseDto
{
SliceDigest = response.SliceDigest,
Verdict = response.Verdict,
Confidence = response.Confidence,
PathWitnesses = response.PathWitnesses,
CacheHit = response.CacheHit,
JobId = response.JobId
};
// Return 202 Accepted if async generation (jobId present)
if (!string.IsNullOrEmpty(response.JobId))
{
return Results.Accepted($"/api/slices/jobs/{response.JobId}", dto);
}
return Results.Ok(dto);
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new { error = ex.Message });
}
}
private static async Task<IResult> HandleGetSliceAsync(
[FromRoute] string digest,
[FromHeader(Name = "Accept")] string? accept,
[FromServices] ISliceQueryService sliceService,
CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(digest))
{
return Results.BadRequest(new { error = "digest is required" });
}
var wantsDsse = accept?.Contains("dsse", StringComparison.OrdinalIgnoreCase) == true;
try
{
if (wantsDsse)
{
var dsse = await sliceService.GetSliceDsseAsync(digest, cancellationToken).ConfigureAwait(false);
if (dsse == null)
{
return Results.NotFound(new { error = $"Slice {digest} not found" });
}
return Results.Json(dsse, SerializerOptions, "application/dsse+json");
}
else
{
var slice = await sliceService.GetSliceAsync(digest, cancellationToken).ConfigureAwait(false);
if (slice == null)
{
return Results.NotFound(new { error = $"Slice {digest} not found" });
}
return Results.Json(slice, SerializerOptions, "application/json");
}
}
catch (InvalidOperationException ex)
{
return Results.NotFound(new { error = ex.Message });
}
}
private static async Task<IResult> HandleReplayAsync(
[FromBody] SliceReplayRequestDto request,
[FromServices] ISliceQueryService sliceService,
CancellationToken cancellationToken)
{
if (request == null)
{
return Results.BadRequest(new { error = "Request body is required" });
}
if (string.IsNullOrWhiteSpace(request.SliceDigest))
{
return Results.BadRequest(new { error = "sliceDigest is required" });
}
try
{
var serviceRequest = new SliceReplayRequest
{
SliceDigest = request.SliceDigest
};
var response = await sliceService.ReplayAsync(serviceRequest, cancellationToken).ConfigureAwait(false);
var dto = new SliceReplayResponseDto
{
Match = response.Match,
OriginalDigest = response.OriginalDigest,
RecomputedDigest = response.RecomputedDigest,
Diff = response.Diff == null ? null : new SliceDiffDto
{
MissingNodes = response.Diff.MissingNodes,
ExtraNodes = response.Diff.ExtraNodes,
MissingEdges = response.Diff.MissingEdges,
ExtraEdges = response.Diff.ExtraEdges,
VerdictDiff = response.Diff.VerdictDiff
}
};
return Results.Ok(dto);
}
catch (InvalidOperationException ex) when (ex.Message.Contains("not found"))
{
return Results.NotFound(new { error = ex.Message });
}
}
private static IResult HandleCacheStatsAsync(
[FromServices] Reachability.Slices.ISliceCache cache)
{
var stats = cache.GetStatistics();
return Results.Ok(new SliceCacheStatsDto
{
ItemCount = (int)stats.EntryCount,
HitCount = stats.HitCount,
MissCount = stats.MissCount,
HitRate = stats.HitRate
});
}
}
#region DTOs
/// <summary>
/// Request to query reachability and generate a slice.
/// </summary>
public sealed class SliceQueryRequestDto
{
/// <summary>
/// The scan ID to query against.
/// </summary>
[JsonPropertyName("scanId")]
public string? ScanId { get; set; }
/// <summary>
/// Optional CVE ID to query reachability for.
/// </summary>
[JsonPropertyName("cveId")]
public string? CveId { get; set; }
/// <summary>
/// Target symbols to check reachability for.
/// </summary>
[JsonPropertyName("symbols")]
public List<string>? Symbols { get; set; }
/// <summary>
/// Entrypoint symbols to start reachability analysis from.
/// </summary>
[JsonPropertyName("entrypoints")]
public List<string>? Entrypoints { get; set; }
/// <summary>
/// Optional policy hash to include in the slice.
/// </summary>
[JsonPropertyName("policyHash")]
public string? PolicyHash { get; set; }
}
/// <summary>
/// Response from slice query.
/// </summary>
public sealed class SliceQueryResponseDto
{
/// <summary>
/// Content-addressed digest of the generated slice.
/// </summary>
[JsonPropertyName("sliceDigest")]
public required string SliceDigest { get; set; }
/// <summary>
/// Reachability verdict (reachable, unreachable, unknown, gated).
/// </summary>
[JsonPropertyName("verdict")]
public required string Verdict { get; set; }
/// <summary>
/// Confidence score [0.0, 1.0].
/// </summary>
[JsonPropertyName("confidence")]
public double Confidence { get; set; }
/// <summary>
/// Example paths demonstrating reachability (if reachable).
/// </summary>
[JsonPropertyName("pathWitnesses")]
public IReadOnlyList<string>? PathWitnesses { get; set; }
/// <summary>
/// Whether result was served from cache.
/// </summary>
[JsonPropertyName("cacheHit")]
public bool CacheHit { get; set; }
/// <summary>
/// Job ID for async generation (if slice is large).
/// </summary>
[JsonPropertyName("jobId")]
public string? JobId { get; set; }
}
/// <summary>
/// Request to replay/verify a slice.
/// </summary>
public sealed class SliceReplayRequestDto
{
/// <summary>
/// Digest of the slice to replay.
/// </summary>
[JsonPropertyName("sliceDigest")]
public string? SliceDigest { get; set; }
}
/// <summary>
/// Response from slice replay verification.
/// </summary>
public sealed class SliceReplayResponseDto
{
/// <summary>
/// Whether the recomputed slice matches the original.
/// </summary>
[JsonPropertyName("match")]
public bool Match { get; set; }
/// <summary>
/// Digest of the original slice.
/// </summary>
[JsonPropertyName("originalDigest")]
public required string OriginalDigest { get; set; }
/// <summary>
/// Digest of the recomputed slice.
/// </summary>
[JsonPropertyName("recomputedDigest")]
public required string RecomputedDigest { get; set; }
/// <summary>
/// Detailed diff if slices don't match.
/// </summary>
[JsonPropertyName("diff")]
public SliceDiffDto? Diff { get; set; }
}
/// <summary>
/// Diff between two slices.
/// </summary>
public sealed class SliceDiffDto
{
[JsonPropertyName("missingNodes")]
public IReadOnlyList<string>? MissingNodes { get; set; }
[JsonPropertyName("extraNodes")]
public IReadOnlyList<string>? ExtraNodes { get; set; }
[JsonPropertyName("missingEdges")]
public IReadOnlyList<string>? MissingEdges { get; set; }
[JsonPropertyName("extraEdges")]
public IReadOnlyList<string>? ExtraEdges { get; set; }
[JsonPropertyName("verdictDiff")]
public string? VerdictDiff { get; set; }
}
/// <summary>
/// Slice cache statistics.
/// </summary>
public sealed class SliceCacheStatsDto
{
[JsonPropertyName("itemCount")]
public int ItemCount { get; set; }
[JsonPropertyName("hitCount")]
public long HitCount { get; set; }
[JsonPropertyName("missCount")]
public long MissCount { get; set; }
[JsonPropertyName("hitRate")]
public double HitRate { get; set; }
}
#endregion

View File

@@ -0,0 +1,163 @@
// -----------------------------------------------------------------------------
// ProofBundleEndpoints.cs
// Sprint: SPRINT_3900_0003_0001_exploit_path_inbox_proof_bundles
// Description: HTTP endpoints for proof bundle generation (attestations + evidence).
// -----------------------------------------------------------------------------
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.Triage.Models;
using StellaOps.Scanner.WebService.Security;
namespace StellaOps.Scanner.WebService.Endpoints.Triage;
/// <summary>
/// Endpoints for proof bundle generation - attested evidence packages.
/// </summary>
internal static class ProofBundleEndpoints
{
/// <summary>
/// Maps proof bundle endpoints.
/// </summary>
public static void MapProofBundleEndpoints(this RouteGroupBuilder apiGroup)
{
ArgumentNullException.ThrowIfNull(apiGroup);
var triageGroup = apiGroup.MapGroup("/triage")
.WithTags("Triage");
// POST /v1/triage/proof-bundle
triageGroup.MapPost("/proof-bundle", HandleGenerateProofBundleAsync)
.WithName("scanner.triage.proof-bundle")
.WithDescription("Generates an attested proof bundle for an exploit path.")
.Produces<ProofBundleResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.RequireAuthorization(ScannerPolicies.TriageWrite);
}
private static async Task<IResult> HandleGenerateProofBundleAsync(
ProofBundleRequest request,
IProofBundleGenerator bundleGenerator,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(bundleGenerator);
if (string.IsNullOrWhiteSpace(request.PathId))
{
return Results.BadRequest(new
{
type = "validation-error",
title = "Invalid path ID",
detail = "Path ID is required."
});
}
var bundle = await bundleGenerator.GenerateBundleAsync(
request.PathId,
request.IncludeReachGraph,
request.IncludeCallTrace,
request.IncludeVexStatements,
request.AttestationKeyId,
cancellationToken);
var response = new ProofBundleResponse
{
PathId = request.PathId,
Bundle = bundle,
GeneratedAt = DateTimeOffset.UtcNow
};
return Results.Ok(response);
}
}
/// <summary>
/// Request for proof bundle generation.
/// </summary>
public sealed record ProofBundleRequest
{
public required string PathId { get; init; }
public bool IncludeReachGraph { get; init; } = true;
public bool IncludeCallTrace { get; init; } = true;
public bool IncludeVexStatements { get; init; } = true;
public string? AttestationKeyId { get; init; }
}
/// <summary>
/// Response containing proof bundle.
/// </summary>
public sealed record ProofBundleResponse
{
public required string PathId { get; init; }
public required ProofBundle Bundle { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
}
/// <summary>
/// Proof bundle containing attestations and evidence.
/// </summary>
public sealed record ProofBundle
{
public required string BundleId { get; init; }
public required string PathId { get; init; }
public required string ArtifactDigest { get; init; }
public required ExploitPathSummary Path { get; init; }
public required IReadOnlyList<EvidenceAttestation> Attestations { get; init; }
public ReachGraphEvidence? ReachGraph { get; init; }
public CallTraceEvidence? CallTrace { get; init; }
public IReadOnlyList<VexStatement>? VexStatements { get; init; }
public required BundleSignature Signature { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
}
public sealed record ExploitPathSummary(
string PathId,
string PackagePurl,
string VulnerableSymbol,
string EntryPoint,
IReadOnlyList<string> CveIds,
string ReachabilityStatus);
public sealed record EvidenceAttestation(
string Type,
string Predicate,
string Subject,
string DsseEnvelope);
public sealed record ReachGraphEvidence(
IReadOnlyList<GraphNode> Nodes,
IReadOnlyList<GraphEdge> Edges);
public sealed record GraphNode(string Id, string Label, string Type);
public sealed record GraphEdge(string From, string To, string Label);
public sealed record CallTraceEvidence(
IReadOnlyList<CallFrame> Frames);
public sealed record CallFrame(string Function, string File, int Line);
public sealed record VexStatement(
string CveId,
string Status,
string Justification,
DateTimeOffset IssuedAt);
public sealed record BundleSignature(
string Algorithm,
string KeyId,
string Signature,
DateTimeOffset SignedAt);
public interface IProofBundleGenerator
{
Task<ProofBundle> GenerateBundleAsync(
string pathId,
bool includeReachGraph,
bool includeCallTrace,
bool includeVexStatements,
string? attestationKeyId,
CancellationToken ct);
}

View File

@@ -0,0 +1,122 @@
// -----------------------------------------------------------------------------
// TriageInboxEndpoints.cs
// Sprint: SPRINT_3900_0003_0001_exploit_path_inbox_proof_bundles
// Description: HTTP endpoints for triage inbox with grouped exploit paths.
// -----------------------------------------------------------------------------
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Routing;
using StellaOps.Scanner.Triage.Models;
using StellaOps.Scanner.Triage.Services;
using StellaOps.Scanner.WebService.Security;
namespace StellaOps.Scanner.WebService.Endpoints.Triage;
/// <summary>
/// Endpoints for triage inbox - grouped exploit paths.
/// </summary>
internal static class TriageInboxEndpoints
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Converters = { new JsonStringEnumConverter() }
};
/// <summary>
/// Maps triage inbox endpoints.
/// </summary>
public static void MapTriageInboxEndpoints(this RouteGroupBuilder apiGroup)
{
ArgumentNullException.ThrowIfNull(apiGroup);
var triageGroup = apiGroup.MapGroup("/triage")
.WithTags("Triage");
// GET /v1/triage/inbox?artifactDigest={digest}&filter={filter}
triageGroup.MapGet("/inbox", HandleGetInboxAsync)
.WithName("scanner.triage.inbox")
.WithDescription("Retrieves triage inbox with grouped exploit paths for an artifact.")
.Produces<TriageInboxResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status400BadRequest)
.RequireAuthorization(ScannerPolicies.TriageRead);
}
private static async Task<IResult> HandleGetInboxAsync(
string artifactDigest,
string? filter,
IExploitPathGroupingService groupingService,
IFindingQueryService findingService,
HttpContext context,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(groupingService);
ArgumentNullException.ThrowIfNull(findingService);
if (string.IsNullOrWhiteSpace(artifactDigest))
{
return Results.BadRequest(new
{
type = "validation-error",
title = "Invalid artifact digest",
detail = "Artifact digest is required."
});
}
var findings = await findingService.GetFindingsForArtifactAsync(artifactDigest, cancellationToken);
var paths = await groupingService.GroupFindingsAsync(artifactDigest, findings, cancellationToken);
var filteredPaths = ApplyFilter(paths, filter);
var response = new TriageInboxResponse
{
ArtifactDigest = artifactDigest,
TotalPaths = paths.Count,
FilteredPaths = filteredPaths.Count,
Filter = filter,
Paths = filteredPaths,
GeneratedAt = DateTimeOffset.UtcNow
};
return Results.Ok(response);
}
private static IReadOnlyList<ExploitPath> ApplyFilter(
IReadOnlyList<ExploitPath> paths,
string? filter)
{
if (string.IsNullOrWhiteSpace(filter))
return paths;
return filter.ToLowerInvariant() switch
{
"actionable" => paths.Where(p => !p.IsQuiet && p.Reachability is ReachabilityStatus.StaticallyReachable or ReachabilityStatus.RuntimeConfirmed).ToList(),
"noisy" => paths.Where(p => p.IsQuiet).ToList(),
"reachable" => paths.Where(p => p.Reachability is ReachabilityStatus.StaticallyReachable or ReachabilityStatus.RuntimeConfirmed).ToList(),
"runtime" => paths.Where(p => p.Reachability == ReachabilityStatus.RuntimeConfirmed).ToList(),
"critical" => paths.Where(p => p.RiskScore.CriticalCount > 0).ToList(),
"high" => paths.Where(p => p.RiskScore.HighCount > 0).ToList(),
_ => paths
};
}
}
/// <summary>
/// Response for triage inbox endpoint.
/// </summary>
public sealed record TriageInboxResponse
{
public required string ArtifactDigest { get; init; }
public required int TotalPaths { get; init; }
public required int FilteredPaths { get; init; }
public string? Filter { get; init; }
public required IReadOnlyList<ExploitPath> Paths { get; init; }
public required DateTimeOffset GeneratedAt { get; init; }
}
public interface IFindingQueryService
{
Task<IReadOnlyList<Finding>> GetFindingsForArtifactAsync(string artifactDigest, CancellationToken ct);
}

View File

@@ -8,6 +8,7 @@ using Microsoft.AspNetCore.Diagnostics;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Authentication;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection.Extensions;
using Microsoft.Extensions.Options;
using Serilog;
@@ -32,6 +33,7 @@ using StellaOps.Scanner.Surface.Env;
using StellaOps.Scanner.Surface.FS;
using StellaOps.Scanner.Surface.Secrets;
using StellaOps.Scanner.Surface.Validation;
using StellaOps.Scanner.Triage;
using StellaOps.Scanner.WebService.Diagnostics;
using StellaOps.Scanner.WebService.Determinism;
using StellaOps.Scanner.WebService.Endpoints;
@@ -68,6 +70,7 @@ var bootstrapOptions = builder.Configuration.BindOptions<ScannerWebServiceOption
});
builder.Services.AddStellaOpsCrypto(bootstrapOptions.Crypto);
builder.Services.AddControllers();
builder.Services.AddOptions<ScannerWebServiceOptions>()
.Bind(builder.Configuration.GetSection(ScannerWebServiceOptions.SectionName))
@@ -126,6 +129,8 @@ builder.Services.AddSingleton<IAttestationChainVerifier, AttestationChainVerifie
builder.Services.AddSingleton<IHumanApprovalAttestationService, HumanApprovalAttestationService>();
builder.Services.AddScoped<ICallGraphIngestionService, CallGraphIngestionService>();
builder.Services.AddScoped<ISbomIngestionService, SbomIngestionService>();
builder.Services.AddSingleton<ISbomUploadStore, InMemorySbomUploadStore>();
builder.Services.AddScoped<ISbomByosUploadService, SbomByosUploadService>();
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
builder.Services.AddSingleton<PolicySnapshotStore>();
@@ -136,6 +141,9 @@ builder.Services.AddSingleton<IScanManifestRepository, InMemoryScanManifestRepos
builder.Services.AddSingleton<IProofBundleRepository, InMemoryProofBundleRepository>();
builder.Services.AddSingleton<IScoringService, DeterministicScoringService>();
builder.Services.AddSingleton<IScanManifestSigner, ScanManifestSigner>();
builder.Services.AddDbContext<TriageDbContext>(options =>
options.UseNpgsql(bootstrapOptions.Storage.Dsn));
builder.Services.AddScoped<ITriageQueryService, TriageQueryService>();
// Register Storage.Repositories implementations for ManifestEndpoints
builder.Services.AddSingleton<StellaOps.Scanner.Storage.Repositories.IScanManifestRepository, TestManifestRepository>();
@@ -516,6 +524,7 @@ if (app.Environment.IsEnvironment("Testing"))
}
apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment);
apiGroup.MapSbomUploadEndpoints();
apiGroup.MapReachabilityDriftRootEndpoints();
apiGroup.MapProofSpineEndpoints(resolvedOptions.Api.SpinesSegment, resolvedOptions.Api.ScansSegment);
apiGroup.MapReplayEndpoints();
@@ -525,6 +534,7 @@ if (resolvedOptions.ScoreReplay.Enabled)
}
apiGroup.MapWitnessEndpoints(); // Sprint: SPRINT_3700_0001_0001
apiGroup.MapEpssEndpoints(); // Sprint: SPRINT_3410_0002_0001
apiGroup.MapSliceEndpoints(); // Sprint: SPRINT_3820_0001_0001
if (resolvedOptions.Features.EnablePolicyPreview)
{
@@ -534,6 +544,7 @@ if (resolvedOptions.Features.EnablePolicyPreview)
apiGroup.MapReportEndpoints(resolvedOptions.Api.ReportsSegment);
apiGroup.MapRuntimeEndpoints(resolvedOptions.Api.RuntimeSegment);
app.MapControllers();
app.MapOpenApiIfAvailable();
await app.RunAsync().ConfigureAwait(false);

View File

@@ -13,6 +13,7 @@ using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.Triage.Entities;
namespace StellaOps.Scanner.WebService.Services;
@@ -94,44 +95,137 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
cancellationToken).ConfigureAwait(false);
// Build score explanation (simplified local computation)
var scoreExplanation = BuildScoreExplanation(finding, explanation);
var scoreInfo = BuildScoreInfo(finding, explanation);
// Compose the response
var now = _timeProvider.GetUtcNow();
// Calculate expiry based on evidence sources
var (expiresAt, isStale) = CalculateTtlAndStaleness(now, explanation);
var freshness = BuildFreshnessInfo(now, explanation, observedAt: now);
return new FindingEvidenceResponse
{
FindingId = findingId,
Cve = cveId,
Component = BuildComponentRef(purl),
ReachablePath = explanation?.PathWitness,
Entrypoint = BuildEntrypointProof(explanation),
Component = BuildComponentInfo(purl),
ReachablePath = explanation?.PathWitness ?? Array.Empty<string>(),
Entrypoint = BuildEntrypointInfo(explanation),
Boundary = null, // Boundary extraction requires RichGraph, deferred to SPRINT_3800_0003_0002
Vex = null, // VEX requires Excititor query, deferred to SPRINT_3800_0003_0002
ScoreExplain = scoreExplanation,
Score = scoreInfo,
LastSeen = now,
ExpiresAt = expiresAt,
IsStale = isStale,
AttestationRefs = BuildAttestationRefs(scan, explanation)
AttestationRefs = BuildAttestationRefs(scan, explanation) ?? Array.Empty<string>(),
Freshness = freshness
};
}
/// <inheritdoc />
public Task<FindingEvidenceResponse> ComposeAsync(
TriageFinding finding,
bool includeRaw,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(finding);
var now = _timeProvider.GetUtcNow();
var latestReachability = finding.ReachabilityResults
.OrderByDescending(r => r.ComputedAt)
.FirstOrDefault();
var latestRisk = finding.RiskResults
.OrderByDescending(r => r.ComputedAt)
.FirstOrDefault();
var latestVex = finding.EffectiveVexRecords
.OrderByDescending(r => r.CollectedAt)
.FirstOrDefault();
var attestationRefs = finding.EvidenceArtifacts
.OrderByDescending(a => a.CreatedAt)
.Select(a => a.ContentHash)
.Distinct(StringComparer.OrdinalIgnoreCase)
.ToList();
var scoreInfo = latestRisk is null
? null
: new ScoreInfo
{
RiskScore = latestRisk.Score,
Contributions = new[]
{
new ScoreContribution
{
Factor = "policy",
Value = latestRisk.Score,
Reason = latestRisk.Why
}
}
};
var vexInfo = latestVex is null
? null
: new VexStatusInfo
{
Status = latestVex.Status.ToString().ToLowerInvariant(),
Justification = latestVex.SourceDomain,
Timestamp = latestVex.ValidFrom,
Issuer = latestVex.Issuer
};
var entrypoint = latestReachability is null
? null
: new EntrypointInfo
{
Type = latestReachability.Reachable switch
{
TriageReachability.Yes => "http",
TriageReachability.No => "internal",
_ => "internal"
},
Route = latestReachability.StaticProofRef,
Auth = null
};
var freshness = BuildFreshnessInfo(
now,
explanation: null,
observedAt: finding.LastSeenAt);
var cve = !string.IsNullOrWhiteSpace(finding.CveId)
? finding.CveId
: finding.RuleId ?? "unknown";
return Task.FromResult(new FindingEvidenceResponse
{
FindingId = finding.Id.ToString(),
Cve = cve,
Component = BuildComponentInfo(finding.Purl),
ReachablePath = Array.Empty<string>(),
Entrypoint = entrypoint,
Vex = vexInfo,
LastSeen = finding.LastSeenAt,
AttestationRefs = attestationRefs,
Score = scoreInfo,
Boundary = null,
Freshness = freshness
});
}
/// <summary>
/// Calculates the evidence expiry time and staleness based on evidence sources.
/// Uses the minimum expiry time from all evidence sources.
/// </summary>
private (DateTimeOffset expiresAt, bool isStale) CalculateTtlAndStaleness(
private FreshnessInfo BuildFreshnessInfo(
DateTimeOffset now,
ReachabilityExplanation? explanation)
ReachabilityExplanation? explanation,
DateTimeOffset? observedAt)
{
var defaultTtl = TimeSpan.FromDays(_options.DefaultEvidenceTtlDays);
var warningThreshold = TimeSpan.FromDays(_options.StaleWarningThresholdDays);
// Default: evidence expires from when it was computed (now)
var reachabilityExpiry = now.Add(defaultTtl);
var baseTimestamp = observedAt ?? now;
var reachabilityExpiry = baseTimestamp.Add(defaultTtl);
// If we have evidence chain with timestamps, use those instead
// For now, we use now as the base timestamp since ReachabilityExplanation
@@ -153,7 +247,16 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
_logger.LogDebug("Evidence nearing expiry: expires in {TimeRemaining}", expiresAt - now);
}
return (expiresAt, isStale);
var ttlRemaining = expiresAt > now
? (int)Math.Floor((expiresAt - now).TotalHours)
: 0;
return new FreshnessInfo
{
IsStale = isStale,
ExpiresAt = expiresAt,
TtlRemainingHours = ttlRemaining
};
}
private static (string? cveId, string? purl) ParseFindingId(string findingId)
@@ -183,7 +286,7 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
return (cveId, purl);
}
private static ComponentRef BuildComponentRef(string purl)
private static ComponentInfo BuildComponentInfo(string purl)
{
// Parse PURL: "pkg:ecosystem/name@version"
var parts = purl.Replace("pkg:", "", StringComparison.OrdinalIgnoreCase)
@@ -193,16 +296,16 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
var name = parts.Length > 1 ? parts[1] : "unknown";
var version = parts.Length > 2 ? parts[2] : "unknown";
return new ComponentRef
return new ComponentInfo
{
Purl = purl,
Name = name,
Version = version,
Type = ecosystem
Ecosystem = ecosystem
};
}
private static EntrypointProof? BuildEntrypointProof(ReachabilityExplanation? explanation)
private static EntrypointInfo? BuildEntrypointInfo(ReachabilityExplanation? explanation)
{
if (explanation?.PathWitness is null || explanation.PathWitness.Count == 0)
{
@@ -212,11 +315,10 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
var firstHop = explanation.PathWitness[0];
var entrypointType = InferEntrypointType(firstHop);
return new EntrypointProof
return new EntrypointInfo
{
Type = entrypointType,
Fqn = firstHop,
Phase = "runtime"
Route = firstHop
};
}
@@ -225,25 +327,25 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
var lower = fqn.ToLowerInvariant();
if (lower.Contains("controller") || lower.Contains("handler") || lower.Contains("http"))
{
return "http_handler";
return "http";
}
if (lower.Contains("grpc") || lower.Contains("rpc"))
{
return "grpc_method";
return "grpc";
}
if (lower.Contains("main") || lower.Contains("program"))
{
return "cli_command";
return "cli";
}
return "internal";
}
private ScoreExplanationDto BuildScoreExplanation(
private ScoreInfo BuildScoreInfo(
ReachabilityFinding finding,
ReachabilityExplanation? explanation)
{
// Simplified score computation based on reachability status
var contributions = new List<ScoreContributionDto>();
var contributions = new List<ScoreContribution>();
double riskScore = 0.0;
// Reachability contribution (0-25 points)
@@ -258,26 +360,22 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
if (reachabilityContribution > 0)
{
contributions.Add(new ScoreContributionDto
contributions.Add(new ScoreContribution
{
Factor = "reachability",
Weight = 1.0,
RawValue = reachabilityContribution,
Contribution = reachabilityContribution,
Explanation = reachabilityExplanation
Value = Convert.ToInt32(Math.Round(reachabilityContribution)),
Reason = reachabilityExplanation
});
riskScore += reachabilityContribution;
}
// Confidence contribution (0-10 points)
var confidenceContribution = finding.Confidence * 10.0;
contributions.Add(new ScoreContributionDto
contributions.Add(new ScoreContribution
{
Factor = "confidence",
Weight = 1.0,
RawValue = finding.Confidence,
Contribution = confidenceContribution,
Explanation = $"Analysis confidence: {finding.Confidence:P0}"
Value = Convert.ToInt32(Math.Round(confidenceContribution)),
Reason = $"Analysis confidence: {finding.Confidence:P0}"
});
riskScore += confidenceContribution;
@@ -289,13 +387,11 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
if (gateCount > 0)
{
var gateDiscount = Math.Min(gateCount * -3.0, -10.0);
contributions.Add(new ScoreContributionDto
contributions.Add(new ScoreContribution
{
Factor = "gate_protection",
Weight = 1.0,
RawValue = gateCount,
Contribution = gateDiscount,
Explanation = $"{gateCount} protective gate(s) detected"
Value = Convert.ToInt32(Math.Round(gateDiscount)),
Reason = $"{gateCount} protective gate(s) detected"
});
riskScore += gateDiscount;
}
@@ -304,12 +400,10 @@ public sealed class EvidenceCompositionService : IEvidenceCompositionService
// Clamp to 0-100
riskScore = Math.Clamp(riskScore, 0.0, 100.0);
return new ScoreExplanationDto
return new ScoreInfo
{
Kind = "stellaops_evidence_v1",
RiskScore = riskScore,
Contributions = contributions,
LastSeen = _timeProvider.GetUtcNow()
RiskScore = Convert.ToInt32(Math.Round(riskScore)),
Contributions = contributions
};
}

View File

@@ -8,6 +8,7 @@ using System.Threading;
using System.Threading.Tasks;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.Triage.Entities;
namespace StellaOps.Scanner.WebService.Services;
@@ -30,4 +31,15 @@ public interface IEvidenceCompositionService
ScanId scanId,
string findingId,
CancellationToken cancellationToken = default);
/// <summary>
/// Composes evidence for a triage finding.
/// </summary>
/// <param name="finding">The triage finding entity.</param>
/// <param name="includeRaw">Whether to include raw evidence pointers.</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task<FindingEvidenceResponse> ComposeAsync(
TriageFinding finding,
bool includeRaw,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,94 @@
using StellaOps.Scanner.Reachability.Slices;
using StellaOps.Scanner.WebService.Domain;
namespace StellaOps.Scanner.WebService.Services;
/// <summary>
/// Query request for reachability slices.
/// </summary>
public sealed record SliceQueryRequest
{
public string? CveId { get; init; }
public IReadOnlyList<string>? Symbols { get; init; }
public IReadOnlyList<string>? Entrypoints { get; init; }
public string? PolicyHash { get; init; }
public required string ScanId { get; init; }
}
/// <summary>
/// Response from slice query.
/// </summary>
public sealed record SliceQueryResponse
{
public required string SliceDigest { get; init; }
public required string Verdict { get; init; }
public required double Confidence { get; init; }
public IReadOnlyList<string>? PathWitnesses { get; init; }
public required bool CacheHit { get; init; }
public string? JobId { get; init; }
}
/// <summary>
/// Replay request for slice verification.
/// </summary>
public sealed record SliceReplayRequest
{
public required string SliceDigest { get; init; }
}
/// <summary>
/// Response from slice replay verification.
/// </summary>
public sealed record SliceReplayResponse
{
public required bool Match { get; init; }
public required string OriginalDigest { get; init; }
public required string RecomputedDigest { get; init; }
public SliceDiff? Diff { get; init; }
}
/// <summary>
/// Diff information when replay doesn't match.
/// </summary>
public sealed record SliceDiff
{
public IReadOnlyList<string>? MissingNodes { get; init; }
public IReadOnlyList<string>? ExtraNodes { get; init; }
public IReadOnlyList<string>? MissingEdges { get; init; }
public IReadOnlyList<string>? ExtraEdges { get; init; }
public string? VerdictDiff { get; init; }
}
/// <summary>
/// Service for querying and managing reachability slices.
/// </summary>
public interface ISliceQueryService
{
/// <summary>
/// Query reachability for CVE/symbols and generate slice.
/// </summary>
Task<SliceQueryResponse> QueryAsync(
SliceQueryRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Retrieve an attested slice by digest.
/// </summary>
Task<ReachabilitySlice?> GetSliceAsync(
string digest,
CancellationToken cancellationToken = default);
/// <summary>
/// Retrieve DSSE envelope for a slice.
/// </summary>
Task<object?> GetSliceDsseAsync(
string digest,
CancellationToken cancellationToken = default);
/// <summary>
/// Verify slice reproducibility by recomputing.
/// </summary>
Task<SliceReplayResponse> ReplayAsync(
SliceReplayRequest request,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,8 @@
using StellaOps.Scanner.Triage.Entities;
namespace StellaOps.Scanner.WebService.Services;
public interface ITriageQueryService
{
Task<TriageFinding?> GetFindingAsync(string findingId, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,640 @@
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Scanner.Storage.Catalog;
using StellaOps.Scanner.WebService.Contracts;
using StellaOps.Scanner.WebService.Domain;
using StellaOps.Scanner.WebService.Utilities;
namespace StellaOps.Scanner.WebService.Services;
internal interface ISbomByosUploadService
{
Task<(SbomUploadResponseDto Response, SbomValidationSummaryDto Validation)> UploadAsync(
SbomUploadRequestDto request,
CancellationToken cancellationToken);
Task<SbomUploadRecordDto?> GetRecordAsync(string sbomId, CancellationToken cancellationToken);
}
internal sealed class SbomByosUploadService : ISbomByosUploadService
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
private readonly IScanCoordinator _scanCoordinator;
private readonly ISbomIngestionService _ingestionService;
private readonly ISbomUploadStore _uploadStore;
private readonly TimeProvider _timeProvider;
private readonly ILogger<SbomByosUploadService> _logger;
public SbomByosUploadService(
IScanCoordinator scanCoordinator,
ISbomIngestionService ingestionService,
ISbomUploadStore uploadStore,
TimeProvider timeProvider,
ILogger<SbomByosUploadService> logger)
{
_scanCoordinator = scanCoordinator ?? throw new ArgumentNullException(nameof(scanCoordinator));
_ingestionService = ingestionService ?? throw new ArgumentNullException(nameof(ingestionService));
_uploadStore = uploadStore ?? throw new ArgumentNullException(nameof(uploadStore));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<(SbomUploadResponseDto Response, SbomValidationSummaryDto Validation)> UploadAsync(
SbomUploadRequestDto request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(request.ArtifactRef))
{
errors.Add("artifactRef is required.");
}
if (!string.IsNullOrWhiteSpace(request.ArtifactDigest) && !request.ArtifactDigest.Contains(':', StringComparison.Ordinal))
{
errors.Add("artifactDigest must include algorithm prefix (e.g. sha256:...).");
}
var document = TryParseDocument(request, out var parseErrors);
if (parseErrors.Count > 0)
{
errors.AddRange(parseErrors);
}
if (errors.Count > 0)
{
var validation = new SbomValidationSummaryDto
{
Valid = false,
Errors = errors
};
return (new SbomUploadResponseDto { ValidationResult = validation }, validation);
}
using (document)
{
var root = document!.RootElement;
var (format, formatVersion) = ResolveFormat(root, request.Format);
var validationWarnings = new List<string>();
var validationErrors = ValidateFormat(root, format, formatVersion, validationWarnings);
if (validationErrors.Count > 0)
{
var invalid = new SbomValidationSummaryDto
{
Valid = false,
Errors = validationErrors
};
return (new SbomUploadResponseDto { ValidationResult = invalid }, invalid);
}
var normalized = Normalize(root, format);
var (qualityScore, qualityWarnings) = Score(normalized);
var digest = ComputeDigest(root);
var sbomId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, digest);
var warnings = new List<string>();
warnings.AddRange(validationWarnings);
warnings.AddRange(qualityWarnings);
var metadata = BuildMetadata(request, format, formatVersion, digest, sbomId);
var target = new ScanTarget(request.ArtifactRef.Trim(), request.ArtifactDigest?.Trim()).Normalize();
var scanId = ScanIdGenerator.Create(target, force: false, clientRequestId: null, metadata);
var ingestion = await _ingestionService
.IngestAsync(scanId, document, format, digest, cancellationToken)
.ConfigureAwait(false);
var submission = new ScanSubmission(target, force: false, clientRequestId: null, metadata);
var scanResult = await _scanCoordinator.SubmitAsync(submission, cancellationToken).ConfigureAwait(false);
if (!string.Equals(scanResult.Snapshot.ScanId.Value, scanId.Value, StringComparison.Ordinal))
{
_logger.LogWarning(
"BYOS scan id mismatch. computed={Computed} submitted={Submitted}",
scanId.Value,
scanResult.Snapshot.ScanId.Value);
}
var now = _timeProvider.GetUtcNow();
var validation = new SbomValidationSummaryDto
{
Valid = true,
QualityScore = qualityScore,
Warnings = warnings,
ComponentCount = normalized.Count
};
var response = new SbomUploadResponseDto
{
SbomId = ingestion.SbomId,
ArtifactRef = target.Reference ?? string.Empty,
ArtifactDigest = target.Digest,
Digest = ingestion.Digest,
Format = format,
FormatVersion = formatVersion,
ValidationResult = validation,
AnalysisJobId = scanResult.Snapshot.ScanId.Value,
UploadedAtUtc = now
};
var record = new SbomUploadRecord(
SbomId: ingestion.SbomId,
ArtifactRef: target.Reference ?? string.Empty,
ArtifactDigest: target.Digest,
Digest: ingestion.Digest,
Format: format,
FormatVersion: formatVersion,
AnalysisJobId: scanResult.Snapshot.ScanId.Value,
ComponentCount: normalized.Count,
QualityScore: qualityScore,
Warnings: warnings,
Source: request.Source,
CreatedAtUtc: now);
await _uploadStore.AddAsync(record, cancellationToken).ConfigureAwait(false);
return (response, validation);
}
}
public async Task<SbomUploadRecordDto?> GetRecordAsync(string sbomId, CancellationToken cancellationToken)
{
var record = await _uploadStore.GetAsync(sbomId, cancellationToken).ConfigureAwait(false);
if (record is null)
{
return null;
}
return new SbomUploadRecordDto
{
SbomId = record.SbomId,
ArtifactRef = record.ArtifactRef,
ArtifactDigest = record.ArtifactDigest,
Digest = record.Digest,
Format = record.Format,
FormatVersion = record.FormatVersion,
AnalysisJobId = record.AnalysisJobId,
ComponentCount = record.ComponentCount,
QualityScore = record.QualityScore,
Warnings = record.Warnings,
Source = record.Source,
CreatedAtUtc = record.CreatedAtUtc
};
}
private static JsonDocument? TryParseDocument(SbomUploadRequestDto request, out List<string> errors)
{
errors = new List<string>();
if (request.Sbom is { } sbomElement && sbomElement.ValueKind == JsonValueKind.Object)
{
var raw = sbomElement.GetRawText();
return JsonDocument.Parse(raw);
}
if (!string.IsNullOrWhiteSpace(request.SbomBase64))
{
try
{
var bytes = Convert.FromBase64String(request.SbomBase64);
return JsonDocument.Parse(bytes);
}
catch (FormatException)
{
errors.Add("sbomBase64 is not valid base64.");
return null;
}
catch (JsonException ex)
{
errors.Add($"Invalid SBOM JSON: {ex.Message}");
return null;
}
}
errors.Add("sbom or sbomBase64 is required.");
return null;
}
private static (string Format, string FormatVersion) ResolveFormat(JsonElement root, string? requestedFormat)
{
var format = string.IsNullOrWhiteSpace(requestedFormat)
? DetectFormat(root)
: requestedFormat.Trim().ToLowerInvariant();
if (string.IsNullOrWhiteSpace(format))
{
return (string.Empty, string.Empty);
}
var formatVersion = format switch
{
SbomFormats.CycloneDx => GetCycloneDxVersion(root),
SbomFormats.Spdx => GetSpdxVersion(root),
_ => string.Empty
};
return (format, formatVersion);
}
private static string? DetectFormat(JsonElement root)
{
if (root.ValueKind != JsonValueKind.Object)
{
return null;
}
if (root.TryGetProperty("bomFormat", out var bomFormat)
&& bomFormat.ValueKind == JsonValueKind.String
&& string.Equals(bomFormat.GetString(), "CycloneDX", StringComparison.OrdinalIgnoreCase))
{
return SbomFormats.CycloneDx;
}
if (root.TryGetProperty("spdxVersion", out var spdxVersion)
&& spdxVersion.ValueKind == JsonValueKind.String
&& !string.IsNullOrWhiteSpace(spdxVersion.GetString()))
{
return SbomFormats.Spdx;
}
return null;
}
private static IReadOnlyList<string> ValidateFormat(
JsonElement root,
string format,
string formatVersion,
List<string> warnings)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(format))
{
errors.Add("Unable to detect SBOM format.");
return errors;
}
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
{
if (!root.TryGetProperty("bomFormat", out var bomFormat) || bomFormat.ValueKind != JsonValueKind.String)
{
errors.Add("CycloneDX SBOM must include bomFormat.");
}
if (string.IsNullOrWhiteSpace(formatVersion))
{
errors.Add("CycloneDX SBOM must include specVersion.");
}
else if (!IsSupportedCycloneDx(formatVersion))
{
errors.Add($"CycloneDX specVersion '{formatVersion}' is not supported (1.4-1.6).");
}
if (!root.TryGetProperty("components", out var components) || components.ValueKind != JsonValueKind.Array)
{
warnings.Add("CycloneDX SBOM does not include a components array.");
}
}
else if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
{
if (!root.TryGetProperty("spdxVersion", out var spdxVersion) || spdxVersion.ValueKind != JsonValueKind.String)
{
errors.Add("SPDX SBOM must include spdxVersion.");
}
if (string.IsNullOrWhiteSpace(formatVersion))
{
errors.Add("SPDX SBOM version could not be determined.");
}
else if (!IsSupportedSpdx(formatVersion))
{
errors.Add($"SPDX version '{formatVersion}' is not supported (2.3, 3.0).");
}
else if (formatVersion.StartsWith("3.0", StringComparison.OrdinalIgnoreCase))
{
warnings.Add("SPDX 3.0 schema validation is pending; structural checks only.");
}
if (!root.TryGetProperty("packages", out var packages) || packages.ValueKind != JsonValueKind.Array)
{
warnings.Add("SPDX SBOM does not include a packages array.");
}
}
else
{
errors.Add($"Unsupported SBOM format '{format}'.");
}
return errors;
}
private static bool IsSupportedCycloneDx(string version)
=> version.StartsWith("1.4", StringComparison.OrdinalIgnoreCase)
|| version.StartsWith("1.5", StringComparison.OrdinalIgnoreCase)
|| version.StartsWith("1.6", StringComparison.OrdinalIgnoreCase);
private static bool IsSupportedSpdx(string version)
=> version.StartsWith("2.3", StringComparison.OrdinalIgnoreCase)
|| version.StartsWith("3.0", StringComparison.OrdinalIgnoreCase);
private static IReadOnlyList<SbomNormalizedComponent> Normalize(JsonElement root, string format)
{
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
{
return NormalizeCycloneDx(root);
}
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
{
return NormalizeSpdx(root);
}
return Array.Empty<SbomNormalizedComponent>();
}
private static IReadOnlyList<SbomNormalizedComponent> NormalizeCycloneDx(JsonElement root)
{
if (!root.TryGetProperty("components", out var components) || components.ValueKind != JsonValueKind.Array)
{
return Array.Empty<SbomNormalizedComponent>();
}
var results = new List<SbomNormalizedComponent>();
foreach (var component in components.EnumerateArray())
{
if (component.ValueKind != JsonValueKind.Object)
{
continue;
}
var name = GetString(component, "name");
var version = GetString(component, "version");
var purl = GetString(component, "purl");
var license = ExtractCycloneDxLicense(component);
if (string.IsNullOrWhiteSpace(name) && string.IsNullOrWhiteSpace(purl))
{
continue;
}
var key = NormalizeKey(purl, name);
results.Add(new SbomNormalizedComponent(key, name, version, purl, license));
}
return results
.OrderBy(c => c.Key, StringComparer.Ordinal)
.ThenBy(c => c.Version ?? string.Empty, StringComparer.Ordinal)
.ToList();
}
private static IReadOnlyList<SbomNormalizedComponent> NormalizeSpdx(JsonElement root)
{
if (!root.TryGetProperty("packages", out var packages) || packages.ValueKind != JsonValueKind.Array)
{
return Array.Empty<SbomNormalizedComponent>();
}
var results = new List<SbomNormalizedComponent>();
foreach (var package in packages.EnumerateArray())
{
if (package.ValueKind != JsonValueKind.Object)
{
continue;
}
var name = GetString(package, "name");
var version = GetString(package, "versionInfo");
var purl = ExtractSpdxPurl(package);
var license = GetString(package, "licenseDeclared");
if (string.IsNullOrWhiteSpace(license))
{
license = GetString(package, "licenseConcluded");
}
if (string.IsNullOrWhiteSpace(name) && string.IsNullOrWhiteSpace(purl))
{
continue;
}
var key = NormalizeKey(purl, name);
results.Add(new SbomNormalizedComponent(key, name, version, purl, license));
}
return results
.OrderBy(c => c.Key, StringComparer.Ordinal)
.ThenBy(c => c.Version ?? string.Empty, StringComparer.Ordinal)
.ToList();
}
private static (double Score, IReadOnlyList<string> Warnings) Score(IReadOnlyList<SbomNormalizedComponent> components)
{
if (components is null || components.Count == 0)
{
return (0.0, new[] { "No components detected in SBOM." });
}
var total = components.Count;
var withPurl = components.Count(c => !string.IsNullOrWhiteSpace(c.Purl));
var withVersion = components.Count(c => !string.IsNullOrWhiteSpace(c.Version));
var withLicense = components.Count(c => !string.IsNullOrWhiteSpace(c.License));
var purlRatio = (double)withPurl / total;
var versionRatio = (double)withVersion / total;
var licenseRatio = (double)withLicense / total;
var score = (purlRatio * 0.4) + (versionRatio * 0.3) + (licenseRatio * 0.3);
var warnings = new List<string>();
if (withPurl < total)
{
warnings.Add($"{total - withPurl} components missing PURL values.");
}
if (withVersion < total)
{
warnings.Add($"{total - withVersion} components missing version values.");
}
if (withLicense < total)
{
warnings.Add($"{total - withLicense} components missing license values.");
}
return (Math.Round(score, 2), warnings);
}
private static string ComputeDigest(JsonElement root)
{
var bytes = JsonSerializer.SerializeToUtf8Bytes(root, JsonOptions);
var hash = SHA256.HashData(bytes);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private static Dictionary<string, string> BuildMetadata(
SbomUploadRequestDto request,
string format,
string formatVersion,
string digest,
string sbomId)
{
var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase)
{
["sbom.digest"] = digest,
["sbom.id"] = sbomId,
["sbom.format"] = format,
["sbom.format_version"] = formatVersion
};
AddIfPresent(metadata, "byos.source.tool", request.Source?.Tool);
AddIfPresent(metadata, "byos.source.version", request.Source?.Version);
AddIfPresent(metadata, "byos.ci.build_id", request.Source?.CiContext?.BuildId);
AddIfPresent(metadata, "byos.ci.repository", request.Source?.CiContext?.Repository);
return metadata;
}
private static void AddIfPresent(Dictionary<string, string> metadata, string key, string? value)
{
if (!string.IsNullOrWhiteSpace(value))
{
metadata[key] = value.Trim();
}
}
private static string GetCycloneDxVersion(JsonElement root)
{
var spec = GetString(root, "specVersion");
return string.IsNullOrWhiteSpace(spec) ? string.Empty : spec.Trim();
}
private static string GetSpdxVersion(JsonElement root)
{
var version = GetString(root, "spdxVersion");
if (string.IsNullOrWhiteSpace(version))
{
return string.Empty;
}
var trimmed = version.Trim();
return trimmed.StartsWith("SPDX-", StringComparison.OrdinalIgnoreCase)
? trimmed[5..]
: trimmed;
}
private static string NormalizeKey(string? purl, string name)
{
if (!string.IsNullOrWhiteSpace(purl))
{
var trimmed = purl.Trim();
var qualifierIndex = trimmed.IndexOf('?');
if (qualifierIndex > 0)
{
trimmed = trimmed[..qualifierIndex];
}
var atIndex = trimmed.LastIndexOf('@');
if (atIndex > 4)
{
trimmed = trimmed[..atIndex];
}
return trimmed;
}
return name.Trim();
}
private static string? ExtractCycloneDxLicense(JsonElement component)
{
if (!component.TryGetProperty("licenses", out var licenses) || licenses.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var entry in licenses.EnumerateArray())
{
if (entry.ValueKind != JsonValueKind.Object)
{
continue;
}
if (entry.TryGetProperty("license", out var licenseObj) && licenseObj.ValueKind == JsonValueKind.Object)
{
var id = GetString(licenseObj, "id");
if (!string.IsNullOrWhiteSpace(id))
{
return id;
}
var name = GetString(licenseObj, "name");
if (!string.IsNullOrWhiteSpace(name))
{
return name;
}
}
}
return null;
}
private static string? ExtractSpdxPurl(JsonElement package)
{
if (!package.TryGetProperty("externalRefs", out var refs) || refs.ValueKind != JsonValueKind.Array)
{
return null;
}
foreach (var reference in refs.EnumerateArray())
{
if (reference.ValueKind != JsonValueKind.Object)
{
continue;
}
var referenceType = GetString(reference, "referenceType");
if (!string.Equals(referenceType, "purl", StringComparison.OrdinalIgnoreCase))
{
continue;
}
var locator = GetString(reference, "referenceLocator");
if (!string.IsNullOrWhiteSpace(locator))
{
return locator;
}
}
return null;
}
private static string GetString(JsonElement element, string property)
{
if (element.ValueKind != JsonValueKind.Object)
{
return string.Empty;
}
if (!element.TryGetProperty(property, out var prop))
{
return string.Empty;
}
return prop.ValueKind == JsonValueKind.String ? prop.GetString() ?? string.Empty : string.Empty;
}
private sealed record SbomNormalizedComponent(
string Key,
string Name,
string? Version,
string? Purl,
string? License);
}

View File

@@ -146,7 +146,7 @@ internal sealed class SbomIngestionService : ISbomIngestionService
{
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
{
return (ArtifactDocumentFormat.CycloneDxJson, "application/vnd.cyclonedx+json");
return (ArtifactDocumentFormat.CycloneDxJson, "application/vnd.cyclonedx+json; version=1.7");
}
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))

View File

@@ -0,0 +1,50 @@
using System.Collections.Concurrent;
using StellaOps.Scanner.WebService.Contracts;
namespace StellaOps.Scanner.WebService.Services;
internal sealed record SbomUploadRecord(
string SbomId,
string ArtifactRef,
string? ArtifactDigest,
string Digest,
string Format,
string FormatVersion,
string AnalysisJobId,
int ComponentCount,
double QualityScore,
IReadOnlyList<string> Warnings,
SbomUploadSourceDto? Source,
DateTimeOffset CreatedAtUtc);
internal interface ISbomUploadStore
{
Task AddAsync(SbomUploadRecord record, CancellationToken cancellationToken);
Task<SbomUploadRecord?> GetAsync(string sbomId, CancellationToken cancellationToken);
}
internal sealed class InMemorySbomUploadStore : ISbomUploadStore
{
private readonly ConcurrentDictionary<string, SbomUploadRecord> _records = new(StringComparer.OrdinalIgnoreCase);
public Task AddAsync(SbomUploadRecord record, CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(record);
cancellationToken.ThrowIfCancellationRequested();
_records[record.SbomId] = record;
return Task.CompletedTask;
}
public Task<SbomUploadRecord?> GetAsync(string sbomId, CancellationToken cancellationToken)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(sbomId))
{
return Task.FromResult<SbomUploadRecord?>(null);
}
_records.TryGetValue(sbomId.Trim(), out var record);
return Task.FromResult(record);
}
}

View File

@@ -0,0 +1,344 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.Scanner.Cache.Abstractions;
using StellaOps.Scanner.Core;
using StellaOps.Scanner.Reachability;
using StellaOps.Scanner.Reachability.Slices;
using StellaOps.Scanner.Reachability.Slices.Replay;
using StellaOps.Scanner.WebService.Domain;
namespace StellaOps.Scanner.WebService.Services;
/// <summary>
/// Options for slice query service.
/// </summary>
public sealed class SliceQueryServiceOptions
{
/// <summary>
/// Maximum slice size (nodes + edges) for synchronous generation.
/// Larger slices return 202 Accepted with job ID.
/// </summary>
public int MaxSyncSliceSize { get; set; } = 10_000;
/// <summary>
/// Whether to cache generated slices.
/// </summary>
public bool EnableCache { get; set; } = true;
}
/// <summary>
/// Service for querying and managing reachability slices.
/// </summary>
public sealed class SliceQueryService : ISliceQueryService
{
private readonly ISliceCache _cache;
private readonly SliceExtractor _extractor;
private readonly SliceCasStorage _casStorage;
private readonly SliceDiffComputer _diffComputer;
private readonly SliceHasher _hasher;
private readonly IFileContentAddressableStore _cas;
private readonly IScanMetadataRepository _scanRepo;
private readonly SliceQueryServiceOptions _options;
private readonly ILogger<SliceQueryService> _logger;
public SliceQueryService(
ISliceCache cache,
SliceExtractor extractor,
SliceCasStorage casStorage,
SliceDiffComputer diffComputer,
SliceHasher hasher,
IFileContentAddressableStore cas,
IScanMetadataRepository scanRepo,
IOptions<SliceQueryServiceOptions> options,
ILogger<SliceQueryService> logger)
{
_cache = cache ?? throw new ArgumentNullException(nameof(cache));
_extractor = extractor ?? throw new ArgumentNullException(nameof(extractor));
_casStorage = casStorage ?? throw new ArgumentNullException(nameof(casStorage));
_diffComputer = diffComputer ?? throw new ArgumentNullException(nameof(diffComputer));
_hasher = hasher ?? throw new ArgumentNullException(nameof(hasher));
_cas = cas ?? throw new ArgumentNullException(nameof(cas));
_scanRepo = scanRepo ?? throw new ArgumentNullException(nameof(scanRepo));
_options = options?.Value ?? new SliceQueryServiceOptions();
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
/// <inheritdoc />
public async Task<SliceQueryResponse> QueryAsync(
SliceQueryRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogDebug("Processing slice query for scan {ScanId}, CVE {CveId}", request.ScanId, request.CveId);
// Check cache first
var cacheKey = ComputeCacheKey(request);
if (_options.EnableCache)
{
var cached = await _cache.TryGetAsync(cacheKey, cancellationToken).ConfigureAwait(false);
if (cached is not null)
{
_logger.LogDebug("Cache hit for slice query {CacheKey}", cacheKey);
return new SliceQueryResponse
{
SliceDigest = cached.SliceDigest,
Verdict = cached.Verdict,
Confidence = cached.Confidence,
PathWitnesses = cached.PathWitnesses.ToList(),
CacheHit = true
};
}
}
// Load scan data
var scanData = await LoadScanDataAsync(request.ScanId, cancellationToken).ConfigureAwait(false);
if (scanData == null)
{
throw new InvalidOperationException($"Scan {request.ScanId} not found");
}
// Build extraction request
var extractionRequest = BuildExtractionRequest(request, scanData);
// Extract slice
var slice = _extractor.Extract(extractionRequest);
// Store in CAS
var casResult = await _casStorage.StoreAsync(slice, _cas, cancellationToken).ConfigureAwait(false);
// Cache the result
if (_options.EnableCache)
{
var cacheEntry = new CachedSliceResult
{
SliceDigest = casResult.SliceDigest,
Verdict = slice.Verdict.Status.ToString().ToLowerInvariant(),
Confidence = slice.Verdict.Confidence,
PathWitnesses = slice.Verdict.PathWitnesses.IsDefaultOrEmpty
? Array.Empty<string>()
: slice.Verdict.PathWitnesses.ToList(),
CachedAt = DateTimeOffset.UtcNow
};
await _cache.SetAsync(cacheKey, cacheEntry, TimeSpan.FromHours(1), cancellationToken).ConfigureAwait(false);
}
_logger.LogInformation(
"Generated slice {Digest} for scan {ScanId}: {NodeCount} nodes, {EdgeCount} edges, verdict={Verdict}",
casResult.SliceDigest,
request.ScanId,
slice.Subgraph.Nodes.Length,
slice.Subgraph.Edges.Length,
slice.Verdict.Status);
return BuildResponse(slice, casResult.SliceDigest, cacheHit: false);
}
/// <inheritdoc />
public async Task<ReachabilitySlice?> GetSliceAsync(
string digest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
var casKey = ExtractDigestHex(digest);
var stream = await _cas.GetAsync(new FileCasGetRequest(casKey), cancellationToken).ConfigureAwait(false);
if (stream == null) return null;
await using (stream)
{
return await System.Text.Json.JsonSerializer.DeserializeAsync<ReachabilitySlice>(
stream,
cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
/// <inheritdoc />
public async Task<object?> GetSliceDsseAsync(
string digest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(digest);
var dsseKey = $"{ExtractDigestHex(digest)}.dsse";
var stream = await _cas.GetAsync(new FileCasGetRequest(dsseKey), cancellationToken).ConfigureAwait(false);
if (stream == null) return null;
await using (stream)
{
return await System.Text.Json.JsonSerializer.DeserializeAsync<object>(
stream,
cancellationToken: cancellationToken).ConfigureAwait(false);
}
}
/// <inheritdoc />
public async Task<SliceReplayResponse> ReplayAsync(
SliceReplayRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
_logger.LogDebug("Replaying slice {Digest}", request.SliceDigest);
// Load original slice
var original = await GetSliceAsync(request.SliceDigest, cancellationToken).ConfigureAwait(false);
if (original == null)
{
throw new InvalidOperationException($"Slice {request.SliceDigest} not found");
}
// Load scan data for recomputation
var scanId = ExtractScanIdFromManifest(original.Manifest);
var scanData = await LoadScanDataAsync(scanId, cancellationToken).ConfigureAwait(false);
if (scanData == null)
{
throw new InvalidOperationException($"Cannot replay: scan {scanId} not found");
}
// Recompute slice with same parameters
var extractionRequest = new SliceExtractionRequest(
scanData.Graph,
original.Inputs,
original.Query,
original.Manifest);
var recomputed = _extractor.Extract(extractionRequest);
var recomputedDigest = _hasher.ComputeDigest(recomputed);
// Compare
var diffResult = _diffComputer.Compute(original, recomputed);
_logger.LogInformation(
"Replay verification for {Digest}: match={Match}",
request.SliceDigest,
diffResult.Match);
return new SliceReplayResponse
{
Match = diffResult.Match,
OriginalDigest = request.SliceDigest,
RecomputedDigest = recomputedDigest.Digest,
Diff = diffResult.Match ? null : new SliceDiff
{
MissingNodes = diffResult.NodesDiff.Missing.IsDefaultOrEmpty ? null : diffResult.NodesDiff.Missing.ToList(),
ExtraNodes = diffResult.NodesDiff.Extra.IsDefaultOrEmpty ? null : diffResult.NodesDiff.Extra.ToList(),
MissingEdges = diffResult.EdgesDiff.Missing.IsDefaultOrEmpty ? null : diffResult.EdgesDiff.Missing.ToList(),
ExtraEdges = diffResult.EdgesDiff.Extra.IsDefaultOrEmpty ? null : diffResult.EdgesDiff.Extra.ToList(),
VerdictDiff = diffResult.VerdictDiff
}
};
}
private static SliceQueryResponse BuildResponse(ReachabilitySlice slice, string digest, bool cacheHit)
{
return new SliceQueryResponse
{
SliceDigest = digest,
Verdict = slice.Verdict.Status.ToString().ToLowerInvariant(),
Confidence = slice.Verdict.Confidence,
PathWitnesses = slice.Verdict.PathWitnesses.IsDefaultOrEmpty
? null
: slice.Verdict.PathWitnesses.ToList(),
CacheHit = cacheHit,
JobId = null
};
}
private SliceExtractionRequest BuildExtractionRequest(SliceQueryRequest request, ScanData scanData)
{
var query = new SliceQuery
{
CveId = request.CveId,
TargetSymbols = request.Symbols?.ToImmutableArray() ?? ImmutableArray<string>.Empty,
Entrypoints = request.Entrypoints?.ToImmutableArray() ?? ImmutableArray<string>.Empty,
PolicyHash = request.PolicyHash
};
var inputs = new SliceInputs
{
GraphDigest = scanData.GraphDigest,
BinaryDigests = scanData.BinaryDigests,
SbomDigest = scanData.SbomDigest,
LayerDigests = scanData.LayerDigests
};
return new SliceExtractionRequest(scanData.Graph, inputs, query, scanData.Manifest);
}
private static string ComputeCacheKey(SliceQueryRequest request)
{
var keyParts = new[]
{
request.ScanId,
request.CveId ?? "",
string.Join(",", request.Symbols?.OrderBy(s => s, StringComparer.Ordinal) ?? Array.Empty<string>()),
string.Join(",", request.Entrypoints?.OrderBy(e => e, StringComparer.Ordinal) ?? Array.Empty<string>()),
request.PolicyHash ?? ""
};
var combined = string.Join("|", keyParts);
var hash = SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(combined));
return "slice:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private async Task<ScanData?> LoadScanDataAsync(string scanId, CancellationToken cancellationToken)
{
// This would load the full scan data including call graph
// For now, return a stub - actual implementation depends on scan storage
var metadata = await _scanRepo.GetMetadataAsync(scanId, cancellationToken).ConfigureAwait(false);
if (metadata == null) return null;
// Load call graph from CAS or graph store
// This is a placeholder - actual implementation would hydrate the full graph
var emptyGraph = new RichGraph(
Nodes: Array.Empty<RichGraphNode>(),
Edges: Array.Empty<RichGraphEdge>(),
Roots: Array.Empty<RichGraphRoot>(),
Analyzer: new RichGraphAnalyzer("scanner", "1.0.0", null));
return new ScanData
{
ScanId = scanId,
Graph = metadata?.RichGraph ?? emptyGraph,
GraphDigest = metadata?.GraphDigest ?? "",
BinaryDigests = metadata?.BinaryDigests ?? ImmutableArray<string>.Empty,
SbomDigest = metadata?.SbomDigest,
LayerDigests = metadata?.LayerDigests ?? ImmutableArray<string>.Empty,
Manifest = metadata?.Manifest ?? new ScanManifest
{
ScanId = scanId,
Timestamp = DateTimeOffset.UtcNow.ToString("O"),
ScannerVersion = "1.0.0",
Environment = "production"
}
};
}
private static string ExtractScanIdFromManifest(ScanManifest manifest)
{
return manifest.ScanId ?? manifest.Subject?.Digest ?? "unknown";
}
private static string ExtractDigestHex(string prefixed)
{
var colonIndex = prefixed.IndexOf(':');
return colonIndex >= 0 ? prefixed[(colonIndex + 1)..] : prefixed;
}
private sealed record ScanData
{
public required string ScanId { get; init; }
public required RichGraph Graph { get; init; }
public required string GraphDigest { get; init; }
public ImmutableArray<string> BinaryDigests { get; init; }
public string? SbomDigest { get; init; }
public ImmutableArray<string> LayerDigests { get; init; }
public required ScanManifest Manifest { get; init; }
}
}

View File

@@ -0,0 +1,35 @@
using Microsoft.EntityFrameworkCore;
using StellaOps.Scanner.Triage;
using StellaOps.Scanner.Triage.Entities;
namespace StellaOps.Scanner.WebService.Services;
public sealed class TriageQueryService : ITriageQueryService
{
private readonly TriageDbContext _dbContext;
private readonly ILogger<TriageQueryService> _logger;
public TriageQueryService(TriageDbContext dbContext, ILogger<TriageQueryService> logger)
{
_dbContext = dbContext ?? throw new ArgumentNullException(nameof(dbContext));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<TriageFinding?> GetFindingAsync(string findingId, CancellationToken cancellationToken = default)
{
if (!Guid.TryParse(findingId, out var id))
{
_logger.LogWarning("Invalid finding id: {FindingId}", findingId);
return null;
}
return await _dbContext.Findings
.Include(f => f.ReachabilityResults)
.Include(f => f.RiskResults)
.Include(f => f.EffectiveVexRecords)
.Include(f => f.EvidenceArtifacts)
.AsNoTracking()
.FirstOrDefaultAsync(f => f.Id == id, cancellationToken)
.ConfigureAwait(false);
}
}

View File

@@ -9,7 +9,7 @@
<RootNamespace>StellaOps.Scanner.WebService</RootNamespace>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="CycloneDX.Core" Version="10.0.2" />
<PackageReference Include="CycloneDX.Core" Version="11.0.0" />
<PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0" />
<PackageReference Include="Serilog.AspNetCore" Version="8.0.1" />
<PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" />
@@ -38,6 +38,7 @@
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.FS/StellaOps.Scanner.Surface.FS.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Surface.Secrets/StellaOps.Scanner.Surface.Secrets.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Triage/StellaOps.Scanner.Triage.csproj" />
<ProjectReference Include="../../__Libraries/StellaOps.Replay.Core/StellaOps.Replay.Core.csproj" />
<ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" />
<ProjectReference Include="../__Libraries/StellaOps.Scanner.Reachability/StellaOps.Scanner.Reachability.csproj" />

View File

@@ -9,3 +9,4 @@
| `DRIFT-3600-API` | `docs/implplan/SPRINT_3600_0003_0001_drift_detection_engine.md` | DONE | Add reachability drift endpoints (`/api/v1/scans/{id}/drift`, `/api/v1/drift/{id}/sinks`) + integration tests. |
| `SCAN-API-3103-001` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DONE | Implement missing ingestion services + DI for callgraph/SBOM endpoints and add deterministic integration tests. |
| `EPSS-SCAN-011` | `docs/implplan/SPRINT_3410_0002_0001_epss_scanner_integration.md` | DONE | Wired `/api/v1/epss/*` endpoints and added `EpssEndpointsTests` integration coverage. |
| `SLICE-3820-API` | `docs/implplan/SPRINT_3820_0001_0001_slice_query_replay_apis.md` | DOING | Implement slice query/replay endpoints, caching, and OpenAPI updates. |