save progress
This commit is contained in:
@@ -0,0 +1,26 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class ObservabilityEndpoints
|
||||
{
|
||||
public static void MapObservabilityEndpoints(this IEndpointRouteBuilder endpoints)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(endpoints);
|
||||
|
||||
endpoints.MapGet("/metrics", HandleMetricsAsync)
|
||||
.WithName("scanner.metrics")
|
||||
.Produces(StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static IResult HandleMetricsAsync(OfflineKitMetricsStore metricsStore)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(metricsStore);
|
||||
|
||||
var payload = metricsStore.RenderPrometheus();
|
||||
return Results.Text(payload, contentType: "text/plain; version=0.0.4; charset=utf-8");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,230 @@
|
||||
using System.Linq;
|
||||
using System.Security.Claims;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Scanner.Core.Configuration;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class OfflineKitEndpoints
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public static void MapOfflineKitEndpoints(this IEndpointRouteBuilder endpoints)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(endpoints);
|
||||
|
||||
var group = endpoints
|
||||
.MapGroup("/api/offline-kit")
|
||||
.WithTags("Offline Kit");
|
||||
|
||||
group.MapPost("/import", HandleImportAsync)
|
||||
.WithName("scanner.offline-kit.import")
|
||||
.RequireAuthorization(ScannerPolicies.OfflineKitImport)
|
||||
.Produces<OfflineKitImportResponseTransport>(StatusCodes.Status202Accepted)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status400BadRequest)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status422UnprocessableEntity);
|
||||
|
||||
group.MapGet("/status", HandleStatusAsync)
|
||||
.WithName("scanner.offline-kit.status")
|
||||
.RequireAuthorization(ScannerPolicies.OfflineKitStatusRead)
|
||||
.Produces<OfflineKitStatusTransport>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status204NoContent)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleImportAsync(
|
||||
HttpContext context,
|
||||
HttpRequest request,
|
||||
IOptionsMonitor<OfflineKitOptions> offlineKitOptions,
|
||||
OfflineKitImportService importService,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(offlineKitOptions);
|
||||
ArgumentNullException.ThrowIfNull(importService);
|
||||
|
||||
if (!offlineKitOptions.CurrentValue.Enabled)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Offline kit import is not enabled",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
if (!request.HasFormContentType)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid offline kit import request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Request must be multipart/form-data.");
|
||||
}
|
||||
|
||||
var form = await request.ReadFormAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var metadataJson = form["metadata"].FirstOrDefault();
|
||||
if (string.IsNullOrWhiteSpace(metadataJson))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid offline kit import request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Missing 'metadata' form field.");
|
||||
}
|
||||
|
||||
OfflineKitImportMetadata? metadata;
|
||||
try
|
||||
{
|
||||
metadata = JsonSerializer.Deserialize<OfflineKitImportMetadata>(metadataJson, JsonOptions);
|
||||
}
|
||||
catch (JsonException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid offline kit import request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: $"Failed to parse metadata JSON: {ex.Message}");
|
||||
}
|
||||
|
||||
if (metadata is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid offline kit import request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Metadata payload is empty.");
|
||||
}
|
||||
|
||||
var bundle = form.Files.GetFile("bundle");
|
||||
if (bundle is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid offline kit import request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Missing 'bundle' file upload.");
|
||||
}
|
||||
|
||||
var manifest = form.Files.GetFile("manifest");
|
||||
var bundleSignature = form.Files.GetFile("bundleSignature");
|
||||
var manifestSignature = form.Files.GetFile("manifestSignature");
|
||||
|
||||
var tenantId = ResolveTenant(context);
|
||||
var actor = ResolveActor(context);
|
||||
|
||||
try
|
||||
{
|
||||
var response = await importService.ImportAsync(
|
||||
new OfflineKitImportRequest(
|
||||
tenantId,
|
||||
actor,
|
||||
metadata,
|
||||
bundle,
|
||||
manifest,
|
||||
bundleSignature,
|
||||
manifestSignature),
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return Results.Accepted("/api/offline-kit/status", response);
|
||||
}
|
||||
catch (OfflineKitImportException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Offline kit import failed",
|
||||
ex.StatusCode,
|
||||
detail: ex.Message,
|
||||
extensions: new Dictionary<string, object?>
|
||||
{
|
||||
["reason_code"] = ex.ReasonCode,
|
||||
["notes"] = ex.Notes
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleStatusAsync(
|
||||
HttpContext context,
|
||||
IOptionsMonitor<OfflineKitOptions> offlineKitOptions,
|
||||
OfflineKitStateStore stateStore,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(context);
|
||||
ArgumentNullException.ThrowIfNull(offlineKitOptions);
|
||||
ArgumentNullException.ThrowIfNull(stateStore);
|
||||
|
||||
if (!offlineKitOptions.CurrentValue.Enabled)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Offline kit status is not enabled",
|
||||
StatusCodes.Status404NotFound);
|
||||
}
|
||||
|
||||
var tenantId = ResolveTenant(context);
|
||||
var status = await stateStore.LoadStatusAsync(tenantId, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return status is null
|
||||
? Results.NoContent()
|
||||
: Results.Ok(status);
|
||||
}
|
||||
|
||||
private static string ResolveTenant(HttpContext context)
|
||||
{
|
||||
var tenant = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant);
|
||||
if (!string.IsNullOrWhiteSpace(tenant))
|
||||
{
|
||||
return tenant.Trim();
|
||||
}
|
||||
|
||||
if (context.Request.Headers.TryGetValue("X-Stella-Tenant", out var headerTenant))
|
||||
{
|
||||
var headerValue = headerTenant.ToString();
|
||||
if (!string.IsNullOrWhiteSpace(headerValue))
|
||||
{
|
||||
return headerValue.Trim();
|
||||
}
|
||||
}
|
||||
|
||||
return "default";
|
||||
}
|
||||
|
||||
private static string ResolveActor(HttpContext context)
|
||||
{
|
||||
var subject = context.User?.FindFirstValue(StellaOpsClaimTypes.Subject);
|
||||
if (!string.IsNullOrWhiteSpace(subject))
|
||||
{
|
||||
return subject.Trim();
|
||||
}
|
||||
|
||||
var clientId = context.User?.FindFirstValue(StellaOpsClaimTypes.ClientId);
|
||||
if (!string.IsNullOrWhiteSpace(clientId))
|
||||
{
|
||||
return clientId.Trim();
|
||||
}
|
||||
|
||||
return "anonymous";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,307 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.ReachabilityDrift;
|
||||
using StellaOps.Scanner.ReachabilityDrift.Services;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
using StellaOps.Scanner.WebService.Infrastructure;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
|
||||
internal static class ReachabilityDriftEndpoints
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
public static void MapReachabilityDriftScanEndpoints(this RouteGroupBuilder scansGroup)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(scansGroup);
|
||||
|
||||
// GET /scans/{scanId}/drift?baseScanId=...&language=dotnet&includeFullPath=false
|
||||
scansGroup.MapGet("/{scanId}/drift", HandleGetDriftAsync)
|
||||
.WithName("scanner.scans.reachability-drift")
|
||||
.WithTags("ReachabilityDrift")
|
||||
.Produces<ReachabilityDriftResult>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
public static void MapReachabilityDriftRootEndpoints(this RouteGroupBuilder apiGroup)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(apiGroup);
|
||||
|
||||
var driftGroup = apiGroup.MapGroup("/drift");
|
||||
|
||||
// GET /drift/{driftId}/sinks?direction=became_reachable&offset=0&limit=100
|
||||
driftGroup.MapGet("/{driftId:guid}/sinks", HandleListSinksAsync)
|
||||
.WithName("scanner.drift.sinks")
|
||||
.WithTags("ReachabilityDrift")
|
||||
.Produces<DriftedSinksResponseDto>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status400BadRequest)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.RequireAuthorization(ScannerPolicies.ScansRead);
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleGetDriftAsync(
|
||||
string scanId,
|
||||
string? baseScanId,
|
||||
string? language,
|
||||
bool? includeFullPath,
|
||||
IScanCoordinator coordinator,
|
||||
ICallGraphSnapshotRepository callGraphSnapshots,
|
||||
CodeChangeFactExtractor codeChangeFactExtractor,
|
||||
ICodeChangeRepository codeChangeRepository,
|
||||
ReachabilityDriftDetector driftDetector,
|
||||
IReachabilityDriftResultRepository driftRepository,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(coordinator);
|
||||
ArgumentNullException.ThrowIfNull(callGraphSnapshots);
|
||||
ArgumentNullException.ThrowIfNull(codeChangeFactExtractor);
|
||||
ArgumentNullException.ThrowIfNull(codeChangeRepository);
|
||||
ArgumentNullException.ThrowIfNull(driftDetector);
|
||||
ArgumentNullException.ThrowIfNull(driftRepository);
|
||||
|
||||
if (!ScanId.TryParse(scanId, out var headScan))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid scan identifier",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Scan identifier is required.");
|
||||
}
|
||||
|
||||
var resolvedLanguage = string.IsNullOrWhiteSpace(language) ? "dotnet" : language.Trim();
|
||||
|
||||
var headSnapshot = await coordinator.GetAsync(headScan, cancellationToken).ConfigureAwait(false);
|
||||
if (headSnapshot is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Scan not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Requested scan could not be located.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(baseScanId))
|
||||
{
|
||||
var existing = await driftRepository.TryGetLatestForHeadAsync(headScan.Value, resolvedLanguage, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (existing is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Drift result not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: $"No reachability drift result recorded for scan {scanId} (language={resolvedLanguage}).");
|
||||
}
|
||||
|
||||
return Json(existing, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
if (!ScanId.TryParse(baseScanId, out var baseScan))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid base scan identifier",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "Query parameter 'baseScanId' must be a valid scan id.");
|
||||
}
|
||||
|
||||
var baselineSnapshot = await coordinator.GetAsync(baseScan, cancellationToken).ConfigureAwait(false);
|
||||
if (baselineSnapshot is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Base scan not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Base scan could not be located.");
|
||||
}
|
||||
|
||||
var baseGraph = await callGraphSnapshots.TryGetLatestAsync(baseScan.Value, resolvedLanguage, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (baseGraph is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Base call graph not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: $"No call graph snapshot found for base scan {baseScan.Value} (language={resolvedLanguage}).");
|
||||
}
|
||||
|
||||
var headGraph = await callGraphSnapshots.TryGetLatestAsync(headScan.Value, resolvedLanguage, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
if (headGraph is null)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Head call graph not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: $"No call graph snapshot found for head scan {headScan.Value} (language={resolvedLanguage}).");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var codeChanges = codeChangeFactExtractor.Extract(baseGraph, headGraph);
|
||||
await codeChangeRepository.StoreAsync(codeChanges, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var drift = driftDetector.Detect(
|
||||
baseGraph,
|
||||
headGraph,
|
||||
codeChanges,
|
||||
includeFullPath: includeFullPath == true);
|
||||
|
||||
await driftRepository.StoreAsync(drift, cancellationToken).ConfigureAwait(false);
|
||||
return Json(drift, StatusCodes.Status200OK);
|
||||
}
|
||||
catch (ArgumentException ex)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid drift request",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: ex.Message);
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<IResult> HandleListSinksAsync(
|
||||
Guid driftId,
|
||||
string? direction,
|
||||
int? offset,
|
||||
int? limit,
|
||||
IReachabilityDriftResultRepository driftRepository,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(driftRepository);
|
||||
|
||||
if (driftId == Guid.Empty)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid drift identifier",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "driftId must be a non-empty GUID.");
|
||||
}
|
||||
|
||||
if (!TryParseDirection(direction, out var parsedDirection))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid direction",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "direction must be 'became_reachable' or 'became_unreachable'.");
|
||||
}
|
||||
|
||||
var resolvedOffset = offset ?? 0;
|
||||
if (resolvedOffset < 0)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid offset",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "offset must be >= 0.");
|
||||
}
|
||||
|
||||
var resolvedLimit = limit ?? 100;
|
||||
if (resolvedLimit <= 0 || resolvedLimit > 500)
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.Validation,
|
||||
"Invalid limit",
|
||||
StatusCodes.Status400BadRequest,
|
||||
detail: "limit must be between 1 and 500.");
|
||||
}
|
||||
|
||||
if (!await driftRepository.ExistsAsync(driftId, cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return ProblemResultFactory.Create(
|
||||
context,
|
||||
ProblemTypes.NotFound,
|
||||
"Drift result not found",
|
||||
StatusCodes.Status404NotFound,
|
||||
detail: "Requested drift result could not be located.");
|
||||
}
|
||||
|
||||
var sinks = await driftRepository.ListSinksAsync(
|
||||
driftId,
|
||||
parsedDirection,
|
||||
resolvedOffset,
|
||||
resolvedLimit,
|
||||
cancellationToken).ConfigureAwait(false);
|
||||
|
||||
var response = new DriftedSinksResponseDto(
|
||||
DriftId: driftId,
|
||||
Direction: parsedDirection,
|
||||
Offset: resolvedOffset,
|
||||
Limit: resolvedLimit,
|
||||
Count: sinks.Count,
|
||||
Sinks: sinks.ToImmutableArray());
|
||||
|
||||
return Json(response, StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static bool TryParseDirection(string? direction, out DriftDirection parsed)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(direction))
|
||||
{
|
||||
parsed = DriftDirection.BecameReachable;
|
||||
return true;
|
||||
}
|
||||
|
||||
var normalized = direction.Trim().ToLowerInvariant();
|
||||
parsed = normalized switch
|
||||
{
|
||||
"became_reachable" or "newly_reachable" or "reachable" or "up" => DriftDirection.BecameReachable,
|
||||
"became_unreachable" or "newly_unreachable" or "unreachable" or "down" => DriftDirection.BecameUnreachable,
|
||||
_ => DriftDirection.BecameReachable
|
||||
};
|
||||
|
||||
return normalized is "became_reachable"
|
||||
or "newly_reachable"
|
||||
or "reachable"
|
||||
or "up"
|
||||
or "became_unreachable"
|
||||
or "newly_unreachable"
|
||||
or "unreachable"
|
||||
or "down";
|
||||
}
|
||||
|
||||
private static IResult Json<T>(T value, int statusCode)
|
||||
{
|
||||
var payload = JsonSerializer.Serialize(value, SerializerOptions);
|
||||
return Results.Content(payload, "application/json", System.Text.Encoding.UTF8, statusCode);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record DriftedSinksResponseDto(
|
||||
Guid DriftId,
|
||||
DriftDirection Direction,
|
||||
int Offset,
|
||||
int Limit,
|
||||
int Count,
|
||||
ImmutableArray<DriftedSink> Sinks);
|
||||
@@ -1,6 +1,7 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.WebService.Constants;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
@@ -63,7 +64,7 @@ internal static class ReachabilityEndpoints
|
||||
string scanId,
|
||||
ComputeReachabilityRequestDto? request,
|
||||
IScanCoordinator coordinator,
|
||||
IReachabilityComputeService computeService,
|
||||
[FromServices] IReachabilityComputeService computeService,
|
||||
HttpContext context,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
|
||||
@@ -83,6 +83,7 @@ internal static class ScanEndpoints
|
||||
scans.MapCallGraphEndpoints();
|
||||
scans.MapSbomEndpoints();
|
||||
scans.MapReachabilityEndpoints();
|
||||
scans.MapReachabilityDriftScanEndpoints();
|
||||
scans.MapExportEndpoints();
|
||||
}
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ using Microsoft.AspNetCore.Routing;
|
||||
using StellaOps.Scanner.SmartDiff.Detection;
|
||||
using StellaOps.Scanner.SmartDiff.Output;
|
||||
using StellaOps.Scanner.Storage.Postgres;
|
||||
using StellaOps.Scanner.WebService.Services;
|
||||
using StellaOps.Scanner.WebService.Security;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Endpoints;
|
||||
@@ -80,7 +81,7 @@ internal static class SmartDiffEndpoints
|
||||
// Get scan metadata if available
|
||||
string? baseDigest = null;
|
||||
string? targetDigest = null;
|
||||
DateTimeOffset scanTime = DateTimeOffset.UtcNow;
|
||||
DateTimeOffset scanTime = DateTimeOffset.UnixEpoch;
|
||||
|
||||
if (metadataRepo is not null)
|
||||
{
|
||||
@@ -99,13 +100,16 @@ internal static class SmartDiffEndpoints
|
||||
ScanTime: scanTime,
|
||||
BaseDigest: baseDigest,
|
||||
TargetDigest: targetDigest,
|
||||
MaterialChanges: changes.Select(c => new MaterialRiskChange(
|
||||
VulnId: c.VulnId,
|
||||
ComponentPurl: c.ComponentPurl,
|
||||
Direction: c.IsRiskIncrease ? RiskDirection.Increased : RiskDirection.Decreased,
|
||||
Reason: c.ChangeReason,
|
||||
FilePath: c.FilePath
|
||||
)).ToList(),
|
||||
MaterialChanges: changes
|
||||
.Where(c => c.HasMaterialChange)
|
||||
.Select(c => new MaterialRiskChange(
|
||||
VulnId: c.FindingKey.VulnId,
|
||||
ComponentPurl: c.FindingKey.ComponentPurl,
|
||||
Direction: ToSarifRiskDirection(c),
|
||||
Reason: ToSarifReason(c),
|
||||
FilePath: null
|
||||
))
|
||||
.ToList(),
|
||||
HardeningRegressions: [],
|
||||
VexCandidates: [],
|
||||
ReachabilityChanges: []);
|
||||
@@ -120,7 +124,7 @@ internal static class SmartDiffEndpoints
|
||||
};
|
||||
|
||||
var generator = new SarifOutputGenerator();
|
||||
var sarifJson = generator.Generate(sarifInput, options);
|
||||
var sarifJson = generator.GenerateJson(sarifInput, options);
|
||||
|
||||
// Return as SARIF content type with proper filename
|
||||
var fileName = $"smartdiff-{scanId}.sarif";
|
||||
@@ -130,6 +134,46 @@ internal static class SmartDiffEndpoints
|
||||
statusCode: StatusCodes.Status200OK);
|
||||
}
|
||||
|
||||
private static StellaOps.Scanner.SmartDiff.Output.RiskDirection ToSarifRiskDirection(MaterialRiskChangeResult change)
|
||||
{
|
||||
if (change.Changes.IsDefaultOrEmpty)
|
||||
{
|
||||
return StellaOps.Scanner.SmartDiff.Output.RiskDirection.Changed;
|
||||
}
|
||||
|
||||
var hasIncreased = change.Changes.Any(c => c.Direction == StellaOps.Scanner.SmartDiff.Detection.RiskDirection.Increased);
|
||||
var hasDecreased = change.Changes.Any(c => c.Direction == StellaOps.Scanner.SmartDiff.Detection.RiskDirection.Decreased);
|
||||
|
||||
return (hasIncreased, hasDecreased) switch
|
||||
{
|
||||
(true, false) => StellaOps.Scanner.SmartDiff.Output.RiskDirection.Increased,
|
||||
(false, true) => StellaOps.Scanner.SmartDiff.Output.RiskDirection.Decreased,
|
||||
_ => StellaOps.Scanner.SmartDiff.Output.RiskDirection.Changed
|
||||
};
|
||||
}
|
||||
|
||||
private static string ToSarifReason(MaterialRiskChangeResult change)
|
||||
{
|
||||
if (change.Changes.IsDefaultOrEmpty)
|
||||
{
|
||||
return "material_change";
|
||||
}
|
||||
|
||||
var reasons = change.Changes
|
||||
.Select(c => c.Reason)
|
||||
.Where(r => !string.IsNullOrWhiteSpace(r))
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.Order(StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
return reasons.Length switch
|
||||
{
|
||||
0 => "material_change",
|
||||
1 => reasons[0],
|
||||
_ => string.Join("; ", reasons)
|
||||
};
|
||||
}
|
||||
|
||||
private static string GetScannerVersion()
|
||||
{
|
||||
var assembly = typeof(SmartDiffEndpoints).Assembly;
|
||||
@@ -289,7 +333,7 @@ internal static class SmartDiffEndpoints
|
||||
};
|
||||
}
|
||||
|
||||
private static VexCandidateDto ToCandidateDto(VexCandidate candidate)
|
||||
private static VexCandidateDto ToCandidateDto(StellaOps.Scanner.SmartDiff.Detection.VexCandidate candidate)
|
||||
{
|
||||
return new VexCandidateDto
|
||||
{
|
||||
|
||||
@@ -12,8 +12,10 @@ using Microsoft.Extensions.DependencyInjection.Extensions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Serilog;
|
||||
using Serilog.Events;
|
||||
using StellaOps.Auth.Abstractions;
|
||||
using StellaOps.Auth.Client;
|
||||
using StellaOps.Auth.ServerIntegration;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using StellaOps.Configuration;
|
||||
using StellaOps.Plugin.DependencyInjection;
|
||||
using StellaOps.Cryptography.DependencyInjection;
|
||||
@@ -24,6 +26,7 @@ using StellaOps.Scanner.Cache;
|
||||
using StellaOps.Scanner.Core.Configuration;
|
||||
using StellaOps.Scanner.Core.Contracts;
|
||||
using StellaOps.Scanner.Core.TrustAnchors;
|
||||
using StellaOps.Scanner.ReachabilityDrift.DependencyInjection;
|
||||
using StellaOps.Scanner.Surface.Env;
|
||||
using StellaOps.Scanner.Surface.FS;
|
||||
using StellaOps.Scanner.Surface.Secrets;
|
||||
@@ -79,6 +82,10 @@ builder.Services.AddOptions<OfflineKitOptions>()
|
||||
.ValidateOnStart();
|
||||
builder.Services.AddSingleton<IPublicKeyLoader, FileSystemPublicKeyLoader>();
|
||||
builder.Services.AddSingleton<ITrustAnchorRegistry, TrustAnchorRegistry>();
|
||||
builder.Services.TryAddScoped<IOfflineKitAuditEmitter, NullOfflineKitAuditEmitter>();
|
||||
builder.Services.AddSingleton<OfflineKitMetricsStore>();
|
||||
builder.Services.AddSingleton<OfflineKitStateStore>();
|
||||
builder.Services.AddScoped<OfflineKitImportService>();
|
||||
|
||||
builder.Host.UseSerilog((context, services, loggerConfiguration) =>
|
||||
{
|
||||
@@ -104,11 +111,20 @@ builder.Services.AddSingleton<ScanProgressStream>();
|
||||
builder.Services.AddSingleton<IScanProgressPublisher>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanProgressReader>(sp => sp.GetRequiredService<ScanProgressStream>());
|
||||
builder.Services.AddSingleton<IScanCoordinator, InMemoryScanCoordinator>();
|
||||
builder.Services.AddSingleton<IReachabilityComputeService, NullReachabilityComputeService>();
|
||||
builder.Services.AddSingleton<IReachabilityQueryService, NullReachabilityQueryService>();
|
||||
builder.Services.AddSingleton<IReachabilityExplainService, NullReachabilityExplainService>();
|
||||
builder.Services.AddSingleton<ISarifExportService, NullSarifExportService>();
|
||||
builder.Services.AddSingleton<ICycloneDxExportService, NullCycloneDxExportService>();
|
||||
builder.Services.AddSingleton<IOpenVexExportService, NullOpenVexExportService>();
|
||||
builder.Services.AddScoped<ICallGraphIngestionService, CallGraphIngestionService>();
|
||||
builder.Services.AddScoped<ISbomIngestionService, SbomIngestionService>();
|
||||
builder.Services.AddSingleton<IPolicySnapshotRepository, InMemoryPolicySnapshotRepository>();
|
||||
builder.Services.AddSingleton<IPolicyAuditRepository, InMemoryPolicyAuditRepository>();
|
||||
builder.Services.AddSingleton<PolicySnapshotStore>();
|
||||
builder.Services.AddSingleton<PolicyPreviewService>();
|
||||
builder.Services.AddSingleton<IRecordModeService, RecordModeService>();
|
||||
builder.Services.AddReachabilityDrift();
|
||||
builder.Services.AddStellaOpsCrypto();
|
||||
builder.Services.AddBouncyCastleEd25519Provider();
|
||||
builder.Services.AddSingleton<IReportSigner, ReportSigner>();
|
||||
@@ -301,8 +317,12 @@ if (bootstrapOptions.Authority.Enabled)
|
||||
{
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.ScansEnqueue, bootstrapOptions.Authority.RequiredScopes.ToArray());
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.ScansRead, ScannerAuthorityScopes.ScansRead);
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.ScansWrite, ScannerAuthorityScopes.ScansWrite);
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.Reports, ScannerAuthorityScopes.ReportsRead);
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.RuntimeIngest, ScannerAuthorityScopes.RuntimeIngest);
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.CallGraphIngest, ScannerAuthorityScopes.CallGraphIngest);
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.OfflineKitImport, StellaOpsScopes.AirgapImport);
|
||||
options.AddStellaOpsScopePolicy(ScannerPolicies.OfflineKitStatusRead, StellaOpsScopes.AirgapStatusRead);
|
||||
});
|
||||
}
|
||||
else
|
||||
@@ -318,8 +338,12 @@ else
|
||||
{
|
||||
options.AddPolicy(ScannerPolicies.ScansEnqueue, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.ScansRead, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.ScansWrite, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.Reports, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.RuntimeIngest, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.CallGraphIngest, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.OfflineKitImport, policy => policy.RequireAssertion(_ => true));
|
||||
options.AddPolicy(ScannerPolicies.OfflineKitStatusRead, policy => policy.RequireAssertion(_ => true));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -430,6 +454,8 @@ if (authorityConfigured)
|
||||
}
|
||||
|
||||
app.MapHealthEndpoints();
|
||||
app.MapObservabilityEndpoints();
|
||||
app.MapOfflineKitEndpoints();
|
||||
|
||||
var apiGroup = app.MapGroup(resolvedOptions.Api.BasePath);
|
||||
|
||||
@@ -441,6 +467,7 @@ if (app.Environment.IsEnvironment("Testing"))
|
||||
}
|
||||
|
||||
apiGroup.MapScanEndpoints(resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReachabilityDriftRootEndpoints();
|
||||
apiGroup.MapProofSpineEndpoints(resolvedOptions.Api.SpinesSegment, resolvedOptions.Api.ScansSegment);
|
||||
apiGroup.MapReplayEndpoints();
|
||||
|
||||
|
||||
@@ -7,6 +7,10 @@ internal static class ScannerAuthorityScopes
|
||||
{
|
||||
public const string ScansEnqueue = "scanner.scans.enqueue";
|
||||
public const string ScansRead = "scanner.scans.read";
|
||||
public const string ScansWrite = "scanner.scans.write";
|
||||
public const string ReportsRead = "scanner.reports.read";
|
||||
public const string RuntimeIngest = "scanner.runtime.ingest";
|
||||
public const string CallGraphIngest = "scanner.callgraph.ingest";
|
||||
public const string OfflineKitImport = "scanner.offline-kit.import";
|
||||
public const string OfflineKitStatusRead = "scanner.offline-kit.status.read";
|
||||
}
|
||||
|
||||
@@ -8,4 +8,7 @@ internal static class ScannerPolicies
|
||||
public const string Reports = "scanner.reports";
|
||||
public const string RuntimeIngest = "scanner.runtime.ingest";
|
||||
public const string CallGraphIngest = "scanner.callgraph.ingest";
|
||||
|
||||
public const string OfflineKitImport = "scanner.offline-kit.import";
|
||||
public const string OfflineKitStatusRead = "scanner.offline-kit.status.read";
|
||||
}
|
||||
|
||||
@@ -0,0 +1,232 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Scanner.Storage.Postgres;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class CallGraphIngestionService : ICallGraphIngestionService
|
||||
{
|
||||
private const string TenantContext = "00000000-0000-0000-0000-000000000001";
|
||||
private static readonly Guid TenantId = Guid.Parse(TenantContext);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly ScannerDataSource _dataSource;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<CallGraphIngestionService> _logger;
|
||||
|
||||
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
|
||||
private string CallGraphIngestionsTable => $"{SchemaName}.callgraph_ingestions";
|
||||
|
||||
public CallGraphIngestionService(
|
||||
ScannerDataSource dataSource,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<CallGraphIngestionService> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public CallGraphValidationResult Validate(CallGraphV1Dto callGraph)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(callGraph);
|
||||
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(callGraph.Schema))
|
||||
{
|
||||
errors.Add("Schema is required.");
|
||||
}
|
||||
else if (!string.Equals(callGraph.Schema, "stella.callgraph.v1", StringComparison.Ordinal))
|
||||
{
|
||||
errors.Add($"Unsupported schema '{callGraph.Schema}'. Expected 'stella.callgraph.v1'.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(callGraph.ScanKey))
|
||||
{
|
||||
errors.Add("ScanKey is required.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(callGraph.Language))
|
||||
{
|
||||
errors.Add("Language is required.");
|
||||
}
|
||||
|
||||
if (callGraph.Nodes is null || callGraph.Nodes.Count == 0)
|
||||
{
|
||||
errors.Add("At least one node is required.");
|
||||
}
|
||||
|
||||
if (callGraph.Edges is null || callGraph.Edges.Count == 0)
|
||||
{
|
||||
errors.Add("At least one edge is required.");
|
||||
}
|
||||
|
||||
return errors.Count == 0
|
||||
? CallGraphValidationResult.Success()
|
||||
: CallGraphValidationResult.Failure(errors.ToArray());
|
||||
}
|
||||
|
||||
public async Task<ExistingCallGraphDto?> FindByDigestAsync(
|
||||
ScanId scanId,
|
||||
string contentDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(scanId.Value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(contentDigest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var sql = $"""
|
||||
SELECT id, content_digest, created_at_utc
|
||||
FROM {CallGraphIngestionsTable}
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND scan_id = @scan_id
|
||||
AND content_digest = @content_digest
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, "reader", cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", TenantId);
|
||||
command.Parameters.AddWithValue("scan_id", scanId.Value.Trim());
|
||||
command.Parameters.AddWithValue("content_digest", contentDigest.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new ExistingCallGraphDto(
|
||||
Id: reader.GetString(0),
|
||||
Digest: reader.GetString(1),
|
||||
CreatedAt: reader.GetFieldValue<DateTimeOffset>(2));
|
||||
}
|
||||
|
||||
public async Task<CallGraphIngestionResult> IngestAsync(
|
||||
ScanId scanId,
|
||||
CallGraphV1Dto callGraph,
|
||||
string contentDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(callGraph);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId.Value);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(contentDigest);
|
||||
|
||||
var normalizedDigest = contentDigest.Trim();
|
||||
var callgraphId = CreateCallGraphId(scanId, normalizedDigest);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var nodeCount = callGraph.Nodes?.Count ?? 0;
|
||||
var edgeCount = callGraph.Edges?.Count ?? 0;
|
||||
var language = callGraph.Language?.Trim() ?? string.Empty;
|
||||
var payload = JsonSerializer.Serialize(callGraph, JsonOptions);
|
||||
|
||||
var insertSql = $"""
|
||||
INSERT INTO {CallGraphIngestionsTable} (
|
||||
id,
|
||||
tenant_id,
|
||||
scan_id,
|
||||
content_digest,
|
||||
language,
|
||||
node_count,
|
||||
edge_count,
|
||||
created_at_utc,
|
||||
callgraph_json
|
||||
) VALUES (
|
||||
@id,
|
||||
@tenant_id,
|
||||
@scan_id,
|
||||
@content_digest,
|
||||
@language,
|
||||
@node_count,
|
||||
@edge_count,
|
||||
@created_at_utc,
|
||||
@callgraph_json::jsonb
|
||||
)
|
||||
ON CONFLICT (tenant_id, scan_id, content_digest) DO NOTHING
|
||||
""";
|
||||
|
||||
var selectSql = $"""
|
||||
SELECT id, content_digest, node_count, edge_count
|
||||
FROM {CallGraphIngestionsTable}
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND scan_id = @scan_id
|
||||
AND content_digest = @content_digest
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, "writer", cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
await using (var insert = new NpgsqlCommand(insertSql, connection))
|
||||
{
|
||||
insert.Parameters.AddWithValue("id", callgraphId);
|
||||
insert.Parameters.AddWithValue("tenant_id", TenantId);
|
||||
insert.Parameters.AddWithValue("scan_id", scanId.Value.Trim());
|
||||
insert.Parameters.AddWithValue("content_digest", normalizedDigest);
|
||||
insert.Parameters.AddWithValue("language", language);
|
||||
insert.Parameters.AddWithValue("node_count", nodeCount);
|
||||
insert.Parameters.AddWithValue("edge_count", edgeCount);
|
||||
insert.Parameters.AddWithValue("created_at_utc", now.UtcDateTime);
|
||||
insert.Parameters.Add(new NpgsqlParameter<string>("callgraph_json", NpgsqlDbType.Jsonb) { TypedValue = payload });
|
||||
|
||||
await insert.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await using var select = new NpgsqlCommand(selectSql, connection);
|
||||
select.Parameters.AddWithValue("tenant_id", TenantId);
|
||||
select.Parameters.AddWithValue("scan_id", scanId.Value.Trim());
|
||||
select.Parameters.AddWithValue("content_digest", normalizedDigest);
|
||||
|
||||
await using var reader = await select.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
throw new InvalidOperationException("Call graph ingestion row was not persisted.");
|
||||
}
|
||||
|
||||
var persistedId = reader.GetString(0);
|
||||
var persistedDigest = reader.GetString(1);
|
||||
var persistedNodeCount = reader.GetInt32(2);
|
||||
var persistedEdgeCount = reader.GetInt32(3);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Ingested callgraph scan={ScanId} lang={Language} nodes={Nodes} edges={Edges} digest={Digest}",
|
||||
scanId.Value,
|
||||
language,
|
||||
persistedNodeCount,
|
||||
persistedEdgeCount,
|
||||
persistedDigest);
|
||||
|
||||
return new CallGraphIngestionResult(
|
||||
CallgraphId: persistedId,
|
||||
NodeCount: persistedNodeCount,
|
||||
EdgeCount: persistedEdgeCount,
|
||||
Digest: persistedDigest);
|
||||
}
|
||||
|
||||
private static string CreateCallGraphId(ScanId scanId, string contentDigest)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes($"{scanId.Value.Trim()}:{contentDigest.Trim()}");
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"cg_{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -306,17 +306,6 @@ public interface IFeedSnapshotTracker
|
||||
Task<FeedSnapshots> GetCurrentSnapshotsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for scan manifest repository operations.
|
||||
/// </summary>
|
||||
public interface IScanManifestRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Find scans affected by feed changes.
|
||||
/// </summary>
|
||||
Task<List<string>> FindAffectedScansAsync(AffectedScansQuery query, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for feed change rescore operations.
|
||||
/// </summary>
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
public interface IScanMetadataRepository
|
||||
{
|
||||
Task<ScanMetadata?> GetScanMetadataAsync(string scanId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed record ScanMetadata(string? BaseDigest, string? TargetDigest, DateTimeOffset ScanTime);
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class NullOfflineKitAuditEmitter : IOfflineKitAuditEmitter
|
||||
{
|
||||
public Task RecordAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,68 @@
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class NullReachabilityComputeService : IReachabilityComputeService
|
||||
{
|
||||
public Task<ComputeJobResult> TriggerComputeAsync(
|
||||
ScanId scanId,
|
||||
bool forceRecompute,
|
||||
IReadOnlyList<string>? entrypoints,
|
||||
IReadOnlyList<string>? targets,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId.Value);
|
||||
|
||||
var jobId = $"reachability_{scanId.Value}";
|
||||
return Task.FromResult(new ComputeJobResult(
|
||||
JobId: jobId,
|
||||
Status: "scheduled",
|
||||
AlreadyInProgress: false,
|
||||
EstimatedDuration: null));
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class NullReachabilityQueryService : IReachabilityQueryService
|
||||
{
|
||||
public Task<IReadOnlyList<ComponentReachability>> GetComponentsAsync(
|
||||
ScanId scanId,
|
||||
string? purlFilter,
|
||||
string? statusFilter,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<ComponentReachability>>(Array.Empty<ComponentReachability>());
|
||||
|
||||
public Task<IReadOnlyList<ReachabilityFinding>> GetFindingsAsync(
|
||||
ScanId scanId,
|
||||
string? cveFilter,
|
||||
string? statusFilter,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<ReachabilityFinding>>(Array.Empty<ReachabilityFinding>());
|
||||
}
|
||||
|
||||
internal sealed class NullReachabilityExplainService : IReachabilityExplainService
|
||||
{
|
||||
public Task<ReachabilityExplanation?> ExplainAsync(
|
||||
ScanId scanId,
|
||||
string cveId,
|
||||
string purl,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<ReachabilityExplanation?>(null);
|
||||
}
|
||||
|
||||
internal sealed class NullSarifExportService : ISarifExportService
|
||||
{
|
||||
public Task<object?> ExportAsync(ScanId scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<object?>(null);
|
||||
}
|
||||
|
||||
internal sealed class NullCycloneDxExportService : ICycloneDxExportService
|
||||
{
|
||||
public Task<object?> ExportWithReachabilityAsync(ScanId scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<object?>(null);
|
||||
}
|
||||
|
||||
internal sealed class NullOpenVexExportService : IOpenVexExportService
|
||||
{
|
||||
public Task<object?> ExportAsync(ScanId scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<object?>(null);
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed record OfflineKitImportRequest(
|
||||
string TenantId,
|
||||
string Actor,
|
||||
OfflineKitImportMetadata Metadata,
|
||||
IFormFile Bundle,
|
||||
IFormFile? Manifest,
|
||||
IFormFile? BundleSignature,
|
||||
IFormFile? ManifestSignature);
|
||||
|
||||
internal sealed class OfflineKitImportException : Exception
|
||||
{
|
||||
public OfflineKitImportException(int statusCode, string reasonCode, string message, string? notes = null)
|
||||
: base(message)
|
||||
{
|
||||
StatusCode = statusCode;
|
||||
ReasonCode = reasonCode;
|
||||
Notes = notes;
|
||||
}
|
||||
|
||||
public int StatusCode { get; }
|
||||
public string ReasonCode { get; }
|
||||
public string? Notes { get; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitImportMetadata
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string BundleSha256 { get; set; } = string.Empty;
|
||||
public long BundleSize { get; set; }
|
||||
public DateTimeOffset? CapturedAt { get; set; }
|
||||
public string? Channel { get; set; }
|
||||
public string? Kind { get; set; }
|
||||
public bool? IsDelta { get; set; }
|
||||
public string? BaseBundleId { get; set; }
|
||||
public string? ManifestSha256 { get; set; }
|
||||
public long? ManifestSize { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitStatusTransport
|
||||
{
|
||||
public OfflineKitStatusBundleTransport? Current { get; set; }
|
||||
public List<OfflineKitComponentStatusTransport>? Components { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitStatusBundleTransport
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string? Channel { get; set; }
|
||||
public string? Kind { get; set; }
|
||||
public bool? IsDelta { get; set; }
|
||||
public string? BaseBundleId { get; set; }
|
||||
public string? BundleSha256 { get; set; }
|
||||
public long? BundleSize { get; set; }
|
||||
public DateTimeOffset? CapturedAt { get; set; }
|
||||
public DateTimeOffset? ImportedAt { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitComponentStatusTransport
|
||||
{
|
||||
public string? Name { get; set; }
|
||||
public string? Version { get; set; }
|
||||
public string? Digest { get; set; }
|
||||
public DateTimeOffset? CapturedAt { get; set; }
|
||||
public long? SizeBytes { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitImportResponseTransport
|
||||
{
|
||||
public string? ImportId { get; set; }
|
||||
public string? Status { get; set; }
|
||||
public DateTimeOffset? SubmittedAt { get; set; }
|
||||
public string? Message { get; set; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,698 @@
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using StellaOps.Scanner.Core.Configuration;
|
||||
using StellaOps.Scanner.Core.TrustAnchors;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class OfflineKitImportService
|
||||
{
|
||||
private readonly IOptionsMonitor<OfflineKitOptions> _options;
|
||||
private readonly ITrustAnchorRegistry _trustAnchorRegistry;
|
||||
private readonly OfflineKitMetricsStore _metrics;
|
||||
private readonly OfflineKitStateStore _stateStore;
|
||||
private readonly IOfflineKitAuditEmitter _auditEmitter;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<OfflineKitImportService> _logger;
|
||||
|
||||
public OfflineKitImportService(
|
||||
IOptionsMonitor<OfflineKitOptions> options,
|
||||
ITrustAnchorRegistry trustAnchorRegistry,
|
||||
OfflineKitMetricsStore metrics,
|
||||
OfflineKitStateStore stateStore,
|
||||
IOfflineKitAuditEmitter auditEmitter,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<OfflineKitImportService> logger)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_trustAnchorRegistry = trustAnchorRegistry ?? throw new ArgumentNullException(nameof(trustAnchorRegistry));
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
_stateStore = stateStore ?? throw new ArgumentNullException(nameof(stateStore));
|
||||
_auditEmitter = auditEmitter ?? throw new ArgumentNullException(nameof(auditEmitter));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<OfflineKitImportResponseTransport> ImportAsync(OfflineKitImportRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var options = _options.CurrentValue;
|
||||
if (!options.Enabled)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status404NotFound, "OFFLINE_KIT_DISABLED", "Offline kit operations are not enabled.");
|
||||
}
|
||||
|
||||
var tenantId = string.IsNullOrWhiteSpace(request.TenantId) ? "default" : request.TenantId.Trim();
|
||||
var actor = string.IsNullOrWhiteSpace(request.Actor) ? "anonymous" : request.Actor.Trim();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var importId = ComputeImportId(tenantId, request.Metadata.BundleSha256, now);
|
||||
var expectedBundleSha = NormalizeSha256(request.Metadata.BundleSha256);
|
||||
if (string.IsNullOrWhiteSpace(expectedBundleSha))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status400BadRequest, "MANIFEST_INVALID", "metadata.bundleSha256 is required.");
|
||||
}
|
||||
|
||||
var bundleId = string.IsNullOrWhiteSpace(request.Metadata.BundleId)
|
||||
? $"sha256-{expectedBundleSha[..Math.Min(12, expectedBundleSha.Length)]}"
|
||||
: request.Metadata.BundleId.Trim();
|
||||
|
||||
var bundleDirectory = _stateStore.GetBundleDirectory(tenantId, bundleId);
|
||||
Directory.CreateDirectory(bundleDirectory);
|
||||
|
||||
var bundlePath = Path.Combine(bundleDirectory, "bundle.tgz");
|
||||
var manifestPath = Path.Combine(bundleDirectory, "manifest.json");
|
||||
var bundleSignaturePath = Path.Combine(bundleDirectory, "bundle-signature.bin");
|
||||
var manifestSignaturePath = Path.Combine(bundleDirectory, "manifest-signature.bin");
|
||||
|
||||
var statusForMetrics = "success";
|
||||
var reasonCode = "SUCCESS";
|
||||
|
||||
bool dsseVerified = false;
|
||||
bool rekorVerified = false;
|
||||
|
||||
try
|
||||
{
|
||||
var (bundleSha, bundleSize) = await SaveWithSha256Async(request.Bundle, bundlePath, cancellationToken).ConfigureAwait(false);
|
||||
if (!DigestsEqual(bundleSha, expectedBundleSha))
|
||||
{
|
||||
statusForMetrics = "failed_hash";
|
||||
reasonCode = "HASH_MISMATCH";
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "Bundle digest does not match metadata.");
|
||||
}
|
||||
|
||||
var components = new List<OfflineKitComponentStatusTransport>();
|
||||
if (request.Manifest is not null)
|
||||
{
|
||||
var (manifestSha, _) = await SaveWithSha256Async(request.Manifest, manifestPath, cancellationToken).ConfigureAwait(false);
|
||||
if (!string.IsNullOrWhiteSpace(request.Metadata.ManifestSha256)
|
||||
&& !DigestsEqual(manifestSha, NormalizeSha256(request.Metadata.ManifestSha256)))
|
||||
{
|
||||
statusForMetrics = "failed_manifest";
|
||||
reasonCode = "SIG_FAIL_MANIFEST";
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "Manifest digest does not match metadata.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken).ConfigureAwait(false);
|
||||
components.AddRange(ParseManifestComponents(manifestJson));
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or JsonException)
|
||||
{
|
||||
_logger.LogWarning(ex, "offlinekit.import failed to parse manifest components bundle_id={bundle_id}", bundleId);
|
||||
}
|
||||
}
|
||||
|
||||
byte[]? dsseBytes = null;
|
||||
DsseEnvelope? envelope = null;
|
||||
string? dsseNotes = null;
|
||||
|
||||
if (request.BundleSignature is not null)
|
||||
{
|
||||
dsseBytes = await SaveRawAsync(request.BundleSignature, bundleSignaturePath, cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
envelope = DsseEnvelope.Parse(Encoding.UTF8.GetString(dsseBytes));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
dsseNotes = $"dsse:parse-failed {ex.GetType().Name}";
|
||||
}
|
||||
}
|
||||
|
||||
if (options.RequireDsse && envelope is null)
|
||||
{
|
||||
statusForMetrics = "failed_dsse";
|
||||
reasonCode = "DSSE_VERIFY_FAIL";
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "DSSE envelope is missing.", notes: dsseNotes);
|
||||
}
|
||||
|
||||
if (envelope is not null)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
dsseVerified = VerifyDsse(bundleSha, request.Metadata, envelope, options);
|
||||
}
|
||||
catch (OfflineKitImportException) when (!options.RequireDsse)
|
||||
{
|
||||
dsseVerified = false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
sw.Stop();
|
||||
_metrics.RecordAttestationVerifyLatency("dsse", sw.Elapsed.TotalSeconds, dsseVerified);
|
||||
}
|
||||
|
||||
if (!dsseVerified)
|
||||
{
|
||||
statusForMetrics = "failed_dsse";
|
||||
reasonCode = "DSSE_VERIFY_FAIL";
|
||||
if (options.RequireDsse)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "DSSE verification failed.", notes: dsseNotes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (options.RekorOfflineMode && request.ManifestSignature is not null && dsseBytes is not null)
|
||||
{
|
||||
var receiptBytes = await SaveRawAsync(request.ManifestSignature, manifestSignaturePath, cancellationToken).ConfigureAwait(false);
|
||||
if (LooksLikeRekorReceipt(receiptBytes))
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
rekorVerified = await VerifyRekorAsync(manifestSignaturePath, dsseBytes, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (OfflineKitImportException) when (!options.RequireDsse)
|
||||
{
|
||||
rekorVerified = false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
sw.Stop();
|
||||
_metrics.RecordRekorInclusionLatency(sw.Elapsed.TotalSeconds, rekorVerified);
|
||||
}
|
||||
|
||||
if (!rekorVerified)
|
||||
{
|
||||
statusForMetrics = "failed_rekor";
|
||||
reasonCode = "REKOR_VERIFY_FAIL";
|
||||
if (options.RequireDsse)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "Rekor receipt verification failed.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_metrics.RecordRekorSuccess("offline");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var status = new OfflineKitStatusTransport
|
||||
{
|
||||
Current = new OfflineKitStatusBundleTransport
|
||||
{
|
||||
BundleId = bundleId,
|
||||
Channel = request.Metadata.Channel?.Trim(),
|
||||
Kind = request.Metadata.Kind?.Trim(),
|
||||
IsDelta = request.Metadata.IsDelta ?? false,
|
||||
BaseBundleId = request.Metadata.BaseBundleId?.Trim(),
|
||||
BundleSha256 = NormalizeSha256(bundleSha),
|
||||
BundleSize = bundleSize,
|
||||
CapturedAt = request.Metadata.CapturedAt?.ToUniversalTime(),
|
||||
ImportedAt = now
|
||||
},
|
||||
Components = components.OrderBy(c => c.Name ?? string.Empty, StringComparer.Ordinal).ToList()
|
||||
};
|
||||
|
||||
await _stateStore.SaveStatusAsync(tenantId, status, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_metrics.RecordImport(statusForMetrics, tenantId);
|
||||
await EmitAuditAsync(tenantId, actor, now, importId, bundleId, result: "accepted", reasonCode, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new OfflineKitImportResponseTransport
|
||||
{
|
||||
ImportId = importId,
|
||||
Status = statusForMetrics == "success" ? "accepted" : "accepted_with_warnings",
|
||||
SubmittedAt = now,
|
||||
Message = statusForMetrics == "success" ? "Accepted." : "Accepted with warnings."
|
||||
};
|
||||
}
|
||||
catch (OfflineKitImportException)
|
||||
{
|
||||
_metrics.RecordImport(statusForMetrics, tenantId);
|
||||
await EmitAuditAsync(tenantId, actor, now, importId, bundleId, result: "failed", reasonCode, cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "offlinekit.import failed tenant_id={tenant_id} import_id={import_id}", tenantId, importId);
|
||||
_metrics.RecordImport("failed_unknown", tenantId);
|
||||
await EmitAuditAsync(tenantId, actor, now, importId, bundleId, result: "failed", "INTERNAL_ERROR", cancellationToken).ConfigureAwait(false);
|
||||
throw new OfflineKitImportException(StatusCodes.Status500InternalServerError, "INTERNAL_ERROR", "Offline kit import failed.");
|
||||
}
|
||||
}
|
||||
|
||||
private bool VerifyDsse(string bundleSha256Hex, OfflineKitImportMetadata metadata, DsseEnvelope envelope, OfflineKitOptions options)
|
||||
{
|
||||
var purl = ResolvePurl(metadata);
|
||||
var resolution = _trustAnchorRegistry.ResolveForPurl(purl);
|
||||
if (resolution is null)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "TRUST_ROOT_MISSING", $"No trust anchor matches '{purl}'.");
|
||||
}
|
||||
|
||||
var trustRoots = BuildTrustRoots(resolution, options.TrustRootDirectory ?? string.Empty);
|
||||
var pae = BuildPreAuthEncoding(envelope.PayloadType, envelope.Payload);
|
||||
|
||||
var verified = 0;
|
||||
foreach (var signature in envelope.Signatures)
|
||||
{
|
||||
if (TryVerifySignature(trustRoots, signature, pae))
|
||||
{
|
||||
verified++;
|
||||
}
|
||||
}
|
||||
|
||||
if (verified < Math.Max(1, resolution.MinSignatures))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "DSSE_VERIFY_FAIL", "DSSE signature verification failed.");
|
||||
}
|
||||
|
||||
var subjectSha = TryExtractDsseSubjectSha256(envelope);
|
||||
if (!string.IsNullOrWhiteSpace(subjectSha) && !DigestsEqual(bundleSha256Hex, subjectSha))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "DSSE_VERIFY_FAIL", "DSSE subject digest does not match bundle digest.");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string ResolvePurl(OfflineKitImportMetadata metadata)
|
||||
{
|
||||
var kind = string.IsNullOrWhiteSpace(metadata.Kind) ? "offline-kit" : metadata.Kind.Trim().ToLowerInvariant();
|
||||
return $"pkg:stellaops/{kind}";
|
||||
}
|
||||
|
||||
private static TrustRootConfig BuildTrustRoots(TrustAnchorResolution resolution, string rootBundlePath)
|
||||
{
|
||||
var publicKeys = new Dictionary<string, byte[]>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var (keyId, keyBytes) in resolution.PublicKeys)
|
||||
{
|
||||
publicKeys[keyId] = keyBytes;
|
||||
}
|
||||
|
||||
var fingerprints = publicKeys.Values
|
||||
.Select(ComputeFingerprint)
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
return new TrustRootConfig(
|
||||
RootBundlePath: rootBundlePath,
|
||||
TrustedKeyFingerprints: fingerprints,
|
||||
AllowedSignatureAlgorithms: new[] { "rsassa-pss-sha256" },
|
||||
NotBeforeUtc: null,
|
||||
NotAfterUtc: null,
|
||||
PublicKeys: publicKeys);
|
||||
}
|
||||
|
||||
private static byte[] BuildPreAuthEncoding(string payloadType, string payloadBase64)
|
||||
{
|
||||
const string paePrefix = "DSSEv1";
|
||||
var payloadBytes = Convert.FromBase64String(payloadBase64);
|
||||
var parts = new[] { paePrefix, payloadType, Encoding.UTF8.GetString(payloadBytes) };
|
||||
|
||||
var paeBuilder = new StringBuilder();
|
||||
paeBuilder.Append("PAE:");
|
||||
paeBuilder.Append(parts.Length);
|
||||
foreach (var part in parts)
|
||||
{
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part.Length);
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part);
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetBytes(paeBuilder.ToString());
|
||||
}
|
||||
|
||||
private static bool TryVerifySignature(TrustRootConfig trustRoots, DsseSignature signature, byte[] pae)
|
||||
{
|
||||
if (!trustRoots.PublicKeys.TryGetValue(signature.KeyId, out var keyBytes))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var fingerprint = ComputeFingerprint(keyBytes);
|
||||
if (!trustRoots.TrustedKeyFingerprints.Contains(fingerprint, StringComparer.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var rsa = RSA.Create();
|
||||
rsa.ImportSubjectPublicKeyInfo(keyBytes, out _);
|
||||
var sig = Convert.FromBase64String(signature.Signature);
|
||||
return rsa.VerifyData(pae, sig, HashAlgorithmName.SHA256, RSASignaturePadding.Pss);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static string? TryExtractDsseSubjectSha256(DsseEnvelope envelope)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
using var doc = JsonDocument.Parse(payloadBytes);
|
||||
if (!doc.RootElement.TryGetProperty("subject", out var subject) || subject.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
foreach (var entry in subject.EnumerateArray())
|
||||
{
|
||||
if (entry.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.TryGetProperty("digest", out var digestObj) || digestObj.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (digestObj.TryGetProperty("sha256", out var shaProp) && shaProp.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return NormalizeSha256(shaProp.GetString());
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<bool> VerifyRekorAsync(string receiptPath, byte[] dsseBytes, OfflineKitOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(options.RekorSnapshotDirectory))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "REKOR_VERIFY_FAIL", "Rekor snapshot directory is not configured.");
|
||||
}
|
||||
|
||||
var publicKeyPath = ResolveRekorPublicKeyPath(options.RekorSnapshotDirectory);
|
||||
if (publicKeyPath is null)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "REKOR_VERIFY_FAIL", "Rekor public key was not found in the snapshot directory.");
|
||||
}
|
||||
|
||||
var dsseSha = SHA256.HashData(dsseBytes);
|
||||
var result = await RekorOfflineReceiptVerifier.VerifyAsync(receiptPath, dsseSha, publicKeyPath, cancellationToken).ConfigureAwait(false);
|
||||
return result.Verified;
|
||||
}
|
||||
|
||||
private static string? ResolveRekorPublicKeyPath(string snapshotDirectory)
|
||||
{
|
||||
var candidates = new[]
|
||||
{
|
||||
Path.Combine(snapshotDirectory, "rekor-pub.pem"),
|
||||
Path.Combine(snapshotDirectory, "rekor.pub"),
|
||||
Path.Combine(snapshotDirectory, "tlog-root.pub"),
|
||||
Path.Combine(snapshotDirectory, "tlog-root.pem"),
|
||||
Path.Combine(snapshotDirectory, "tlog", "rekor-pub.pem"),
|
||||
Path.Combine(snapshotDirectory, "tlog", "rekor.pub")
|
||||
};
|
||||
|
||||
foreach (var candidate in candidates)
|
||||
{
|
||||
if (File.Exists(candidate))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static bool LooksLikeRekorReceipt(byte[] payload)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
if (root.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return root.TryGetProperty("uuid", out _)
|
||||
&& root.TryGetProperty("logIndex", out _)
|
||||
&& root.TryGetProperty("rootHash", out _)
|
||||
&& root.TryGetProperty("hashes", out _)
|
||||
&& root.TryGetProperty("checkpoint", out _);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task EmitAuditAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
DateTimeOffset timestamp,
|
||||
string importId,
|
||||
string bundleId,
|
||||
string result,
|
||||
string reasonCode,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
var entity = new OfflineKitAuditEntity
|
||||
{
|
||||
EventId = ComputeDeterministicEventId(tenantId, importId),
|
||||
TenantId = tenantId,
|
||||
EventType = "offlinekit.import",
|
||||
Timestamp = timestamp,
|
||||
Actor = actor,
|
||||
Details = JsonSerializer.Serialize(new { importId, bundleId, reasonCode }, new JsonSerializerOptions(JsonSerializerDefaults.Web)),
|
||||
Result = result
|
||||
};
|
||||
|
||||
await _auditEmitter.RecordAsync(entity, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "offlinekit.audit.emit failed tenant_id={tenant_id} import_id={import_id}", tenantId, importId);
|
||||
}
|
||||
}
|
||||
|
||||
private static Guid ComputeDeterministicEventId(string tenantId, string importId)
|
||||
{
|
||||
var input = $"{tenantId}|{importId}".ToLowerInvariant();
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
Span<byte> guidBytes = stackalloc byte[16];
|
||||
hash.AsSpan(0, 16).CopyTo(guidBytes);
|
||||
return new Guid(guidBytes);
|
||||
}
|
||||
|
||||
private static string ComputeImportId(string tenantId, string bundleSha256, DateTimeOffset submittedAt)
|
||||
{
|
||||
var input = $"{tenantId}|{NormalizeSha256(bundleSha256)}|{submittedAt.ToUnixTimeSeconds()}".ToLowerInvariant();
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static bool DigestsEqual(string computedHex, string expectedHex)
|
||||
=> string.Equals(NormalizeSha256(computedHex), NormalizeSha256(expectedHex), StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static string NormalizeSha256(string? digest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var value = digest.Trim();
|
||||
if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
value = value.Substring("sha256:".Length);
|
||||
}
|
||||
|
||||
return value.ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeFingerprint(byte[] publicKey)
|
||||
{
|
||||
var hash = SHA256.HashData(publicKey);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static async Task<(string Sha256Hex, long SizeBytes)> SaveWithSha256Async(IFormFile file, string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(file);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var directory = Path.GetDirectoryName(path);
|
||||
if (!string.IsNullOrWhiteSpace(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var temp = path + ".tmp";
|
||||
long size = 0;
|
||||
|
||||
using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
|
||||
await using var output = File.Create(temp);
|
||||
await using var input = file.OpenReadStream();
|
||||
|
||||
var buffer = new byte[128 * 1024];
|
||||
while (true)
|
||||
{
|
||||
var read = await input.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
hasher.AppendData(buffer, 0, read);
|
||||
await output.WriteAsync(buffer.AsMemory(0, read), cancellationToken).ConfigureAwait(false);
|
||||
size += read;
|
||||
}
|
||||
|
||||
var hash = hasher.GetHashAndReset();
|
||||
var hex = Convert.ToHexString(hash).ToLowerInvariant();
|
||||
File.Move(temp, path, overwrite: true);
|
||||
|
||||
return (hex, size);
|
||||
}
|
||||
|
||||
private static async Task<byte[]> SaveRawAsync(IFormFile file, string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(file);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var directory = Path.GetDirectoryName(path);
|
||||
if (!string.IsNullOrWhiteSpace(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
await using var output = File.Create(path);
|
||||
await using var input = file.OpenReadStream();
|
||||
await input.CopyToAsync(output, cancellationToken).ConfigureAwait(false);
|
||||
return await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<OfflineKitComponentStatusTransport> ParseManifestComponents(string manifestJson)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(manifestJson))
|
||||
{
|
||||
return Array.Empty<OfflineKitComponentStatusTransport>();
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(manifestJson);
|
||||
if (doc.RootElement.ValueKind == JsonValueKind.Object &&
|
||||
doc.RootElement.TryGetProperty("entries", out var entries) &&
|
||||
entries.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return ParseEntries(entries);
|
||||
}
|
||||
|
||||
if (doc.RootElement.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return ParseEntries(doc.RootElement);
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
// NDJSON fallback.
|
||||
}
|
||||
|
||||
var components = new List<OfflineKitComponentStatusTransport>();
|
||||
foreach (var line in manifestJson.Split(['\r', '\n'], StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var entryDoc = JsonDocument.Parse(line);
|
||||
if (TryParseComponent(entryDoc.RootElement, out var component))
|
||||
{
|
||||
components.Add(component);
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return components;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<OfflineKitComponentStatusTransport> ParseEntries(JsonElement entries)
|
||||
{
|
||||
var components = new List<OfflineKitComponentStatusTransport>(entries.GetArrayLength());
|
||||
foreach (var entry in entries.EnumerateArray())
|
||||
{
|
||||
if (TryParseComponent(entry, out var component))
|
||||
{
|
||||
components.Add(component);
|
||||
}
|
||||
}
|
||||
|
||||
return components;
|
||||
}
|
||||
|
||||
private static bool TryParseComponent(JsonElement entry, out OfflineKitComponentStatusTransport component)
|
||||
{
|
||||
component = new OfflineKitComponentStatusTransport();
|
||||
if (entry.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!entry.TryGetProperty("name", out var nameProp) || nameProp.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var name = nameProp.GetString();
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
string? sha = null;
|
||||
if (entry.TryGetProperty("sha256", out var shaProp) && shaProp.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
sha = NormalizeSha256(shaProp.GetString());
|
||||
}
|
||||
|
||||
long? size = null;
|
||||
if (entry.TryGetProperty("size", out var sizeProp) && sizeProp.ValueKind == JsonValueKind.Number && sizeProp.TryGetInt64(out var sizeValue))
|
||||
{
|
||||
size = sizeValue;
|
||||
}
|
||||
|
||||
DateTimeOffset? capturedAt = null;
|
||||
if (entry.TryGetProperty("capturedAt", out var capturedProp) && capturedProp.ValueKind == JsonValueKind.String
|
||||
&& DateTimeOffset.TryParse(capturedProp.GetString(), out var parsedCaptured))
|
||||
{
|
||||
capturedAt = parsedCaptured.ToUniversalTime();
|
||||
}
|
||||
|
||||
component = new OfflineKitComponentStatusTransport
|
||||
{
|
||||
Name = name.Trim(),
|
||||
Digest = sha,
|
||||
SizeBytes = size,
|
||||
CapturedAt = capturedAt
|
||||
};
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,294 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class OfflineKitMetricsStore
|
||||
{
|
||||
private static readonly double[] DefaultLatencyBucketsSeconds =
|
||||
{
|
||||
0.001,
|
||||
0.0025,
|
||||
0.005,
|
||||
0.01,
|
||||
0.025,
|
||||
0.05,
|
||||
0.1,
|
||||
0.25,
|
||||
0.5,
|
||||
1,
|
||||
2.5,
|
||||
5,
|
||||
10
|
||||
};
|
||||
|
||||
private readonly ConcurrentDictionary<ImportCounterKey, long> _imports = new();
|
||||
private readonly ConcurrentDictionary<TwoLabelKey, Histogram> _attestationVerifyLatency = new();
|
||||
private readonly ConcurrentDictionary<string, Histogram> _rekorInclusionLatency = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, long> _rekorSuccess = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, long> _rekorRetry = new(StringComparer.Ordinal);
|
||||
|
||||
public void RecordImport(string status, string tenantId)
|
||||
{
|
||||
status = NormalizeLabelValue(status, "unknown");
|
||||
tenantId = NormalizeLabelValue(tenantId, "unknown");
|
||||
_imports.AddOrUpdate(new ImportCounterKey(tenantId, status), 1, static (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public void RecordAttestationVerifyLatency(string attestationType, double seconds, bool success)
|
||||
{
|
||||
attestationType = NormalizeLabelValue(attestationType, "unknown");
|
||||
seconds = ClampSeconds(seconds);
|
||||
var key = new TwoLabelKey(attestationType, success ? "true" : "false");
|
||||
var histogram = _attestationVerifyLatency.GetOrAdd(key, _ => new Histogram(DefaultLatencyBucketsSeconds));
|
||||
histogram.Record(seconds);
|
||||
}
|
||||
|
||||
public void RecordRekorSuccess(string mode)
|
||||
{
|
||||
mode = NormalizeLabelValue(mode, "unknown");
|
||||
_rekorSuccess.AddOrUpdate(mode, 1, static (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public void RecordRekorRetry(string reason)
|
||||
{
|
||||
reason = NormalizeLabelValue(reason, "unknown");
|
||||
_rekorRetry.AddOrUpdate(reason, 1, static (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public void RecordRekorInclusionLatency(double seconds, bool success)
|
||||
{
|
||||
seconds = ClampSeconds(seconds);
|
||||
var key = success ? "true" : "false";
|
||||
var histogram = _rekorInclusionLatency.GetOrAdd(key, _ => new Histogram(DefaultLatencyBucketsSeconds));
|
||||
histogram.Record(seconds);
|
||||
}
|
||||
|
||||
public string RenderPrometheus()
|
||||
{
|
||||
var builder = new StringBuilder(capacity: 4096);
|
||||
|
||||
AppendCounterHeader(builder, "offlinekit_import_total", "Total number of offline kit import attempts");
|
||||
foreach (var (key, value) in _imports.OrderBy(kv => kv.Key.TenantId, StringComparer.Ordinal)
|
||||
.ThenBy(kv => kv.Key.Status, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("offlinekit_import_total{tenant_id=\"");
|
||||
builder.Append(EscapeLabelValue(key.TenantId));
|
||||
builder.Append("\",status=\"");
|
||||
builder.Append(EscapeLabelValue(key.Status));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(value.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
AppendHistogramTwoLabels(
|
||||
builder,
|
||||
name: "offlinekit_attestation_verify_latency_seconds",
|
||||
help: "Time taken to verify attestations during import",
|
||||
labelA: "attestation_type",
|
||||
labelB: "success",
|
||||
histograms: _attestationVerifyLatency);
|
||||
|
||||
AppendCounterHeader(builder, "attestor_rekor_success_total", "Successful Rekor verification count");
|
||||
foreach (var (key, value) in _rekorSuccess.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("attestor_rekor_success_total{mode=\"");
|
||||
builder.Append(EscapeLabelValue(key));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(value.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
AppendCounterHeader(builder, "attestor_rekor_retry_total", "Rekor verification retry count");
|
||||
foreach (var (key, value) in _rekorRetry.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("attestor_rekor_retry_total{reason=\"");
|
||||
builder.Append(EscapeLabelValue(key));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(value.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
AppendHistogramOneLabel(
|
||||
builder,
|
||||
name: "rekor_inclusion_latency",
|
||||
help: "Time to verify Rekor inclusion proof",
|
||||
label: "success",
|
||||
histograms: _rekorInclusionLatency);
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
private static void AppendCounterHeader(StringBuilder builder, string name, string help)
|
||||
{
|
||||
builder.Append("# HELP ");
|
||||
builder.Append(name);
|
||||
builder.Append(' ');
|
||||
builder.Append(help);
|
||||
builder.Append('\n');
|
||||
builder.Append("# TYPE ");
|
||||
builder.Append(name);
|
||||
builder.Append(" counter\n");
|
||||
}
|
||||
|
||||
private static void AppendHistogramTwoLabels(
|
||||
StringBuilder builder,
|
||||
string name,
|
||||
string help,
|
||||
string labelA,
|
||||
string labelB,
|
||||
ConcurrentDictionary<TwoLabelKey, Histogram> histograms)
|
||||
{
|
||||
builder.Append("# HELP ");
|
||||
builder.Append(name);
|
||||
builder.Append(' ');
|
||||
builder.Append(help);
|
||||
builder.Append('\n');
|
||||
builder.Append("# TYPE ");
|
||||
builder.Append(name);
|
||||
builder.Append(" histogram\n");
|
||||
|
||||
foreach (var grouping in histograms.OrderBy(kv => kv.Key.LabelA, StringComparer.Ordinal)
|
||||
.ThenBy(kv => kv.Key.LabelB, StringComparer.Ordinal))
|
||||
{
|
||||
var labels = $"{labelA}=\"{EscapeLabelValue(grouping.Key.LabelA)}\",{labelB}=\"{EscapeLabelValue(grouping.Key.LabelB)}\"";
|
||||
AppendHistogramSeries(builder, name, labels, grouping.Value.Snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
private static void AppendHistogramOneLabel(
|
||||
StringBuilder builder,
|
||||
string name,
|
||||
string help,
|
||||
string label,
|
||||
ConcurrentDictionary<string, Histogram> histograms)
|
||||
{
|
||||
builder.Append("# HELP ");
|
||||
builder.Append(name);
|
||||
builder.Append(' ');
|
||||
builder.Append(help);
|
||||
builder.Append('\n');
|
||||
builder.Append("# TYPE ");
|
||||
builder.Append(name);
|
||||
builder.Append(" histogram\n");
|
||||
|
||||
foreach (var grouping in histograms.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
var labels = $"{label}=\"{EscapeLabelValue(grouping.Key)}\"";
|
||||
AppendHistogramSeries(builder, name, labels, grouping.Value.Snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
private static void AppendHistogramSeries(
|
||||
StringBuilder builder,
|
||||
string name,
|
||||
string labels,
|
||||
HistogramSnapshot snapshot)
|
||||
{
|
||||
long cumulative = 0;
|
||||
|
||||
for (var i = 0; i < snapshot.BucketUpperBounds.Length; i++)
|
||||
{
|
||||
cumulative += snapshot.BucketCounts[i];
|
||||
builder.Append(name);
|
||||
builder.Append("_bucket{");
|
||||
builder.Append(labels);
|
||||
builder.Append(",le=\"");
|
||||
builder.Append(snapshot.BucketUpperBounds[i].ToString("G", CultureInfo.InvariantCulture));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(cumulative.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
cumulative += snapshot.BucketCounts[^1];
|
||||
builder.Append(name);
|
||||
builder.Append("_bucket{");
|
||||
builder.Append(labels);
|
||||
builder.Append(",le=\"+Inf\"} ");
|
||||
builder.Append(cumulative.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
|
||||
builder.Append(name);
|
||||
builder.Append("_sum{");
|
||||
builder.Append(labels);
|
||||
builder.Append("} ");
|
||||
builder.Append(snapshot.SumSeconds.ToString("G", CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
|
||||
builder.Append(name);
|
||||
builder.Append("_count{");
|
||||
builder.Append(labels);
|
||||
builder.Append("} ");
|
||||
builder.Append(snapshot.Count.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
private static double ClampSeconds(double seconds)
|
||||
=> double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds < 0 ? 0 : seconds;
|
||||
|
||||
private static string NormalizeLabelValue(string? value, string fallback)
|
||||
=> string.IsNullOrWhiteSpace(value) ? fallback : value.Trim();
|
||||
|
||||
private static string EscapeLabelValue(string value)
|
||||
=> value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
|
||||
private sealed class Histogram
|
||||
{
|
||||
private readonly double[] _bucketUpperBounds;
|
||||
private readonly long[] _bucketCounts;
|
||||
private long _count;
|
||||
private double _sumSeconds;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Histogram(double[] bucketUpperBounds)
|
||||
{
|
||||
_bucketUpperBounds = bucketUpperBounds ?? throw new ArgumentNullException(nameof(bucketUpperBounds));
|
||||
_bucketCounts = new long[_bucketUpperBounds.Length + 1];
|
||||
}
|
||||
|
||||
public void Record(double seconds)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_count++;
|
||||
_sumSeconds += seconds;
|
||||
|
||||
var bucketIndex = _bucketUpperBounds.Length;
|
||||
for (var i = 0; i < _bucketUpperBounds.Length; i++)
|
||||
{
|
||||
if (seconds <= _bucketUpperBounds[i])
|
||||
{
|
||||
bucketIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_bucketCounts[bucketIndex]++;
|
||||
}
|
||||
}
|
||||
|
||||
public HistogramSnapshot Snapshot()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return new HistogramSnapshot(
|
||||
(double[])_bucketUpperBounds.Clone(),
|
||||
(long[])_bucketCounts.Clone(),
|
||||
_count,
|
||||
_sumSeconds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record HistogramSnapshot(
|
||||
double[] BucketUpperBounds,
|
||||
long[] BucketCounts,
|
||||
long Count,
|
||||
double SumSeconds);
|
||||
|
||||
private sealed record ImportCounterKey(string TenantId, string Status);
|
||||
|
||||
private sealed record TwoLabelKey(string LabelA, string LabelB);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class OfflineKitStateStore
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
private readonly string _rootDirectory;
|
||||
private readonly ILogger<OfflineKitStateStore> _logger;
|
||||
|
||||
public OfflineKitStateStore(IHostEnvironment environment, ILogger<OfflineKitStateStore> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(environment);
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_rootDirectory = Path.Combine(environment.ContentRootPath, "data", "offline-kit");
|
||||
}
|
||||
|
||||
public string GetBundleDirectory(string tenantId, string bundleId)
|
||||
{
|
||||
var safeTenant = SanitizePathSegment(tenantId);
|
||||
var safeBundle = SanitizePathSegment(bundleId);
|
||||
return Path.Combine(_rootDirectory, "bundles", safeTenant, safeBundle);
|
||||
}
|
||||
|
||||
public async Task SaveStatusAsync(string tenantId, OfflineKitStatusTransport status, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(status);
|
||||
|
||||
var stateDirectory = Path.Combine(_rootDirectory, ".state");
|
||||
Directory.CreateDirectory(stateDirectory);
|
||||
|
||||
var path = GetStatusPath(tenantId);
|
||||
var temp = path + ".tmp";
|
||||
|
||||
await using (var stream = File.Create(temp))
|
||||
{
|
||||
await JsonSerializer.SerializeAsync(stream, status, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
File.Copy(temp, path, overwrite: true);
|
||||
File.Delete(temp);
|
||||
}
|
||||
|
||||
public async Task<OfflineKitStatusTransport?> LoadStatusAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var path = GetStatusPath(tenantId);
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(path);
|
||||
return await JsonSerializer.DeserializeAsync<OfflineKitStatusTransport>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or JsonException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to read offline kit state from {Path}", path);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private string GetStatusPath(string tenantId)
|
||||
{
|
||||
var safeTenant = SanitizePathSegment(tenantId);
|
||||
return Path.Combine(_rootDirectory, ".state", $"offline-kit-active__{safeTenant}.json");
|
||||
}
|
||||
|
||||
private static string SanitizePathSegment(string value)
|
||||
{
|
||||
var trimmed = value.Trim().ToLowerInvariant();
|
||||
var invalid = Path.GetInvalidFileNameChars();
|
||||
var chars = trimmed
|
||||
.Select(c => invalid.Contains(c) || c == '/' || c == '\\' || char.IsWhiteSpace(c) ? '_' : c)
|
||||
.ToArray();
|
||||
return new string(chars);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.Services;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class SbomIngestionService : ISbomIngestionService
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly ArtifactStorageService _artifactStorage;
|
||||
private readonly ILogger<SbomIngestionService> _logger;
|
||||
|
||||
public SbomIngestionService(ArtifactStorageService artifactStorage, ILogger<SbomIngestionService> logger)
|
||||
{
|
||||
_artifactStorage = artifactStorage ?? throw new ArgumentNullException(nameof(artifactStorage));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public string? DetectFormat(JsonDocument sbomDocument)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomDocument);
|
||||
|
||||
if (sbomDocument.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var root = sbomDocument.RootElement;
|
||||
|
||||
if (root.TryGetProperty("bomFormat", out var bomFormat)
|
||||
&& bomFormat.ValueKind == JsonValueKind.String
|
||||
&& string.Equals(bomFormat.GetString(), "CycloneDX", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormats.CycloneDx;
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("spdxVersion", out var spdxVersion)
|
||||
&& spdxVersion.ValueKind == JsonValueKind.String
|
||||
&& !string.IsNullOrWhiteSpace(spdxVersion.GetString()))
|
||||
{
|
||||
return SbomFormats.Spdx;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public SbomValidationResult Validate(JsonDocument sbomDocument, string format)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomDocument);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(format);
|
||||
|
||||
if (sbomDocument.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return SbomValidationResult.Failure("SBOM root must be a JSON object.");
|
||||
}
|
||||
|
||||
var root = sbomDocument.RootElement;
|
||||
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!root.TryGetProperty("bomFormat", out var bomFormat)
|
||||
|| bomFormat.ValueKind != JsonValueKind.String
|
||||
|| !string.Equals(bomFormat.GetString(), "CycloneDX", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomValidationResult.Failure("CycloneDX SBOM must include bomFormat == 'CycloneDX'.");
|
||||
}
|
||||
|
||||
return SbomValidationResult.Success();
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!root.TryGetProperty("spdxVersion", out var spdxVersion)
|
||||
|| spdxVersion.ValueKind != JsonValueKind.String
|
||||
|| string.IsNullOrWhiteSpace(spdxVersion.GetString()))
|
||||
{
|
||||
return SbomValidationResult.Failure("SPDX SBOM must include spdxVersion.");
|
||||
}
|
||||
|
||||
return SbomValidationResult.Success();
|
||||
}
|
||||
|
||||
return SbomValidationResult.Failure($"Unsupported SBOM format '{format}'.");
|
||||
}
|
||||
|
||||
public async Task<SbomIngestionResult> IngestAsync(
|
||||
ScanId scanId,
|
||||
JsonDocument sbomDocument,
|
||||
string format,
|
||||
string? contentDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomDocument);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId.Value);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(format);
|
||||
|
||||
var (documentFormat, mediaType) = ResolveStorageFormat(format);
|
||||
var bytes = JsonSerializer.SerializeToUtf8Bytes(sbomDocument.RootElement, JsonOptions);
|
||||
|
||||
await using var stream = new MemoryStream(bytes, writable: false);
|
||||
var stored = await _artifactStorage.StoreArtifactAsync(
|
||||
ArtifactDocumentType.ImageBom,
|
||||
documentFormat,
|
||||
mediaType,
|
||||
stream,
|
||||
immutable: true,
|
||||
ttlClass: "default",
|
||||
expiresAtUtc: null,
|
||||
cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(contentDigest)
|
||||
&& !string.Equals(contentDigest.Trim(), stored.BytesSha256, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"SBOM Content-Digest header did not match stored digest header={HeaderDigest} stored={StoredDigest}",
|
||||
contentDigest.Trim(),
|
||||
stored.BytesSha256);
|
||||
}
|
||||
|
||||
var componentCount = CountComponents(sbomDocument, format);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Ingested sbom scan={ScanId} format={Format} components={Components} digest={Digest} id={SbomId}",
|
||||
scanId.Value,
|
||||
format,
|
||||
componentCount,
|
||||
stored.BytesSha256,
|
||||
stored.Id);
|
||||
|
||||
return new SbomIngestionResult(
|
||||
SbomId: stored.Id,
|
||||
Format: format,
|
||||
ComponentCount: componentCount,
|
||||
Digest: stored.BytesSha256);
|
||||
}
|
||||
|
||||
private static (ArtifactDocumentFormat Format, string MediaType) ResolveStorageFormat(string format)
|
||||
{
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (ArtifactDocumentFormat.CycloneDxJson, "application/vnd.cyclonedx+json");
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (ArtifactDocumentFormat.SpdxJson, "application/spdx+json");
|
||||
}
|
||||
|
||||
return (ArtifactDocumentFormat.CycloneDxJson, "application/json");
|
||||
}
|
||||
|
||||
private static int CountComponents(JsonDocument document, string format)
|
||||
{
|
||||
if (document.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var root = document.RootElement;
|
||||
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (root.TryGetProperty("components", out var components) && components.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return components.GetArrayLength();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (root.TryGetProperty("packages", out var packages) && packages.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return packages.GetArrayLength();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -175,6 +175,7 @@ public interface IScanManifestRepository
|
||||
{
|
||||
Task<SignedScanManifest?> GetManifestAsync(string scanId, string? manifestHash = null, CancellationToken cancellationToken = default);
|
||||
Task SaveManifestAsync(SignedScanManifest manifest, CancellationToken cancellationToken = default);
|
||||
Task<List<string>> FindAffectedScansAsync(AffectedScansQuery query, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -20,9 +20,11 @@
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" />
|
||||
<ProjectReference Include="../../Authority/__Libraries/StellaOps.Authority.Storage.Postgres/StellaOps.Authority.Storage.Postgres.csproj" />
|
||||
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" />
|
||||
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" />
|
||||
<ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" />
|
||||
<ProjectReference Include="../../AirGap/StellaOps.AirGap.Importer/StellaOps.AirGap.Importer.csproj" />
|
||||
<ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj" />
|
||||
@@ -43,4 +45,8 @@
|
||||
<ProjectReference Include="../../Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Messaging/StellaOps.Messaging.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Compile Remove="Endpoints\\UnknownsEndpoints.cs" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
|
||||
@@ -5,3 +5,4 @@
|
||||
| `SCAN-API-3101-001` | `docs/implplan/SPRINT_3101_0001_0001_scanner_api_standardization.md` | DOING | Align Scanner OpenAPI spec with current endpoints and include ProofSpine routes; compose into `src/Api/StellaOps.Api.OpenApi/stella.yaml`. |
|
||||
| `PROOFSPINE-3100-API` | `docs/implplan/SPRINT_3100_0001_0001_proof_spine_system.md` | DOING | Implement and test `/api/v1/spines/*` endpoints and wire verification output. |
|
||||
| `SCAN-AIRGAP-0340-001` | `docs/implplan/SPRINT_0340_0001_0001_scanner_offline_config.md` | BLOCKED | Offline kit verification wiring is blocked on an import pipeline + offline Rekor verifier. |
|
||||
| `SCAN-API-3103-001` | `docs/implplan/SPRINT_3103_0001_0001_scanner_api_ingestion_completion.md` | DOING | Implement missing ingestion services + DI for callgraph/SBOM endpoints and add deterministic integration tests. |
|
||||
|
||||
Reference in New Issue
Block a user