save progress
This commit is contained in:
@@ -0,0 +1,232 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Npgsql;
|
||||
using NpgsqlTypes;
|
||||
using StellaOps.Scanner.Storage.Postgres;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class CallGraphIngestionService : ICallGraphIngestionService
|
||||
{
|
||||
private const string TenantContext = "00000000-0000-0000-0000-000000000001";
|
||||
private static readonly Guid TenantId = Guid.Parse(TenantContext);
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly ScannerDataSource _dataSource;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<CallGraphIngestionService> _logger;
|
||||
|
||||
private string SchemaName => _dataSource.SchemaName ?? ScannerDataSource.DefaultSchema;
|
||||
private string CallGraphIngestionsTable => $"{SchemaName}.callgraph_ingestions";
|
||||
|
||||
public CallGraphIngestionService(
|
||||
ScannerDataSource dataSource,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<CallGraphIngestionService> logger)
|
||||
{
|
||||
_dataSource = dataSource ?? throw new ArgumentNullException(nameof(dataSource));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public CallGraphValidationResult Validate(CallGraphV1Dto callGraph)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(callGraph);
|
||||
|
||||
var errors = new List<string>();
|
||||
|
||||
if (string.IsNullOrWhiteSpace(callGraph.Schema))
|
||||
{
|
||||
errors.Add("Schema is required.");
|
||||
}
|
||||
else if (!string.Equals(callGraph.Schema, "stella.callgraph.v1", StringComparison.Ordinal))
|
||||
{
|
||||
errors.Add($"Unsupported schema '{callGraph.Schema}'. Expected 'stella.callgraph.v1'.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(callGraph.ScanKey))
|
||||
{
|
||||
errors.Add("ScanKey is required.");
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(callGraph.Language))
|
||||
{
|
||||
errors.Add("Language is required.");
|
||||
}
|
||||
|
||||
if (callGraph.Nodes is null || callGraph.Nodes.Count == 0)
|
||||
{
|
||||
errors.Add("At least one node is required.");
|
||||
}
|
||||
|
||||
if (callGraph.Edges is null || callGraph.Edges.Count == 0)
|
||||
{
|
||||
errors.Add("At least one edge is required.");
|
||||
}
|
||||
|
||||
return errors.Count == 0
|
||||
? CallGraphValidationResult.Success()
|
||||
: CallGraphValidationResult.Failure(errors.ToArray());
|
||||
}
|
||||
|
||||
public async Task<ExistingCallGraphDto?> FindByDigestAsync(
|
||||
ScanId scanId,
|
||||
string contentDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(scanId.Value))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(contentDigest))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var sql = $"""
|
||||
SELECT id, content_digest, created_at_utc
|
||||
FROM {CallGraphIngestionsTable}
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND scan_id = @scan_id
|
||||
AND content_digest = @content_digest
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, "reader", cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
await using var command = new NpgsqlCommand(sql, connection);
|
||||
command.Parameters.AddWithValue("tenant_id", TenantId);
|
||||
command.Parameters.AddWithValue("scan_id", scanId.Value.Trim());
|
||||
command.Parameters.AddWithValue("content_digest", contentDigest.Trim());
|
||||
|
||||
await using var reader = await command.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return new ExistingCallGraphDto(
|
||||
Id: reader.GetString(0),
|
||||
Digest: reader.GetString(1),
|
||||
CreatedAt: reader.GetFieldValue<DateTimeOffset>(2));
|
||||
}
|
||||
|
||||
public async Task<CallGraphIngestionResult> IngestAsync(
|
||||
ScanId scanId,
|
||||
CallGraphV1Dto callGraph,
|
||||
string contentDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(callGraph);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId.Value);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(contentDigest);
|
||||
|
||||
var normalizedDigest = contentDigest.Trim();
|
||||
var callgraphId = CreateCallGraphId(scanId, normalizedDigest);
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var nodeCount = callGraph.Nodes?.Count ?? 0;
|
||||
var edgeCount = callGraph.Edges?.Count ?? 0;
|
||||
var language = callGraph.Language?.Trim() ?? string.Empty;
|
||||
var payload = JsonSerializer.Serialize(callGraph, JsonOptions);
|
||||
|
||||
var insertSql = $"""
|
||||
INSERT INTO {CallGraphIngestionsTable} (
|
||||
id,
|
||||
tenant_id,
|
||||
scan_id,
|
||||
content_digest,
|
||||
language,
|
||||
node_count,
|
||||
edge_count,
|
||||
created_at_utc,
|
||||
callgraph_json
|
||||
) VALUES (
|
||||
@id,
|
||||
@tenant_id,
|
||||
@scan_id,
|
||||
@content_digest,
|
||||
@language,
|
||||
@node_count,
|
||||
@edge_count,
|
||||
@created_at_utc,
|
||||
@callgraph_json::jsonb
|
||||
)
|
||||
ON CONFLICT (tenant_id, scan_id, content_digest) DO NOTHING
|
||||
""";
|
||||
|
||||
var selectSql = $"""
|
||||
SELECT id, content_digest, node_count, edge_count
|
||||
FROM {CallGraphIngestionsTable}
|
||||
WHERE tenant_id = @tenant_id
|
||||
AND scan_id = @scan_id
|
||||
AND content_digest = @content_digest
|
||||
LIMIT 1
|
||||
""";
|
||||
|
||||
await using var connection = await _dataSource.OpenConnectionAsync(TenantContext, "writer", cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
await using (var insert = new NpgsqlCommand(insertSql, connection))
|
||||
{
|
||||
insert.Parameters.AddWithValue("id", callgraphId);
|
||||
insert.Parameters.AddWithValue("tenant_id", TenantId);
|
||||
insert.Parameters.AddWithValue("scan_id", scanId.Value.Trim());
|
||||
insert.Parameters.AddWithValue("content_digest", normalizedDigest);
|
||||
insert.Parameters.AddWithValue("language", language);
|
||||
insert.Parameters.AddWithValue("node_count", nodeCount);
|
||||
insert.Parameters.AddWithValue("edge_count", edgeCount);
|
||||
insert.Parameters.AddWithValue("created_at_utc", now.UtcDateTime);
|
||||
insert.Parameters.Add(new NpgsqlParameter<string>("callgraph_json", NpgsqlDbType.Jsonb) { TypedValue = payload });
|
||||
|
||||
await insert.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
await using var select = new NpgsqlCommand(selectSql, connection);
|
||||
select.Parameters.AddWithValue("tenant_id", TenantId);
|
||||
select.Parameters.AddWithValue("scan_id", scanId.Value.Trim());
|
||||
select.Parameters.AddWithValue("content_digest", normalizedDigest);
|
||||
|
||||
await using var reader = await select.ExecuteReaderAsync(cancellationToken).ConfigureAwait(false);
|
||||
if (!await reader.ReadAsync(cancellationToken).ConfigureAwait(false))
|
||||
{
|
||||
throw new InvalidOperationException("Call graph ingestion row was not persisted.");
|
||||
}
|
||||
|
||||
var persistedId = reader.GetString(0);
|
||||
var persistedDigest = reader.GetString(1);
|
||||
var persistedNodeCount = reader.GetInt32(2);
|
||||
var persistedEdgeCount = reader.GetInt32(3);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Ingested callgraph scan={ScanId} lang={Language} nodes={Nodes} edges={Edges} digest={Digest}",
|
||||
scanId.Value,
|
||||
language,
|
||||
persistedNodeCount,
|
||||
persistedEdgeCount,
|
||||
persistedDigest);
|
||||
|
||||
return new CallGraphIngestionResult(
|
||||
CallgraphId: persistedId,
|
||||
NodeCount: persistedNodeCount,
|
||||
EdgeCount: persistedEdgeCount,
|
||||
Digest: persistedDigest);
|
||||
}
|
||||
|
||||
private static string CreateCallGraphId(ScanId scanId, string contentDigest)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes($"{scanId.Value.Trim()}:{contentDigest.Trim()}");
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return $"cg_{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -306,17 +306,6 @@ public interface IFeedSnapshotTracker
|
||||
Task<FeedSnapshots> GetCurrentSnapshotsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for scan manifest repository operations.
|
||||
/// </summary>
|
||||
public interface IScanManifestRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Find scans affected by feed changes.
|
||||
/// </summary>
|
||||
Task<List<string>> FindAffectedScansAsync(AffectedScansQuery query, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for feed change rescore operations.
|
||||
/// </summary>
|
||||
|
||||
@@ -0,0 +1,9 @@
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
public interface IScanMetadataRepository
|
||||
{
|
||||
Task<ScanMetadata?> GetScanMetadataAsync(string scanId, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
public sealed record ScanMetadata(string? BaseDigest, string? TargetDigest, DateTimeOffset ScanTime);
|
||||
|
||||
@@ -0,0 +1,11 @@
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class NullOfflineKitAuditEmitter : IOfflineKitAuditEmitter
|
||||
{
|
||||
public Task RecordAsync(OfflineKitAuditEntity entity, CancellationToken cancellationToken = default)
|
||||
=> Task.CompletedTask;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,68 @@
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class NullReachabilityComputeService : IReachabilityComputeService
|
||||
{
|
||||
public Task<ComputeJobResult> TriggerComputeAsync(
|
||||
ScanId scanId,
|
||||
bool forceRecompute,
|
||||
IReadOnlyList<string>? entrypoints,
|
||||
IReadOnlyList<string>? targets,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId.Value);
|
||||
|
||||
var jobId = $"reachability_{scanId.Value}";
|
||||
return Task.FromResult(new ComputeJobResult(
|
||||
JobId: jobId,
|
||||
Status: "scheduled",
|
||||
AlreadyInProgress: false,
|
||||
EstimatedDuration: null));
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class NullReachabilityQueryService : IReachabilityQueryService
|
||||
{
|
||||
public Task<IReadOnlyList<ComponentReachability>> GetComponentsAsync(
|
||||
ScanId scanId,
|
||||
string? purlFilter,
|
||||
string? statusFilter,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<ComponentReachability>>(Array.Empty<ComponentReachability>());
|
||||
|
||||
public Task<IReadOnlyList<ReachabilityFinding>> GetFindingsAsync(
|
||||
ScanId scanId,
|
||||
string? cveFilter,
|
||||
string? statusFilter,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<IReadOnlyList<ReachabilityFinding>>(Array.Empty<ReachabilityFinding>());
|
||||
}
|
||||
|
||||
internal sealed class NullReachabilityExplainService : IReachabilityExplainService
|
||||
{
|
||||
public Task<ReachabilityExplanation?> ExplainAsync(
|
||||
ScanId scanId,
|
||||
string cveId,
|
||||
string purl,
|
||||
CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<ReachabilityExplanation?>(null);
|
||||
}
|
||||
|
||||
internal sealed class NullSarifExportService : ISarifExportService
|
||||
{
|
||||
public Task<object?> ExportAsync(ScanId scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<object?>(null);
|
||||
}
|
||||
|
||||
internal sealed class NullCycloneDxExportService : ICycloneDxExportService
|
||||
{
|
||||
public Task<object?> ExportWithReachabilityAsync(ScanId scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<object?>(null);
|
||||
}
|
||||
|
||||
internal sealed class NullOpenVexExportService : IOpenVexExportService
|
||||
{
|
||||
public Task<object?> ExportAsync(ScanId scanId, CancellationToken cancellationToken = default)
|
||||
=> Task.FromResult<object?>(null);
|
||||
}
|
||||
@@ -0,0 +1,78 @@
|
||||
using Microsoft.AspNetCore.Http;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed record OfflineKitImportRequest(
|
||||
string TenantId,
|
||||
string Actor,
|
||||
OfflineKitImportMetadata Metadata,
|
||||
IFormFile Bundle,
|
||||
IFormFile? Manifest,
|
||||
IFormFile? BundleSignature,
|
||||
IFormFile? ManifestSignature);
|
||||
|
||||
internal sealed class OfflineKitImportException : Exception
|
||||
{
|
||||
public OfflineKitImportException(int statusCode, string reasonCode, string message, string? notes = null)
|
||||
: base(message)
|
||||
{
|
||||
StatusCode = statusCode;
|
||||
ReasonCode = reasonCode;
|
||||
Notes = notes;
|
||||
}
|
||||
|
||||
public int StatusCode { get; }
|
||||
public string ReasonCode { get; }
|
||||
public string? Notes { get; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitImportMetadata
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string BundleSha256 { get; set; } = string.Empty;
|
||||
public long BundleSize { get; set; }
|
||||
public DateTimeOffset? CapturedAt { get; set; }
|
||||
public string? Channel { get; set; }
|
||||
public string? Kind { get; set; }
|
||||
public bool? IsDelta { get; set; }
|
||||
public string? BaseBundleId { get; set; }
|
||||
public string? ManifestSha256 { get; set; }
|
||||
public long? ManifestSize { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitStatusTransport
|
||||
{
|
||||
public OfflineKitStatusBundleTransport? Current { get; set; }
|
||||
public List<OfflineKitComponentStatusTransport>? Components { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitStatusBundleTransport
|
||||
{
|
||||
public string? BundleId { get; set; }
|
||||
public string? Channel { get; set; }
|
||||
public string? Kind { get; set; }
|
||||
public bool? IsDelta { get; set; }
|
||||
public string? BaseBundleId { get; set; }
|
||||
public string? BundleSha256 { get; set; }
|
||||
public long? BundleSize { get; set; }
|
||||
public DateTimeOffset? CapturedAt { get; set; }
|
||||
public DateTimeOffset? ImportedAt { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitComponentStatusTransport
|
||||
{
|
||||
public string? Name { get; set; }
|
||||
public string? Version { get; set; }
|
||||
public string? Digest { get; set; }
|
||||
public DateTimeOffset? CapturedAt { get; set; }
|
||||
public long? SizeBytes { get; set; }
|
||||
}
|
||||
|
||||
internal sealed class OfflineKitImportResponseTransport
|
||||
{
|
||||
public string? ImportId { get; set; }
|
||||
public string? Status { get; set; }
|
||||
public DateTimeOffset? SubmittedAt { get; set; }
|
||||
public string? Message { get; set; }
|
||||
}
|
||||
|
||||
@@ -0,0 +1,698 @@
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using Microsoft.Extensions.Options;
|
||||
using StellaOps.AirGap.Importer.Contracts;
|
||||
using StellaOps.AirGap.Importer.Validation;
|
||||
using StellaOps.Authority.Storage.Postgres.Models;
|
||||
using StellaOps.Authority.Storage.Postgres.Repositories;
|
||||
using StellaOps.Scanner.Core.Configuration;
|
||||
using StellaOps.Scanner.Core.TrustAnchors;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class OfflineKitImportService
|
||||
{
|
||||
private readonly IOptionsMonitor<OfflineKitOptions> _options;
|
||||
private readonly ITrustAnchorRegistry _trustAnchorRegistry;
|
||||
private readonly OfflineKitMetricsStore _metrics;
|
||||
private readonly OfflineKitStateStore _stateStore;
|
||||
private readonly IOfflineKitAuditEmitter _auditEmitter;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<OfflineKitImportService> _logger;
|
||||
|
||||
public OfflineKitImportService(
|
||||
IOptionsMonitor<OfflineKitOptions> options,
|
||||
ITrustAnchorRegistry trustAnchorRegistry,
|
||||
OfflineKitMetricsStore metrics,
|
||||
OfflineKitStateStore stateStore,
|
||||
IOfflineKitAuditEmitter auditEmitter,
|
||||
TimeProvider timeProvider,
|
||||
ILogger<OfflineKitImportService> logger)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_trustAnchorRegistry = trustAnchorRegistry ?? throw new ArgumentNullException(nameof(trustAnchorRegistry));
|
||||
_metrics = metrics ?? throw new ArgumentNullException(nameof(metrics));
|
||||
_stateStore = stateStore ?? throw new ArgumentNullException(nameof(stateStore));
|
||||
_auditEmitter = auditEmitter ?? throw new ArgumentNullException(nameof(auditEmitter));
|
||||
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public async Task<OfflineKitImportResponseTransport> ImportAsync(OfflineKitImportRequest request, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var options = _options.CurrentValue;
|
||||
if (!options.Enabled)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status404NotFound, "OFFLINE_KIT_DISABLED", "Offline kit operations are not enabled.");
|
||||
}
|
||||
|
||||
var tenantId = string.IsNullOrWhiteSpace(request.TenantId) ? "default" : request.TenantId.Trim();
|
||||
var actor = string.IsNullOrWhiteSpace(request.Actor) ? "anonymous" : request.Actor.Trim();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var importId = ComputeImportId(tenantId, request.Metadata.BundleSha256, now);
|
||||
var expectedBundleSha = NormalizeSha256(request.Metadata.BundleSha256);
|
||||
if (string.IsNullOrWhiteSpace(expectedBundleSha))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status400BadRequest, "MANIFEST_INVALID", "metadata.bundleSha256 is required.");
|
||||
}
|
||||
|
||||
var bundleId = string.IsNullOrWhiteSpace(request.Metadata.BundleId)
|
||||
? $"sha256-{expectedBundleSha[..Math.Min(12, expectedBundleSha.Length)]}"
|
||||
: request.Metadata.BundleId.Trim();
|
||||
|
||||
var bundleDirectory = _stateStore.GetBundleDirectory(tenantId, bundleId);
|
||||
Directory.CreateDirectory(bundleDirectory);
|
||||
|
||||
var bundlePath = Path.Combine(bundleDirectory, "bundle.tgz");
|
||||
var manifestPath = Path.Combine(bundleDirectory, "manifest.json");
|
||||
var bundleSignaturePath = Path.Combine(bundleDirectory, "bundle-signature.bin");
|
||||
var manifestSignaturePath = Path.Combine(bundleDirectory, "manifest-signature.bin");
|
||||
|
||||
var statusForMetrics = "success";
|
||||
var reasonCode = "SUCCESS";
|
||||
|
||||
bool dsseVerified = false;
|
||||
bool rekorVerified = false;
|
||||
|
||||
try
|
||||
{
|
||||
var (bundleSha, bundleSize) = await SaveWithSha256Async(request.Bundle, bundlePath, cancellationToken).ConfigureAwait(false);
|
||||
if (!DigestsEqual(bundleSha, expectedBundleSha))
|
||||
{
|
||||
statusForMetrics = "failed_hash";
|
||||
reasonCode = "HASH_MISMATCH";
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "Bundle digest does not match metadata.");
|
||||
}
|
||||
|
||||
var components = new List<OfflineKitComponentStatusTransport>();
|
||||
if (request.Manifest is not null)
|
||||
{
|
||||
var (manifestSha, _) = await SaveWithSha256Async(request.Manifest, manifestPath, cancellationToken).ConfigureAwait(false);
|
||||
if (!string.IsNullOrWhiteSpace(request.Metadata.ManifestSha256)
|
||||
&& !DigestsEqual(manifestSha, NormalizeSha256(request.Metadata.ManifestSha256)))
|
||||
{
|
||||
statusForMetrics = "failed_manifest";
|
||||
reasonCode = "SIG_FAIL_MANIFEST";
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "Manifest digest does not match metadata.");
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var manifestJson = await File.ReadAllTextAsync(manifestPath, cancellationToken).ConfigureAwait(false);
|
||||
components.AddRange(ParseManifestComponents(manifestJson));
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or JsonException)
|
||||
{
|
||||
_logger.LogWarning(ex, "offlinekit.import failed to parse manifest components bundle_id={bundle_id}", bundleId);
|
||||
}
|
||||
}
|
||||
|
||||
byte[]? dsseBytes = null;
|
||||
DsseEnvelope? envelope = null;
|
||||
string? dsseNotes = null;
|
||||
|
||||
if (request.BundleSignature is not null)
|
||||
{
|
||||
dsseBytes = await SaveRawAsync(request.BundleSignature, bundleSignaturePath, cancellationToken).ConfigureAwait(false);
|
||||
try
|
||||
{
|
||||
envelope = DsseEnvelope.Parse(Encoding.UTF8.GetString(dsseBytes));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
dsseNotes = $"dsse:parse-failed {ex.GetType().Name}";
|
||||
}
|
||||
}
|
||||
|
||||
if (options.RequireDsse && envelope is null)
|
||||
{
|
||||
statusForMetrics = "failed_dsse";
|
||||
reasonCode = "DSSE_VERIFY_FAIL";
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "DSSE envelope is missing.", notes: dsseNotes);
|
||||
}
|
||||
|
||||
if (envelope is not null)
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
dsseVerified = VerifyDsse(bundleSha, request.Metadata, envelope, options);
|
||||
}
|
||||
catch (OfflineKitImportException) when (!options.RequireDsse)
|
||||
{
|
||||
dsseVerified = false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
sw.Stop();
|
||||
_metrics.RecordAttestationVerifyLatency("dsse", sw.Elapsed.TotalSeconds, dsseVerified);
|
||||
}
|
||||
|
||||
if (!dsseVerified)
|
||||
{
|
||||
statusForMetrics = "failed_dsse";
|
||||
reasonCode = "DSSE_VERIFY_FAIL";
|
||||
if (options.RequireDsse)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "DSSE verification failed.", notes: dsseNotes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (options.RekorOfflineMode && request.ManifestSignature is not null && dsseBytes is not null)
|
||||
{
|
||||
var receiptBytes = await SaveRawAsync(request.ManifestSignature, manifestSignaturePath, cancellationToken).ConfigureAwait(false);
|
||||
if (LooksLikeRekorReceipt(receiptBytes))
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
try
|
||||
{
|
||||
rekorVerified = await VerifyRekorAsync(manifestSignaturePath, dsseBytes, options, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (OfflineKitImportException) when (!options.RequireDsse)
|
||||
{
|
||||
rekorVerified = false;
|
||||
}
|
||||
finally
|
||||
{
|
||||
sw.Stop();
|
||||
_metrics.RecordRekorInclusionLatency(sw.Elapsed.TotalSeconds, rekorVerified);
|
||||
}
|
||||
|
||||
if (!rekorVerified)
|
||||
{
|
||||
statusForMetrics = "failed_rekor";
|
||||
reasonCode = "REKOR_VERIFY_FAIL";
|
||||
if (options.RequireDsse)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, reasonCode, "Rekor receipt verification failed.");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
_metrics.RecordRekorSuccess("offline");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var status = new OfflineKitStatusTransport
|
||||
{
|
||||
Current = new OfflineKitStatusBundleTransport
|
||||
{
|
||||
BundleId = bundleId,
|
||||
Channel = request.Metadata.Channel?.Trim(),
|
||||
Kind = request.Metadata.Kind?.Trim(),
|
||||
IsDelta = request.Metadata.IsDelta ?? false,
|
||||
BaseBundleId = request.Metadata.BaseBundleId?.Trim(),
|
||||
BundleSha256 = NormalizeSha256(bundleSha),
|
||||
BundleSize = bundleSize,
|
||||
CapturedAt = request.Metadata.CapturedAt?.ToUniversalTime(),
|
||||
ImportedAt = now
|
||||
},
|
||||
Components = components.OrderBy(c => c.Name ?? string.Empty, StringComparer.Ordinal).ToList()
|
||||
};
|
||||
|
||||
await _stateStore.SaveStatusAsync(tenantId, status, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_metrics.RecordImport(statusForMetrics, tenantId);
|
||||
await EmitAuditAsync(tenantId, actor, now, importId, bundleId, result: "accepted", reasonCode, cancellationToken).ConfigureAwait(false);
|
||||
|
||||
return new OfflineKitImportResponseTransport
|
||||
{
|
||||
ImportId = importId,
|
||||
Status = statusForMetrics == "success" ? "accepted" : "accepted_with_warnings",
|
||||
SubmittedAt = now,
|
||||
Message = statusForMetrics == "success" ? "Accepted." : "Accepted with warnings."
|
||||
};
|
||||
}
|
||||
catch (OfflineKitImportException)
|
||||
{
|
||||
_metrics.RecordImport(statusForMetrics, tenantId);
|
||||
await EmitAuditAsync(tenantId, actor, now, importId, bundleId, result: "failed", reasonCode, cancellationToken).ConfigureAwait(false);
|
||||
throw;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "offlinekit.import failed tenant_id={tenant_id} import_id={import_id}", tenantId, importId);
|
||||
_metrics.RecordImport("failed_unknown", tenantId);
|
||||
await EmitAuditAsync(tenantId, actor, now, importId, bundleId, result: "failed", "INTERNAL_ERROR", cancellationToken).ConfigureAwait(false);
|
||||
throw new OfflineKitImportException(StatusCodes.Status500InternalServerError, "INTERNAL_ERROR", "Offline kit import failed.");
|
||||
}
|
||||
}
|
||||
|
||||
private bool VerifyDsse(string bundleSha256Hex, OfflineKitImportMetadata metadata, DsseEnvelope envelope, OfflineKitOptions options)
|
||||
{
|
||||
var purl = ResolvePurl(metadata);
|
||||
var resolution = _trustAnchorRegistry.ResolveForPurl(purl);
|
||||
if (resolution is null)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "TRUST_ROOT_MISSING", $"No trust anchor matches '{purl}'.");
|
||||
}
|
||||
|
||||
var trustRoots = BuildTrustRoots(resolution, options.TrustRootDirectory ?? string.Empty);
|
||||
var pae = BuildPreAuthEncoding(envelope.PayloadType, envelope.Payload);
|
||||
|
||||
var verified = 0;
|
||||
foreach (var signature in envelope.Signatures)
|
||||
{
|
||||
if (TryVerifySignature(trustRoots, signature, pae))
|
||||
{
|
||||
verified++;
|
||||
}
|
||||
}
|
||||
|
||||
if (verified < Math.Max(1, resolution.MinSignatures))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "DSSE_VERIFY_FAIL", "DSSE signature verification failed.");
|
||||
}
|
||||
|
||||
var subjectSha = TryExtractDsseSubjectSha256(envelope);
|
||||
if (!string.IsNullOrWhiteSpace(subjectSha) && !DigestsEqual(bundleSha256Hex, subjectSha))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "DSSE_VERIFY_FAIL", "DSSE subject digest does not match bundle digest.");
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static string ResolvePurl(OfflineKitImportMetadata metadata)
|
||||
{
|
||||
var kind = string.IsNullOrWhiteSpace(metadata.Kind) ? "offline-kit" : metadata.Kind.Trim().ToLowerInvariant();
|
||||
return $"pkg:stellaops/{kind}";
|
||||
}
|
||||
|
||||
private static TrustRootConfig BuildTrustRoots(TrustAnchorResolution resolution, string rootBundlePath)
|
||||
{
|
||||
var publicKeys = new Dictionary<string, byte[]>(StringComparer.OrdinalIgnoreCase);
|
||||
foreach (var (keyId, keyBytes) in resolution.PublicKeys)
|
||||
{
|
||||
publicKeys[keyId] = keyBytes;
|
||||
}
|
||||
|
||||
var fingerprints = publicKeys.Values
|
||||
.Select(ComputeFingerprint)
|
||||
.Distinct(StringComparer.Ordinal)
|
||||
.ToArray();
|
||||
|
||||
return new TrustRootConfig(
|
||||
RootBundlePath: rootBundlePath,
|
||||
TrustedKeyFingerprints: fingerprints,
|
||||
AllowedSignatureAlgorithms: new[] { "rsassa-pss-sha256" },
|
||||
NotBeforeUtc: null,
|
||||
NotAfterUtc: null,
|
||||
PublicKeys: publicKeys);
|
||||
}
|
||||
|
||||
private static byte[] BuildPreAuthEncoding(string payloadType, string payloadBase64)
|
||||
{
|
||||
const string paePrefix = "DSSEv1";
|
||||
var payloadBytes = Convert.FromBase64String(payloadBase64);
|
||||
var parts = new[] { paePrefix, payloadType, Encoding.UTF8.GetString(payloadBytes) };
|
||||
|
||||
var paeBuilder = new StringBuilder();
|
||||
paeBuilder.Append("PAE:");
|
||||
paeBuilder.Append(parts.Length);
|
||||
foreach (var part in parts)
|
||||
{
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part.Length);
|
||||
paeBuilder.Append(' ');
|
||||
paeBuilder.Append(part);
|
||||
}
|
||||
|
||||
return Encoding.UTF8.GetBytes(paeBuilder.ToString());
|
||||
}
|
||||
|
||||
private static bool TryVerifySignature(TrustRootConfig trustRoots, DsseSignature signature, byte[] pae)
|
||||
{
|
||||
if (!trustRoots.PublicKeys.TryGetValue(signature.KeyId, out var keyBytes))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var fingerprint = ComputeFingerprint(keyBytes);
|
||||
if (!trustRoots.TrustedKeyFingerprints.Contains(fingerprint, StringComparer.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var rsa = RSA.Create();
|
||||
rsa.ImportSubjectPublicKeyInfo(keyBytes, out _);
|
||||
var sig = Convert.FromBase64String(signature.Signature);
|
||||
return rsa.VerifyData(pae, sig, HashAlgorithmName.SHA256, RSASignaturePadding.Pss);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static string? TryExtractDsseSubjectSha256(DsseEnvelope envelope)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(envelope.Payload);
|
||||
using var doc = JsonDocument.Parse(payloadBytes);
|
||||
if (!doc.RootElement.TryGetProperty("subject", out var subject) || subject.ValueKind != JsonValueKind.Array)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
foreach (var entry in subject.EnumerateArray())
|
||||
{
|
||||
if (entry.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.TryGetProperty("digest", out var digestObj) || digestObj.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (digestObj.TryGetProperty("sha256", out var shaProp) && shaProp.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
return NormalizeSha256(shaProp.GetString());
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task<bool> VerifyRekorAsync(string receiptPath, byte[] dsseBytes, OfflineKitOptions options, CancellationToken cancellationToken)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(options.RekorSnapshotDirectory))
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "REKOR_VERIFY_FAIL", "Rekor snapshot directory is not configured.");
|
||||
}
|
||||
|
||||
var publicKeyPath = ResolveRekorPublicKeyPath(options.RekorSnapshotDirectory);
|
||||
if (publicKeyPath is null)
|
||||
{
|
||||
throw new OfflineKitImportException(StatusCodes.Status422UnprocessableEntity, "REKOR_VERIFY_FAIL", "Rekor public key was not found in the snapshot directory.");
|
||||
}
|
||||
|
||||
var dsseSha = SHA256.HashData(dsseBytes);
|
||||
var result = await RekorOfflineReceiptVerifier.VerifyAsync(receiptPath, dsseSha, publicKeyPath, cancellationToken).ConfigureAwait(false);
|
||||
return result.Verified;
|
||||
}
|
||||
|
||||
private static string? ResolveRekorPublicKeyPath(string snapshotDirectory)
|
||||
{
|
||||
var candidates = new[]
|
||||
{
|
||||
Path.Combine(snapshotDirectory, "rekor-pub.pem"),
|
||||
Path.Combine(snapshotDirectory, "rekor.pub"),
|
||||
Path.Combine(snapshotDirectory, "tlog-root.pub"),
|
||||
Path.Combine(snapshotDirectory, "tlog-root.pem"),
|
||||
Path.Combine(snapshotDirectory, "tlog", "rekor-pub.pem"),
|
||||
Path.Combine(snapshotDirectory, "tlog", "rekor.pub")
|
||||
};
|
||||
|
||||
foreach (var candidate in candidates)
|
||||
{
|
||||
if (File.Exists(candidate))
|
||||
{
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private static bool LooksLikeRekorReceipt(byte[] payload)
|
||||
{
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(payload);
|
||||
var root = doc.RootElement;
|
||||
if (root.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return root.TryGetProperty("uuid", out _)
|
||||
&& root.TryGetProperty("logIndex", out _)
|
||||
&& root.TryGetProperty("rootHash", out _)
|
||||
&& root.TryGetProperty("hashes", out _)
|
||||
&& root.TryGetProperty("checkpoint", out _);
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task EmitAuditAsync(
|
||||
string tenantId,
|
||||
string actor,
|
||||
DateTimeOffset timestamp,
|
||||
string importId,
|
||||
string bundleId,
|
||||
string result,
|
||||
string reasonCode,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
var entity = new OfflineKitAuditEntity
|
||||
{
|
||||
EventId = ComputeDeterministicEventId(tenantId, importId),
|
||||
TenantId = tenantId,
|
||||
EventType = "offlinekit.import",
|
||||
Timestamp = timestamp,
|
||||
Actor = actor,
|
||||
Details = JsonSerializer.Serialize(new { importId, bundleId, reasonCode }, new JsonSerializerOptions(JsonSerializerDefaults.Web)),
|
||||
Result = result
|
||||
};
|
||||
|
||||
await _auditEmitter.RecordAsync(entity, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "offlinekit.audit.emit failed tenant_id={tenant_id} import_id={import_id}", tenantId, importId);
|
||||
}
|
||||
}
|
||||
|
||||
private static Guid ComputeDeterministicEventId(string tenantId, string importId)
|
||||
{
|
||||
var input = $"{tenantId}|{importId}".ToLowerInvariant();
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
Span<byte> guidBytes = stackalloc byte[16];
|
||||
hash.AsSpan(0, 16).CopyTo(guidBytes);
|
||||
return new Guid(guidBytes);
|
||||
}
|
||||
|
||||
private static string ComputeImportId(string tenantId, string bundleSha256, DateTimeOffset submittedAt)
|
||||
{
|
||||
var input = $"{tenantId}|{NormalizeSha256(bundleSha256)}|{submittedAt.ToUnixTimeSeconds()}".ToLowerInvariant();
|
||||
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static bool DigestsEqual(string computedHex, string expectedHex)
|
||||
=> string.Equals(NormalizeSha256(computedHex), NormalizeSha256(expectedHex), StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
private static string NormalizeSha256(string? digest)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(digest))
|
||||
{
|
||||
return string.Empty;
|
||||
}
|
||||
|
||||
var value = digest.Trim();
|
||||
if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
value = value.Substring("sha256:".Length);
|
||||
}
|
||||
|
||||
return value.ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeFingerprint(byte[] publicKey)
|
||||
{
|
||||
var hash = SHA256.HashData(publicKey);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static async Task<(string Sha256Hex, long SizeBytes)> SaveWithSha256Async(IFormFile file, string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(file);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var directory = Path.GetDirectoryName(path);
|
||||
if (!string.IsNullOrWhiteSpace(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var temp = path + ".tmp";
|
||||
long size = 0;
|
||||
|
||||
using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
|
||||
await using var output = File.Create(temp);
|
||||
await using var input = file.OpenReadStream();
|
||||
|
||||
var buffer = new byte[128 * 1024];
|
||||
while (true)
|
||||
{
|
||||
var read = await input.ReadAsync(buffer, cancellationToken).ConfigureAwait(false);
|
||||
if (read == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
hasher.AppendData(buffer, 0, read);
|
||||
await output.WriteAsync(buffer.AsMemory(0, read), cancellationToken).ConfigureAwait(false);
|
||||
size += read;
|
||||
}
|
||||
|
||||
var hash = hasher.GetHashAndReset();
|
||||
var hex = Convert.ToHexString(hash).ToLowerInvariant();
|
||||
File.Move(temp, path, overwrite: true);
|
||||
|
||||
return (hex, size);
|
||||
}
|
||||
|
||||
private static async Task<byte[]> SaveRawAsync(IFormFile file, string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(file);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var directory = Path.GetDirectoryName(path);
|
||||
if (!string.IsNullOrWhiteSpace(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
await using var output = File.Create(path);
|
||||
await using var input = file.OpenReadStream();
|
||||
await input.CopyToAsync(output, cancellationToken).ConfigureAwait(false);
|
||||
return await File.ReadAllBytesAsync(path, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<OfflineKitComponentStatusTransport> ParseManifestComponents(string manifestJson)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(manifestJson))
|
||||
{
|
||||
return Array.Empty<OfflineKitComponentStatusTransport>();
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
using var doc = JsonDocument.Parse(manifestJson);
|
||||
if (doc.RootElement.ValueKind == JsonValueKind.Object &&
|
||||
doc.RootElement.TryGetProperty("entries", out var entries) &&
|
||||
entries.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return ParseEntries(entries);
|
||||
}
|
||||
|
||||
if (doc.RootElement.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return ParseEntries(doc.RootElement);
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
// NDJSON fallback.
|
||||
}
|
||||
|
||||
var components = new List<OfflineKitComponentStatusTransport>();
|
||||
foreach (var line in manifestJson.Split(['\r', '\n'], StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries))
|
||||
{
|
||||
try
|
||||
{
|
||||
using var entryDoc = JsonDocument.Parse(line);
|
||||
if (TryParseComponent(entryDoc.RootElement, out var component))
|
||||
{
|
||||
components.Add(component);
|
||||
}
|
||||
}
|
||||
catch (JsonException)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return components;
|
||||
}
|
||||
|
||||
private static IReadOnlyList<OfflineKitComponentStatusTransport> ParseEntries(JsonElement entries)
|
||||
{
|
||||
var components = new List<OfflineKitComponentStatusTransport>(entries.GetArrayLength());
|
||||
foreach (var entry in entries.EnumerateArray())
|
||||
{
|
||||
if (TryParseComponent(entry, out var component))
|
||||
{
|
||||
components.Add(component);
|
||||
}
|
||||
}
|
||||
|
||||
return components;
|
||||
}
|
||||
|
||||
private static bool TryParseComponent(JsonElement entry, out OfflineKitComponentStatusTransport component)
|
||||
{
|
||||
component = new OfflineKitComponentStatusTransport();
|
||||
if (entry.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!entry.TryGetProperty("name", out var nameProp) || nameProp.ValueKind != JsonValueKind.String)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
var name = nameProp.GetString();
|
||||
if (string.IsNullOrWhiteSpace(name))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
string? sha = null;
|
||||
if (entry.TryGetProperty("sha256", out var shaProp) && shaProp.ValueKind == JsonValueKind.String)
|
||||
{
|
||||
sha = NormalizeSha256(shaProp.GetString());
|
||||
}
|
||||
|
||||
long? size = null;
|
||||
if (entry.TryGetProperty("size", out var sizeProp) && sizeProp.ValueKind == JsonValueKind.Number && sizeProp.TryGetInt64(out var sizeValue))
|
||||
{
|
||||
size = sizeValue;
|
||||
}
|
||||
|
||||
DateTimeOffset? capturedAt = null;
|
||||
if (entry.TryGetProperty("capturedAt", out var capturedProp) && capturedProp.ValueKind == JsonValueKind.String
|
||||
&& DateTimeOffset.TryParse(capturedProp.GetString(), out var parsedCaptured))
|
||||
{
|
||||
capturedAt = parsedCaptured.ToUniversalTime();
|
||||
}
|
||||
|
||||
component = new OfflineKitComponentStatusTransport
|
||||
{
|
||||
Name = name.Trim(),
|
||||
Digest = sha,
|
||||
SizeBytes = size,
|
||||
CapturedAt = capturedAt
|
||||
};
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,294 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class OfflineKitMetricsStore
|
||||
{
|
||||
private static readonly double[] DefaultLatencyBucketsSeconds =
|
||||
{
|
||||
0.001,
|
||||
0.0025,
|
||||
0.005,
|
||||
0.01,
|
||||
0.025,
|
||||
0.05,
|
||||
0.1,
|
||||
0.25,
|
||||
0.5,
|
||||
1,
|
||||
2.5,
|
||||
5,
|
||||
10
|
||||
};
|
||||
|
||||
private readonly ConcurrentDictionary<ImportCounterKey, long> _imports = new();
|
||||
private readonly ConcurrentDictionary<TwoLabelKey, Histogram> _attestationVerifyLatency = new();
|
||||
private readonly ConcurrentDictionary<string, Histogram> _rekorInclusionLatency = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, long> _rekorSuccess = new(StringComparer.Ordinal);
|
||||
private readonly ConcurrentDictionary<string, long> _rekorRetry = new(StringComparer.Ordinal);
|
||||
|
||||
public void RecordImport(string status, string tenantId)
|
||||
{
|
||||
status = NormalizeLabelValue(status, "unknown");
|
||||
tenantId = NormalizeLabelValue(tenantId, "unknown");
|
||||
_imports.AddOrUpdate(new ImportCounterKey(tenantId, status), 1, static (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public void RecordAttestationVerifyLatency(string attestationType, double seconds, bool success)
|
||||
{
|
||||
attestationType = NormalizeLabelValue(attestationType, "unknown");
|
||||
seconds = ClampSeconds(seconds);
|
||||
var key = new TwoLabelKey(attestationType, success ? "true" : "false");
|
||||
var histogram = _attestationVerifyLatency.GetOrAdd(key, _ => new Histogram(DefaultLatencyBucketsSeconds));
|
||||
histogram.Record(seconds);
|
||||
}
|
||||
|
||||
public void RecordRekorSuccess(string mode)
|
||||
{
|
||||
mode = NormalizeLabelValue(mode, "unknown");
|
||||
_rekorSuccess.AddOrUpdate(mode, 1, static (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public void RecordRekorRetry(string reason)
|
||||
{
|
||||
reason = NormalizeLabelValue(reason, "unknown");
|
||||
_rekorRetry.AddOrUpdate(reason, 1, static (_, current) => current + 1);
|
||||
}
|
||||
|
||||
public void RecordRekorInclusionLatency(double seconds, bool success)
|
||||
{
|
||||
seconds = ClampSeconds(seconds);
|
||||
var key = success ? "true" : "false";
|
||||
var histogram = _rekorInclusionLatency.GetOrAdd(key, _ => new Histogram(DefaultLatencyBucketsSeconds));
|
||||
histogram.Record(seconds);
|
||||
}
|
||||
|
||||
public string RenderPrometheus()
|
||||
{
|
||||
var builder = new StringBuilder(capacity: 4096);
|
||||
|
||||
AppendCounterHeader(builder, "offlinekit_import_total", "Total number of offline kit import attempts");
|
||||
foreach (var (key, value) in _imports.OrderBy(kv => kv.Key.TenantId, StringComparer.Ordinal)
|
||||
.ThenBy(kv => kv.Key.Status, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("offlinekit_import_total{tenant_id=\"");
|
||||
builder.Append(EscapeLabelValue(key.TenantId));
|
||||
builder.Append("\",status=\"");
|
||||
builder.Append(EscapeLabelValue(key.Status));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(value.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
AppendHistogramTwoLabels(
|
||||
builder,
|
||||
name: "offlinekit_attestation_verify_latency_seconds",
|
||||
help: "Time taken to verify attestations during import",
|
||||
labelA: "attestation_type",
|
||||
labelB: "success",
|
||||
histograms: _attestationVerifyLatency);
|
||||
|
||||
AppendCounterHeader(builder, "attestor_rekor_success_total", "Successful Rekor verification count");
|
||||
foreach (var (key, value) in _rekorSuccess.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("attestor_rekor_success_total{mode=\"");
|
||||
builder.Append(EscapeLabelValue(key));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(value.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
AppendCounterHeader(builder, "attestor_rekor_retry_total", "Rekor verification retry count");
|
||||
foreach (var (key, value) in _rekorRetry.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
builder.Append("attestor_rekor_retry_total{reason=\"");
|
||||
builder.Append(EscapeLabelValue(key));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(value.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
AppendHistogramOneLabel(
|
||||
builder,
|
||||
name: "rekor_inclusion_latency",
|
||||
help: "Time to verify Rekor inclusion proof",
|
||||
label: "success",
|
||||
histograms: _rekorInclusionLatency);
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
private static void AppendCounterHeader(StringBuilder builder, string name, string help)
|
||||
{
|
||||
builder.Append("# HELP ");
|
||||
builder.Append(name);
|
||||
builder.Append(' ');
|
||||
builder.Append(help);
|
||||
builder.Append('\n');
|
||||
builder.Append("# TYPE ");
|
||||
builder.Append(name);
|
||||
builder.Append(" counter\n");
|
||||
}
|
||||
|
||||
private static void AppendHistogramTwoLabels(
|
||||
StringBuilder builder,
|
||||
string name,
|
||||
string help,
|
||||
string labelA,
|
||||
string labelB,
|
||||
ConcurrentDictionary<TwoLabelKey, Histogram> histograms)
|
||||
{
|
||||
builder.Append("# HELP ");
|
||||
builder.Append(name);
|
||||
builder.Append(' ');
|
||||
builder.Append(help);
|
||||
builder.Append('\n');
|
||||
builder.Append("# TYPE ");
|
||||
builder.Append(name);
|
||||
builder.Append(" histogram\n");
|
||||
|
||||
foreach (var grouping in histograms.OrderBy(kv => kv.Key.LabelA, StringComparer.Ordinal)
|
||||
.ThenBy(kv => kv.Key.LabelB, StringComparer.Ordinal))
|
||||
{
|
||||
var labels = $"{labelA}=\"{EscapeLabelValue(grouping.Key.LabelA)}\",{labelB}=\"{EscapeLabelValue(grouping.Key.LabelB)}\"";
|
||||
AppendHistogramSeries(builder, name, labels, grouping.Value.Snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
private static void AppendHistogramOneLabel(
|
||||
StringBuilder builder,
|
||||
string name,
|
||||
string help,
|
||||
string label,
|
||||
ConcurrentDictionary<string, Histogram> histograms)
|
||||
{
|
||||
builder.Append("# HELP ");
|
||||
builder.Append(name);
|
||||
builder.Append(' ');
|
||||
builder.Append(help);
|
||||
builder.Append('\n');
|
||||
builder.Append("# TYPE ");
|
||||
builder.Append(name);
|
||||
builder.Append(" histogram\n");
|
||||
|
||||
foreach (var grouping in histograms.OrderBy(kv => kv.Key, StringComparer.Ordinal))
|
||||
{
|
||||
var labels = $"{label}=\"{EscapeLabelValue(grouping.Key)}\"";
|
||||
AppendHistogramSeries(builder, name, labels, grouping.Value.Snapshot());
|
||||
}
|
||||
}
|
||||
|
||||
private static void AppendHistogramSeries(
|
||||
StringBuilder builder,
|
||||
string name,
|
||||
string labels,
|
||||
HistogramSnapshot snapshot)
|
||||
{
|
||||
long cumulative = 0;
|
||||
|
||||
for (var i = 0; i < snapshot.BucketUpperBounds.Length; i++)
|
||||
{
|
||||
cumulative += snapshot.BucketCounts[i];
|
||||
builder.Append(name);
|
||||
builder.Append("_bucket{");
|
||||
builder.Append(labels);
|
||||
builder.Append(",le=\"");
|
||||
builder.Append(snapshot.BucketUpperBounds[i].ToString("G", CultureInfo.InvariantCulture));
|
||||
builder.Append("\"} ");
|
||||
builder.Append(cumulative.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
cumulative += snapshot.BucketCounts[^1];
|
||||
builder.Append(name);
|
||||
builder.Append("_bucket{");
|
||||
builder.Append(labels);
|
||||
builder.Append(",le=\"+Inf\"} ");
|
||||
builder.Append(cumulative.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
|
||||
builder.Append(name);
|
||||
builder.Append("_sum{");
|
||||
builder.Append(labels);
|
||||
builder.Append("} ");
|
||||
builder.Append(snapshot.SumSeconds.ToString("G", CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
|
||||
builder.Append(name);
|
||||
builder.Append("_count{");
|
||||
builder.Append(labels);
|
||||
builder.Append("} ");
|
||||
builder.Append(snapshot.Count.ToString(CultureInfo.InvariantCulture));
|
||||
builder.Append('\n');
|
||||
}
|
||||
|
||||
private static double ClampSeconds(double seconds)
|
||||
=> double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds < 0 ? 0 : seconds;
|
||||
|
||||
private static string NormalizeLabelValue(string? value, string fallback)
|
||||
=> string.IsNullOrWhiteSpace(value) ? fallback : value.Trim();
|
||||
|
||||
private static string EscapeLabelValue(string value)
|
||||
=> value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
|
||||
private sealed class Histogram
|
||||
{
|
||||
private readonly double[] _bucketUpperBounds;
|
||||
private readonly long[] _bucketCounts;
|
||||
private long _count;
|
||||
private double _sumSeconds;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public Histogram(double[] bucketUpperBounds)
|
||||
{
|
||||
_bucketUpperBounds = bucketUpperBounds ?? throw new ArgumentNullException(nameof(bucketUpperBounds));
|
||||
_bucketCounts = new long[_bucketUpperBounds.Length + 1];
|
||||
}
|
||||
|
||||
public void Record(double seconds)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_count++;
|
||||
_sumSeconds += seconds;
|
||||
|
||||
var bucketIndex = _bucketUpperBounds.Length;
|
||||
for (var i = 0; i < _bucketUpperBounds.Length; i++)
|
||||
{
|
||||
if (seconds <= _bucketUpperBounds[i])
|
||||
{
|
||||
bucketIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_bucketCounts[bucketIndex]++;
|
||||
}
|
||||
}
|
||||
|
||||
public HistogramSnapshot Snapshot()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return new HistogramSnapshot(
|
||||
(double[])_bucketUpperBounds.Clone(),
|
||||
(long[])_bucketCounts.Clone(),
|
||||
_count,
|
||||
_sumSeconds);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record HistogramSnapshot(
|
||||
double[] BucketUpperBounds,
|
||||
long[] BucketCounts,
|
||||
long Count,
|
||||
double SumSeconds);
|
||||
|
||||
private sealed record ImportCounterKey(string TenantId, string Status);
|
||||
|
||||
private sealed record TwoLabelKey(string LabelA, string LabelB);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,89 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class OfflineKitStateStore
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
private readonly string _rootDirectory;
|
||||
private readonly ILogger<OfflineKitStateStore> _logger;
|
||||
|
||||
public OfflineKitStateStore(IHostEnvironment environment, ILogger<OfflineKitStateStore> logger)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(environment);
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_rootDirectory = Path.Combine(environment.ContentRootPath, "data", "offline-kit");
|
||||
}
|
||||
|
||||
public string GetBundleDirectory(string tenantId, string bundleId)
|
||||
{
|
||||
var safeTenant = SanitizePathSegment(tenantId);
|
||||
var safeBundle = SanitizePathSegment(bundleId);
|
||||
return Path.Combine(_rootDirectory, "bundles", safeTenant, safeBundle);
|
||||
}
|
||||
|
||||
public async Task SaveStatusAsync(string tenantId, OfflineKitStatusTransport status, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
ArgumentNullException.ThrowIfNull(status);
|
||||
|
||||
var stateDirectory = Path.Combine(_rootDirectory, ".state");
|
||||
Directory.CreateDirectory(stateDirectory);
|
||||
|
||||
var path = GetStatusPath(tenantId);
|
||||
var temp = path + ".tmp";
|
||||
|
||||
await using (var stream = File.Create(temp))
|
||||
{
|
||||
await JsonSerializer.SerializeAsync(stream, status, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
File.Copy(temp, path, overwrite: true);
|
||||
File.Delete(temp);
|
||||
}
|
||||
|
||||
public async Task<OfflineKitStatusTransport?> LoadStatusAsync(string tenantId, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
|
||||
|
||||
var path = GetStatusPath(tenantId);
|
||||
if (!File.Exists(path))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(path);
|
||||
return await JsonSerializer.DeserializeAsync<OfflineKitStatusTransport>(stream, JsonOptions, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
catch (Exception ex) when (ex is IOException or JsonException)
|
||||
{
|
||||
_logger.LogWarning(ex, "Failed to read offline kit state from {Path}", path);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private string GetStatusPath(string tenantId)
|
||||
{
|
||||
var safeTenant = SanitizePathSegment(tenantId);
|
||||
return Path.Combine(_rootDirectory, ".state", $"offline-kit-active__{safeTenant}.json");
|
||||
}
|
||||
|
||||
private static string SanitizePathSegment(string value)
|
||||
{
|
||||
var trimmed = value.Trim().ToLowerInvariant();
|
||||
var invalid = Path.GetInvalidFileNameChars();
|
||||
var chars = trimmed
|
||||
.Select(c => invalid.Contains(c) || c == '/' || c == '\\' || char.IsWhiteSpace(c) ? '_' : c)
|
||||
.ToArray();
|
||||
return new string(chars);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,192 @@
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Scanner.Storage.Catalog;
|
||||
using StellaOps.Scanner.Storage.Services;
|
||||
using StellaOps.Scanner.WebService.Contracts;
|
||||
using StellaOps.Scanner.WebService.Domain;
|
||||
|
||||
namespace StellaOps.Scanner.WebService.Services;
|
||||
|
||||
internal sealed class SbomIngestionService : ISbomIngestionService
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
private readonly ArtifactStorageService _artifactStorage;
|
||||
private readonly ILogger<SbomIngestionService> _logger;
|
||||
|
||||
public SbomIngestionService(ArtifactStorageService artifactStorage, ILogger<SbomIngestionService> logger)
|
||||
{
|
||||
_artifactStorage = artifactStorage ?? throw new ArgumentNullException(nameof(artifactStorage));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
public string? DetectFormat(JsonDocument sbomDocument)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomDocument);
|
||||
|
||||
if (sbomDocument.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var root = sbomDocument.RootElement;
|
||||
|
||||
if (root.TryGetProperty("bomFormat", out var bomFormat)
|
||||
&& bomFormat.ValueKind == JsonValueKind.String
|
||||
&& string.Equals(bomFormat.GetString(), "CycloneDX", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomFormats.CycloneDx;
|
||||
}
|
||||
|
||||
if (root.TryGetProperty("spdxVersion", out var spdxVersion)
|
||||
&& spdxVersion.ValueKind == JsonValueKind.String
|
||||
&& !string.IsNullOrWhiteSpace(spdxVersion.GetString()))
|
||||
{
|
||||
return SbomFormats.Spdx;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public SbomValidationResult Validate(JsonDocument sbomDocument, string format)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomDocument);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(format);
|
||||
|
||||
if (sbomDocument.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return SbomValidationResult.Failure("SBOM root must be a JSON object.");
|
||||
}
|
||||
|
||||
var root = sbomDocument.RootElement;
|
||||
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!root.TryGetProperty("bomFormat", out var bomFormat)
|
||||
|| bomFormat.ValueKind != JsonValueKind.String
|
||||
|| !string.Equals(bomFormat.GetString(), "CycloneDX", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return SbomValidationResult.Failure("CycloneDX SBOM must include bomFormat == 'CycloneDX'.");
|
||||
}
|
||||
|
||||
return SbomValidationResult.Success();
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (!root.TryGetProperty("spdxVersion", out var spdxVersion)
|
||||
|| spdxVersion.ValueKind != JsonValueKind.String
|
||||
|| string.IsNullOrWhiteSpace(spdxVersion.GetString()))
|
||||
{
|
||||
return SbomValidationResult.Failure("SPDX SBOM must include spdxVersion.");
|
||||
}
|
||||
|
||||
return SbomValidationResult.Success();
|
||||
}
|
||||
|
||||
return SbomValidationResult.Failure($"Unsupported SBOM format '{format}'.");
|
||||
}
|
||||
|
||||
public async Task<SbomIngestionResult> IngestAsync(
|
||||
ScanId scanId,
|
||||
JsonDocument sbomDocument,
|
||||
string format,
|
||||
string? contentDigest,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(sbomDocument);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(scanId.Value);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(format);
|
||||
|
||||
var (documentFormat, mediaType) = ResolveStorageFormat(format);
|
||||
var bytes = JsonSerializer.SerializeToUtf8Bytes(sbomDocument.RootElement, JsonOptions);
|
||||
|
||||
await using var stream = new MemoryStream(bytes, writable: false);
|
||||
var stored = await _artifactStorage.StoreArtifactAsync(
|
||||
ArtifactDocumentType.ImageBom,
|
||||
documentFormat,
|
||||
mediaType,
|
||||
stream,
|
||||
immutable: true,
|
||||
ttlClass: "default",
|
||||
expiresAtUtc: null,
|
||||
cancellationToken: cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(contentDigest)
|
||||
&& !string.Equals(contentDigest.Trim(), stored.BytesSha256, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_logger.LogDebug(
|
||||
"SBOM Content-Digest header did not match stored digest header={HeaderDigest} stored={StoredDigest}",
|
||||
contentDigest.Trim(),
|
||||
stored.BytesSha256);
|
||||
}
|
||||
|
||||
var componentCount = CountComponents(sbomDocument, format);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Ingested sbom scan={ScanId} format={Format} components={Components} digest={Digest} id={SbomId}",
|
||||
scanId.Value,
|
||||
format,
|
||||
componentCount,
|
||||
stored.BytesSha256,
|
||||
stored.Id);
|
||||
|
||||
return new SbomIngestionResult(
|
||||
SbomId: stored.Id,
|
||||
Format: format,
|
||||
ComponentCount: componentCount,
|
||||
Digest: stored.BytesSha256);
|
||||
}
|
||||
|
||||
private static (ArtifactDocumentFormat Format, string MediaType) ResolveStorageFormat(string format)
|
||||
{
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (ArtifactDocumentFormat.CycloneDxJson, "application/vnd.cyclonedx+json");
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return (ArtifactDocumentFormat.SpdxJson, "application/spdx+json");
|
||||
}
|
||||
|
||||
return (ArtifactDocumentFormat.CycloneDxJson, "application/json");
|
||||
}
|
||||
|
||||
private static int CountComponents(JsonDocument document, string format)
|
||||
{
|
||||
if (document.RootElement.ValueKind != JsonValueKind.Object)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var root = document.RootElement;
|
||||
|
||||
if (string.Equals(format, SbomFormats.CycloneDx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (root.TryGetProperty("components", out var components) && components.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return components.GetArrayLength();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (string.Equals(format, SbomFormats.Spdx, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
if (root.TryGetProperty("packages", out var packages) && packages.ValueKind == JsonValueKind.Array)
|
||||
{
|
||||
return packages.GetArrayLength();
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -175,6 +175,7 @@ public interface IScanManifestRepository
|
||||
{
|
||||
Task<SignedScanManifest?> GetManifestAsync(string scanId, string? manifestHash = null, CancellationToken cancellationToken = default);
|
||||
Task SaveManifestAsync(SignedScanManifest manifest, CancellationToken cancellationToken = default);
|
||||
Task<List<string>> FindAffectedScansAsync(AffectedScansQuery query, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
Reference in New Issue
Block a user