up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-28 00:45:16 +02:00
parent 3b96b2e3ea
commit 1c6730a1d2
95 changed files with 14504 additions and 463 deletions

View File

@@ -0,0 +1,179 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Excititor.WebService.Contracts;
/// <summary>
/// Response listing registered mirror bundles.
/// </summary>
public sealed record MirrorBundleListResponse(
[property: JsonPropertyName("bundles")] IReadOnlyList<MirrorBundleSummary> Bundles,
[property: JsonPropertyName("totalCount")] int TotalCount,
[property: JsonPropertyName("limit")] int Limit,
[property: JsonPropertyName("offset")] int Offset,
[property: JsonPropertyName("queriedAt")] DateTimeOffset QueriedAt);
/// <summary>
/// Summary of a registered mirror bundle.
/// </summary>
public sealed record MirrorBundleSummary(
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("mirrorGeneration")] string MirrorGeneration,
[property: JsonPropertyName("publisher")] string Publisher,
[property: JsonPropertyName("signedAt")] DateTimeOffset SignedAt,
[property: JsonPropertyName("importedAt")] DateTimeOffset ImportedAt,
[property: JsonPropertyName("payloadHash")] string PayloadHash,
[property: JsonPropertyName("stalenessSeconds")] long StalenessSeconds,
[property: JsonPropertyName("status")] string Status);
/// <summary>
/// Detailed response for a registered mirror bundle with provenance.
/// </summary>
public sealed record MirrorBundleDetailResponse(
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("mirrorGeneration")] string MirrorGeneration,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("publisher")] string Publisher,
[property: JsonPropertyName("signedAt")] DateTimeOffset SignedAt,
[property: JsonPropertyName("importedAt")] DateTimeOffset ImportedAt,
[property: JsonPropertyName("provenance")] MirrorBundleProvenance Provenance,
[property: JsonPropertyName("staleness")] MirrorBundleStaleness Staleness,
[property: JsonPropertyName("paths")] MirrorBundlePaths Paths,
[property: JsonPropertyName("timeline")] IReadOnlyList<MirrorBundleTimelineEntry> Timeline,
[property: JsonPropertyName("queriedAt")] DateTimeOffset QueriedAt);
/// <summary>
/// Provenance metadata for a mirror bundle.
/// </summary>
public sealed record MirrorBundleProvenance(
[property: JsonPropertyName("payloadHash")] string PayloadHash,
[property: JsonPropertyName("signature")] string Signature,
[property: JsonPropertyName("payloadUrl")] string? PayloadUrl,
[property: JsonPropertyName("transparencyLog")] string? TransparencyLog,
[property: JsonPropertyName("manifestHash")] string ManifestHash);
/// <summary>
/// Staleness metrics for a mirror bundle.
/// </summary>
public sealed record MirrorBundleStaleness(
[property: JsonPropertyName("sinceSignedSeconds")] long SinceSignedSeconds,
[property: JsonPropertyName("sinceImportedSeconds")] long SinceImportedSeconds,
[property: JsonPropertyName("signedAgeCategory")] string SignedAgeCategory,
[property: JsonPropertyName("importedAgeCategory")] string ImportedAgeCategory);
/// <summary>
/// Storage paths for a mirror bundle.
/// </summary>
public sealed record MirrorBundlePaths(
[property: JsonPropertyName("portableManifestPath")] string PortableManifestPath,
[property: JsonPropertyName("evidenceLockerPath")] string EvidenceLockerPath);
/// <summary>
/// Timeline entry for audit trail.
/// </summary>
public sealed record MirrorBundleTimelineEntry(
[property: JsonPropertyName("eventType")] string EventType,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("stalenessSeconds")] int? StalenessSeconds,
[property: JsonPropertyName("errorCode")] string? ErrorCode,
[property: JsonPropertyName("message")] string? Message);
/// <summary>
/// Response for timeline-only query.
/// </summary>
public sealed record MirrorBundleTimelineResponse(
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("mirrorGeneration")] string MirrorGeneration,
[property: JsonPropertyName("timeline")] IReadOnlyList<MirrorBundleTimelineEntry> Timeline,
[property: JsonPropertyName("queriedAt")] DateTimeOffset QueriedAt);
/// <summary>
/// Structured error response for sealed-mode and airgap errors.
/// </summary>
public sealed record AirgapErrorResponse(
[property: JsonPropertyName("errorCode")] string ErrorCode,
[property: JsonPropertyName("message")] string Message,
[property: JsonPropertyName("category")] string Category,
[property: JsonPropertyName("retryable")] bool Retryable,
[property: JsonPropertyName("details")] IReadOnlyDictionary<string, string>? Details);
/// <summary>
/// Maps sealed-mode error codes to structured error responses.
/// </summary>
public static class AirgapErrorMapping
{
public const string CategoryValidation = "validation";
public const string CategorySealedMode = "sealed_mode";
public const string CategoryTrust = "trust";
public const string CategoryDuplicate = "duplicate";
public const string CategoryNotFound = "not_found";
public static AirgapErrorResponse FromErrorCode(string errorCode, string message, IReadOnlyDictionary<string, string>? details = null)
{
var (category, retryable) = errorCode switch
{
"AIRGAP_EGRESS_BLOCKED" => (CategorySealedMode, false),
"AIRGAP_SOURCE_UNTRUSTED" => (CategoryTrust, false),
"AIRGAP_SIGNATURE_MISSING" => (CategoryValidation, false),
"AIRGAP_SIGNATURE_INVALID" => (CategoryValidation, false),
"AIRGAP_PAYLOAD_STALE" => (CategoryValidation, true),
"AIRGAP_PAYLOAD_MISMATCH" => (CategoryTrust, false),
"AIRGAP_DUPLICATE_IMPORT" => (CategoryDuplicate, false),
"AIRGAP_BUNDLE_NOT_FOUND" => (CategoryNotFound, false),
_ when errorCode.StartsWith("bundle_", StringComparison.Ordinal) => (CategoryValidation, false),
_ when errorCode.StartsWith("mirror_", StringComparison.Ordinal) => (CategoryValidation, false),
_ when errorCode.StartsWith("publisher_", StringComparison.Ordinal) => (CategoryValidation, false),
_ when errorCode.StartsWith("payload_", StringComparison.Ordinal) => (CategoryValidation, false),
_ when errorCode.StartsWith("signed_", StringComparison.Ordinal) => (CategoryValidation, false),
_ => (CategoryValidation, false),
};
return new AirgapErrorResponse(errorCode, message, category, retryable, details);
}
public static AirgapErrorResponse DuplicateImport(string bundleId, string mirrorGeneration)
=> new(
"AIRGAP_DUPLICATE_IMPORT",
$"Bundle '{bundleId}' generation '{mirrorGeneration}' has already been imported.",
CategoryDuplicate,
Retryable: false,
new Dictionary<string, string>
{
["bundleId"] = bundleId,
["mirrorGeneration"] = mirrorGeneration,
});
public static AirgapErrorResponse BundleNotFound(string bundleId, string? mirrorGeneration)
=> new(
"AIRGAP_BUNDLE_NOT_FOUND",
mirrorGeneration is null
? $"Bundle '{bundleId}' not found."
: $"Bundle '{bundleId}' generation '{mirrorGeneration}' not found.",
CategoryNotFound,
Retryable: false,
new Dictionary<string, string>
{
["bundleId"] = bundleId,
["mirrorGeneration"] = mirrorGeneration ?? string.Empty,
});
}
/// <summary>
/// Utility for computing staleness categories.
/// </summary>
public static class StalenessCalculator
{
public static long ComputeSeconds(DateTimeOffset then, DateTimeOffset now)
=> (long)Math.Max(0, Math.Ceiling((now - then).TotalSeconds));
public static string CategorizeAge(long seconds)
=> seconds switch
{
< 3600 => "fresh", // < 1 hour
< 86400 => "recent", // < 1 day
< 604800 => "stale", // < 1 week
< 2592000 => "old", // < 30 days
_ => "very_old", // >= 30 days
};
}

View File

@@ -1,6 +1,7 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using Microsoft.AspNetCore.Builder;
@@ -15,6 +16,7 @@ using StellaOps.Excititor.Core.Observations;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Contracts;
using StellaOps.Excititor.WebService.Services;
using StellaOps.Excititor.WebService.Telemetry;
namespace StellaOps.Excititor.WebService.Endpoints;
@@ -245,6 +247,195 @@ public static class EvidenceEndpoints
return Results.Ok(response);
}).WithName("LookupVexEvidence");
// GET /vuln/evidence/vex/{advisory_key} - Get evidence by advisory key (EXCITITOR-VULN-29-002)
app.MapGet("/vuln/evidence/vex/{advisory_key}", async (
HttpContext context,
string advisory_key,
IOptions<VexMongoStorageOptions> storageOptions,
[FromServices] IMongoDatabase database,
TimeProvider timeProvider,
[FromQuery] int? limit,
[FromQuery] string? cursor,
CancellationToken cancellationToken) =>
{
var scopeResult = ScopeAuthorization.RequireScope(context, "vex.read");
if (scopeResult is not null)
{
return scopeResult;
}
if (!TryResolveTenant(context, storageOptions.Value, out var tenant, out var tenantError))
{
return tenantError;
}
if (string.IsNullOrWhiteSpace(advisory_key))
{
NormalizationTelemetry.RecordAdvisoryKeyCanonicalizeError(tenant, "empty_key");
return Results.BadRequest(new { error = new { code = "ERR_ADVISORY_KEY", message = "advisory_key is required" } });
}
var stopwatch = Stopwatch.StartNew();
// Canonicalize the advisory key using VexAdvisoryKeyCanonicalizer
var canonicalizer = new VexAdvisoryKeyCanonicalizer();
VexCanonicalAdvisoryKey canonicalKey;
try
{
canonicalKey = canonicalizer.Canonicalize(advisory_key.Trim());
NormalizationTelemetry.RecordAdvisoryKeyCanonicalization(tenant, canonicalKey);
}
catch (ArgumentException ex)
{
NormalizationTelemetry.RecordAdvisoryKeyCanonicalizeError(tenant, "invalid_format", advisory_key);
return Results.BadRequest(new { error = new { code = "ERR_INVALID_ADVISORY_KEY", message = ex.Message } });
}
var take = Math.Clamp(limit.GetValueOrDefault(100), 1, 500);
var collection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Statements);
var builder = Builders<BsonDocument>.Filter;
// Build filter to match by vulnerability ID (case-insensitive)
// Try original key, canonical key, and all aliases
var vulnerabilityFilters = new List<FilterDefinition<BsonDocument>>
{
builder.Regex("VulnerabilityId", new BsonRegularExpression($"^{EscapeRegex(advisory_key.Trim())}$", "i"))
};
// Add canonical key if different
if (!string.Equals(canonicalKey.AdvisoryKey, advisory_key.Trim(), StringComparison.OrdinalIgnoreCase))
{
vulnerabilityFilters.Add(builder.Regex("VulnerabilityId", new BsonRegularExpression($"^{EscapeRegex(canonicalKey.AdvisoryKey)}$", "i")));
}
// Add original ID if available
if (canonicalKey.OriginalId is { } originalId &&
!string.Equals(originalId, advisory_key.Trim(), StringComparison.OrdinalIgnoreCase))
{
vulnerabilityFilters.Add(builder.Regex("VulnerabilityId", new BsonRegularExpression($"^{EscapeRegex(originalId)}$", "i")));
}
var filter = builder.Or(vulnerabilityFilters);
// Apply cursor-based pagination if provided
if (!string.IsNullOrWhiteSpace(cursor) && TryDecodeCursor(cursor, out var cursorTime, out var cursorId))
{
var ltTime = builder.Lt("InsertedAt", cursorTime);
var eqTimeLtId = builder.And(
builder.Eq("InsertedAt", cursorTime),
builder.Lt("_id", ObjectId.Parse(cursorId)));
filter = builder.And(filter, builder.Or(ltTime, eqTimeLtId));
}
var sort = Builders<BsonDocument>.Sort.Descending("InsertedAt").Descending("_id");
var documents = await collection
.Find(filter)
.Sort(sort)
.Limit(take)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
var now = timeProvider.GetUtcNow();
var statements = new List<VexAdvisoryStatementResponse>();
foreach (var doc in documents)
{
var provenance = new VexAdvisoryProvenanceResponse(
DocumentDigest: doc.GetValue("Document", BsonNull.Value).IsBsonDocument
? doc["Document"].AsBsonDocument.GetValue("Digest", BsonNull.Value).AsString ?? string.Empty
: string.Empty,
DocumentFormat: doc.GetValue("Document", BsonNull.Value).IsBsonDocument
? doc["Document"].AsBsonDocument.GetValue("Format", BsonNull.Value).AsString ?? "unknown"
: "unknown",
SourceUri: doc.GetValue("Document", BsonNull.Value).IsBsonDocument
? doc["Document"].AsBsonDocument.GetValue("SourceUri", BsonNull.Value).AsString ?? string.Empty
: string.Empty,
Revision: doc.GetValue("Document", BsonNull.Value).IsBsonDocument
? doc["Document"].AsBsonDocument.GetValue("Revision", BsonNull.Value).AsString
: null,
InsertedAt: doc.GetValue("InsertedAt", BsonNull.Value).IsBsonDateTime
? new DateTimeOffset(doc["InsertedAt"].ToUniversalTime(), TimeSpan.Zero)
: now);
VexAdvisoryAttestationResponse? attestation = null;
if (doc.GetValue("Document", BsonNull.Value).IsBsonDocument)
{
var docSection = doc["Document"].AsBsonDocument;
if (docSection.Contains("Signature") && !docSection["Signature"].IsBsonNull)
{
var sig = docSection["Signature"].AsBsonDocument;
var sigType = sig.GetValue("Type", BsonNull.Value).AsString;
if (!string.IsNullOrWhiteSpace(sigType))
{
attestation = new VexAdvisoryAttestationResponse(
SignatureType: sigType,
Issuer: sig.GetValue("Issuer", BsonNull.Value).AsString,
Subject: sig.GetValue("Subject", BsonNull.Value).AsString,
KeyId: sig.GetValue("KeyId", BsonNull.Value).AsString,
VerifiedAt: sig.Contains("VerifiedAt") && !sig["VerifiedAt"].IsBsonNull
? new DateTimeOffset(sig["VerifiedAt"].ToUniversalTime(), TimeSpan.Zero)
: null,
TransparencyLogRef: sig.GetValue("TransparencyLogReference", BsonNull.Value).AsString,
TrustWeight: sig.Contains("TrustWeight") && !sig["TrustWeight"].IsBsonNull
? (decimal)sig["TrustWeight"].ToDouble()
: null,
TrustTier: DeriveTrustTier(sig.GetValue("TrustIssuerId", BsonNull.Value).AsString));
}
}
}
var productDoc = doc.GetValue("Product", BsonNull.Value).IsBsonDocument
? doc["Product"].AsBsonDocument
: null;
var product = new VexAdvisoryProductResponse(
Key: productDoc?.GetValue("Key", BsonNull.Value).AsString ?? string.Empty,
Name: productDoc?.GetValue("Name", BsonNull.Value).AsString,
Version: productDoc?.GetValue("Version", BsonNull.Value).AsString,
Purl: productDoc?.GetValue("Purl", BsonNull.Value).AsString,
Cpe: productDoc?.GetValue("Cpe", BsonNull.Value).AsString);
statements.Add(new VexAdvisoryStatementResponse(
StatementId: doc.GetValue("_id", BsonNull.Value).ToString() ?? string.Empty,
ProviderId: doc.GetValue("ProviderId", BsonNull.Value).AsString ?? string.Empty,
Product: product,
Status: doc.GetValue("Status", BsonNull.Value).AsString ?? "unknown",
Justification: doc.GetValue("Justification", BsonNull.Value).AsString,
Detail: doc.GetValue("Detail", BsonNull.Value).AsString,
FirstSeen: doc.GetValue("FirstSeen", BsonNull.Value).IsBsonDateTime
? new DateTimeOffset(doc["FirstSeen"].ToUniversalTime(), TimeSpan.Zero)
: now,
LastSeen: doc.GetValue("LastSeen", BsonNull.Value).IsBsonDateTime
? new DateTimeOffset(doc["LastSeen"].ToUniversalTime(), TimeSpan.Zero)
: now,
Provenance: provenance,
Attestation: attestation));
}
var aliases = canonicalKey.Links
.Select(link => new VexAdvisoryLinkResponse(link.Identifier, link.Type, link.IsOriginal))
.ToList();
stopwatch.Stop();
NormalizationTelemetry.RecordEvidenceRetrieval(
tenant,
"success",
statements.Count,
stopwatch.Elapsed.TotalSeconds);
var response = new VexAdvisoryEvidenceResponse(
AdvisoryKey: advisory_key.Trim(),
CanonicalKey: canonicalKey.AdvisoryKey,
Scope: canonicalKey.Scope.ToString().ToLowerInvariant(),
Aliases: aliases,
Statements: statements,
QueriedAt: now,
TotalCount: statements.Count);
return Results.Ok(response);
}).WithName("GetVexAdvisoryEvidence");
}
private static bool TryResolveTenant(HttpContext context, VexMongoStorageOptions options, out string tenant, out IResult? problem)
@@ -308,4 +499,37 @@ public static class EvidenceEndpoints
var payload = FormattableString.Invariant($"{timestamp:O}|{id}");
return Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(payload));
}
private static string EscapeRegex(string input)
{
// Escape special regex characters for safe use in MongoDB regex
return System.Text.RegularExpressions.Regex.Escape(input);
}
private static string? DeriveTrustTier(string? issuerId)
{
if (string.IsNullOrWhiteSpace(issuerId))
{
return null;
}
var lowerIssuerId = issuerId.ToLowerInvariant();
if (lowerIssuerId.Contains("vendor") || lowerIssuerId.Contains("upstream"))
{
return "vendor";
}
if (lowerIssuerId.Contains("distro") || lowerIssuerId.Contains("rhel") ||
lowerIssuerId.Contains("ubuntu") || lowerIssuerId.Contains("debian"))
{
return "distro-trusted";
}
if (lowerIssuerId.Contains("community") || lowerIssuerId.Contains("oss"))
{
return "community";
}
return "other";
}
}

View File

@@ -0,0 +1,264 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Storage.Mongo;
using StellaOps.Excititor.WebService.Contracts;
namespace StellaOps.Excititor.WebService.Endpoints;
/// <summary>
/// Endpoints for mirror bundle registration, provenance exposure, and timeline queries (EXCITITOR-AIRGAP-56-001).
/// </summary>
internal static class MirrorRegistrationEndpoints
{
public static void MapMirrorRegistrationEndpoints(WebApplication app)
{
var group = app.MapGroup("/airgap/v1/mirror/bundles");
group.MapGet("/", HandleListBundlesAsync)
.WithName("ListMirrorBundles")
.WithDescription("List registered mirror bundles with pagination and optional filters.");
group.MapGet("/{bundleId}", HandleGetBundleAsync)
.WithName("GetMirrorBundle")
.WithDescription("Get mirror bundle details with provenance and staleness metrics.");
group.MapGet("/{bundleId}/timeline", HandleGetBundleTimelineAsync)
.WithName("GetMirrorBundleTimeline")
.WithDescription("Get timeline events for a mirror bundle.");
}
private static async Task<IResult> HandleListBundlesAsync(
HttpContext httpContext,
IAirgapImportStore importStore,
TimeProvider timeProvider,
ILogger<MirrorRegistrationEndpointsMarker> logger,
[FromQuery] string? publisher = null,
[FromQuery] string? importedAfter = null,
[FromQuery] int limit = 50,
[FromQuery] int offset = 0,
CancellationToken cancellationToken = default)
{
var tenantId = ResolveTenantId(httpContext);
var now = timeProvider.GetUtcNow();
DateTimeOffset? afterFilter = null;
if (!string.IsNullOrWhiteSpace(importedAfter) && DateTimeOffset.TryParse(importedAfter, out var parsed))
{
afterFilter = parsed;
}
var clampedLimit = Math.Clamp(limit, 1, 100);
var clampedOffset = Math.Max(0, offset);
var records = await importStore.ListAsync(
tenantId,
publisher,
afterFilter,
clampedLimit,
clampedOffset,
cancellationToken).ConfigureAwait(false);
var totalCount = await importStore.CountAsync(
tenantId,
publisher,
afterFilter,
cancellationToken).ConfigureAwait(false);
var summaries = records.Select(record =>
{
var stalenessSeconds = StalenessCalculator.ComputeSeconds(record.SignedAt, now);
var status = DetermineStatus(record.Timeline);
return new MirrorBundleSummary(
record.BundleId,
record.MirrorGeneration,
record.Publisher,
record.SignedAt,
record.ImportedAt,
record.PayloadHash,
stalenessSeconds,
status);
}).ToList();
var response = new MirrorBundleListResponse(
summaries,
totalCount,
clampedLimit,
clampedOffset,
now);
await WriteJsonAsync(httpContext, response, StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false);
return Results.Empty;
}
private static async Task<IResult> HandleGetBundleAsync(
string bundleId,
HttpContext httpContext,
IAirgapImportStore importStore,
TimeProvider timeProvider,
ILogger<MirrorRegistrationEndpointsMarker> logger,
[FromQuery] string? generation = null,
CancellationToken cancellationToken = default)
{
var tenantId = ResolveTenantId(httpContext);
var now = timeProvider.GetUtcNow();
var record = await importStore.FindByBundleIdAsync(
tenantId,
bundleId,
generation,
cancellationToken).ConfigureAwait(false);
if (record is null)
{
var errorResponse = AirgapErrorMapping.BundleNotFound(bundleId, generation);
await WriteJsonAsync(httpContext, errorResponse, StatusCodes.Status404NotFound, cancellationToken).ConfigureAwait(false);
return Results.Empty;
}
var sinceSignedSeconds = StalenessCalculator.ComputeSeconds(record.SignedAt, now);
var sinceImportedSeconds = StalenessCalculator.ComputeSeconds(record.ImportedAt, now);
var staleness = new MirrorBundleStaleness(
sinceSignedSeconds,
sinceImportedSeconds,
StalenessCalculator.CategorizeAge(sinceSignedSeconds),
StalenessCalculator.CategorizeAge(sinceImportedSeconds));
var provenance = new MirrorBundleProvenance(
record.PayloadHash,
record.Signature,
record.PayloadUrl,
record.TransparencyLog,
record.PortableManifestHash);
var paths = new MirrorBundlePaths(
record.PortableManifestPath,
record.EvidenceLockerPath);
var timeline = record.Timeline
.OrderByDescending(e => e.CreatedAt)
.Select(e => new MirrorBundleTimelineEntry(
e.EventType,
e.CreatedAt,
e.StalenessSeconds,
e.ErrorCode,
e.Message))
.ToList();
var response = new MirrorBundleDetailResponse(
record.BundleId,
record.MirrorGeneration,
record.TenantId,
record.Publisher,
record.SignedAt,
record.ImportedAt,
provenance,
staleness,
paths,
timeline,
now);
await WriteJsonAsync(httpContext, response, StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false);
return Results.Empty;
}
private static async Task<IResult> HandleGetBundleTimelineAsync(
string bundleId,
HttpContext httpContext,
IAirgapImportStore importStore,
TimeProvider timeProvider,
ILogger<MirrorRegistrationEndpointsMarker> logger,
[FromQuery] string? generation = null,
CancellationToken cancellationToken = default)
{
var tenantId = ResolveTenantId(httpContext);
var now = timeProvider.GetUtcNow();
var record = await importStore.FindByBundleIdAsync(
tenantId,
bundleId,
generation,
cancellationToken).ConfigureAwait(false);
if (record is null)
{
var errorResponse = AirgapErrorMapping.BundleNotFound(bundleId, generation);
await WriteJsonAsync(httpContext, errorResponse, StatusCodes.Status404NotFound, cancellationToken).ConfigureAwait(false);
return Results.Empty;
}
var timeline = record.Timeline
.OrderByDescending(e => e.CreatedAt)
.Select(e => new MirrorBundleTimelineEntry(
e.EventType,
e.CreatedAt,
e.StalenessSeconds,
e.ErrorCode,
e.Message))
.ToList();
var response = new MirrorBundleTimelineResponse(
record.BundleId,
record.MirrorGeneration,
timeline,
now);
await WriteJsonAsync(httpContext, response, StatusCodes.Status200OK, cancellationToken).ConfigureAwait(false);
return Results.Empty;
}
private static string ResolveTenantId(HttpContext httpContext)
{
if (httpContext.Request.Headers.TryGetValue("X-Tenant-Id", out var tenantHeader)
&& !string.IsNullOrWhiteSpace(tenantHeader.ToString()))
{
return tenantHeader.ToString();
}
return "default";
}
private static string DetermineStatus(IEnumerable<AirgapTimelineEntry> timeline)
{
var entries = timeline.ToList();
if (entries.Count == 0)
{
return "unknown";
}
var latestEvent = entries.MaxBy(e => e.CreatedAt);
if (latestEvent is null)
{
return "unknown";
}
return latestEvent.EventType switch
{
"airgap.import.completed" => "completed",
"airgap.import.failed" => "failed",
"airgap.import.started" => "in_progress",
_ => "unknown",
};
}
private static async Task WriteJsonAsync<T>(HttpContext context, T payload, int statusCode, CancellationToken cancellationToken)
{
context.Response.StatusCode = statusCode;
context.Response.ContentType = "application/json";
var json = VexCanonicalJsonSerializer.Serialize(payload);
await context.Response.WriteAsync(json, cancellationToken);
}
}
/// <summary>
/// Marker type for logger category resolution.
/// </summary>
internal sealed class MirrorRegistrationEndpointsMarker { }

View File

@@ -67,6 +67,7 @@ internal static class TelemetryExtensions
.AddMeter(IngestionTelemetry.MeterName)
.AddMeter(EvidenceTelemetry.MeterName)
.AddMeter(LinksetTelemetry.MeterName)
.AddMeter(NormalizationTelemetry.MeterName)
.AddAspNetCoreInstrumentation()
.AddHttpClientInstrumentation()
.AddRuntimeInstrumentation();

View File

@@ -72,6 +72,8 @@ services.Configure<VexAttestationVerificationOptions>(configuration.GetSection("
services.AddVexPolicy();
services.AddSingleton<IVexEvidenceChunkService, VexEvidenceChunkService>();
services.AddSingleton<ChunkTelemetry>();
// EXCITITOR-VULN-29-004: Normalization observability for Vuln Explorer + Advisory AI dashboards
services.AddSingleton<IVexNormalizationTelemetryRecorder, VexNormalizationTelemetryRecorder>();
services.AddRedHatCsafConnector();
services.Configure<MirrorDistributionOptions>(configuration.GetSection(MirrorDistributionOptions.SectionName));
services.AddSingleton<MirrorRateLimiter>();
@@ -2275,6 +2277,7 @@ app.MapGet("/obs/excititor/timeline", async (
IngestEndpoints.MapIngestEndpoints(app);
ResolveEndpoint.MapResolveEndpoint(app);
MirrorEndpoints.MapMirrorEndpoints(app);
MirrorRegistrationEndpoints.MapMirrorRegistrationEndpoints(app);
// Evidence and Attestation APIs (WEB-OBS-53-001, WEB-OBS-54-001)
EvidenceEndpoints.MapEvidenceEndpoints(app);

View File

@@ -0,0 +1,318 @@
using System;
using System.Collections.Generic;
using System.Diagnostics.Metrics;
using StellaOps.Excititor.Core.Canonicalization;
namespace StellaOps.Excititor.WebService.Telemetry;
/// <summary>
/// Telemetry metrics for VEX normalization and canonicalization operations (EXCITITOR-VULN-29-004).
/// Tracks advisory/product key canonicalization, normalization errors, suppression scopes,
/// and withdrawn statement handling for Vuln Explorer and Advisory AI dashboards.
/// </summary>
internal static class NormalizationTelemetry
{
public const string MeterName = "StellaOps.Excititor.WebService.Normalization";
private static readonly Meter Meter = new(MeterName);
// Advisory key canonicalization metrics
private static readonly Counter<long> AdvisoryKeyCanonicalizeCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.advisory_key_total",
unit: "operations",
description: "Total advisory key canonicalization operations by outcome.");
private static readonly Counter<long> AdvisoryKeyCanonicalizeErrorCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.advisory_key_errors",
unit: "errors",
description: "Advisory key canonicalization errors by error type.");
private static readonly Counter<long> AdvisoryKeyScopeCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.advisory_key_scope",
unit: "keys",
description: "Advisory keys processed by scope (global, ecosystem, vendor, distribution, unknown).");
// Product key canonicalization metrics
private static readonly Counter<long> ProductKeyCanonicalizeCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.product_key_total",
unit: "operations",
description: "Total product key canonicalization operations by outcome.");
private static readonly Counter<long> ProductKeyCanonicalizeErrorCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.product_key_errors",
unit: "errors",
description: "Product key canonicalization errors by error type.");
private static readonly Counter<long> ProductKeyScopeCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.product_key_scope",
unit: "keys",
description: "Product keys processed by scope (package, component, ospackage, container, platform, unknown).");
private static readonly Counter<long> ProductKeyTypeCounter =
Meter.CreateCounter<long>(
"excititor.vex.canonicalize.product_key_type",
unit: "keys",
description: "Product keys processed by type (purl, cpe, rpm, deb, oci, platform, other).");
// Evidence retrieval metrics
private static readonly Counter<long> EvidenceRetrievalCounter =
Meter.CreateCounter<long>(
"excititor.vex.evidence.retrieval_total",
unit: "requests",
description: "Total evidence retrieval requests by outcome.");
private static readonly Histogram<int> EvidenceStatementCountHistogram =
Meter.CreateHistogram<int>(
"excititor.vex.evidence.statement_count",
unit: "statements",
description: "Distribution of statements returned per evidence retrieval request.");
private static readonly Histogram<double> EvidenceRetrievalLatencyHistogram =
Meter.CreateHistogram<double>(
"excititor.vex.evidence.retrieval_latency_seconds",
unit: "s",
description: "Latency distribution for evidence retrieval operations.");
// Normalization error metrics
private static readonly Counter<long> NormalizationErrorCounter =
Meter.CreateCounter<long>(
"excititor.vex.normalize.errors_total",
unit: "errors",
description: "Total normalization errors by type and provider.");
// Suppression scope metrics
private static readonly Counter<long> SuppressionScopeCounter =
Meter.CreateCounter<long>(
"excititor.vex.suppression.scope_total",
unit: "suppressions",
description: "Suppression scope applications by scope type.");
private static readonly Counter<long> SuppressionAppliedCounter =
Meter.CreateCounter<long>(
"excititor.vex.suppression.applied_total",
unit: "statements",
description: "Statements affected by suppression scopes.");
// Withdrawn statement metrics
private static readonly Counter<long> WithdrawnStatementCounter =
Meter.CreateCounter<long>(
"excititor.vex.withdrawn.statements_total",
unit: "statements",
description: "Total withdrawn statement detections by provider.");
private static readonly Counter<long> WithdrawnReplacementCounter =
Meter.CreateCounter<long>(
"excititor.vex.withdrawn.replacements_total",
unit: "replacements",
description: "Withdrawn statement replacements processed.");
/// <summary>
/// Records a successful advisory key canonicalization.
/// </summary>
public static void RecordAdvisoryKeyCanonicalization(
string? tenant,
VexCanonicalAdvisoryKey result)
{
var normalizedTenant = NormalizeTenant(tenant);
var scope = result.Scope.ToString().ToLowerInvariant();
AdvisoryKeyCanonicalizeCounter.Add(1, BuildOutcomeTags(normalizedTenant, "success"));
AdvisoryKeyScopeCounter.Add(1, BuildScopeTags(normalizedTenant, scope));
}
/// <summary>
/// Records an advisory key canonicalization error.
/// </summary>
public static void RecordAdvisoryKeyCanonicalizeError(
string? tenant,
string errorType,
string? advisoryKey = null)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("error_type", errorType),
};
AdvisoryKeyCanonicalizeCounter.Add(1, BuildOutcomeTags(normalizedTenant, "error"));
AdvisoryKeyCanonicalizeErrorCounter.Add(1, tags);
}
/// <summary>
/// Records a successful product key canonicalization.
/// </summary>
public static void RecordProductKeyCanonicalization(
string? tenant,
VexCanonicalProductKey result)
{
var normalizedTenant = NormalizeTenant(tenant);
var scope = result.Scope.ToString().ToLowerInvariant();
var keyType = result.KeyType.ToString().ToLowerInvariant();
ProductKeyCanonicalizeCounter.Add(1, BuildOutcomeTags(normalizedTenant, "success"));
ProductKeyScopeCounter.Add(1, BuildScopeTags(normalizedTenant, scope));
ProductKeyTypeCounter.Add(1, new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("key_type", keyType),
});
}
/// <summary>
/// Records a product key canonicalization error.
/// </summary>
public static void RecordProductKeyCanonicalizeError(
string? tenant,
string errorType,
string? productKey = null)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("error_type", errorType),
};
ProductKeyCanonicalizeCounter.Add(1, BuildOutcomeTags(normalizedTenant, "error"));
ProductKeyCanonicalizeErrorCounter.Add(1, tags);
}
/// <summary>
/// Records an evidence retrieval operation.
/// </summary>
public static void RecordEvidenceRetrieval(
string? tenant,
string outcome,
int statementCount,
double latencySeconds)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = BuildOutcomeTags(normalizedTenant, outcome);
EvidenceRetrievalCounter.Add(1, tags);
if (string.Equals(outcome, "success", StringComparison.OrdinalIgnoreCase))
{
EvidenceStatementCountHistogram.Record(statementCount, tags);
}
EvidenceRetrievalLatencyHistogram.Record(latencySeconds, tags);
}
/// <summary>
/// Records a normalization error.
/// </summary>
public static void RecordNormalizationError(
string? tenant,
string provider,
string errorType,
string? detail = null)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("provider", string.IsNullOrWhiteSpace(provider) ? "unknown" : provider),
new KeyValuePair<string, object?>("error_type", errorType),
};
NormalizationErrorCounter.Add(1, tags);
}
/// <summary>
/// Records a suppression scope application.
/// </summary>
public static void RecordSuppressionScope(
string? tenant,
string scopeType,
int affectedStatements)
{
var normalizedTenant = NormalizeTenant(tenant);
var scopeTags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("scope_type", scopeType),
};
SuppressionScopeCounter.Add(1, scopeTags);
if (affectedStatements > 0)
{
SuppressionAppliedCounter.Add(affectedStatements, scopeTags);
}
}
/// <summary>
/// Records a withdrawn statement detection.
/// </summary>
public static void RecordWithdrawnStatement(
string? tenant,
string provider,
string? replacementId = null)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("provider", string.IsNullOrWhiteSpace(provider) ? "unknown" : provider),
};
WithdrawnStatementCounter.Add(1, tags);
if (!string.IsNullOrWhiteSpace(replacementId))
{
WithdrawnReplacementCounter.Add(1, tags);
}
}
/// <summary>
/// Records batch withdrawn statement processing.
/// </summary>
public static void RecordWithdrawnStatements(
string? tenant,
string provider,
int totalWithdrawn,
int replacements)
{
var normalizedTenant = NormalizeTenant(tenant);
var tags = new[]
{
new KeyValuePair<string, object?>("tenant", normalizedTenant),
new KeyValuePair<string, object?>("provider", string.IsNullOrWhiteSpace(provider) ? "unknown" : provider),
};
if (totalWithdrawn > 0)
{
WithdrawnStatementCounter.Add(totalWithdrawn, tags);
}
if (replacements > 0)
{
WithdrawnReplacementCounter.Add(replacements, tags);
}
}
private static string NormalizeTenant(string? tenant)
=> string.IsNullOrWhiteSpace(tenant) ? "default" : tenant;
private static KeyValuePair<string, object?>[] BuildOutcomeTags(string tenant, string outcome)
=> new[]
{
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("outcome", outcome),
};
private static KeyValuePair<string, object?>[] BuildScopeTags(string tenant, string scope)
=> new[]
{
new KeyValuePair<string, object?>("tenant", tenant),
new KeyValuePair<string, object?>("scope", scope),
};
}

View File

@@ -0,0 +1,87 @@
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
namespace StellaOps.Excititor.WebService.Telemetry;
/// <summary>
/// Implementation of <see cref="IVexNormalizationTelemetryRecorder"/> that bridges to
/// <see cref="NormalizationTelemetry"/> static metrics and structured logging (EXCITITOR-VULN-29-004).
/// </summary>
internal sealed class VexNormalizationTelemetryRecorder : IVexNormalizationTelemetryRecorder
{
private readonly ILogger<VexNormalizationTelemetryRecorder> _logger;
public VexNormalizationTelemetryRecorder(ILogger<VexNormalizationTelemetryRecorder> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public void RecordNormalizationError(string? tenant, string provider, string errorType, string? detail = null)
{
NormalizationTelemetry.RecordNormalizationError(tenant, provider, errorType, detail);
_logger.LogWarning(
"VEX normalization error: tenant={Tenant} provider={Provider} errorType={ErrorType} detail={Detail}",
tenant ?? "default",
provider,
errorType,
detail ?? "(none)");
}
public void RecordSuppressionScope(string? tenant, string scopeType, int affectedStatements)
{
NormalizationTelemetry.RecordSuppressionScope(tenant, scopeType, affectedStatements);
if (affectedStatements > 0)
{
_logger.LogInformation(
"VEX suppression scope applied: tenant={Tenant} scopeType={ScopeType} affectedStatements={AffectedStatements}",
tenant ?? "default",
scopeType,
affectedStatements);
}
else
{
_logger.LogDebug(
"VEX suppression scope checked (no statements affected): tenant={Tenant} scopeType={ScopeType}",
tenant ?? "default",
scopeType);
}
}
public void RecordWithdrawnStatement(string? tenant, string provider, string? replacementId = null)
{
NormalizationTelemetry.RecordWithdrawnStatement(tenant, provider, replacementId);
if (string.IsNullOrWhiteSpace(replacementId))
{
_logger.LogInformation(
"VEX withdrawn statement detected: tenant={Tenant} provider={Provider}",
tenant ?? "default",
provider);
}
else
{
_logger.LogInformation(
"VEX withdrawn statement superseded: tenant={Tenant} provider={Provider} replacementId={ReplacementId}",
tenant ?? "default",
provider,
replacementId);
}
}
public void RecordWithdrawnStatements(string? tenant, string provider, int totalWithdrawn, int replacements)
{
NormalizationTelemetry.RecordWithdrawnStatements(tenant, provider, totalWithdrawn, replacements);
if (totalWithdrawn > 0)
{
_logger.LogInformation(
"VEX withdrawn statements batch: tenant={Tenant} provider={Provider} totalWithdrawn={TotalWithdrawn} replacements={Replacements}",
tenant ?? "default",
provider,
totalWithdrawn,
replacements);
}
}
}

View File

@@ -0,0 +1,487 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Builds portable evidence bundles for sealed deployments with timeline and attestation metadata (EXCITITOR-AIRGAP-58-001).
/// </summary>
public interface IPortableEvidenceBundleBuilder
{
Task<PortableEvidenceBundleResult> BuildAsync(
PortableEvidenceBundleRequest request,
CancellationToken cancellationToken);
}
/// <summary>
/// Request for building a portable evidence bundle.
/// </summary>
public sealed record PortableEvidenceBundleRequest(
string Tenant,
VexLockerManifest Manifest,
IReadOnlyList<PortableEvidenceItem> EvidenceItems,
PortableEvidenceAttestationMetadata? Attestation,
IReadOnlyList<PortableEvidenceTimelineEntry> Timeline,
ImmutableDictionary<string, string>? AdditionalMetadata = null);
/// <summary>
/// Individual evidence item to include in the bundle.
/// </summary>
public sealed record PortableEvidenceItem(
string ObservationId,
string ProviderId,
string ContentHash,
ReadOnlyMemory<byte> Content,
string? Format);
/// <summary>
/// Attestation metadata for the bundle.
/// </summary>
public sealed record PortableEvidenceAttestationMetadata(
string? DsseEnvelopeJson,
string? EnvelopeDigest,
string? PredicateType,
string? SignatureType,
string? KeyId,
string? Issuer,
string? Subject,
DateTimeOffset? SignedAt,
string? TransparencyLogRef);
/// <summary>
/// Timeline entry for audit trail in the bundle.
/// </summary>
public sealed record PortableEvidenceTimelineEntry(
string EventType,
DateTimeOffset CreatedAt,
string? TenantId,
string? BundleId,
string? MirrorGeneration,
int? StalenessSeconds,
string? ErrorCode,
string? Message);
/// <summary>
/// Result of building a portable evidence bundle.
/// </summary>
public sealed record PortableEvidenceBundleResult(
string BundleId,
string BundlePath,
string ManifestDigest,
string BundleDigest,
long BundleSizeBytes,
int ItemCount,
DateTimeOffset CreatedAt,
PortableEvidenceBundleVerification Verification);
/// <summary>
/// Verification data for the bundle.
/// </summary>
public sealed record PortableEvidenceBundleVerification(
string MerkleRoot,
string ManifestDigest,
string BundleDigest,
bool HasAttestation,
string? AttestationDigest);
/// <summary>
/// Default implementation of portable evidence bundle builder.
/// </summary>
public sealed class PortableEvidenceBundleBuilder : IPortableEvidenceBundleBuilder
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public Task<PortableEvidenceBundleResult> BuildAsync(
PortableEvidenceBundleRequest request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Manifest);
var createdAt = DateTimeOffset.UtcNow;
var bundleId = GenerateBundleId(request.Tenant, createdAt);
using var memoryStream = new MemoryStream();
string manifestDigest;
string? attestationDigest = null;
using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, leaveOpen: true))
{
// 1. Write the locker manifest
manifestDigest = WriteManifest(archive, request.Manifest);
// 2. Write attestation if present
if (request.Attestation?.DsseEnvelopeJson is not null)
{
attestationDigest = WriteAttestation(archive, request.Attestation);
}
// 3. Write evidence items
WriteEvidenceItems(archive, request.EvidenceItems);
// 4. Write timeline
WriteTimeline(archive, request.Timeline);
// 5. Write bundle manifest (index of all contents)
WriteBundleManifest(archive, request, bundleId, createdAt, manifestDigest, attestationDigest);
// 6. Write verifier instructions
WriteVerifierInstructions(archive);
}
memoryStream.Position = 0;
var bundleDigest = ComputeDigest(memoryStream.ToArray());
var bundlePath = $"evidence-bundle-{SanitizeForPath(request.Tenant)}-{createdAt:yyyyMMdd-HHmmss}.zip";
var verification = new PortableEvidenceBundleVerification(
request.Manifest.MerkleRoot,
manifestDigest,
bundleDigest,
request.Attestation?.DsseEnvelopeJson is not null,
attestationDigest);
return Task.FromResult(new PortableEvidenceBundleResult(
bundleId,
bundlePath,
manifestDigest,
bundleDigest,
memoryStream.Length,
request.EvidenceItems.Count,
createdAt,
verification));
}
private static string GenerateBundleId(string tenant, DateTimeOffset timestamp)
{
var normalizedTenant = string.IsNullOrWhiteSpace(tenant) ? "default" : tenant.Trim().ToLowerInvariant();
var date = timestamp.ToString("yyyyMMdd-HHmmss", CultureInfo.InvariantCulture);
var randomSuffix = Guid.NewGuid().ToString("N")[..8];
return $"portable-evidence:{normalizedTenant}:{date}:{randomSuffix}";
}
private static string WriteManifest(ZipArchive archive, VexLockerManifest manifest)
{
var entry = archive.CreateEntry("manifest.json", CompressionLevel.Optimal);
var json = VexCanonicalJsonSerializer.Serialize(manifest);
var bytes = Encoding.UTF8.GetBytes(json);
using (var stream = entry.Open())
{
stream.Write(bytes);
}
return ComputeDigest(bytes);
}
private static string WriteAttestation(ZipArchive archive, PortableEvidenceAttestationMetadata attestation)
{
var entry = archive.CreateEntry("attestation.json", CompressionLevel.Optimal);
var attestationDoc = new PortableAttestationDocument(
attestation.DsseEnvelopeJson,
attestation.EnvelopeDigest,
attestation.PredicateType,
attestation.SignatureType,
attestation.KeyId,
attestation.Issuer,
attestation.Subject,
attestation.SignedAt?.ToString("O", CultureInfo.InvariantCulture),
attestation.TransparencyLogRef);
var json = JsonSerializer.Serialize(attestationDoc, SerializerOptions);
var bytes = Encoding.UTF8.GetBytes(json);
using (var stream = entry.Open())
{
stream.Write(bytes);
}
return ComputeDigest(bytes);
}
private static void WriteEvidenceItems(ZipArchive archive, IReadOnlyList<PortableEvidenceItem> items)
{
foreach (var item in items)
{
var extension = GetExtension(item.Format);
var entryPath = $"evidence/{SanitizeForPath(item.ProviderId)}/{SanitizeDigest(item.ContentHash)}{extension}";
var entry = archive.CreateEntry(entryPath, CompressionLevel.Optimal);
using var stream = entry.Open();
stream.Write(item.Content.Span);
}
}
private static void WriteTimeline(ZipArchive archive, IReadOnlyList<PortableEvidenceTimelineEntry> timeline)
{
if (timeline.Count == 0)
{
return;
}
var entry = archive.CreateEntry("timeline.json", CompressionLevel.Optimal);
var sortedTimeline = timeline
.OrderBy(e => e.CreatedAt)
.Select(e => new PortableTimelineEntryDocument(
e.EventType,
e.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
e.TenantId,
e.BundleId,
e.MirrorGeneration,
e.StalenessSeconds,
e.ErrorCode,
e.Message))
.ToList();
var json = JsonSerializer.Serialize(sortedTimeline, SerializerOptions);
using var stream = entry.Open();
stream.Write(Encoding.UTF8.GetBytes(json));
}
private static void WriteBundleManifest(
ZipArchive archive,
PortableEvidenceBundleRequest request,
string bundleId,
DateTimeOffset createdAt,
string manifestDigest,
string? attestationDigest)
{
var entry = archive.CreateEntry("bundle-manifest.json", CompressionLevel.Optimal);
var evidenceIndex = request.EvidenceItems
.Select(item => new PortableBundleEvidenceEntry(
item.ObservationId,
item.ProviderId,
item.ContentHash,
item.Format ?? "json",
item.Content.Length))
.OrderBy(e => e.ObservationId, StringComparer.Ordinal)
.ThenBy(e => e.ProviderId, StringComparer.OrdinalIgnoreCase)
.ToList();
var bundleManifest = new PortableBundleManifestDocument(
SchemaVersion: 1,
BundleId: bundleId,
Tenant: request.Tenant,
CreatedAt: createdAt.ToString("O", CultureInfo.InvariantCulture),
ManifestDigest: manifestDigest,
MerkleRoot: request.Manifest.MerkleRoot,
ItemCount: request.EvidenceItems.Count,
TimelineEventCount: request.Timeline.Count,
HasAttestation: attestationDigest is not null,
AttestationDigest: attestationDigest,
Evidence: evidenceIndex,
Metadata: request.AdditionalMetadata ?? ImmutableDictionary<string, string>.Empty);
var json = JsonSerializer.Serialize(bundleManifest, SerializerOptions);
using var stream = entry.Open();
stream.Write(Encoding.UTF8.GetBytes(json));
}
private static void WriteVerifierInstructions(ZipArchive archive)
{
var entry = archive.CreateEntry("VERIFY.md", CompressionLevel.Optimal);
var instructions = GetVerifierInstructions();
using var stream = entry.Open();
stream.Write(Encoding.UTF8.GetBytes(instructions));
}
private static string GetVerifierInstructions() => """
# Portable Evidence Bundle Verification Guide
This document describes how to verify the integrity and authenticity of this
portable evidence bundle for Advisory AI teams.
## Bundle Contents
- `manifest.json` - Evidence locker manifest with Merkle root
- `attestation.json` - DSSE attestation envelope (if signed)
- `evidence/` - Raw evidence items organized by provider
- `timeline.json` - Audit timeline events
- `bundle-manifest.json` - Index of all bundle contents
## Verification Steps
### Step 1: Verify Bundle Integrity
1. Extract the bundle to a temporary directory
2. Compute SHA-256 hash of each evidence file
3. Compare against `contentHash` values in `manifest.json`
```bash
# Example: Verify a single evidence file
sha256sum evidence/provider-name/sha256_abc123.json
```
### Step 2: Verify Merkle Root
1. Collect all `contentHash` values from `manifest.json` items
2. Sort them by `observationId` then `providerId`
3. Compute Merkle root using binary tree with SHA-256
4. Compare against `merkleRoot` in `manifest.json`
```python
# Pseudocode for Merkle root verification
import hashlib
def compute_merkle_root(hashes):
if len(hashes) == 0:
return hashlib.sha256(b'').hexdigest()
if len(hashes) == 1:
return hashes[0]
if len(hashes) % 2 != 0:
hashes.append(hashes[-1]) # Pad to even
next_level = []
for i in range(0, len(hashes), 2):
combined = bytes.fromhex(hashes[i] + hashes[i+1])
next_level.append(hashlib.sha256(combined).hexdigest())
return compute_merkle_root(next_level)
```
### Step 3: Verify Attestation (if present)
If `attestation.json` exists:
1. Parse the DSSE envelope from `dsseEnvelope` field
2. Verify the signature using the public key identified by `keyId`
3. Optionally check transparency log reference at `transparencyLogRef`
```bash
# Example: Verify with cosign (if Sigstore attestation)
cosign verify-blob --signature attestation.sig --certificate attestation.crt manifest.json
```
### Step 4: Validate Timeline Consistency
1. Parse `timeline.json`
2. Verify events are in chronological order
3. Check for any `airgap.import.failed` events with error codes
4. Verify staleness values are within acceptable bounds
## Error Codes Reference
| Code | Description |
|------|-------------|
| AIRGAP_EGRESS_BLOCKED | External URL blocked in sealed mode |
| AIRGAP_SOURCE_UNTRUSTED | Publisher not in allowlist |
| AIRGAP_SIGNATURE_MISSING | Required signature not provided |
| AIRGAP_SIGNATURE_INVALID | Signature validation failed |
| AIRGAP_PAYLOAD_STALE | Bundle timestamp exceeds skew tolerance |
| AIRGAP_PAYLOAD_MISMATCH | Payload hash doesn't match metadata |
## Advisory AI Integration
For automated verification in Advisory AI pipelines:
1. Extract `bundle-manifest.json` for quick integrity check
2. Use `merkleRoot` as the canonical bundle identifier
3. Reference `attestationDigest` for cryptographic proof
4. Parse `timeline.json` for provenance audit trail
## Support
For questions about bundle verification, contact your StellaOps administrator
or refer to the StellaOps documentation.
---
Generated by StellaOps Excititor - Portable Evidence Bundle Builder
""";
private static string ComputeDigest(byte[] data)
{
var hash = SHA256.HashData(data);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private static string SanitizeForPath(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "unknown";
}
var builder = new StringBuilder(value.Length);
foreach (var ch in value.ToLowerInvariant())
{
builder.Append(char.IsLetterOrDigit(ch) || ch == '-' || ch == '_' ? ch : '_');
}
return builder.ToString();
}
private static string SanitizeDigest(string digest)
{
return digest.Replace(":", "_");
}
private static string GetExtension(string? format)
=> format?.ToLowerInvariant() switch
{
"json" => ".json",
"jsonlines" or "jsonl" => ".jsonl",
"openvex" => ".json",
"csaf" => ".json",
"cyclonedx" => ".json",
_ => ".bin",
};
}
// Internal document types for serialization
internal sealed record PortableAttestationDocument(
[property: JsonPropertyName("dsseEnvelope")] string? DsseEnvelope,
[property: JsonPropertyName("envelopeDigest")] string? EnvelopeDigest,
[property: JsonPropertyName("predicateType")] string? PredicateType,
[property: JsonPropertyName("signatureType")] string? SignatureType,
[property: JsonPropertyName("keyId")] string? KeyId,
[property: JsonPropertyName("issuer")] string? Issuer,
[property: JsonPropertyName("subject")] string? Subject,
[property: JsonPropertyName("signedAt")] string? SignedAt,
[property: JsonPropertyName("transparencyLogRef")] string? TransparencyLogRef);
internal sealed record PortableTimelineEntryDocument(
[property: JsonPropertyName("eventType")] string EventType,
[property: JsonPropertyName("createdAt")] string CreatedAt,
[property: JsonPropertyName("tenantId")] string? TenantId,
[property: JsonPropertyName("bundleId")] string? BundleId,
[property: JsonPropertyName("mirrorGeneration")] string? MirrorGeneration,
[property: JsonPropertyName("stalenessSeconds")] int? StalenessSeconds,
[property: JsonPropertyName("errorCode")] string? ErrorCode,
[property: JsonPropertyName("message")] string? Message);
internal sealed record PortableBundleManifestDocument(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("createdAt")] string CreatedAt,
[property: JsonPropertyName("manifestDigest")] string ManifestDigest,
[property: JsonPropertyName("merkleRoot")] string MerkleRoot,
[property: JsonPropertyName("itemCount")] int ItemCount,
[property: JsonPropertyName("timelineEventCount")] int TimelineEventCount,
[property: JsonPropertyName("hasAttestation")] bool HasAttestation,
[property: JsonPropertyName("attestationDigest")] string? AttestationDigest,
[property: JsonPropertyName("evidence")] IReadOnlyList<PortableBundleEvidenceEntry> Evidence,
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string> Metadata);
internal sealed record PortableBundleEvidenceEntry(
[property: JsonPropertyName("observationId")] string ObservationId,
[property: JsonPropertyName("providerId")] string ProviderId,
[property: JsonPropertyName("contentHash")] string ContentHash,
[property: JsonPropertyName("format")] string Format,
[property: JsonPropertyName("sizeBytes")] int SizeBytes);

View File

@@ -0,0 +1,250 @@
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core;
/// <summary>
/// Portable evidence bundle for sealed deployments (EXCITITOR-AIRGAP-58-001).
/// Contains evidence content, timeline events, and attestation metadata
/// for offline verification by Advisory AI teams.
/// </summary>
public sealed record PortableEvidenceBundle
{
public const int SchemaVersion = 1;
public PortableEvidenceBundle(
string bundleId,
DateTimeOffset generatedAt,
string tenantId,
PortableEvidenceBundleContent content,
ImmutableArray<PortableTimelineEntry> timeline,
PortableBundleAttestation? attestation,
PortableBundleProvenance provenance)
{
ArgumentException.ThrowIfNullOrWhiteSpace(bundleId);
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(content);
ArgumentNullException.ThrowIfNull(provenance);
BundleId = bundleId.Trim();
GeneratedAt = generatedAt;
TenantId = tenantId.Trim();
Content = content;
Timeline = timeline.IsDefault ? ImmutableArray<PortableTimelineEntry>.Empty : timeline;
Attestation = attestation;
Provenance = provenance;
}
public string BundleId { get; }
public DateTimeOffset GeneratedAt { get; }
public string TenantId { get; }
public PortableEvidenceBundleContent Content { get; }
public ImmutableArray<PortableTimelineEntry> Timeline { get; }
public PortableBundleAttestation? Attestation { get; }
public PortableBundleProvenance Provenance { get; }
}
/// <summary>
/// Evidence content within a portable bundle.
/// </summary>
public sealed record PortableEvidenceBundleContent
{
public PortableEvidenceBundleContent(
string vulnerabilityId,
string? productKey,
ImmutableArray<VexClaim> claims,
VexConsensus? consensus,
ImmutableArray<VexQuietProvenance> quietProvenance)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
VulnerabilityId = vulnerabilityId.Trim();
ProductKey = string.IsNullOrWhiteSpace(productKey) ? null : productKey.Trim();
Claims = claims.IsDefault ? ImmutableArray<VexClaim>.Empty : claims;
Consensus = consensus;
QuietProvenance = quietProvenance.IsDefault ? ImmutableArray<VexQuietProvenance>.Empty : quietProvenance;
}
public string VulnerabilityId { get; }
public string? ProductKey { get; }
public ImmutableArray<VexClaim> Claims { get; }
public VexConsensus? Consensus { get; }
public ImmutableArray<VexQuietProvenance> QuietProvenance { get; }
}
/// <summary>
/// Timeline entry in a portable evidence bundle.
/// </summary>
public sealed record PortableTimelineEntry
{
public PortableTimelineEntry(
string eventId,
string eventType,
string providerId,
string traceId,
string justificationSummary,
string? evidenceHash,
DateTimeOffset createdAt,
ImmutableDictionary<string, string>? attributes)
{
ArgumentException.ThrowIfNullOrWhiteSpace(eventId);
ArgumentException.ThrowIfNullOrWhiteSpace(eventType);
ArgumentException.ThrowIfNullOrWhiteSpace(providerId);
ArgumentException.ThrowIfNullOrWhiteSpace(traceId);
EventId = eventId.Trim();
EventType = eventType.Trim();
ProviderId = providerId.Trim();
TraceId = traceId.Trim();
JustificationSummary = justificationSummary?.Trim() ?? string.Empty;
EvidenceHash = string.IsNullOrWhiteSpace(evidenceHash) ? null : evidenceHash.Trim();
CreatedAt = createdAt;
Attributes = attributes ?? ImmutableDictionary<string, string>.Empty;
}
public string EventId { get; }
public string EventType { get; }
public string ProviderId { get; }
public string TraceId { get; }
public string JustificationSummary { get; }
public string? EvidenceHash { get; }
public DateTimeOffset CreatedAt { get; }
public ImmutableDictionary<string, string> Attributes { get; }
}
/// <summary>
/// Attestation metadata in a portable evidence bundle.
/// </summary>
public sealed record PortableBundleAttestation
{
public PortableBundleAttestation(
string predicateType,
string? envelopeDigest,
DateTimeOffset? signedAt,
PortableRekorReference? rekor,
PortableSignerInfo? signer)
{
ArgumentException.ThrowIfNullOrWhiteSpace(predicateType);
PredicateType = predicateType.Trim();
EnvelopeDigest = string.IsNullOrWhiteSpace(envelopeDigest) ? null : envelopeDigest.Trim();
SignedAt = signedAt;
Rekor = rekor;
Signer = signer;
}
public string PredicateType { get; }
public string? EnvelopeDigest { get; }
public DateTimeOffset? SignedAt { get; }
public PortableRekorReference? Rekor { get; }
public PortableSignerInfo? Signer { get; }
}
/// <summary>
/// Sigstore Rekor transparency log reference.
/// </summary>
public sealed record PortableRekorReference
{
public PortableRekorReference(
string apiVersion,
string location,
string? logIndex,
string? inclusionProofUri)
{
ArgumentException.ThrowIfNullOrWhiteSpace(apiVersion);
ArgumentException.ThrowIfNullOrWhiteSpace(location);
ApiVersion = apiVersion.Trim();
Location = location.Trim();
LogIndex = string.IsNullOrWhiteSpace(logIndex) ? null : logIndex.Trim();
InclusionProofUri = string.IsNullOrWhiteSpace(inclusionProofUri) ? null : inclusionProofUri.Trim();
}
public string ApiVersion { get; }
public string Location { get; }
public string? LogIndex { get; }
public string? InclusionProofUri { get; }
}
/// <summary>
/// Signer information for attestations.
/// </summary>
public sealed record PortableSignerInfo
{
public PortableSignerInfo(
string keyId,
string algorithm,
string? issuer,
string? subject)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm);
KeyId = keyId.Trim();
Algorithm = algorithm.Trim();
Issuer = string.IsNullOrWhiteSpace(issuer) ? null : issuer.Trim();
Subject = string.IsNullOrWhiteSpace(subject) ? null : subject.Trim();
}
public string KeyId { get; }
public string Algorithm { get; }
public string? Issuer { get; }
public string? Subject { get; }
}
/// <summary>
/// Provenance information for a portable evidence bundle.
/// </summary>
public sealed record PortableBundleProvenance
{
public PortableBundleProvenance(
string contentDigest,
string publisher,
ImmutableArray<string> sourceProviders,
ImmutableDictionary<string, string>? metadata)
{
ArgumentException.ThrowIfNullOrWhiteSpace(contentDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(publisher);
ContentDigest = contentDigest.Trim();
Publisher = publisher.Trim();
SourceProviders = sourceProviders.IsDefault ? ImmutableArray<string>.Empty : sourceProviders;
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty;
}
public string ContentDigest { get; }
public string Publisher { get; }
public ImmutableArray<string> SourceProviders { get; }
public ImmutableDictionary<string, string> Metadata { get; }
}
/// <summary>
/// Request to build a portable evidence bundle.
/// </summary>
public sealed record PortableEvidenceBundleRequest
{
public PortableEvidenceBundleRequest(
string vulnerabilityId,
string? productKey,
string tenantId,
bool includeTimeline,
bool includeConsensus,
int? timelineLimit)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
VulnerabilityId = vulnerabilityId.Trim();
ProductKey = string.IsNullOrWhiteSpace(productKey) ? null : productKey.Trim();
TenantId = tenantId.Trim();
IncludeTimeline = includeTimeline;
IncludeConsensus = includeConsensus;
TimelineLimit = timelineLimit is null or <= 0 ? 100 : Math.Min(timelineLimit.Value, 1000);
}
public string VulnerabilityId { get; }
public string? ProductKey { get; }
public string TenantId { get; }
public bool IncludeTimeline { get; }
public bool IncludeConsensus { get; }
public int TimelineLimit { get; }
}

View File

@@ -0,0 +1,43 @@
namespace StellaOps.Excititor.Core;
/// <summary>
/// Interface for recording VEX normalization telemetry (EXCITITOR-VULN-29-004).
/// Implementations wire metrics and structured logs to observability backends
/// for Vuln Explorer and Advisory AI dashboards.
/// </summary>
public interface IVexNormalizationTelemetryRecorder
{
/// <summary>
/// Records a normalization error that occurred during claim extraction.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="provider">Provider ID that sourced the document.</param>
/// <param name="errorType">Error classification (e.g., "unsupported_format", "normalization_exception", "validation_error").</param>
/// <param name="detail">Optional error detail message.</param>
void RecordNormalizationError(string? tenant, string provider, string errorType, string? detail = null);
/// <summary>
/// Records a suppression scope application affecting VEX statements.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="scopeType">Type of suppression scope (e.g., "provider", "product", "vulnerability").</param>
/// <param name="affectedStatements">Number of statements affected by the suppression.</param>
void RecordSuppressionScope(string? tenant, string scopeType, int affectedStatements);
/// <summary>
/// Records detection of a withdrawn VEX statement.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="provider">Provider ID that issued the withdrawal.</param>
/// <param name="replacementId">Optional replacement statement ID if superseded.</param>
void RecordWithdrawnStatement(string? tenant, string provider, string? replacementId = null);
/// <summary>
/// Records batch withdrawn statement processing.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="provider">Provider ID that issued the withdrawals.</param>
/// <param name="totalWithdrawn">Total number of withdrawn statements.</param>
/// <param name="replacements">Number of statements with replacements.</param>
void RecordWithdrawnStatements(string? tenant, string provider, int totalWithdrawn, int replacements);
}

View File

@@ -0,0 +1,276 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Export;
/// <summary>
/// Service for building portable evidence bundles with timeline and attestation metadata (EXCITITOR-AIRGAP-58-001).
/// Bundles can be exported for sealed deployments and verified by Advisory AI teams.
/// </summary>
public interface IPortableEvidenceBundleBuilder
{
/// <summary>
/// Builds a portable evidence bundle from claims and optional timeline events.
/// </summary>
ValueTask<PortableEvidenceBundle> BuildAsync(
PortableEvidenceBundleRequest request,
IReadOnlyCollection<VexClaim> claims,
VexConsensus? consensus,
IReadOnlyCollection<TimelineEvent>? timelineEvents,
VexAttestationMetadata? attestation,
CancellationToken cancellationToken);
/// <summary>
/// Serializes a portable evidence bundle to canonical JSON.
/// </summary>
string Serialize(PortableEvidenceBundle bundle);
/// <summary>
/// Computes the content digest of a portable evidence bundle.
/// </summary>
string ComputeDigest(PortableEvidenceBundle bundle);
}
/// <summary>
/// Default implementation of <see cref="IPortableEvidenceBundleBuilder"/>.
/// </summary>
public sealed class PortableEvidenceBundleBuilder : IPortableEvidenceBundleBuilder
{
private const string PublisherName = "StellaOps.Excititor";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.General)
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
private readonly TimeProvider _timeProvider;
private readonly ILogger<PortableEvidenceBundleBuilder> _logger;
public PortableEvidenceBundleBuilder(
TimeProvider timeProvider,
ILogger<PortableEvidenceBundleBuilder> logger)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ValueTask<PortableEvidenceBundle> BuildAsync(
PortableEvidenceBundleRequest request,
IReadOnlyCollection<VexClaim> claims,
VexConsensus? consensus,
IReadOnlyCollection<TimelineEvent>? timelineEvents,
VexAttestationMetadata? attestation,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(claims);
cancellationToken.ThrowIfCancellationRequested();
var generatedAt = _timeProvider.GetUtcNow();
var bundleId = GenerateBundleId(request, generatedAt);
// Order claims deterministically
var orderedClaims = claims
.OrderBy(c => c.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(c => c.Product.Key, StringComparer.Ordinal)
.ThenBy(c => c.ProviderId, StringComparer.Ordinal)
.ThenBy(c => c.Document.Digest, StringComparer.Ordinal)
.ToImmutableArray();
// Build content
var quietProvenance = ExtractQuietProvenance(orderedClaims);
var content = new PortableEvidenceBundleContent(
request.VulnerabilityId,
request.ProductKey,
orderedClaims,
request.IncludeConsensus ? consensus : null,
quietProvenance);
// Map timeline events
var timeline = MapTimelineEvents(timelineEvents, request.TimelineLimit);
// Map attestation
var bundleAttestation = MapAttestation(attestation);
// Extract source providers
var sourceProviders = orderedClaims
.Select(c => c.ProviderId)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableArray();
// Compute content digest (before provenance is set)
var contentDigest = ComputeContentDigest(content);
var provenance = new PortableBundleProvenance(
contentDigest,
PublisherName,
sourceProviders,
ImmutableDictionary<string, string>.Empty
.Add("schemaVersion", PortableEvidenceBundle.SchemaVersion.ToString())
.Add("claimCount", orderedClaims.Length.ToString())
.Add("hasConsensus", (consensus is not null).ToString().ToLowerInvariant())
.Add("hasAttestation", (attestation is not null).ToString().ToLowerInvariant())
.Add("timelineCount", timeline.Length.ToString()));
var bundle = new PortableEvidenceBundle(
bundleId,
generatedAt,
request.TenantId,
content,
timeline,
bundleAttestation,
provenance);
_logger.LogInformation(
"Built portable evidence bundle {BundleId} for {VulnerabilityId}/{ProductKey}: claims={ClaimCount} timeline={TimelineCount}",
bundleId,
request.VulnerabilityId,
request.ProductKey ?? "(all)",
orderedClaims.Length,
timeline.Length);
return ValueTask.FromResult(bundle);
}
public string Serialize(PortableEvidenceBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
return VexCanonicalJsonSerializer.Serialize(bundle);
}
public string ComputeDigest(PortableEvidenceBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
var json = Serialize(bundle);
var bytes = Encoding.UTF8.GetBytes(json);
return ComputeSha256Digest(bytes);
}
private static string GenerateBundleId(PortableEvidenceBundleRequest request, DateTimeOffset generatedAt)
{
var components = new[]
{
request.VulnerabilityId,
request.ProductKey ?? "_",
request.TenantId,
generatedAt.ToUnixTimeMilliseconds().ToString(),
};
var input = string.Join(":", components);
var inputBytes = Encoding.UTF8.GetBytes(input);
var hashBytes = SHA256.HashData(inputBytes);
var shortHash = Convert.ToHexString(hashBytes[..8]).ToLowerInvariant();
return $"peb-{shortHash}";
}
private static ImmutableArray<VexQuietProvenance> ExtractQuietProvenance(ImmutableArray<VexClaim> claims)
{
// Group claims by vulnerability/product to build quiet provenance
var grouped = claims
.Where(c => c.Document.Signature is not null)
.GroupBy(c => (c.VulnerabilityId, c.Product.Key))
.ToList();
if (grouped.Count == 0)
{
return ImmutableArray<VexQuietProvenance>.Empty;
}
var provenance = new List<VexQuietProvenance>();
foreach (var group in grouped)
{
var statements = group
.Select(claim => new VexQuietStatement(
claim.ProviderId,
claim.Document.Digest,
claim.Justification,
claim.Document.Signature))
.ToList();
provenance.Add(new VexQuietProvenance(
group.Key.VulnerabilityId,
group.Key.Key,
statements));
}
return provenance
.OrderBy(p => p.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(p => p.ProductKey, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<PortableTimelineEntry> MapTimelineEvents(
IReadOnlyCollection<TimelineEvent>? events,
int limit)
{
if (events is null || events.Count == 0)
{
return ImmutableArray<PortableTimelineEntry>.Empty;
}
return events
.OrderByDescending(e => e.CreatedAt)
.ThenBy(e => e.EventId, StringComparer.Ordinal)
.Take(limit)
.Select(e => new PortableTimelineEntry(
e.EventId,
e.EventType,
e.ProviderId,
e.TraceId,
e.JustificationSummary,
e.EvidenceHash,
e.CreatedAt,
e.Attributes))
.ToImmutableArray();
}
private static PortableBundleAttestation? MapAttestation(VexAttestationMetadata? attestation)
{
if (attestation is null)
{
return null;
}
PortableRekorReference? rekor = null;
if (attestation.Rekor is { } rekorRef)
{
rekor = new PortableRekorReference(
rekorRef.ApiVersion,
rekorRef.Location,
rekorRef.LogIndex,
rekorRef.InclusionProofUri?.ToString());
}
return new PortableBundleAttestation(
attestation.PredicateType,
attestation.EnvelopeDigest,
attestation.SignedAt,
rekor,
signer: null); // Signer info not available in attestation metadata
}
private static string ComputeContentDigest(PortableEvidenceBundleContent content)
{
var json = VexCanonicalJsonSerializer.Serialize(content);
var bytes = Encoding.UTF8.GetBytes(json);
return ComputeSha256Digest(bytes);
}
private static string ComputeSha256Digest(ReadOnlySpan<byte> content)
{
Span<byte> hash = stackalloc byte[SHA256.HashSizeInBytes];
SHA256.HashData(content, hash);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
@@ -8,6 +9,26 @@ namespace StellaOps.Excititor.Storage.Mongo;
public interface IAirgapImportStore
{
Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken);
Task<AirgapImportRecord?> FindByBundleIdAsync(
string tenantId,
string bundleId,
string? mirrorGeneration,
CancellationToken cancellationToken);
Task<IReadOnlyList<AirgapImportRecord>> ListAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
int limit,
int offset,
CancellationToken cancellationToken);
Task<int> CountAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
CancellationToken cancellationToken);
}
public sealed class DuplicateAirgapImportException : Exception
@@ -58,4 +79,95 @@ internal sealed class MongoAirgapImportStore : IAirgapImportStore
throw new DuplicateAirgapImportException(record.BundleId, record.MirrorGeneration, ex);
}
}
public async Task<AirgapImportRecord?> FindByBundleIdAsync(
string tenantId,
string bundleId,
string? mirrorGeneration,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(tenantId);
ArgumentNullException.ThrowIfNull(bundleId);
var filter = Builders<AirgapImportRecord>.Filter.And(
Builders<AirgapImportRecord>.Filter.Eq(x => x.TenantId, tenantId),
Builders<AirgapImportRecord>.Filter.Eq(x => x.BundleId, bundleId));
if (!string.IsNullOrWhiteSpace(mirrorGeneration))
{
filter = Builders<AirgapImportRecord>.Filter.And(
filter,
Builders<AirgapImportRecord>.Filter.Eq(x => x.MirrorGeneration, mirrorGeneration));
}
var sort = Builders<AirgapImportRecord>.Sort.Descending(x => x.MirrorGeneration);
return await _collection
.Find(filter)
.Sort(sort)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<AirgapImportRecord>> ListAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
int limit,
int offset,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(tenantId);
var filter = BuildListFilter(tenantId, publisherFilter, importedAfter);
var sort = Builders<AirgapImportRecord>.Sort.Descending(x => x.ImportedAt);
return await _collection
.Find(filter)
.Sort(sort)
.Skip(offset)
.Limit(Math.Clamp(limit, 1, 1000))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
}
public async Task<int> CountAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(tenantId);
var filter = BuildListFilter(tenantId, publisherFilter, importedAfter);
var count = await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return (int)Math.Min(count, int.MaxValue);
}
private static FilterDefinition<AirgapImportRecord> BuildListFilter(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter)
{
var filters = new List<FilterDefinition<AirgapImportRecord>>
{
Builders<AirgapImportRecord>.Filter.Eq(x => x.TenantId, tenantId)
};
if (!string.IsNullOrWhiteSpace(publisherFilter))
{
filters.Add(Builders<AirgapImportRecord>.Filter.Eq(x => x.Publisher, publisherFilter));
}
if (importedAfter is { } after)
{
filters.Add(Builders<AirgapImportRecord>.Filter.Gte(x => x.ImportedAt, after));
}
return Builders<AirgapImportRecord>.Filter.And(filters);
}
}

View File

@@ -34,6 +34,11 @@ public interface IVexClaimStore
ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null);
/// <summary>
/// Retrieves all claims for a specific vulnerability ID (EXCITITOR-VULN-29-002).
/// </summary>
ValueTask<IReadOnlyCollection<VexClaim>> FindByVulnerabilityAsync(string vulnerabilityId, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public sealed record VexConnectorState(

View File

@@ -64,4 +64,23 @@ public sealed class MongoVexClaimStore : IVexClaimStore
return records.ConvertAll(static record => record.ToDomain());
}
public async ValueTask<IReadOnlyCollection<VexClaim>> FindByVulnerabilityAsync(string vulnerabilityId, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
var filter = Builders<VexStatementRecord>.Filter.Eq(x => x.VulnerabilityId, vulnerabilityId.Trim());
var find = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var records = await find
.SortByDescending(x => x.InsertedAt)
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.ConvertAll(static record => record.ToDomain());
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -9,6 +10,7 @@ namespace StellaOps.Excititor.Storage.Mongo;
/// <summary>
/// Normalizer router that resolves providers from Mongo storage before invoking the format-specific normalizer.
/// Records telemetry for normalization operations (EXCITITOR-VULN-29-004).
/// </summary>
public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
{
@@ -16,17 +18,20 @@ public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
private readonly IVexProviderStore _providerStore;
private readonly IVexMongoSessionProvider _sessionProvider;
private readonly ILogger<StorageBackedVexNormalizerRouter> _logger;
private readonly IVexNormalizationTelemetryRecorder? _telemetryRecorder;
public StorageBackedVexNormalizerRouter(
IEnumerable<IVexNormalizer> normalizers,
IVexProviderStore providerStore,
IVexMongoSessionProvider sessionProvider,
ILogger<StorageBackedVexNormalizerRouter> logger)
ILogger<StorageBackedVexNormalizerRouter> logger,
IVexNormalizationTelemetryRecorder? telemetryRecorder = null)
{
ArgumentNullException.ThrowIfNull(normalizers);
_providerStore = providerStore ?? throw new ArgumentNullException(nameof(providerStore));
_sessionProvider = sessionProvider ?? throw new ArgumentNullException(nameof(sessionProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_telemetryRecorder = telemetryRecorder;
_registry = new VexNormalizerRegistry(normalizers.ToImmutableArray());
}
@@ -35,10 +40,23 @@ public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
{
ArgumentNullException.ThrowIfNull(document);
var stopwatch = Stopwatch.StartNew();
var normalizer = _registry.Resolve(document);
if (normalizer is null)
{
_logger.LogWarning("No normalizer registered for VEX document format {Format}. Skipping normalization for {Digest}.", document.Format, document.Digest);
stopwatch.Stop();
_logger.LogWarning(
"No normalizer registered for VEX document format {Format}. Skipping normalization for {Digest} from provider {ProviderId}.",
document.Format,
document.Digest,
document.ProviderId);
_telemetryRecorder?.RecordNormalizationError(
tenant: null,
document.ProviderId,
"unsupported_format",
$"No normalizer for format {document.Format}");
return new VexClaimBatch(
document,
ImmutableArray<VexClaim>.Empty,
@@ -49,6 +67,48 @@ public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
var provider = await _providerStore.FindAsync(document.ProviderId, cancellationToken, session).ConfigureAwait(false)
?? new VexProvider(document.ProviderId, document.ProviderId, VexProviderKind.Vendor);
return await normalizer.NormalizeAsync(document, provider, cancellationToken).ConfigureAwait(false);
try
{
var batch = await normalizer.NormalizeAsync(document, provider, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
if (batch.Claims.IsDefaultOrEmpty || batch.Claims.Length == 0)
{
_logger.LogDebug(
"Normalization produced no claims for document {Digest} from provider {ProviderId}.",
document.Digest,
document.ProviderId);
}
else
{
_logger.LogDebug(
"Normalization produced {ClaimCount} claims for document {Digest} from provider {ProviderId} in {Duration}ms.",
batch.Claims.Length,
document.Digest,
document.ProviderId,
stopwatch.Elapsed.TotalMilliseconds);
}
return batch;
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
stopwatch.Stop();
_logger.LogError(
ex,
"Normalization failed for document {Digest} from provider {ProviderId} after {Duration}ms: {Message}",
document.Digest,
document.ProviderId,
stopwatch.Elapsed.TotalMilliseconds,
ex.Message);
_telemetryRecorder?.RecordNormalizationError(
tenant: null,
document.ProviderId,
"normalization_exception",
ex.Message);
throw;
}
}
}