up
Some checks failed
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Docs CI / lint-and-preview (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
devportal-offline / build-offline (push) Has been cancelled
Mirror Thin Bundle Sign & Verify / mirror-sign (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-11-28 00:45:16 +02:00
parent 3b96b2e3ea
commit 1c6730a1d2
95 changed files with 14504 additions and 463 deletions

View File

@@ -0,0 +1,487 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Globalization;
using System.IO;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Excititor.Core.Evidence;
/// <summary>
/// Builds portable evidence bundles for sealed deployments with timeline and attestation metadata (EXCITITOR-AIRGAP-58-001).
/// </summary>
public interface IPortableEvidenceBundleBuilder
{
Task<PortableEvidenceBundleResult> BuildAsync(
PortableEvidenceBundleRequest request,
CancellationToken cancellationToken);
}
/// <summary>
/// Request for building a portable evidence bundle.
/// </summary>
public sealed record PortableEvidenceBundleRequest(
string Tenant,
VexLockerManifest Manifest,
IReadOnlyList<PortableEvidenceItem> EvidenceItems,
PortableEvidenceAttestationMetadata? Attestation,
IReadOnlyList<PortableEvidenceTimelineEntry> Timeline,
ImmutableDictionary<string, string>? AdditionalMetadata = null);
/// <summary>
/// Individual evidence item to include in the bundle.
/// </summary>
public sealed record PortableEvidenceItem(
string ObservationId,
string ProviderId,
string ContentHash,
ReadOnlyMemory<byte> Content,
string? Format);
/// <summary>
/// Attestation metadata for the bundle.
/// </summary>
public sealed record PortableEvidenceAttestationMetadata(
string? DsseEnvelopeJson,
string? EnvelopeDigest,
string? PredicateType,
string? SignatureType,
string? KeyId,
string? Issuer,
string? Subject,
DateTimeOffset? SignedAt,
string? TransparencyLogRef);
/// <summary>
/// Timeline entry for audit trail in the bundle.
/// </summary>
public sealed record PortableEvidenceTimelineEntry(
string EventType,
DateTimeOffset CreatedAt,
string? TenantId,
string? BundleId,
string? MirrorGeneration,
int? StalenessSeconds,
string? ErrorCode,
string? Message);
/// <summary>
/// Result of building a portable evidence bundle.
/// </summary>
public sealed record PortableEvidenceBundleResult(
string BundleId,
string BundlePath,
string ManifestDigest,
string BundleDigest,
long BundleSizeBytes,
int ItemCount,
DateTimeOffset CreatedAt,
PortableEvidenceBundleVerification Verification);
/// <summary>
/// Verification data for the bundle.
/// </summary>
public sealed record PortableEvidenceBundleVerification(
string MerkleRoot,
string ManifestDigest,
string BundleDigest,
bool HasAttestation,
string? AttestationDigest);
/// <summary>
/// Default implementation of portable evidence bundle builder.
/// </summary>
public sealed class PortableEvidenceBundleBuilder : IPortableEvidenceBundleBuilder
{
private static readonly JsonSerializerOptions SerializerOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public Task<PortableEvidenceBundleResult> BuildAsync(
PortableEvidenceBundleRequest request,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.Manifest);
var createdAt = DateTimeOffset.UtcNow;
var bundleId = GenerateBundleId(request.Tenant, createdAt);
using var memoryStream = new MemoryStream();
string manifestDigest;
string? attestationDigest = null;
using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, leaveOpen: true))
{
// 1. Write the locker manifest
manifestDigest = WriteManifest(archive, request.Manifest);
// 2. Write attestation if present
if (request.Attestation?.DsseEnvelopeJson is not null)
{
attestationDigest = WriteAttestation(archive, request.Attestation);
}
// 3. Write evidence items
WriteEvidenceItems(archive, request.EvidenceItems);
// 4. Write timeline
WriteTimeline(archive, request.Timeline);
// 5. Write bundle manifest (index of all contents)
WriteBundleManifest(archive, request, bundleId, createdAt, manifestDigest, attestationDigest);
// 6. Write verifier instructions
WriteVerifierInstructions(archive);
}
memoryStream.Position = 0;
var bundleDigest = ComputeDigest(memoryStream.ToArray());
var bundlePath = $"evidence-bundle-{SanitizeForPath(request.Tenant)}-{createdAt:yyyyMMdd-HHmmss}.zip";
var verification = new PortableEvidenceBundleVerification(
request.Manifest.MerkleRoot,
manifestDigest,
bundleDigest,
request.Attestation?.DsseEnvelopeJson is not null,
attestationDigest);
return Task.FromResult(new PortableEvidenceBundleResult(
bundleId,
bundlePath,
manifestDigest,
bundleDigest,
memoryStream.Length,
request.EvidenceItems.Count,
createdAt,
verification));
}
private static string GenerateBundleId(string tenant, DateTimeOffset timestamp)
{
var normalizedTenant = string.IsNullOrWhiteSpace(tenant) ? "default" : tenant.Trim().ToLowerInvariant();
var date = timestamp.ToString("yyyyMMdd-HHmmss", CultureInfo.InvariantCulture);
var randomSuffix = Guid.NewGuid().ToString("N")[..8];
return $"portable-evidence:{normalizedTenant}:{date}:{randomSuffix}";
}
private static string WriteManifest(ZipArchive archive, VexLockerManifest manifest)
{
var entry = archive.CreateEntry("manifest.json", CompressionLevel.Optimal);
var json = VexCanonicalJsonSerializer.Serialize(manifest);
var bytes = Encoding.UTF8.GetBytes(json);
using (var stream = entry.Open())
{
stream.Write(bytes);
}
return ComputeDigest(bytes);
}
private static string WriteAttestation(ZipArchive archive, PortableEvidenceAttestationMetadata attestation)
{
var entry = archive.CreateEntry("attestation.json", CompressionLevel.Optimal);
var attestationDoc = new PortableAttestationDocument(
attestation.DsseEnvelopeJson,
attestation.EnvelopeDigest,
attestation.PredicateType,
attestation.SignatureType,
attestation.KeyId,
attestation.Issuer,
attestation.Subject,
attestation.SignedAt?.ToString("O", CultureInfo.InvariantCulture),
attestation.TransparencyLogRef);
var json = JsonSerializer.Serialize(attestationDoc, SerializerOptions);
var bytes = Encoding.UTF8.GetBytes(json);
using (var stream = entry.Open())
{
stream.Write(bytes);
}
return ComputeDigest(bytes);
}
private static void WriteEvidenceItems(ZipArchive archive, IReadOnlyList<PortableEvidenceItem> items)
{
foreach (var item in items)
{
var extension = GetExtension(item.Format);
var entryPath = $"evidence/{SanitizeForPath(item.ProviderId)}/{SanitizeDigest(item.ContentHash)}{extension}";
var entry = archive.CreateEntry(entryPath, CompressionLevel.Optimal);
using var stream = entry.Open();
stream.Write(item.Content.Span);
}
}
private static void WriteTimeline(ZipArchive archive, IReadOnlyList<PortableEvidenceTimelineEntry> timeline)
{
if (timeline.Count == 0)
{
return;
}
var entry = archive.CreateEntry("timeline.json", CompressionLevel.Optimal);
var sortedTimeline = timeline
.OrderBy(e => e.CreatedAt)
.Select(e => new PortableTimelineEntryDocument(
e.EventType,
e.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
e.TenantId,
e.BundleId,
e.MirrorGeneration,
e.StalenessSeconds,
e.ErrorCode,
e.Message))
.ToList();
var json = JsonSerializer.Serialize(sortedTimeline, SerializerOptions);
using var stream = entry.Open();
stream.Write(Encoding.UTF8.GetBytes(json));
}
private static void WriteBundleManifest(
ZipArchive archive,
PortableEvidenceBundleRequest request,
string bundleId,
DateTimeOffset createdAt,
string manifestDigest,
string? attestationDigest)
{
var entry = archive.CreateEntry("bundle-manifest.json", CompressionLevel.Optimal);
var evidenceIndex = request.EvidenceItems
.Select(item => new PortableBundleEvidenceEntry(
item.ObservationId,
item.ProviderId,
item.ContentHash,
item.Format ?? "json",
item.Content.Length))
.OrderBy(e => e.ObservationId, StringComparer.Ordinal)
.ThenBy(e => e.ProviderId, StringComparer.OrdinalIgnoreCase)
.ToList();
var bundleManifest = new PortableBundleManifestDocument(
SchemaVersion: 1,
BundleId: bundleId,
Tenant: request.Tenant,
CreatedAt: createdAt.ToString("O", CultureInfo.InvariantCulture),
ManifestDigest: manifestDigest,
MerkleRoot: request.Manifest.MerkleRoot,
ItemCount: request.EvidenceItems.Count,
TimelineEventCount: request.Timeline.Count,
HasAttestation: attestationDigest is not null,
AttestationDigest: attestationDigest,
Evidence: evidenceIndex,
Metadata: request.AdditionalMetadata ?? ImmutableDictionary<string, string>.Empty);
var json = JsonSerializer.Serialize(bundleManifest, SerializerOptions);
using var stream = entry.Open();
stream.Write(Encoding.UTF8.GetBytes(json));
}
private static void WriteVerifierInstructions(ZipArchive archive)
{
var entry = archive.CreateEntry("VERIFY.md", CompressionLevel.Optimal);
var instructions = GetVerifierInstructions();
using var stream = entry.Open();
stream.Write(Encoding.UTF8.GetBytes(instructions));
}
private static string GetVerifierInstructions() => """
# Portable Evidence Bundle Verification Guide
This document describes how to verify the integrity and authenticity of this
portable evidence bundle for Advisory AI teams.
## Bundle Contents
- `manifest.json` - Evidence locker manifest with Merkle root
- `attestation.json` - DSSE attestation envelope (if signed)
- `evidence/` - Raw evidence items organized by provider
- `timeline.json` - Audit timeline events
- `bundle-manifest.json` - Index of all bundle contents
## Verification Steps
### Step 1: Verify Bundle Integrity
1. Extract the bundle to a temporary directory
2. Compute SHA-256 hash of each evidence file
3. Compare against `contentHash` values in `manifest.json`
```bash
# Example: Verify a single evidence file
sha256sum evidence/provider-name/sha256_abc123.json
```
### Step 2: Verify Merkle Root
1. Collect all `contentHash` values from `manifest.json` items
2. Sort them by `observationId` then `providerId`
3. Compute Merkle root using binary tree with SHA-256
4. Compare against `merkleRoot` in `manifest.json`
```python
# Pseudocode for Merkle root verification
import hashlib
def compute_merkle_root(hashes):
if len(hashes) == 0:
return hashlib.sha256(b'').hexdigest()
if len(hashes) == 1:
return hashes[0]
if len(hashes) % 2 != 0:
hashes.append(hashes[-1]) # Pad to even
next_level = []
for i in range(0, len(hashes), 2):
combined = bytes.fromhex(hashes[i] + hashes[i+1])
next_level.append(hashlib.sha256(combined).hexdigest())
return compute_merkle_root(next_level)
```
### Step 3: Verify Attestation (if present)
If `attestation.json` exists:
1. Parse the DSSE envelope from `dsseEnvelope` field
2. Verify the signature using the public key identified by `keyId`
3. Optionally check transparency log reference at `transparencyLogRef`
```bash
# Example: Verify with cosign (if Sigstore attestation)
cosign verify-blob --signature attestation.sig --certificate attestation.crt manifest.json
```
### Step 4: Validate Timeline Consistency
1. Parse `timeline.json`
2. Verify events are in chronological order
3. Check for any `airgap.import.failed` events with error codes
4. Verify staleness values are within acceptable bounds
## Error Codes Reference
| Code | Description |
|------|-------------|
| AIRGAP_EGRESS_BLOCKED | External URL blocked in sealed mode |
| AIRGAP_SOURCE_UNTRUSTED | Publisher not in allowlist |
| AIRGAP_SIGNATURE_MISSING | Required signature not provided |
| AIRGAP_SIGNATURE_INVALID | Signature validation failed |
| AIRGAP_PAYLOAD_STALE | Bundle timestamp exceeds skew tolerance |
| AIRGAP_PAYLOAD_MISMATCH | Payload hash doesn't match metadata |
## Advisory AI Integration
For automated verification in Advisory AI pipelines:
1. Extract `bundle-manifest.json` for quick integrity check
2. Use `merkleRoot` as the canonical bundle identifier
3. Reference `attestationDigest` for cryptographic proof
4. Parse `timeline.json` for provenance audit trail
## Support
For questions about bundle verification, contact your StellaOps administrator
or refer to the StellaOps documentation.
---
Generated by StellaOps Excititor - Portable Evidence Bundle Builder
""";
private static string ComputeDigest(byte[] data)
{
var hash = SHA256.HashData(data);
return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant();
}
private static string SanitizeForPath(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "unknown";
}
var builder = new StringBuilder(value.Length);
foreach (var ch in value.ToLowerInvariant())
{
builder.Append(char.IsLetterOrDigit(ch) || ch == '-' || ch == '_' ? ch : '_');
}
return builder.ToString();
}
private static string SanitizeDigest(string digest)
{
return digest.Replace(":", "_");
}
private static string GetExtension(string? format)
=> format?.ToLowerInvariant() switch
{
"json" => ".json",
"jsonlines" or "jsonl" => ".jsonl",
"openvex" => ".json",
"csaf" => ".json",
"cyclonedx" => ".json",
_ => ".bin",
};
}
// Internal document types for serialization
internal sealed record PortableAttestationDocument(
[property: JsonPropertyName("dsseEnvelope")] string? DsseEnvelope,
[property: JsonPropertyName("envelopeDigest")] string? EnvelopeDigest,
[property: JsonPropertyName("predicateType")] string? PredicateType,
[property: JsonPropertyName("signatureType")] string? SignatureType,
[property: JsonPropertyName("keyId")] string? KeyId,
[property: JsonPropertyName("issuer")] string? Issuer,
[property: JsonPropertyName("subject")] string? Subject,
[property: JsonPropertyName("signedAt")] string? SignedAt,
[property: JsonPropertyName("transparencyLogRef")] string? TransparencyLogRef);
internal sealed record PortableTimelineEntryDocument(
[property: JsonPropertyName("eventType")] string EventType,
[property: JsonPropertyName("createdAt")] string CreatedAt,
[property: JsonPropertyName("tenantId")] string? TenantId,
[property: JsonPropertyName("bundleId")] string? BundleId,
[property: JsonPropertyName("mirrorGeneration")] string? MirrorGeneration,
[property: JsonPropertyName("stalenessSeconds")] int? StalenessSeconds,
[property: JsonPropertyName("errorCode")] string? ErrorCode,
[property: JsonPropertyName("message")] string? Message);
internal sealed record PortableBundleManifestDocument(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("createdAt")] string CreatedAt,
[property: JsonPropertyName("manifestDigest")] string ManifestDigest,
[property: JsonPropertyName("merkleRoot")] string MerkleRoot,
[property: JsonPropertyName("itemCount")] int ItemCount,
[property: JsonPropertyName("timelineEventCount")] int TimelineEventCount,
[property: JsonPropertyName("hasAttestation")] bool HasAttestation,
[property: JsonPropertyName("attestationDigest")] string? AttestationDigest,
[property: JsonPropertyName("evidence")] IReadOnlyList<PortableBundleEvidenceEntry> Evidence,
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string> Metadata);
internal sealed record PortableBundleEvidenceEntry(
[property: JsonPropertyName("observationId")] string ObservationId,
[property: JsonPropertyName("providerId")] string ProviderId,
[property: JsonPropertyName("contentHash")] string ContentHash,
[property: JsonPropertyName("format")] string Format,
[property: JsonPropertyName("sizeBytes")] int SizeBytes);

View File

@@ -0,0 +1,250 @@
using System.Collections.Immutable;
namespace StellaOps.Excititor.Core;
/// <summary>
/// Portable evidence bundle for sealed deployments (EXCITITOR-AIRGAP-58-001).
/// Contains evidence content, timeline events, and attestation metadata
/// for offline verification by Advisory AI teams.
/// </summary>
public sealed record PortableEvidenceBundle
{
public const int SchemaVersion = 1;
public PortableEvidenceBundle(
string bundleId,
DateTimeOffset generatedAt,
string tenantId,
PortableEvidenceBundleContent content,
ImmutableArray<PortableTimelineEntry> timeline,
PortableBundleAttestation? attestation,
PortableBundleProvenance provenance)
{
ArgumentException.ThrowIfNullOrWhiteSpace(bundleId);
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
ArgumentNullException.ThrowIfNull(content);
ArgumentNullException.ThrowIfNull(provenance);
BundleId = bundleId.Trim();
GeneratedAt = generatedAt;
TenantId = tenantId.Trim();
Content = content;
Timeline = timeline.IsDefault ? ImmutableArray<PortableTimelineEntry>.Empty : timeline;
Attestation = attestation;
Provenance = provenance;
}
public string BundleId { get; }
public DateTimeOffset GeneratedAt { get; }
public string TenantId { get; }
public PortableEvidenceBundleContent Content { get; }
public ImmutableArray<PortableTimelineEntry> Timeline { get; }
public PortableBundleAttestation? Attestation { get; }
public PortableBundleProvenance Provenance { get; }
}
/// <summary>
/// Evidence content within a portable bundle.
/// </summary>
public sealed record PortableEvidenceBundleContent
{
public PortableEvidenceBundleContent(
string vulnerabilityId,
string? productKey,
ImmutableArray<VexClaim> claims,
VexConsensus? consensus,
ImmutableArray<VexQuietProvenance> quietProvenance)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
VulnerabilityId = vulnerabilityId.Trim();
ProductKey = string.IsNullOrWhiteSpace(productKey) ? null : productKey.Trim();
Claims = claims.IsDefault ? ImmutableArray<VexClaim>.Empty : claims;
Consensus = consensus;
QuietProvenance = quietProvenance.IsDefault ? ImmutableArray<VexQuietProvenance>.Empty : quietProvenance;
}
public string VulnerabilityId { get; }
public string? ProductKey { get; }
public ImmutableArray<VexClaim> Claims { get; }
public VexConsensus? Consensus { get; }
public ImmutableArray<VexQuietProvenance> QuietProvenance { get; }
}
/// <summary>
/// Timeline entry in a portable evidence bundle.
/// </summary>
public sealed record PortableTimelineEntry
{
public PortableTimelineEntry(
string eventId,
string eventType,
string providerId,
string traceId,
string justificationSummary,
string? evidenceHash,
DateTimeOffset createdAt,
ImmutableDictionary<string, string>? attributes)
{
ArgumentException.ThrowIfNullOrWhiteSpace(eventId);
ArgumentException.ThrowIfNullOrWhiteSpace(eventType);
ArgumentException.ThrowIfNullOrWhiteSpace(providerId);
ArgumentException.ThrowIfNullOrWhiteSpace(traceId);
EventId = eventId.Trim();
EventType = eventType.Trim();
ProviderId = providerId.Trim();
TraceId = traceId.Trim();
JustificationSummary = justificationSummary?.Trim() ?? string.Empty;
EvidenceHash = string.IsNullOrWhiteSpace(evidenceHash) ? null : evidenceHash.Trim();
CreatedAt = createdAt;
Attributes = attributes ?? ImmutableDictionary<string, string>.Empty;
}
public string EventId { get; }
public string EventType { get; }
public string ProviderId { get; }
public string TraceId { get; }
public string JustificationSummary { get; }
public string? EvidenceHash { get; }
public DateTimeOffset CreatedAt { get; }
public ImmutableDictionary<string, string> Attributes { get; }
}
/// <summary>
/// Attestation metadata in a portable evidence bundle.
/// </summary>
public sealed record PortableBundleAttestation
{
public PortableBundleAttestation(
string predicateType,
string? envelopeDigest,
DateTimeOffset? signedAt,
PortableRekorReference? rekor,
PortableSignerInfo? signer)
{
ArgumentException.ThrowIfNullOrWhiteSpace(predicateType);
PredicateType = predicateType.Trim();
EnvelopeDigest = string.IsNullOrWhiteSpace(envelopeDigest) ? null : envelopeDigest.Trim();
SignedAt = signedAt;
Rekor = rekor;
Signer = signer;
}
public string PredicateType { get; }
public string? EnvelopeDigest { get; }
public DateTimeOffset? SignedAt { get; }
public PortableRekorReference? Rekor { get; }
public PortableSignerInfo? Signer { get; }
}
/// <summary>
/// Sigstore Rekor transparency log reference.
/// </summary>
public sealed record PortableRekorReference
{
public PortableRekorReference(
string apiVersion,
string location,
string? logIndex,
string? inclusionProofUri)
{
ArgumentException.ThrowIfNullOrWhiteSpace(apiVersion);
ArgumentException.ThrowIfNullOrWhiteSpace(location);
ApiVersion = apiVersion.Trim();
Location = location.Trim();
LogIndex = string.IsNullOrWhiteSpace(logIndex) ? null : logIndex.Trim();
InclusionProofUri = string.IsNullOrWhiteSpace(inclusionProofUri) ? null : inclusionProofUri.Trim();
}
public string ApiVersion { get; }
public string Location { get; }
public string? LogIndex { get; }
public string? InclusionProofUri { get; }
}
/// <summary>
/// Signer information for attestations.
/// </summary>
public sealed record PortableSignerInfo
{
public PortableSignerInfo(
string keyId,
string algorithm,
string? issuer,
string? subject)
{
ArgumentException.ThrowIfNullOrWhiteSpace(keyId);
ArgumentException.ThrowIfNullOrWhiteSpace(algorithm);
KeyId = keyId.Trim();
Algorithm = algorithm.Trim();
Issuer = string.IsNullOrWhiteSpace(issuer) ? null : issuer.Trim();
Subject = string.IsNullOrWhiteSpace(subject) ? null : subject.Trim();
}
public string KeyId { get; }
public string Algorithm { get; }
public string? Issuer { get; }
public string? Subject { get; }
}
/// <summary>
/// Provenance information for a portable evidence bundle.
/// </summary>
public sealed record PortableBundleProvenance
{
public PortableBundleProvenance(
string contentDigest,
string publisher,
ImmutableArray<string> sourceProviders,
ImmutableDictionary<string, string>? metadata)
{
ArgumentException.ThrowIfNullOrWhiteSpace(contentDigest);
ArgumentException.ThrowIfNullOrWhiteSpace(publisher);
ContentDigest = contentDigest.Trim();
Publisher = publisher.Trim();
SourceProviders = sourceProviders.IsDefault ? ImmutableArray<string>.Empty : sourceProviders;
Metadata = metadata ?? ImmutableDictionary<string, string>.Empty;
}
public string ContentDigest { get; }
public string Publisher { get; }
public ImmutableArray<string> SourceProviders { get; }
public ImmutableDictionary<string, string> Metadata { get; }
}
/// <summary>
/// Request to build a portable evidence bundle.
/// </summary>
public sealed record PortableEvidenceBundleRequest
{
public PortableEvidenceBundleRequest(
string vulnerabilityId,
string? productKey,
string tenantId,
bool includeTimeline,
bool includeConsensus,
int? timelineLimit)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
ArgumentException.ThrowIfNullOrWhiteSpace(tenantId);
VulnerabilityId = vulnerabilityId.Trim();
ProductKey = string.IsNullOrWhiteSpace(productKey) ? null : productKey.Trim();
TenantId = tenantId.Trim();
IncludeTimeline = includeTimeline;
IncludeConsensus = includeConsensus;
TimelineLimit = timelineLimit is null or <= 0 ? 100 : Math.Min(timelineLimit.Value, 1000);
}
public string VulnerabilityId { get; }
public string? ProductKey { get; }
public string TenantId { get; }
public bool IncludeTimeline { get; }
public bool IncludeConsensus { get; }
public int TimelineLimit { get; }
}

View File

@@ -0,0 +1,43 @@
namespace StellaOps.Excititor.Core;
/// <summary>
/// Interface for recording VEX normalization telemetry (EXCITITOR-VULN-29-004).
/// Implementations wire metrics and structured logs to observability backends
/// for Vuln Explorer and Advisory AI dashboards.
/// </summary>
public interface IVexNormalizationTelemetryRecorder
{
/// <summary>
/// Records a normalization error that occurred during claim extraction.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="provider">Provider ID that sourced the document.</param>
/// <param name="errorType">Error classification (e.g., "unsupported_format", "normalization_exception", "validation_error").</param>
/// <param name="detail">Optional error detail message.</param>
void RecordNormalizationError(string? tenant, string provider, string errorType, string? detail = null);
/// <summary>
/// Records a suppression scope application affecting VEX statements.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="scopeType">Type of suppression scope (e.g., "provider", "product", "vulnerability").</param>
/// <param name="affectedStatements">Number of statements affected by the suppression.</param>
void RecordSuppressionScope(string? tenant, string scopeType, int affectedStatements);
/// <summary>
/// Records detection of a withdrawn VEX statement.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="provider">Provider ID that issued the withdrawal.</param>
/// <param name="replacementId">Optional replacement statement ID if superseded.</param>
void RecordWithdrawnStatement(string? tenant, string provider, string? replacementId = null);
/// <summary>
/// Records batch withdrawn statement processing.
/// </summary>
/// <param name="tenant">Tenant identifier (null for default tenant).</param>
/// <param name="provider">Provider ID that issued the withdrawals.</param>
/// <param name="totalWithdrawn">Total number of withdrawn statements.</param>
/// <param name="replacements">Number of statements with replacements.</param>
void RecordWithdrawnStatements(string? tenant, string provider, int totalWithdrawn, int replacements);
}

View File

@@ -0,0 +1,276 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.Excititor.Core;
using StellaOps.Excititor.Core.Observations;
namespace StellaOps.Excititor.Export;
/// <summary>
/// Service for building portable evidence bundles with timeline and attestation metadata (EXCITITOR-AIRGAP-58-001).
/// Bundles can be exported for sealed deployments and verified by Advisory AI teams.
/// </summary>
public interface IPortableEvidenceBundleBuilder
{
/// <summary>
/// Builds a portable evidence bundle from claims and optional timeline events.
/// </summary>
ValueTask<PortableEvidenceBundle> BuildAsync(
PortableEvidenceBundleRequest request,
IReadOnlyCollection<VexClaim> claims,
VexConsensus? consensus,
IReadOnlyCollection<TimelineEvent>? timelineEvents,
VexAttestationMetadata? attestation,
CancellationToken cancellationToken);
/// <summary>
/// Serializes a portable evidence bundle to canonical JSON.
/// </summary>
string Serialize(PortableEvidenceBundle bundle);
/// <summary>
/// Computes the content digest of a portable evidence bundle.
/// </summary>
string ComputeDigest(PortableEvidenceBundle bundle);
}
/// <summary>
/// Default implementation of <see cref="IPortableEvidenceBundleBuilder"/>.
/// </summary>
public sealed class PortableEvidenceBundleBuilder : IPortableEvidenceBundleBuilder
{
private const string PublisherName = "StellaOps.Excititor";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.General)
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false,
};
private readonly TimeProvider _timeProvider;
private readonly ILogger<PortableEvidenceBundleBuilder> _logger;
public PortableEvidenceBundleBuilder(
TimeProvider timeProvider,
ILogger<PortableEvidenceBundleBuilder> logger)
{
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public ValueTask<PortableEvidenceBundle> BuildAsync(
PortableEvidenceBundleRequest request,
IReadOnlyCollection<VexClaim> claims,
VexConsensus? consensus,
IReadOnlyCollection<TimelineEvent>? timelineEvents,
VexAttestationMetadata? attestation,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(claims);
cancellationToken.ThrowIfCancellationRequested();
var generatedAt = _timeProvider.GetUtcNow();
var bundleId = GenerateBundleId(request, generatedAt);
// Order claims deterministically
var orderedClaims = claims
.OrderBy(c => c.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(c => c.Product.Key, StringComparer.Ordinal)
.ThenBy(c => c.ProviderId, StringComparer.Ordinal)
.ThenBy(c => c.Document.Digest, StringComparer.Ordinal)
.ToImmutableArray();
// Build content
var quietProvenance = ExtractQuietProvenance(orderedClaims);
var content = new PortableEvidenceBundleContent(
request.VulnerabilityId,
request.ProductKey,
orderedClaims,
request.IncludeConsensus ? consensus : null,
quietProvenance);
// Map timeline events
var timeline = MapTimelineEvents(timelineEvents, request.TimelineLimit);
// Map attestation
var bundleAttestation = MapAttestation(attestation);
// Extract source providers
var sourceProviders = orderedClaims
.Select(c => c.ProviderId)
.Distinct(StringComparer.OrdinalIgnoreCase)
.OrderBy(p => p, StringComparer.Ordinal)
.ToImmutableArray();
// Compute content digest (before provenance is set)
var contentDigest = ComputeContentDigest(content);
var provenance = new PortableBundleProvenance(
contentDigest,
PublisherName,
sourceProviders,
ImmutableDictionary<string, string>.Empty
.Add("schemaVersion", PortableEvidenceBundle.SchemaVersion.ToString())
.Add("claimCount", orderedClaims.Length.ToString())
.Add("hasConsensus", (consensus is not null).ToString().ToLowerInvariant())
.Add("hasAttestation", (attestation is not null).ToString().ToLowerInvariant())
.Add("timelineCount", timeline.Length.ToString()));
var bundle = new PortableEvidenceBundle(
bundleId,
generatedAt,
request.TenantId,
content,
timeline,
bundleAttestation,
provenance);
_logger.LogInformation(
"Built portable evidence bundle {BundleId} for {VulnerabilityId}/{ProductKey}: claims={ClaimCount} timeline={TimelineCount}",
bundleId,
request.VulnerabilityId,
request.ProductKey ?? "(all)",
orderedClaims.Length,
timeline.Length);
return ValueTask.FromResult(bundle);
}
public string Serialize(PortableEvidenceBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
return VexCanonicalJsonSerializer.Serialize(bundle);
}
public string ComputeDigest(PortableEvidenceBundle bundle)
{
ArgumentNullException.ThrowIfNull(bundle);
var json = Serialize(bundle);
var bytes = Encoding.UTF8.GetBytes(json);
return ComputeSha256Digest(bytes);
}
private static string GenerateBundleId(PortableEvidenceBundleRequest request, DateTimeOffset generatedAt)
{
var components = new[]
{
request.VulnerabilityId,
request.ProductKey ?? "_",
request.TenantId,
generatedAt.ToUnixTimeMilliseconds().ToString(),
};
var input = string.Join(":", components);
var inputBytes = Encoding.UTF8.GetBytes(input);
var hashBytes = SHA256.HashData(inputBytes);
var shortHash = Convert.ToHexString(hashBytes[..8]).ToLowerInvariant();
return $"peb-{shortHash}";
}
private static ImmutableArray<VexQuietProvenance> ExtractQuietProvenance(ImmutableArray<VexClaim> claims)
{
// Group claims by vulnerability/product to build quiet provenance
var grouped = claims
.Where(c => c.Document.Signature is not null)
.GroupBy(c => (c.VulnerabilityId, c.Product.Key))
.ToList();
if (grouped.Count == 0)
{
return ImmutableArray<VexQuietProvenance>.Empty;
}
var provenance = new List<VexQuietProvenance>();
foreach (var group in grouped)
{
var statements = group
.Select(claim => new VexQuietStatement(
claim.ProviderId,
claim.Document.Digest,
claim.Justification,
claim.Document.Signature))
.ToList();
provenance.Add(new VexQuietProvenance(
group.Key.VulnerabilityId,
group.Key.Key,
statements));
}
return provenance
.OrderBy(p => p.VulnerabilityId, StringComparer.Ordinal)
.ThenBy(p => p.ProductKey, StringComparer.Ordinal)
.ToImmutableArray();
}
private static ImmutableArray<PortableTimelineEntry> MapTimelineEvents(
IReadOnlyCollection<TimelineEvent>? events,
int limit)
{
if (events is null || events.Count == 0)
{
return ImmutableArray<PortableTimelineEntry>.Empty;
}
return events
.OrderByDescending(e => e.CreatedAt)
.ThenBy(e => e.EventId, StringComparer.Ordinal)
.Take(limit)
.Select(e => new PortableTimelineEntry(
e.EventId,
e.EventType,
e.ProviderId,
e.TraceId,
e.JustificationSummary,
e.EvidenceHash,
e.CreatedAt,
e.Attributes))
.ToImmutableArray();
}
private static PortableBundleAttestation? MapAttestation(VexAttestationMetadata? attestation)
{
if (attestation is null)
{
return null;
}
PortableRekorReference? rekor = null;
if (attestation.Rekor is { } rekorRef)
{
rekor = new PortableRekorReference(
rekorRef.ApiVersion,
rekorRef.Location,
rekorRef.LogIndex,
rekorRef.InclusionProofUri?.ToString());
}
return new PortableBundleAttestation(
attestation.PredicateType,
attestation.EnvelopeDigest,
attestation.SignedAt,
rekor,
signer: null); // Signer info not available in attestation metadata
}
private static string ComputeContentDigest(PortableEvidenceBundleContent content)
{
var json = VexCanonicalJsonSerializer.Serialize(content);
var bytes = Encoding.UTF8.GetBytes(json);
return ComputeSha256Digest(bytes);
}
private static string ComputeSha256Digest(ReadOnlySpan<byte> content)
{
Span<byte> hash = stackalloc byte[SHA256.HashSizeInBytes];
SHA256.HashData(content, hash);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -1,4 +1,5 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Driver;
@@ -8,6 +9,26 @@ namespace StellaOps.Excititor.Storage.Mongo;
public interface IAirgapImportStore
{
Task SaveAsync(AirgapImportRecord record, CancellationToken cancellationToken);
Task<AirgapImportRecord?> FindByBundleIdAsync(
string tenantId,
string bundleId,
string? mirrorGeneration,
CancellationToken cancellationToken);
Task<IReadOnlyList<AirgapImportRecord>> ListAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
int limit,
int offset,
CancellationToken cancellationToken);
Task<int> CountAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
CancellationToken cancellationToken);
}
public sealed class DuplicateAirgapImportException : Exception
@@ -58,4 +79,95 @@ internal sealed class MongoAirgapImportStore : IAirgapImportStore
throw new DuplicateAirgapImportException(record.BundleId, record.MirrorGeneration, ex);
}
}
public async Task<AirgapImportRecord?> FindByBundleIdAsync(
string tenantId,
string bundleId,
string? mirrorGeneration,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(tenantId);
ArgumentNullException.ThrowIfNull(bundleId);
var filter = Builders<AirgapImportRecord>.Filter.And(
Builders<AirgapImportRecord>.Filter.Eq(x => x.TenantId, tenantId),
Builders<AirgapImportRecord>.Filter.Eq(x => x.BundleId, bundleId));
if (!string.IsNullOrWhiteSpace(mirrorGeneration))
{
filter = Builders<AirgapImportRecord>.Filter.And(
filter,
Builders<AirgapImportRecord>.Filter.Eq(x => x.MirrorGeneration, mirrorGeneration));
}
var sort = Builders<AirgapImportRecord>.Sort.Descending(x => x.MirrorGeneration);
return await _collection
.Find(filter)
.Sort(sort)
.FirstOrDefaultAsync(cancellationToken)
.ConfigureAwait(false);
}
public async Task<IReadOnlyList<AirgapImportRecord>> ListAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
int limit,
int offset,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(tenantId);
var filter = BuildListFilter(tenantId, publisherFilter, importedAfter);
var sort = Builders<AirgapImportRecord>.Sort.Descending(x => x.ImportedAt);
return await _collection
.Find(filter)
.Sort(sort)
.Skip(offset)
.Limit(Math.Clamp(limit, 1, 1000))
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
}
public async Task<int> CountAsync(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(tenantId);
var filter = BuildListFilter(tenantId, publisherFilter, importedAfter);
var count = await _collection
.CountDocumentsAsync(filter, cancellationToken: cancellationToken)
.ConfigureAwait(false);
return (int)Math.Min(count, int.MaxValue);
}
private static FilterDefinition<AirgapImportRecord> BuildListFilter(
string tenantId,
string? publisherFilter,
DateTimeOffset? importedAfter)
{
var filters = new List<FilterDefinition<AirgapImportRecord>>
{
Builders<AirgapImportRecord>.Filter.Eq(x => x.TenantId, tenantId)
};
if (!string.IsNullOrWhiteSpace(publisherFilter))
{
filters.Add(Builders<AirgapImportRecord>.Filter.Eq(x => x.Publisher, publisherFilter));
}
if (importedAfter is { } after)
{
filters.Add(Builders<AirgapImportRecord>.Filter.Gte(x => x.ImportedAt, after));
}
return Builders<AirgapImportRecord>.Filter.And(filters);
}
}

View File

@@ -34,6 +34,11 @@ public interface IVexClaimStore
ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null);
ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null);
/// <summary>
/// Retrieves all claims for a specific vulnerability ID (EXCITITOR-VULN-29-002).
/// </summary>
ValueTask<IReadOnlyCollection<VexClaim>> FindByVulnerabilityAsync(string vulnerabilityId, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null);
}
public sealed record VexConnectorState(

View File

@@ -64,4 +64,23 @@ public sealed class MongoVexClaimStore : IVexClaimStore
return records.ConvertAll(static record => record.ToDomain());
}
public async ValueTask<IReadOnlyCollection<VexClaim>> FindByVulnerabilityAsync(string vulnerabilityId, int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId);
var filter = Builders<VexStatementRecord>.Filter.Eq(x => x.VulnerabilityId, vulnerabilityId.Trim());
var find = session is null
? _collection.Find(filter)
: _collection.Find(session, filter);
var records = await find
.SortByDescending(x => x.InsertedAt)
.Limit(limit)
.ToListAsync(cancellationToken)
.ConfigureAwait(false);
return records.ConvertAll(static record => record.ToDomain());
}
}

View File

@@ -1,5 +1,6 @@
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -9,6 +10,7 @@ namespace StellaOps.Excititor.Storage.Mongo;
/// <summary>
/// Normalizer router that resolves providers from Mongo storage before invoking the format-specific normalizer.
/// Records telemetry for normalization operations (EXCITITOR-VULN-29-004).
/// </summary>
public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
{
@@ -16,17 +18,20 @@ public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
private readonly IVexProviderStore _providerStore;
private readonly IVexMongoSessionProvider _sessionProvider;
private readonly ILogger<StorageBackedVexNormalizerRouter> _logger;
private readonly IVexNormalizationTelemetryRecorder? _telemetryRecorder;
public StorageBackedVexNormalizerRouter(
IEnumerable<IVexNormalizer> normalizers,
IVexProviderStore providerStore,
IVexMongoSessionProvider sessionProvider,
ILogger<StorageBackedVexNormalizerRouter> logger)
ILogger<StorageBackedVexNormalizerRouter> logger,
IVexNormalizationTelemetryRecorder? telemetryRecorder = null)
{
ArgumentNullException.ThrowIfNull(normalizers);
_providerStore = providerStore ?? throw new ArgumentNullException(nameof(providerStore));
_sessionProvider = sessionProvider ?? throw new ArgumentNullException(nameof(sessionProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_telemetryRecorder = telemetryRecorder;
_registry = new VexNormalizerRegistry(normalizers.ToImmutableArray());
}
@@ -35,10 +40,23 @@ public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
{
ArgumentNullException.ThrowIfNull(document);
var stopwatch = Stopwatch.StartNew();
var normalizer = _registry.Resolve(document);
if (normalizer is null)
{
_logger.LogWarning("No normalizer registered for VEX document format {Format}. Skipping normalization for {Digest}.", document.Format, document.Digest);
stopwatch.Stop();
_logger.LogWarning(
"No normalizer registered for VEX document format {Format}. Skipping normalization for {Digest} from provider {ProviderId}.",
document.Format,
document.Digest,
document.ProviderId);
_telemetryRecorder?.RecordNormalizationError(
tenant: null,
document.ProviderId,
"unsupported_format",
$"No normalizer for format {document.Format}");
return new VexClaimBatch(
document,
ImmutableArray<VexClaim>.Empty,
@@ -49,6 +67,48 @@ public sealed class StorageBackedVexNormalizerRouter : IVexNormalizerRouter
var provider = await _providerStore.FindAsync(document.ProviderId, cancellationToken, session).ConfigureAwait(false)
?? new VexProvider(document.ProviderId, document.ProviderId, VexProviderKind.Vendor);
return await normalizer.NormalizeAsync(document, provider, cancellationToken).ConfigureAwait(false);
try
{
var batch = await normalizer.NormalizeAsync(document, provider, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
if (batch.Claims.IsDefaultOrEmpty || batch.Claims.Length == 0)
{
_logger.LogDebug(
"Normalization produced no claims for document {Digest} from provider {ProviderId}.",
document.Digest,
document.ProviderId);
}
else
{
_logger.LogDebug(
"Normalization produced {ClaimCount} claims for document {Digest} from provider {ProviderId} in {Duration}ms.",
batch.Claims.Length,
document.Digest,
document.ProviderId,
stopwatch.Elapsed.TotalMilliseconds);
}
return batch;
}
catch (Exception ex) when (ex is not OperationCanceledException)
{
stopwatch.Stop();
_logger.LogError(
ex,
"Normalization failed for document {Digest} from provider {ProviderId} after {Duration}ms: {Message}",
document.Digest,
document.ProviderId,
stopwatch.Elapsed.TotalMilliseconds,
ex.Message);
_telemetryRecorder?.RecordNormalizationError(
tenant: null,
document.ProviderId,
"normalization_exception",
ex.Message);
throw;
}
}
}