This commit is contained in:
StellaOps Bot
2025-12-07 22:49:53 +02:00
parent 11597679ed
commit 7c24ed96ee
204 changed files with 23313 additions and 1430 deletions

View File

@@ -235,6 +235,10 @@ internal sealed class AttestorSigningKeyRegistry : IDisposable
}
var privateKeyBytes = LoadSm2KeyBytes(key);
var metadata = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase)
{
["source"] = "config"
};
var signingKey = new CryptoSigningKey(
new CryptoKeyReference(providerKeyId, providerName),
normalizedAlgorithm,

View File

@@ -120,6 +120,95 @@ public sealed class AttestorVerificationServiceTests
Assert.Equal("missing", verifyResult.Report.Transparency.WitnessStatus);
}
[Fact]
public async Task VerifyAsync_KmsBundle_Passes_WhenTwoSignaturesRequired()
{
var options = Options.Create(new AttestorOptions
{
Redis = new AttestorOptions.RedisOptions { Url = string.Empty },
Rekor = new AttestorOptions.RekorOptions
{
Primary = new AttestorOptions.RekorBackendOptions
{
Url = "https://rekor.stellaops.test",
ProofTimeoutMs = 1000,
PollIntervalMs = 50,
MaxAttempts = 2
}
},
Security = new AttestorOptions.SecurityOptions
{
SignerIdentity = new AttestorOptions.SignerIdentityOptions
{
Mode = { "kms" },
KmsKeys = { HmacSecretBase64 }
}
},
Verification = new AttestorOptions.VerificationOptions
{
MinimumSignatures = 2,
PolicyId = "policy/dual-sign"
}
});
using var metrics = new AttestorMetrics();
using var activitySource = new AttestorActivitySource();
var canonicalizer = new DefaultDsseCanonicalizer();
var engine = new AttestorVerificationEngine(canonicalizer, new TestCryptoHash(), options, NullLogger<AttestorVerificationEngine>.Instance);
var repository = new InMemoryAttestorEntryRepository();
var dedupeStore = new InMemoryAttestorDedupeStore();
var rekorClient = new StubRekorClient(new NullLogger<StubRekorClient>());
var archiveStore = new NullAttestorArchiveStore(new NullLogger<NullAttestorArchiveStore>());
var auditSink = new InMemoryAttestorAuditSink();
var submissionService = new AttestorSubmissionService(
new AttestorSubmissionValidator(canonicalizer),
repository,
dedupeStore,
rekorClient,
new NullTransparencyWitnessClient(),
archiveStore,
auditSink,
new NullVerificationCache(),
options,
new NullLogger<AttestorSubmissionService>(),
TimeProvider.System,
metrics);
var submission = CreateSubmissionRequestWithTwoSignatures(canonicalizer, HmacSecret);
var context = new SubmissionContext
{
CallerSubject = "urn:stellaops:signer",
CallerAudience = "attestor",
CallerClientId = "signer-service",
CallerTenant = "default"
};
var response = await submissionService.SubmitAsync(submission, context);
var verificationService = new AttestorVerificationService(
repository,
canonicalizer,
rekorClient,
new NullTransparencyWitnessClient(),
engine,
options,
new NullLogger<AttestorVerificationService>(),
metrics,
activitySource,
TimeProvider.System);
var verifyResult = await verificationService.VerifyAsync(new AttestorVerificationRequest
{
Uuid = response.Uuid,
Bundle = submission.Bundle
});
Assert.True(verifyResult.Ok);
Assert.Equal(VerificationSectionStatus.Pass, verifyResult.Report!.Signatures.Status);
Assert.Equal(2, verifyResult.Report.Signatures.VerifiedSignatures);
Assert.Equal(2, verifyResult.Report.Signatures.RequiredSignatures);
}
[Fact]
public async Task VerifyAsync_FlagsTamperedBundle()
{
@@ -262,6 +351,32 @@ public sealed class AttestorVerificationServiceTests
return request;
}
private static AttestorSubmissionRequest CreateSubmissionRequestWithTwoSignatures(DefaultDsseCanonicalizer canonicalizer, byte[] hmacSecret)
{
var request = CreateSubmissionRequest(canonicalizer, hmacSecret);
// Recompute signature and append a second copy to satisfy multi-signature verification
if (!TryDecodeBase64(request.Bundle.Dsse.PayloadBase64, out var payload))
{
throw new InvalidOperationException("Test payload failed to decode.");
}
var preAuth = ComputePreAuthEncodingForTests(request.Bundle.Dsse.PayloadType, payload);
using (var hmac = new HMACSHA256(hmacSecret))
{
var signature = hmac.ComputeHash(preAuth);
request.Bundle.Dsse.Signatures.Add(new AttestorSubmissionRequest.DsseSignature
{
KeyId = "kms-test-2",
Signature = Convert.ToBase64String(signature)
});
}
var canonical = canonicalizer.CanonicalizeAsync(request).GetAwaiter().GetResult();
request.Meta.BundleSha256 = Convert.ToHexString(SHA256.HashData(canonical)).ToLowerInvariant();
return request;
}
private static AttestorSubmissionRequest.SubmissionBundle CloneBundle(AttestorSubmissionRequest.SubmissionBundle source)
{
var clone = new AttestorSubmissionRequest.SubmissionBundle

View File

@@ -77,6 +77,7 @@ internal static class CommandFactory
root.Add(BuildSdkCommand(services, verboseOption, cancellationToken));
root.Add(BuildMirrorCommand(services, verboseOption, cancellationToken));
root.Add(BuildAirgapCommand(services, verboseOption, cancellationToken));
root.Add(BuildDevPortalCommand(services, verboseOption, cancellationToken));
root.Add(SystemCommandBuilder.BuildSystemCommand(services, verboseOption, cancellationToken));
var pluginLogger = loggerFactory.CreateLogger<CliCommandModuleLoader>();
@@ -10632,5 +10633,53 @@ internal static class CommandFactory
return airgap;
}
private static Command BuildDevPortalCommand(IServiceProvider services, Option<bool> verboseOption, CancellationToken cancellationToken)
{
var devportal = new Command("devportal", "Manage DevPortal offline operations.");
// devportal verify (DVOFF-64-002)
var verify = new Command("verify", "Verify integrity of a DevPortal/evidence bundle before import.");
var bundleOption = new Option<string>("--bundle", new[] { "-b" })
{
Description = "Path to the bundle .tgz file.",
Required = true
};
var offlineOption = new Option<bool>("--offline")
{
Description = "Skip TSA verification and online checks."
};
var jsonOption = new Option<bool>("--json")
{
Description = "Output results in JSON format."
};
verify.Add(bundleOption);
verify.Add(offlineOption);
verify.Add(jsonOption);
verify.SetAction((parseResult, _) =>
{
var bundlePath = parseResult.GetValue(bundleOption)!;
var offline = parseResult.GetValue(offlineOption);
var json = parseResult.GetValue(jsonOption);
var verbose = parseResult.GetValue(verboseOption);
return CommandHandlers.HandleDevPortalVerifyAsync(
services,
bundlePath,
offline,
json,
verbose,
cancellationToken);
});
devportal.Add(verify);
return devportal;
}
}

View File

@@ -27,17 +27,17 @@ using StellaOps.Cli.Configuration;
using StellaOps.Cli.Output;
using StellaOps.Cli.Prompts;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.AdvisoryAi;
using StellaOps.Cli.Services.Models.Bun;
using StellaOps.Cli.Services.Models.Ruby;
using StellaOps.Cli.Telemetry;
using StellaOps.Cryptography;
using StellaOps.Cryptography.DependencyInjection;
using StellaOps.Cryptography.Kms;
using StellaOps.Policy.Scoring;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.Policy.Scoring.Policies;
using StellaOps.Cli.Services.Models;
using StellaOps.Cli.Services.Models.AdvisoryAi;
using StellaOps.Cli.Services.Models.Bun;
using StellaOps.Cli.Services.Models.Ruby;
using StellaOps.Cli.Telemetry;
using StellaOps.Cryptography;
using StellaOps.Cryptography.DependencyInjection;
using StellaOps.Cryptography.Kms;
using StellaOps.Policy.Scoring;
using StellaOps.Policy.Scoring.Engine;
using StellaOps.Policy.Scoring.Policies;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Java;
using StellaOps.Scanner.Analyzers.Lang.Node;
@@ -70,17 +70,17 @@ internal static class CommandHandlers
/// <summary>
/// JSON serializer options for output (alias for JsonOptions).
/// </summary>
private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions;
private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
private static readonly JsonSerializerOptions JsonOutputOptions = JsonOptions;
private static readonly JsonSerializerOptions CompactJson = new(JsonSerializerDefaults.Web)
{
WriteIndented = true
};
/// <summary>
/// Sets the verbosity level for logging.
/// </summary>
private static void SetVerbosity(IServiceProvider services, bool verbose)
private static void SetVerbosity(IServiceProvider services, bool verbose)
{
// Configure logging level based on verbose flag
var loggerFactory = services.GetService<ILoggerFactory>();
@@ -90,215 +90,215 @@ internal static class CommandHandlers
var logger = loggerFactory.CreateLogger("StellaOps.Cli.Commands.CommandHandlers");
logger.LogDebug("Verbose logging enabled");
}
}
public static async Task HandleCvssScoreAsync(
IServiceProvider services,
string vulnerabilityId,
string policyPath,
string vector,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-score");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false);
var loader = new CvssPolicyLoader();
var policyResult = loader.Load(policyJson, cancellationToken);
if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash))
{
var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}"));
throw new InvalidOperationException($"Policy invalid: {errors}");
}
var policy = policyResult.Policy with { Hash = policyResult.Hash };
var engine = scope.ServiceProvider.GetRequiredService<ICvssV4Engine>();
var parsed = engine.ParseVector(vector);
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var request = new CreateCvssReceipt(
vulnerabilityId,
policy,
parsed.BaseMetrics,
parsed.ThreatMetrics,
parsed.EnvironmentalMetrics,
parsed.SupplementalMetrics,
Array.Empty<CvssEvidenceItem>(),
SigningKey: null,
CreatedBy: "cli",
CreatedAt: DateTimeOffset.UtcNow);
var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false)
?? throw new InvalidOperationException("CVSS receipt creation failed.");
if (json)
{
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
}
else
{
Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
Console.WriteLine($"Vector: {receipt.VectorString}");
Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})");
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to create CVSS receipt");
Environment.ExitCode = 1;
if (json)
{
var problem = new { error = "cvss_score_failed", message = ex.Message };
Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson));
}
}
}
public static async Task HandleCvssShowAsync(
IServiceProvider services,
string receiptId,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-show");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
if (receipt is null)
{
Environment.ExitCode = 5;
Console.WriteLine(json
? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson)
: $"✖ Receipt {receiptId} not found");
return;
}
if (json)
{
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
}
else
{
Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}");
Console.WriteLine($"Vector: {receipt.VectorString}");
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId);
Environment.ExitCode = 1;
}
}
public static async Task HandleCvssHistoryAsync(
IServiceProvider services,
string receiptId,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-history");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false);
if (json)
{
Console.WriteLine(JsonSerializer.Serialize(history, CompactJson));
}
else
{
if (history.Count == 0)
{
Console.WriteLine("(no history)");
}
else
{
foreach (var entry in history.OrderBy(h => h.Timestamp))
{
Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})");
}
}
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId);
Environment.ExitCode = 1;
}
}
public static async Task HandleCvssExportAsync(
IServiceProvider services,
string receiptId,
string format,
string? output,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-export");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
if (receipt is null)
{
Environment.ExitCode = 5;
Console.WriteLine($"✖ Receipt {receiptId} not found");
return;
}
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
{
Environment.ExitCode = 9;
Console.WriteLine("Only json export is supported at this time.");
return;
}
var targetPath = string.IsNullOrWhiteSpace(output)
? $"cvss-receipt-{receipt.ReceiptId}.json"
: output!;
var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson);
await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false);
Console.WriteLine($"✔ Exported receipt to {targetPath}");
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId);
Environment.ExitCode = 1;
}
}
}
public static async Task HandleCvssScoreAsync(
IServiceProvider services,
string vulnerabilityId,
string policyPath,
string vector,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-score");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var policyJson = await File.ReadAllTextAsync(policyPath, cancellationToken).ConfigureAwait(false);
var loader = new CvssPolicyLoader();
var policyResult = loader.Load(policyJson, cancellationToken);
if (!policyResult.IsValid || policyResult.Policy is null || string.IsNullOrWhiteSpace(policyResult.Hash))
{
var errors = string.Join("; ", policyResult.Errors.Select(e => $"{e.Path}: {e.Message}"));
throw new InvalidOperationException($"Policy invalid: {errors}");
}
var policy = policyResult.Policy with { Hash = policyResult.Hash };
var engine = scope.ServiceProvider.GetRequiredService<ICvssV4Engine>();
var parsed = engine.ParseVector(vector);
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var request = new CreateCvssReceipt(
vulnerabilityId,
policy,
parsed.BaseMetrics,
parsed.ThreatMetrics,
parsed.EnvironmentalMetrics,
parsed.SupplementalMetrics,
Array.Empty<CvssEvidenceItem>(),
SigningKey: null,
CreatedBy: "cli",
CreatedAt: DateTimeOffset.UtcNow);
var receipt = await client.CreateReceiptAsync(request, cancellationToken).ConfigureAwait(false)
?? throw new InvalidOperationException("CVSS receipt creation failed.");
if (json)
{
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
}
else
{
Console.WriteLine($"✔ CVSS receipt {receipt.ReceiptId} created | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
Console.WriteLine($"Vector: {receipt.VectorString}");
Console.WriteLine($"Policy: {receipt.PolicyRef.PolicyId} v{receipt.PolicyRef.Version} ({receipt.PolicyRef.Hash})");
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to create CVSS receipt");
Environment.ExitCode = 1;
if (json)
{
var problem = new { error = "cvss_score_failed", message = ex.Message };
Console.WriteLine(JsonSerializer.Serialize(problem, CompactJson));
}
}
}
public static async Task HandleCvssShowAsync(
IServiceProvider services,
string receiptId,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-show");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
if (receipt is null)
{
Environment.ExitCode = 5;
Console.WriteLine(json
? JsonSerializer.Serialize(new { error = "not_found", receiptId }, CompactJson)
: $"✖ Receipt {receiptId} not found");
return;
}
if (json)
{
Console.WriteLine(JsonSerializer.Serialize(receipt, CompactJson));
}
else
{
Console.WriteLine($"Receipt {receipt.ReceiptId} | Severity {receipt.Severity} | Effective {receipt.Scores.EffectiveScore:0.0}");
Console.WriteLine($"Created {receipt.CreatedAt:u} by {receipt.CreatedBy}");
Console.WriteLine($"Vector: {receipt.VectorString}");
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to fetch CVSS receipt {ReceiptId}", receiptId);
Environment.ExitCode = 1;
}
}
public static async Task HandleCvssHistoryAsync(
IServiceProvider services,
string receiptId,
bool json,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-history");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var history = await client.GetHistoryAsync(receiptId, cancellationToken).ConfigureAwait(false);
if (json)
{
Console.WriteLine(JsonSerializer.Serialize(history, CompactJson));
}
else
{
if (history.Count == 0)
{
Console.WriteLine("(no history)");
}
else
{
foreach (var entry in history.OrderBy(h => h.Timestamp))
{
Console.WriteLine($"{entry.Timestamp:u} | {entry.Actor} | {entry.ChangeType} {entry.Field} => {entry.NewValue ?? ""} ({entry.Reason})");
}
}
}
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to fetch CVSS receipt history {ReceiptId}", receiptId);
Environment.ExitCode = 1;
}
}
public static async Task HandleCvssExportAsync(
IServiceProvider services,
string receiptId,
string format,
string? output,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var logger = scope.ServiceProvider.GetRequiredService<ILoggerFactory>().CreateLogger("cvss-export");
var verbosity = scope.ServiceProvider.GetRequiredService<VerbosityState>();
verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information;
try
{
var client = scope.ServiceProvider.GetRequiredService<ICvssClient>();
var receipt = await client.GetReceiptAsync(receiptId, cancellationToken).ConfigureAwait(false);
if (receipt is null)
{
Environment.ExitCode = 5;
Console.WriteLine($"✖ Receipt {receiptId} not found");
return;
}
if (!string.Equals(format, "json", StringComparison.OrdinalIgnoreCase))
{
Environment.ExitCode = 9;
Console.WriteLine("Only json export is supported at this time.");
return;
}
var targetPath = string.IsNullOrWhiteSpace(output)
? $"cvss-receipt-{receipt.ReceiptId}.json"
: output!;
var jsonPayload = JsonSerializer.Serialize(receipt, CompactJson);
await File.WriteAllTextAsync(targetPath, jsonPayload, cancellationToken).ConfigureAwait(false);
Console.WriteLine($"✔ Exported receipt to {targetPath}");
Environment.ExitCode = 0;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to export CVSS receipt {ReceiptId}", receiptId);
Environment.ExitCode = 1;
}
}
private static async Task VerifyBundleAsync(string path, ILogger logger, CancellationToken cancellationToken)
{
@@ -29676,4 +29676,105 @@ stella policy test {policyName}.stella
}
#endregion
#region DevPortal Commands
/// <summary>
/// Handler for 'stella devportal verify' command (DVOFF-64-002).
/// Verifies integrity of a DevPortal/evidence bundle before import.
/// Exit codes: 0 success, 2 checksum mismatch, 3 signature failure, 4 TSA missing, 5 unexpected.
/// </summary>
public static async Task<int> HandleDevPortalVerifyAsync(
IServiceProvider services,
string bundlePath,
bool offline,
bool emitJson,
bool verbose,
CancellationToken cancellationToken)
{
await using var scope = services.CreateAsyncScope();
var loggerFactory = scope.ServiceProvider.GetRequiredService<ILoggerFactory>();
var logger = loggerFactory.CreateLogger<DevPortalBundleVerifier>();
var verifier = new DevPortalBundleVerifier(logger);
using var activity = CliActivitySource.Instance.StartActivity("cli.devportal.verify", System.Diagnostics.ActivityKind.Client);
activity?.SetTag("stellaops.cli.command", "devportal verify");
activity?.SetTag("stellaops.cli.devportal.offline", offline);
using var duration = CliMetrics.MeasureCommandDuration("devportal verify");
try
{
var resolvedPath = Path.GetFullPath(bundlePath);
if (verbose)
{
AnsiConsole.MarkupLine($"[grey]Verifying bundle: {Markup.Escape(resolvedPath)}[/]");
if (offline)
{
AnsiConsole.MarkupLine("[grey]Mode: offline (TSA verification skipped)[/]");
}
}
var result = await verifier.VerifyBundleAsync(resolvedPath, offline, cancellationToken)
.ConfigureAwait(false);
activity?.SetTag("stellaops.cli.devportal.status", result.Status);
activity?.SetTag("stellaops.cli.devportal.exit_code", (int)result.ExitCode);
if (emitJson)
{
Console.WriteLine(result.ToJson());
}
else
{
if (result.ExitCode == DevPortalVerifyExitCode.Success)
{
AnsiConsole.MarkupLine("[green]Bundle verification successful.[/]");
AnsiConsole.MarkupLine($" Bundle ID: {Markup.Escape(result.BundleId ?? "unknown")}");
AnsiConsole.MarkupLine($" Root Hash: {Markup.Escape(result.RootHash ?? "unknown")}");
AnsiConsole.MarkupLine($" Entries: {result.Entries}");
AnsiConsole.MarkupLine($" Created: {result.CreatedAt?.ToString("O") ?? "unknown"}");
AnsiConsole.MarkupLine($" Portable: {(result.Portable ? "yes" : "no")}");
}
else
{
AnsiConsole.MarkupLine($"[red]Bundle verification failed:[/] {Markup.Escape(result.ErrorMessage ?? "Unknown error")}");
if (!string.IsNullOrEmpty(result.ErrorDetail))
{
AnsiConsole.MarkupLine($" [grey]{Markup.Escape(result.ErrorDetail)}[/]");
}
}
}
return (int)result.ExitCode;
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
if (!emitJson)
{
AnsiConsole.MarkupLine("[yellow]Operation cancelled.[/]");
}
return 130;
}
catch (Exception ex)
{
logger.LogError(ex, "Failed to verify bundle");
if (emitJson)
{
var errorResult = DevPortalBundleVerificationResult.Failed(
DevPortalVerifyExitCode.Unexpected,
ex.Message);
Console.WriteLine(errorResult.ToJson());
}
else
{
AnsiConsole.MarkupLine($"[red]Error:[/] {Markup.Escape(ex.Message)}");
}
return (int)DevPortalVerifyExitCode.Unexpected;
}
}
#endregion
}

View File

@@ -0,0 +1,533 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
using StellaOps.Cli.Services.Models;
namespace StellaOps.Cli.Services;
/// <summary>
/// Verifier for attestation bundles exported from the Export Center.
/// Per EXPORT-ATTEST-75-001.
/// </summary>
internal sealed class AttestationBundleVerifier : IAttestationBundleVerifier
{
private const string DsseEnvelopeFileName = "attestation.dsse.json";
private const string StatementFileName = "statement.json";
private const string TransparencyFileName = "transparency.ndjson";
private const string MetadataFileName = "metadata.json";
private const string ChecksumsFileName = "checksums.txt";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ILogger<AttestationBundleVerifier> _logger;
public AttestationBundleVerifier(ILogger<AttestationBundleVerifier> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<AttestationBundleVerifyResult> VerifyAsync(
AttestationBundleVerifyOptions options,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(options);
ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath);
_logger.LogDebug("Verifying attestation bundle at {FilePath}, offline={Offline}",
options.FilePath, options.Offline);
// Step 1: Check bundle exists
if (!File.Exists(options.FilePath))
{
return CreateFailedResult(
AttestationBundleExitCodes.FileNotFound,
"Bundle file not found",
options.FilePath);
}
// Step 2: Verify SHA-256 against .sha256 file if present
var sha256Path = options.FilePath + ".sha256";
if (File.Exists(sha256Path))
{
var checksumResult = await VerifyBundleChecksumAsync(options.FilePath, sha256Path, cancellationToken)
.ConfigureAwait(false);
if (!checksumResult.IsValid)
{
return CreateFailedResult(
AttestationBundleExitCodes.ChecksumMismatch,
"SHA-256 checksum mismatch",
options.FilePath,
$"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}");
}
}
else
{
_logger.LogDebug("No co-located .sha256 file found for external checksum verification");
}
// Step 3: Extract and parse bundle contents
BundleContents contents;
try
{
contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException)
{
_logger.LogError(ex, "Failed to extract bundle contents");
return CreateFailedResult(
AttestationBundleExitCodes.FormatError,
"Failed to extract bundle contents",
options.FilePath,
ex.Message);
}
// Step 4: Verify internal checksums from checksums.txt
if (contents.ChecksumsText is not null)
{
var internalCheckResult = VerifyInternalChecksums(contents);
if (!internalCheckResult.Success)
{
return CreateFailedResult(
AttestationBundleExitCodes.ChecksumMismatch,
"Internal checksum verification failed",
options.FilePath,
internalCheckResult.ErrorMessage);
}
}
// Step 5: Verify DSSE signature
var signatureValid = VerifyDsseSignature(contents, options.Offline, out var signatureError);
if (!signatureValid && !string.IsNullOrEmpty(signatureError))
{
return CreateFailedResult(
AttestationBundleExitCodes.SignatureFailure,
"DSSE signature verification failed",
options.FilePath,
signatureError);
}
// Step 6: Check transparency entries (only if not offline and verifyTransparency is true)
if (!options.Offline && options.VerifyTransparency)
{
if (string.IsNullOrWhiteSpace(contents.TransparencyNdjson))
{
return CreateFailedResult(
AttestationBundleExitCodes.MissingTransparency,
"Transparency log entry missing",
options.FilePath,
"Bundle requires transparency.ndjson when not in offline mode");
}
}
// Step 7: Build success result
var metadata = contents.Metadata;
var subjects = ExtractSubjects(contents);
return new AttestationBundleVerifyResult(
Success: true,
Status: "verified",
ExportId: metadata?.ExportId,
AttestationId: metadata?.AttestationId,
RootHash: FormatRootHash(metadata?.RootHash),
Subjects: subjects,
PredicateType: ExtractPredicateType(contents),
StatementVersion: metadata?.StatementVersion,
BundlePath: options.FilePath,
ExitCode: AttestationBundleExitCodes.Success);
}
public async Task<AttestationBundleImportResult> ImportAsync(
AttestationBundleImportOptions options,
CancellationToken cancellationToken)
{
ArgumentNullException.ThrowIfNull(options);
ArgumentException.ThrowIfNullOrWhiteSpace(options.FilePath);
_logger.LogDebug("Importing attestation bundle from {FilePath}", options.FilePath);
// First verify the bundle
var verifyOptions = new AttestationBundleVerifyOptions(
options.FilePath,
options.Offline,
options.VerifyTransparency,
options.TrustRootPath);
var verifyResult = await VerifyAsync(verifyOptions, cancellationToken).ConfigureAwait(false);
if (!verifyResult.Success)
{
return new AttestationBundleImportResult(
Success: false,
Status: "verification_failed",
AttestationId: verifyResult.AttestationId,
TenantId: null,
Namespace: options.Namespace,
RootHash: verifyResult.RootHash,
ErrorMessage: verifyResult.ErrorMessage,
ExitCode: verifyResult.ExitCode);
}
// Extract metadata for import
BundleContents contents;
try
{
contents = await ExtractBundleContentsAsync(options.FilePath, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
return new AttestationBundleImportResult(
Success: false,
Status: "extraction_failed",
AttestationId: null,
TenantId: null,
Namespace: options.Namespace,
RootHash: null,
ErrorMessage: ex.Message,
ExitCode: AttestationBundleExitCodes.ImportFailed);
}
var metadata = contents.Metadata;
var tenantId = options.Tenant ?? metadata?.TenantId;
// Import is a local-only operation for air-gap scenarios
// The actual import to backend would happen via separate API call
_logger.LogInformation("Attestation bundle imported: {AttestationId} for tenant {TenantId}",
metadata?.AttestationId, tenantId);
return new AttestationBundleImportResult(
Success: true,
Status: "imported",
AttestationId: metadata?.AttestationId,
TenantId: tenantId,
Namespace: options.Namespace,
RootHash: FormatRootHash(metadata?.RootHash),
ExitCode: AttestationBundleExitCodes.Success);
}
private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync(
string bundlePath,
string sha256Path,
CancellationToken cancellationToken)
{
// Read expected hash from .sha256 file
var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false);
var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant();
if (string.IsNullOrEmpty(expectedHash))
{
return (false, null, null);
}
// Compute actual hash
await using var stream = File.OpenRead(bundlePath);
var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant();
return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash);
}
private async Task<BundleContents> ExtractBundleContentsAsync(
string bundlePath,
CancellationToken cancellationToken)
{
var contents = new BundleContents();
await using var fileStream = File.OpenRead(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream);
TarEntry? entry;
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null)
{
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
{
continue;
}
using var memoryStream = new MemoryStream();
await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
var data = memoryStream.ToArray();
var text = System.Text.Encoding.UTF8.GetString(data);
switch (entry.Name)
{
case DsseEnvelopeFileName:
contents.DsseEnvelopeJson = text;
contents.DsseEnvelopeBytes = data;
contents.DsseEnvelope = JsonSerializer.Deserialize<DsseEnvelope>(text, SerializerOptions);
break;
case StatementFileName:
contents.StatementJson = text;
contents.StatementBytes = data;
contents.Statement = JsonSerializer.Deserialize<InTotoStatement>(text, SerializerOptions);
break;
case TransparencyFileName:
contents.TransparencyNdjson = text;
contents.TransparencyBytes = data;
break;
case MetadataFileName:
contents.MetadataJson = text;
contents.MetadataBytes = data;
contents.Metadata = JsonSerializer.Deserialize<AttestationBundleMetadata>(text, SerializerOptions);
break;
case ChecksumsFileName:
contents.ChecksumsText = text;
break;
}
}
return contents;
}
private (bool Success, string? ErrorMessage) VerifyInternalChecksums(BundleContents contents)
{
if (string.IsNullOrWhiteSpace(contents.ChecksumsText))
{
return (true, null);
}
var lines = contents.ChecksumsText.Split('\n', StringSplitOptions.RemoveEmptyEntries);
foreach (var line in lines)
{
// Skip comments
if (line.TrimStart().StartsWith('#'))
{
continue;
}
// Parse "hash filename" format
var parts = line.Split(new[] { ' ' }, 2, StringSplitOptions.RemoveEmptyEntries);
if (parts.Length != 2)
{
continue;
}
var expectedHash = parts[0].Trim().ToLowerInvariant();
var fileName = parts[1].Trim();
byte[]? fileBytes = fileName switch
{
DsseEnvelopeFileName => contents.DsseEnvelopeBytes,
StatementFileName => contents.StatementBytes,
TransparencyFileName => contents.TransparencyBytes,
MetadataFileName => contents.MetadataBytes,
_ => null
};
if (fileBytes is null)
{
// File not found in bundle - could be optional
if (fileName == TransparencyFileName)
{
continue; // transparency.ndjson is optional
}
return (false, $"File '{fileName}' referenced in checksums but not found in bundle");
}
var actualHash = Convert.ToHexString(SHA256.HashData(fileBytes)).ToLowerInvariant();
if (!string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase))
{
return (false, $"Checksum mismatch for '{fileName}': expected {expectedHash}, got {actualHash}");
}
}
return (true, null);
}
private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error)
{
error = null;
if (contents.DsseEnvelope is null || string.IsNullOrEmpty(contents.DsseEnvelope.Payload))
{
error = "DSSE envelope not found or has no payload";
return false;
}
// Verify payload matches statement
if (contents.StatementJson is not null)
{
try
{
var payloadBytes = Convert.FromBase64String(contents.DsseEnvelope.Payload);
var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes);
// Compare parsed JSON to handle whitespace differences
using var statementDoc = JsonDocument.Parse(contents.StatementJson);
using var payloadDoc = JsonDocument.Parse(payloadJson);
// Check _type field matches
var statementType = statementDoc.RootElement.TryGetProperty("_type", out var sType)
? sType.GetString()
: null;
var payloadType = payloadDoc.RootElement.TryGetProperty("_type", out var pType)
? pType.GetString()
: null;
if (!string.Equals(statementType, payloadType, StringComparison.Ordinal))
{
error = "DSSE payload does not match statement _type";
return false;
}
}
catch (FormatException ex)
{
error = $"Invalid DSSE payload encoding: {ex.Message}";
return false;
}
catch (JsonException ex)
{
error = $"Invalid DSSE payload JSON: {ex.Message}";
return false;
}
}
// In offline mode, we don't verify the actual cryptographic signature
// (would require access to signing keys/certificates)
if (offline)
{
_logger.LogDebug("Offline mode: skipping cryptographic signature verification");
return true;
}
// Check that signatures exist
if (contents.DsseEnvelope.Signatures is null || contents.DsseEnvelope.Signatures.Count == 0)
{
error = "DSSE envelope has no signatures";
return false;
}
// Online signature verification would require access to trust roots
// For now, we trust the signature if payload matches and signatures exist
return true;
}
private static IReadOnlyList<string>? ExtractSubjects(BundleContents contents)
{
if (contents.Statement?.Subject is null || contents.Statement.Subject.Count == 0)
{
// Fall back to metadata subjects
if (contents.Metadata?.SubjectDigests is not null)
{
return contents.Metadata.SubjectDigests
.Select(s => $"{s.Name}@{s.Algorithm}:{s.Digest}")
.ToList();
}
return null;
}
return contents.Statement.Subject
.Select(s =>
{
var digest = s.Digest?.FirstOrDefault();
return digest.HasValue
? $"{s.Name}@{digest.Value.Key}:{digest.Value.Value}"
: s.Name ?? "unknown";
})
.ToList();
}
private static string? ExtractPredicateType(BundleContents contents)
{
return contents.Statement?.PredicateType ?? contents.DsseEnvelope?.PayloadType;
}
private static string? FormatRootHash(string? rootHash)
{
if (string.IsNullOrWhiteSpace(rootHash))
{
return null;
}
return rootHash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? rootHash
: $"sha256:{rootHash}";
}
private static AttestationBundleVerifyResult CreateFailedResult(
int exitCode,
string message,
string bundlePath,
string? detail = null)
=> new(
Success: false,
Status: "failed",
ExportId: null,
AttestationId: null,
RootHash: null,
Subjects: null,
PredicateType: null,
StatementVersion: null,
BundlePath: bundlePath,
ErrorMessage: detail ?? message,
ExitCode: exitCode);
private sealed class BundleContents
{
public string? DsseEnvelopeJson { get; set; }
public byte[]? DsseEnvelopeBytes { get; set; }
public DsseEnvelope? DsseEnvelope { get; set; }
public string? StatementJson { get; set; }
public byte[]? StatementBytes { get; set; }
public InTotoStatement? Statement { get; set; }
public string? TransparencyNdjson { get; set; }
public byte[]? TransparencyBytes { get; set; }
public string? MetadataJson { get; set; }
public byte[]? MetadataBytes { get; set; }
public AttestationBundleMetadata? Metadata { get; set; }
public string? ChecksumsText { get; set; }
}
private sealed class DsseEnvelope
{
public string? PayloadType { get; set; }
public string? Payload { get; set; }
public IReadOnlyList<DsseSignature>? Signatures { get; set; }
}
private sealed class DsseSignature
{
public string? KeyId { get; set; }
public string? Sig { get; set; }
}
private sealed class InTotoStatement
{
public string? Type { get; set; }
public string? PredicateType { get; set; }
public IReadOnlyList<InTotoSubject>? Subject { get; set; }
}
private sealed class InTotoSubject
{
public string? Name { get; set; }
public Dictionary<string, string>? Digest { get; set; }
}
private sealed record AttestationBundleMetadata(
string? Version,
string? ExportId,
string? AttestationId,
string? TenantId,
DateTimeOffset? CreatedAtUtc,
string? RootHash,
string? SourceUri,
string? StatementVersion,
IReadOnlyList<AttestationSubjectDigest>? SubjectDigests);
private sealed record AttestationSubjectDigest(
string? Name,
string? Digest,
string? Algorithm);
}

View File

@@ -0,0 +1,380 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text.Json;
using Microsoft.Extensions.Logging;
namespace StellaOps.Cli.Services;
/// <summary>
/// Verifier for EvidenceLocker sealed bundles used in DevPortal offline verification.
/// Per DVOFF-64-002.
/// </summary>
internal sealed class DevPortalBundleVerifier : IDevPortalBundleVerifier
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ILogger<DevPortalBundleVerifier> _logger;
public DevPortalBundleVerifier(ILogger<DevPortalBundleVerifier> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<DevPortalBundleVerificationResult> VerifyBundleAsync(
string bundlePath,
bool offline,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(bundlePath);
_logger.LogDebug("Verifying DevPortal bundle at {BundlePath}, offline={Offline}", bundlePath, offline);
// Step 1: Check bundle exists
if (!File.Exists(bundlePath))
{
return DevPortalBundleVerificationResult.Failed(
DevPortalVerifyExitCode.Unexpected,
"Bundle file not found",
bundlePath);
}
// Step 2: Validate SHA-256 against .sha256 file if present
var sha256Path = bundlePath + ".sha256";
if (File.Exists(sha256Path))
{
var checksumResult = await VerifyBundleChecksumAsync(bundlePath, sha256Path, cancellationToken)
.ConfigureAwait(false);
if (!checksumResult.IsValid)
{
return DevPortalBundleVerificationResult.Failed(
DevPortalVerifyExitCode.ChecksumMismatch,
"SHA-256 checksum mismatch",
$"Expected: {checksumResult.ExpectedHash}, Computed: {checksumResult.ActualHash}");
}
}
else
{
_logger.LogDebug("No .sha256 file found, skipping checksum verification");
}
// Step 3: Extract and parse bundle contents
BundleContents contents;
try
{
contents = await ExtractBundleContentsAsync(bundlePath, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex) when (ex is InvalidDataException or JsonException or IOException)
{
_logger.LogError(ex, "Failed to extract bundle contents");
return DevPortalBundleVerificationResult.Failed(
DevPortalVerifyExitCode.Unexpected,
"Failed to extract bundle contents",
ex.Message);
}
// Step 4: Verify DSSE signature
var signatureValid = VerifyDsseSignature(contents, offline, out var signatureError);
if (!signatureValid && !string.IsNullOrEmpty(signatureError))
{
return DevPortalBundleVerificationResult.Failed(
DevPortalVerifyExitCode.SignatureFailure,
"DSSE signature verification failed",
signatureError);
}
// Step 5: Verify TSA (only if not offline)
if (!offline && contents.Signature is not null)
{
if (string.IsNullOrEmpty(contents.Signature.TimestampAuthority) ||
string.IsNullOrEmpty(contents.Signature.TimestampToken))
{
return DevPortalBundleVerificationResult.Failed(
DevPortalVerifyExitCode.TsaMissing,
"RFC3161 timestamp missing",
"Bundle requires timestamping when not in offline mode");
}
}
// Step 6: Build success result
return new DevPortalBundleVerificationResult
{
Status = "verified",
BundleId = contents.Manifest?.BundleId ?? contents.BundleMetadata?.BundleId,
RootHash = contents.BundleMetadata?.RootHash is not null
? $"sha256:{contents.BundleMetadata.RootHash}"
: null,
Entries = contents.Manifest?.Entries?.Count ?? 0,
CreatedAt = contents.Manifest?.CreatedAt ?? contents.BundleMetadata?.CreatedAt,
Portable = contents.BundleMetadata?.PortableGeneratedAt is not null,
ExitCode = DevPortalVerifyExitCode.Success
};
}
private async Task<(bool IsValid, string? ExpectedHash, string? ActualHash)> VerifyBundleChecksumAsync(
string bundlePath,
string sha256Path,
CancellationToken cancellationToken)
{
// Read expected hash from .sha256 file
var content = await File.ReadAllTextAsync(sha256Path, cancellationToken).ConfigureAwait(false);
var expectedHash = content.Split(' ', StringSplitOptions.RemoveEmptyEntries).FirstOrDefault()?.Trim()?.ToLowerInvariant();
if (string.IsNullOrEmpty(expectedHash))
{
return (false, null, null);
}
// Compute actual hash
await using var stream = File.OpenRead(bundlePath);
var hashBytes = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false);
var actualHash = Convert.ToHexString(hashBytes).ToLowerInvariant();
return (string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase), expectedHash, actualHash);
}
private async Task<BundleContents> ExtractBundleContentsAsync(
string bundlePath,
CancellationToken cancellationToken)
{
var contents = new BundleContents();
await using var fileStream = File.OpenRead(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream);
TarEntry? entry;
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: cancellationToken).ConfigureAwait(false)) is not null)
{
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
{
continue;
}
using var memoryStream = new MemoryStream();
await entry.DataStream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
var json = System.Text.Encoding.UTF8.GetString(memoryStream.ToArray());
switch (entry.Name)
{
case "manifest.json":
contents.ManifestJson = json;
contents.Manifest = JsonSerializer.Deserialize<BundleManifest>(json, SerializerOptions);
break;
case "signature.json":
contents.SignatureJson = json;
contents.Signature = JsonSerializer.Deserialize<BundleSignature>(json, SerializerOptions);
break;
case "bundle.json":
contents.BundleMetadataJson = json;
contents.BundleMetadata = JsonSerializer.Deserialize<BundleMetadataDocument>(json, SerializerOptions);
break;
case "checksums.txt":
contents.ChecksumsText = json;
break;
}
}
return contents;
}
private bool VerifyDsseSignature(BundleContents contents, bool offline, out string? error)
{
error = null;
if (contents.Signature is null || string.IsNullOrEmpty(contents.Signature.Payload))
{
error = "Signature not found in bundle";
return false;
}
// Verify payload matches manifest
if (contents.ManifestJson is not null)
{
try
{
var payloadBytes = Convert.FromBase64String(contents.Signature.Payload);
var payloadJson = System.Text.Encoding.UTF8.GetString(payloadBytes);
// Compare parsed JSON to handle whitespace differences
using var manifestDoc = JsonDocument.Parse(contents.ManifestJson);
using var payloadDoc = JsonDocument.Parse(payloadJson);
var manifestBundleId = manifestDoc.RootElement.TryGetProperty("bundleId", out var mId)
? mId.GetString()
: null;
var payloadBundleId = payloadDoc.RootElement.TryGetProperty("bundleId", out var pId)
? pId.GetString()
: null;
if (!string.Equals(manifestBundleId, payloadBundleId, StringComparison.OrdinalIgnoreCase))
{
error = "Signature payload does not match manifest bundleId";
return false;
}
}
catch (FormatException ex)
{
error = $"Invalid signature payload encoding: {ex.Message}";
return false;
}
catch (JsonException ex)
{
error = $"Invalid signature payload JSON: {ex.Message}";
return false;
}
}
// In offline mode, we don't verify the actual cryptographic signature
// (would require access to signing keys/certificates)
if (offline)
{
_logger.LogDebug("Offline mode: skipping cryptographic signature verification");
return true;
}
// Online signature verification would go here
// For now, we trust the signature if payload matches
return true;
}
private sealed class BundleContents
{
public string? ManifestJson { get; set; }
public BundleManifest? Manifest { get; set; }
public string? SignatureJson { get; set; }
public BundleSignature? Signature { get; set; }
public string? BundleMetadataJson { get; set; }
public BundleMetadataDocument? BundleMetadata { get; set; }
public string? ChecksumsText { get; set; }
}
private sealed class BundleManifest
{
public string? BundleId { get; set; }
public string? TenantId { get; set; }
public int Kind { get; set; }
public DateTimeOffset? CreatedAt { get; set; }
public Dictionary<string, string>? Metadata { get; set; }
public List<BundleManifestEntry>? Entries { get; set; }
}
private sealed class BundleManifestEntry
{
public string? Section { get; set; }
public string? CanonicalPath { get; set; }
public string? Sha256 { get; set; }
public long SizeBytes { get; set; }
public string? MediaType { get; set; }
}
private sealed class BundleSignature
{
public string? PayloadType { get; set; }
public string? Payload { get; set; }
public string? Signature { get; set; }
public string? KeyId { get; set; }
public string? Algorithm { get; set; }
public string? Provider { get; set; }
public DateTimeOffset? SignedAt { get; set; }
public DateTimeOffset? TimestampedAt { get; set; }
public string? TimestampAuthority { get; set; }
public string? TimestampToken { get; set; }
}
private sealed class BundleMetadataDocument
{
public string? BundleId { get; set; }
public string? TenantId { get; set; }
public int Kind { get; set; }
public int Status { get; set; }
public string? RootHash { get; set; }
public string? StorageKey { get; set; }
public DateTimeOffset? CreatedAt { get; set; }
public DateTimeOffset? SealedAt { get; set; }
public DateTimeOffset? PortableGeneratedAt { get; set; }
}
}
/// <summary>
/// Exit codes for DevPortal bundle verification per DVOFF-64-002.
/// </summary>
public enum DevPortalVerifyExitCode
{
/// <summary>Verification successful.</summary>
Success = 0,
/// <summary>SHA-256 checksum mismatch.</summary>
ChecksumMismatch = 2,
/// <summary>DSSE signature verification failed.</summary>
SignatureFailure = 3,
/// <summary>RFC3161 timestamp missing (when not offline).</summary>
TsaMissing = 4,
/// <summary>Unexpected error.</summary>
Unexpected = 5
}
/// <summary>
/// Result of DevPortal bundle verification.
/// </summary>
public sealed class DevPortalBundleVerificationResult
{
public string Status { get; set; } = "failed";
public string? BundleId { get; set; }
public string? RootHash { get; set; }
public int Entries { get; set; }
public DateTimeOffset? CreatedAt { get; set; }
public bool Portable { get; set; }
public DevPortalVerifyExitCode ExitCode { get; set; } = DevPortalVerifyExitCode.Unexpected;
public string? ErrorMessage { get; set; }
public string? ErrorDetail { get; set; }
public static DevPortalBundleVerificationResult Failed(
DevPortalVerifyExitCode exitCode,
string message,
string? detail = null)
=> new()
{
Status = "failed",
ExitCode = exitCode,
ErrorMessage = message,
ErrorDetail = detail
};
public string ToJson()
{
var options = new JsonSerializerOptions
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
// Build output with sorted keys
var output = new SortedDictionary<string, object?>(StringComparer.Ordinal);
if (BundleId is not null)
output["bundleId"] = BundleId;
if (CreatedAt.HasValue)
output["createdAt"] = CreatedAt.Value.ToString("O");
output["entries"] = Entries;
if (ErrorDetail is not null)
output["errorDetail"] = ErrorDetail;
if (ErrorMessage is not null)
output["errorMessage"] = ErrorMessage;
output["portable"] = Portable;
if (RootHash is not null)
output["rootHash"] = RootHash;
output["status"] = Status;
return JsonSerializer.Serialize(output, options);
}
}

View File

@@ -0,0 +1,29 @@
using StellaOps.Cli.Services.Models;
namespace StellaOps.Cli.Services;
/// <summary>
/// Interface for attestation bundle verification.
/// </summary>
public interface IAttestationBundleVerifier
{
/// <summary>
/// Verifies an attestation bundle exported from the Export Center.
/// </summary>
/// <param name="options">Verification options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification result with status and exit code.</returns>
Task<AttestationBundleVerifyResult> VerifyAsync(
AttestationBundleVerifyOptions options,
CancellationToken cancellationToken);
/// <summary>
/// Imports an attestation bundle into the local system.
/// </summary>
/// <param name="options">Import options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Import result with status and exit code.</returns>
Task<AttestationBundleImportResult> ImportAsync(
AttestationBundleImportOptions options,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,19 @@
namespace StellaOps.Cli.Services;
/// <summary>
/// Interface for DevPortal bundle verification.
/// </summary>
public interface IDevPortalBundleVerifier
{
/// <summary>
/// Verifies a DevPortal/EvidenceLocker sealed bundle.
/// </summary>
/// <param name="bundlePath">Path to the bundle .tgz file.</param>
/// <param name="offline">If true, skip TSA verification and online checks.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Verification result with status and exit code.</returns>
Task<DevPortalBundleVerificationResult> VerifyBundleAsync(
string bundlePath,
bool offline,
CancellationToken cancellationToken);
}

View File

@@ -0,0 +1,126 @@
using System.Text.Json.Serialization;
namespace StellaOps.Cli.Services.Models;
/// <summary>
/// Options for attestation bundle verification.
/// </summary>
public sealed record AttestationBundleVerifyOptions(
string FilePath,
bool Offline = false,
bool VerifyTransparency = true,
string? TrustRootPath = null);
/// <summary>
/// Options for attestation bundle import.
/// </summary>
public sealed record AttestationBundleImportOptions(
string FilePath,
string? Tenant = null,
string? Namespace = null,
bool Offline = false,
bool VerifyTransparency = true,
string? TrustRootPath = null);
/// <summary>
/// Result of attestation bundle verification.
/// </summary>
public sealed record AttestationBundleVerifyResult(
bool Success,
string Status,
string? ExportId,
string? AttestationId,
string? RootHash,
IReadOnlyList<string>? Subjects,
string? PredicateType,
string? StatementVersion,
string BundlePath,
string? ErrorMessage = null,
int ExitCode = 0);
/// <summary>
/// Result of attestation bundle import.
/// </summary>
public sealed record AttestationBundleImportResult(
bool Success,
string Status,
string? AttestationId,
string? TenantId,
string? Namespace,
string? RootHash,
string? ErrorMessage = null,
int ExitCode = 0);
/// <summary>
/// JSON output for attestation bundle verify command.
/// </summary>
public sealed record AttestationBundleVerifyJson(
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("exportId")] string? ExportId,
[property: JsonPropertyName("attestationId")] string? AttestationId,
[property: JsonPropertyName("rootHash")] string? RootHash,
[property: JsonPropertyName("subjects")] IReadOnlyList<string>? Subjects,
[property: JsonPropertyName("predicateType")] string? PredicateType,
[property: JsonPropertyName("bundlePath")] string BundlePath);
/// <summary>
/// JSON output for attestation bundle import command.
/// </summary>
public sealed record AttestationBundleImportJson(
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("attestationId")] string? AttestationId,
[property: JsonPropertyName("tenantId")] string? TenantId,
[property: JsonPropertyName("namespace")] string? Namespace,
[property: JsonPropertyName("rootHash")] string? RootHash);
/// <summary>
/// Exit codes for attestation bundle commands.
/// </summary>
public static class AttestationBundleExitCodes
{
/// <summary>Success.</summary>
public const int Success = 0;
/// <summary>General failure.</summary>
public const int GeneralFailure = 1;
/// <summary>Checksum mismatch.</summary>
public const int ChecksumMismatch = 2;
/// <summary>DSSE signature verification failure.</summary>
public const int SignatureFailure = 3;
/// <summary>Missing required TSA/CT log entry.</summary>
public const int MissingTransparency = 4;
/// <summary>Archive or file format error.</summary>
public const int FormatError = 5;
/// <summary>File not found.</summary>
public const int FileNotFound = 6;
/// <summary>Import failed.</summary>
public const int ImportFailed = 7;
}
/// <summary>
/// Metadata parsed from an attestation bundle.
/// </summary>
internal sealed record AttestationBundleMetadata(
string? Version,
string? ExportId,
string? AttestationId,
string? TenantId,
DateTimeOffset? CreatedAtUtc,
string? RootHash,
string? SourceUri,
string? StatementVersion,
IReadOnlyList<AttestationBundleSubjectDigest>? SubjectDigests);
/// <summary>
/// Subject digest from attestation bundle metadata.
/// </summary>
internal sealed record AttestationBundleSubjectDigest(
string? Name,
string? Digest,
string? Algorithm);

View File

@@ -0,0 +1,406 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cli.Services;
using StellaOps.Cli.Services.Models;
namespace StellaOps.Cli.Tests;
public sealed class AttestationBundleVerifierTests : IDisposable
{
private readonly string _tempDir;
private readonly AttestationBundleVerifier _verifier;
public AttestationBundleVerifierTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"attest-bundle-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_verifier = new AttestationBundleVerifier(NullLogger<AttestationBundleVerifier>.Instance);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public async Task VerifyAsync_FileNotFound_ReturnsFileNotFoundCode()
{
var options = new AttestationBundleVerifyOptions(
Path.Combine(_tempDir, "nonexistent.tgz"),
Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal(AttestationBundleExitCodes.FileNotFound, result.ExitCode);
}
[Fact]
public async Task VerifyAsync_ValidBundle_ReturnsSuccess()
{
var bundlePath = await CreateValidBundleAsync();
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.True(result.Success);
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
Assert.Equal("verified", result.Status);
}
[Fact]
public async Task VerifyAsync_ValidBundle_ReturnsMetadata()
{
var bundlePath = await CreateValidBundleAsync();
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.True(result.Success);
Assert.NotNull(result.ExportId);
Assert.NotNull(result.AttestationId);
Assert.NotNull(result.RootHash);
Assert.StartsWith("sha256:", result.RootHash);
}
[Fact]
public async Task VerifyAsync_CorruptedArchive_ReturnsFormatError()
{
var bundlePath = Path.Combine(_tempDir, "corrupted.tgz");
await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not a valid tgz"));
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal(AttestationBundleExitCodes.FormatError, result.ExitCode);
}
[Fact]
public async Task VerifyAsync_ChecksumMismatch_ReturnsChecksumMismatchCode()
{
var bundlePath = await CreateBundleWithBadChecksumAsync();
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode);
}
[Fact]
public async Task VerifyAsync_ExternalChecksumMismatch_ReturnsChecksumMismatchCode()
{
var bundlePath = await CreateValidBundleAsync();
var checksumPath = bundlePath + ".sha256";
await File.WriteAllTextAsync(checksumPath, "0000000000000000000000000000000000000000000000000000000000000000 " + Path.GetFileName(bundlePath));
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal(AttestationBundleExitCodes.ChecksumMismatch, result.ExitCode);
}
[Fact]
public async Task VerifyAsync_MissingTransparency_WhenNotOffline_ReturnsMissingTransparencyCode()
{
var bundlePath = await CreateBundleWithoutTransparencyAsync();
var options = new AttestationBundleVerifyOptions(
bundlePath,
Offline: false,
VerifyTransparency: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal(AttestationBundleExitCodes.MissingTransparency, result.ExitCode);
}
[Fact]
public async Task VerifyAsync_MissingTransparency_WhenOffline_ReturnsSuccess()
{
var bundlePath = await CreateBundleWithoutTransparencyAsync();
var options = new AttestationBundleVerifyOptions(
bundlePath,
Offline: true,
VerifyTransparency: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.True(result.Success);
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
}
[Fact]
public async Task VerifyAsync_MissingDssePayload_ReturnsSignatureFailure()
{
var bundlePath = await CreateBundleWithMissingDssePayloadAsync();
var options = new AttestationBundleVerifyOptions(bundlePath, Offline: true);
var result = await _verifier.VerifyAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal(AttestationBundleExitCodes.SignatureFailure, result.ExitCode);
}
[Fact]
public async Task ImportAsync_ValidBundle_ReturnsSuccess()
{
var bundlePath = await CreateValidBundleAsync();
var options = new AttestationBundleImportOptions(
bundlePath,
Tenant: "test-tenant",
Namespace: "test-namespace",
Offline: true);
var result = await _verifier.ImportAsync(options, CancellationToken.None);
Assert.True(result.Success);
Assert.Equal(AttestationBundleExitCodes.Success, result.ExitCode);
Assert.Equal("imported", result.Status);
}
[Fact]
public async Task ImportAsync_InvalidBundle_ReturnsVerificationFailed()
{
var bundlePath = Path.Combine(_tempDir, "invalid.tgz");
await File.WriteAllBytesAsync(bundlePath, Encoding.UTF8.GetBytes("not valid"));
var options = new AttestationBundleImportOptions(
bundlePath,
Tenant: "test-tenant",
Offline: true);
var result = await _verifier.ImportAsync(options, CancellationToken.None);
Assert.False(result.Success);
Assert.Equal("verification_failed", result.Status);
}
[Fact]
public async Task ImportAsync_InheritsTenantFromMetadata()
{
var bundlePath = await CreateValidBundleAsync();
var options = new AttestationBundleImportOptions(
bundlePath,
Tenant: null, // Not specified
Offline: true);
var result = await _verifier.ImportAsync(options, CancellationToken.None);
Assert.True(result.Success);
Assert.NotNull(result.TenantId); // Should come from bundle metadata
}
private async Task<string> CreateValidBundleAsync()
{
var bundlePath = Path.Combine(_tempDir, $"valid-bundle-{Guid.NewGuid():N}.tgz");
var exportId = Guid.NewGuid().ToString("D");
var attestationId = Guid.NewGuid().ToString("D");
var tenantId = Guid.NewGuid().ToString("D");
// Create statement JSON
var statement = new
{
_type = "https://in-toto.io/Statement/v1",
predicateType = "https://stellaops.io/attestations/vuln-scan/v1",
subject = new[]
{
new { name = "test-image:latest", digest = new Dictionary<string, string> { ["sha256"] = "abc123" } }
},
predicate = new { }
};
var statementJson = JsonSerializer.Serialize(statement);
var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson));
// Create DSSE envelope
var dsse = new
{
payloadType = "application/vnd.in-toto+json",
payload = statementBase64,
signatures = new[]
{
new { keyid = "key-001", sig = "fake-signature-for-test" }
}
};
var dsseJson = JsonSerializer.Serialize(dsse);
// Create metadata
var metadata = new
{
version = "attestation-bundle/v1",
exportId,
attestationId,
tenantId,
createdAtUtc = DateTimeOffset.UtcNow.ToString("O"),
rootHash = "abc123def456",
statementVersion = "v1"
};
var metadataJson = JsonSerializer.Serialize(metadata);
// Create transparency entries
var transparencyNdjson = "{\"logIndex\":1,\"logId\":\"test\"}\n";
// Calculate checksums
var dsseHash = ComputeHash(dsseJson);
var statementHash = ComputeHash(statementJson);
var metadataHash = ComputeHash(metadataJson);
var transparencyHash = ComputeHash(transparencyNdjson);
var checksums = new StringBuilder();
checksums.AppendLine("# Attestation bundle checksums (sha256)");
checksums.AppendLine($"{dsseHash} attestation.dsse.json");
checksums.AppendLine($"{metadataHash} metadata.json");
checksums.AppendLine($"{statementHash} statement.json");
checksums.AppendLine($"{transparencyHash} transparency.ndjson");
var checksumsText = checksums.ToString();
// Create archive
await using var fileStream = File.Create(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
await WriteEntryAsync(tarWriter, "checksums.txt", checksumsText);
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
await WriteEntryAsync(tarWriter, "transparency.ndjson", transparencyNdjson);
return bundlePath;
}
private async Task<string> CreateBundleWithoutTransparencyAsync()
{
var bundlePath = Path.Combine(_tempDir, $"no-transparency-{Guid.NewGuid():N}.tgz");
var statement = new
{
_type = "https://in-toto.io/Statement/v1",
predicateType = "https://stellaops.io/attestations/vuln-scan/v1",
subject = new[] { new { name = "test", digest = new Dictionary<string, string> { ["sha256"] = "abc" } } }
};
var statementJson = JsonSerializer.Serialize(statement);
var statementBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(statementJson));
var dsse = new
{
payloadType = "application/vnd.in-toto+json",
payload = statementBase64,
signatures = new[] { new { keyid = "key-001", sig = "fake-sig" } }
};
var dsseJson = JsonSerializer.Serialize(dsse);
var metadata = new
{
version = "attestation-bundle/v1",
exportId = Guid.NewGuid().ToString("D"),
attestationId = Guid.NewGuid().ToString("D"),
tenantId = Guid.NewGuid().ToString("D"),
rootHash = "abc123"
};
var metadataJson = JsonSerializer.Serialize(metadata);
var dsseHash = ComputeHash(dsseJson);
var statementHash = ComputeHash(statementJson);
var metadataHash = ComputeHash(metadataJson);
var checksums = $"# Checksums\n{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n";
await using var fileStream = File.Create(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
// No transparency.ndjson
return bundlePath;
}
private async Task<string> CreateBundleWithBadChecksumAsync()
{
var bundlePath = Path.Combine(_tempDir, $"bad-checksum-{Guid.NewGuid():N}.tgz");
var dsseJson = "{\"payloadType\":\"test\",\"payload\":\"dGVzdA==\",\"signatures\":[{\"keyid\":\"k\",\"sig\":\"s\"}]}";
var statementJson = "{\"_type\":\"test\"}";
var metadataJson = "{\"version\":\"v1\"}";
// Intentionally wrong checksum
var checksums = "0000000000000000000000000000000000000000000000000000000000000000 attestation.dsse.json\n";
await using var fileStream = File.Create(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
return bundlePath;
}
private async Task<string> CreateBundleWithMissingDssePayloadAsync()
{
var bundlePath = Path.Combine(_tempDir, $"no-dsse-payload-{Guid.NewGuid():N}.tgz");
// DSSE without payload
var dsseJson = "{\"payloadType\":\"test\",\"signatures\":[]}";
var statementJson = "{\"_type\":\"test\"}";
var metadataJson = "{\"version\":\"v1\"}";
var dsseHash = ComputeHash(dsseJson);
var statementHash = ComputeHash(statementJson);
var metadataHash = ComputeHash(metadataJson);
var checksums = $"{dsseHash} attestation.dsse.json\n{metadataHash} metadata.json\n{statementHash} statement.json\n";
await using var fileStream = File.Create(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.SmallestSize);
await using var tarWriter = new TarWriter(gzipStream, TarEntryFormat.Pax);
await WriteEntryAsync(tarWriter, "attestation.dsse.json", dsseJson);
await WriteEntryAsync(tarWriter, "checksums.txt", checksums);
await WriteEntryAsync(tarWriter, "metadata.json", metadataJson);
await WriteEntryAsync(tarWriter, "statement.json", statementJson);
return bundlePath;
}
private static async Task WriteEntryAsync(TarWriter writer, string name, string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var dataStream = new MemoryStream(bytes);
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
DataStream = dataStream
};
await writer.WriteEntryAsync(entry);
}
private static string ComputeHash(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = System.Security.Cryptography.SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,316 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cli.Services;
using Xunit;
namespace StellaOps.Cli.Tests.Services;
public sealed class DevPortalBundleVerifierTests : IDisposable
{
private readonly string _tempDir;
private readonly DevPortalBundleVerifier _verifier;
public DevPortalBundleVerifierTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"devportal-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_verifier = new DevPortalBundleVerifier(NullLogger<DevPortalBundleVerifier>.Instance);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public async Task VerifyBundleAsync_ReturnsSuccess_ForValidBundle()
{
var bundlePath = CreateValidBundle();
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
Assert.Equal("verified", result.Status);
Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode);
Assert.Equal("a1b2c3d4-e5f6-7890-abcd-ef1234567890", result.BundleId);
Assert.NotNull(result.RootHash);
Assert.True(result.RootHash!.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase));
Assert.Equal(1, result.Entries);
}
[Fact]
public async Task VerifyBundleAsync_ReturnsUnexpected_WhenBundleNotFound()
{
var nonExistentPath = Path.Combine(_tempDir, "nonexistent.tgz");
var result = await _verifier.VerifyBundleAsync(nonExistentPath, offline: true, CancellationToken.None);
Assert.Equal("failed", result.Status);
Assert.Equal(DevPortalVerifyExitCode.Unexpected, result.ExitCode);
Assert.Contains("not found", result.ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public async Task VerifyBundleAsync_ReturnsChecksumMismatch_WhenSha256DoesNotMatch()
{
var bundlePath = CreateValidBundle();
var sha256Path = bundlePath + ".sha256";
// Write incorrect hash
await File.WriteAllTextAsync(sha256Path, "0000000000000000000000000000000000000000000000000000000000000000 bundle.tgz");
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
Assert.Equal("failed", result.Status);
Assert.Equal(DevPortalVerifyExitCode.ChecksumMismatch, result.ExitCode);
}
[Fact]
public async Task VerifyBundleAsync_SucceedsWithoutSha256File()
{
var bundlePath = CreateValidBundle();
// Remove .sha256 file if exists
var sha256Path = bundlePath + ".sha256";
if (File.Exists(sha256Path))
{
File.Delete(sha256Path);
}
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
Assert.Equal("verified", result.Status);
Assert.Equal(DevPortalVerifyExitCode.Success, result.ExitCode);
}
[Fact]
public async Task VerifyBundleAsync_ReturnsTsaMissing_WhenOnlineAndNoTimestamp()
{
var bundlePath = CreateBundleWithoutTimestamp();
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: false, CancellationToken.None);
Assert.Equal("failed", result.Status);
Assert.Equal(DevPortalVerifyExitCode.TsaMissing, result.ExitCode);
}
[Fact]
public async Task VerifyBundleAsync_DetectsPortableBundle()
{
var bundlePath = CreatePortableBundle();
var result = await _verifier.VerifyBundleAsync(bundlePath, offline: true, CancellationToken.None);
Assert.Equal("verified", result.Status);
Assert.True(result.Portable);
}
[Fact]
public void ToJson_OutputsKeysSortedAlphabetically()
{
var result = new DevPortalBundleVerificationResult
{
Status = "verified",
BundleId = "test-id",
RootHash = "sha256:abc123",
Entries = 3,
CreatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
Portable = false,
ExitCode = DevPortalVerifyExitCode.Success
};
var json = result.ToJson();
// Keys should be in alphabetical order
var keys = JsonDocument.Parse(json).RootElement.EnumerateObject()
.Select(p => p.Name)
.ToList();
var sortedKeys = keys.OrderBy(k => k, StringComparer.Ordinal).ToList();
Assert.Equal(sortedKeys, keys);
}
private string CreateValidBundle()
{
var bundlePath = Path.Combine(_tempDir, $"bundle-{Guid.NewGuid():N}.tgz");
var manifest = new
{
bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
tenantId = "00000000-0000-0000-0000-000000000001",
kind = 2,
createdAt = "2025-12-07T10:30:00Z",
metadata = new Dictionary<string, string> { ["source"] = "test" },
entries = new[]
{
new
{
section = "sbom",
canonicalPath = "sbom/cyclonedx.json",
sha256 = new string('a', 64),
sizeBytes = 1024,
mediaType = "application/vnd.cyclonedx+json"
}
}
};
var manifestJson = JsonSerializer.Serialize(manifest, new JsonSerializerOptions { WriteIndented = false });
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
var signature = new
{
payloadType = "application/vnd.stella.evidence.manifest+json",
payload = manifestPayload,
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
keyId = "key-1",
algorithm = "ES256",
provider = "StellaOps",
signedAt = "2025-12-07T10:30:05Z",
timestampedAt = "2025-12-07T10:30:06Z",
timestampAuthority = "https://freetsa.org/tsr",
timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token"))
};
var bundleMetadata = new
{
bundleId = "a1b2c3d4-e5f6-7890-abcd-ef1234567890",
tenantId = "00000000-0000-0000-0000-000000000001",
kind = 2,
status = 3,
rootHash = new string('f', 64),
storageKey = "evidence/bundle.tgz",
createdAt = "2025-12-07T10:30:00Z",
sealedAt = "2025-12-07T10:30:05Z"
};
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
return bundlePath;
}
private string CreateBundleWithoutTimestamp()
{
var bundlePath = Path.Combine(_tempDir, $"bundle-no-tsa-{Guid.NewGuid():N}.tgz");
var manifest = new
{
bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012",
tenantId = "00000000-0000-0000-0000-000000000001",
kind = 2,
createdAt = "2025-12-07T10:30:00Z",
entries = Array.Empty<object>()
};
var manifestJson = JsonSerializer.Serialize(manifest);
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
var signature = new
{
payloadType = "application/vnd.stella.evidence.manifest+json",
payload = manifestPayload,
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
keyId = "key-1",
algorithm = "ES256",
provider = "StellaOps",
signedAt = "2025-12-07T10:30:05Z"
// No timestampedAt, timestampAuthority, timestampToken
};
var bundleMetadata = new
{
bundleId = "b2c3d4e5-f6a7-8901-bcde-f23456789012",
tenantId = "00000000-0000-0000-0000-000000000001",
kind = 2,
status = 3,
rootHash = new string('e', 64),
storageKey = "evidence/bundle.tgz",
createdAt = "2025-12-07T10:30:00Z",
sealedAt = "2025-12-07T10:30:05Z"
};
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
return bundlePath;
}
private string CreatePortableBundle()
{
var bundlePath = Path.Combine(_tempDir, $"portable-{Guid.NewGuid():N}.tgz");
var manifest = new
{
bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234",
kind = 1,
createdAt = "2025-12-07T10:30:00Z",
entries = Array.Empty<object>()
};
var manifestJson = JsonSerializer.Serialize(manifest);
var manifestPayload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
var signature = new
{
payloadType = "application/vnd.stella.evidence.manifest+json",
payload = manifestPayload,
signature = Convert.ToBase64String(Encoding.UTF8.GetBytes("test-signature")),
keyId = "key-1",
algorithm = "ES256",
provider = "StellaOps",
signedAt = "2025-12-07T10:30:05Z",
timestampedAt = "2025-12-07T10:30:06Z",
timestampAuthority = "tsa.default",
timestampToken = Convert.ToBase64String(Encoding.UTF8.GetBytes("tsa-token"))
};
var bundleMetadata = new
{
bundleId = "c3d4e5f6-a7b8-9012-cdef-345678901234",
kind = 1,
status = 3,
rootHash = new string('d', 64),
createdAt = "2025-12-07T10:30:00Z",
sealedAt = "2025-12-07T10:30:05Z",
portableGeneratedAt = "2025-12-07T10:35:00Z" // Indicates portable bundle
};
CreateTgzBundle(bundlePath, manifestJson, signature, bundleMetadata);
return bundlePath;
}
private static void CreateTgzBundle(string bundlePath, string manifestJson, object signature, object bundleMetadata)
{
using var memoryStream = new MemoryStream();
using (var gzipStream = new GZipStream(memoryStream, CompressionLevel.Optimal, leaveOpen: true))
using (var tarWriter = new TarWriter(gzipStream))
{
AddTarEntry(tarWriter, "manifest.json", manifestJson);
AddTarEntry(tarWriter, "signature.json", JsonSerializer.Serialize(signature));
AddTarEntry(tarWriter, "bundle.json", JsonSerializer.Serialize(bundleMetadata));
AddTarEntry(tarWriter, "checksums.txt", $"# checksums\n{new string('f', 64)} sbom/cyclonedx.json\n");
}
memoryStream.Position = 0;
using var fileStream = File.Create(bundlePath);
memoryStream.CopyTo(fileStream);
}
private static void AddTarEntry(TarWriter writer, string name, string content)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, name)
{
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
ModificationTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero)
};
var bytes = Encoding.UTF8.GetBytes(content);
entry.DataStream = new MemoryStream(bytes);
writer.WriteEntry(entry);
}
}

View File

@@ -0,0 +1,32 @@
<Project>
<PropertyGroup>
<!-- Keep Concelier test harness active while trimming Mongo dependencies. Allow opt-out per project. -->
<UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)'==''">true</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<!-- Concelier is migrating off MongoDB; strip implicit Mongo2Go/Mongo driver packages inherited from the repo root. -->
<PackageReference Remove="Mongo2Go" />
<PackageReference Remove="MongoDB.Driver" />
</ItemGroup>
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)'=='true'">
<PackageReference Include="coverlet.collector" Version="6.0.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
<PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" />
<ProjectReference Include="$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj"
Condition="Exists('$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj')" />
<Using Include="StellaOps.Concelier.Testing"
Condition="Exists('$(MSBuildThisFileDirectory)__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj')" />
<Using Include="Xunit" />
</ItemGroup>
<!-- Keep OpenSSL shim sources available to Mongo2Go-free test harnesses if needed. -->
<ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)'=='true'">
<None Include="$(MSBuildThisFileDirectory)..\..\tests\native\openssl-1.1\linux-x64\*.so.1.1"
Link="native/linux-x64/%(Filename)%(Extension)"
CopyToOutputDirectory="PreserveNewest" />
<Compile Include="$(MSBuildThisFileDirectory)..\..\tests\shared\OpenSslLegacyShim.cs" Link="Shared/OpenSslLegacyShim.cs" />
<Compile Include="$(MSBuildThisFileDirectory)..\..\tests\shared\OpenSslAutoInit.cs" Link="Shared/OpenSslAutoInit.cs" />
</ItemGroup>
</Project>

View File

@@ -30,7 +30,7 @@ public sealed class RawDocumentStorage
string uri,
byte[] content,
string? contentType,
DateTimeOffset? expiresAt,
DateTimeOffset? ExpiresAt,
CancellationToken cancellationToken,
Guid? documentId = null)
{

View File

@@ -418,7 +418,7 @@ public sealed class UbuntuConnector : IFeedConnector
await _stateRepository.UpdateCursorAsync(SourceName, doc, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false);
}
private static string ComputeNoticeHash(BsonDocument document)
private string ComputeNoticeHash(BsonDocument document)
{
var bytes = document.ToBson();
var hash = _hash.ComputeHash(bytes, HashAlgorithms.Sha256);

View File

@@ -0,0 +1,38 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Concelier.Core.Linksets
{
public static class PolicyAuthSignalFactory
{
public static PolicyAuthSignal ToPolicyAuthSignal(AdvisoryLinkset linkset)
{
if (linkset is null) throw new ArgumentNullException(nameof(linkset));
var subject = linkset.Normalized?.Purls?.FirstOrDefault() ?? linkset.AdvisoryId;
var evidenceUri = $"urn:linkset:{linkset.AdvisoryId}";
return new PolicyAuthSignal(
Id: linkset.AdvisoryId,
Tenant: linkset.TenantId,
Subject: subject ?? string.Empty,
Source: linkset.Source,
SignalType: "reachability",
Evidence: new[]
{
new PolicyAuthEvidence(evidenceUri)
});
}
}
public sealed record PolicyAuthSignal(
string Id,
string Tenant,
string Subject,
string Source,
string SignalType,
IReadOnlyList<PolicyAuthEvidence> Evidence);
public sealed record PolicyAuthEvidence(string Uri);
}

View File

@@ -1,248 +1,276 @@
using System;
using System.Collections;
using System.Text;
using System.Globalization;
using System.Text.Json;
namespace MongoDB.Bson
{
public readonly struct ObjectId : IEquatable<ObjectId>
public class BsonValue : IEquatable<BsonValue?>
{
public Guid Value { get; }
public ObjectId(Guid value) => Value = value;
public ObjectId(string value) => Value = Guid.TryParse(value, out var g) ? g : Guid.Empty;
public static ObjectId GenerateNewId() => new(Guid.NewGuid());
public static ObjectId Empty => new(Guid.Empty);
public bool Equals(ObjectId other) => Value.Equals(other.Value);
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
public override int GetHashCode() => Value.GetHashCode();
public override string ToString() => Value.ToString("N");
public static bool operator ==(ObjectId left, ObjectId right) => left.Equals(right);
public static bool operator !=(ObjectId left, ObjectId right) => !left.Equals(right);
}
protected object? RawValue;
public enum BsonType { Document, Array, String, Boolean, Int32, Int64, Double, DateTime, Guid, Null }
public class BsonValue
{
protected readonly object? _value;
public BsonValue(object? value) => _value = value;
internal object? RawValue => _value;
public static implicit operator BsonValue(string value) => new BsonString(value ?? string.Empty);
public static implicit operator BsonValue(bool value) => new BsonBoolean(value);
public static implicit operator BsonValue(int value) => new BsonInt32(value);
public static implicit operator BsonValue(long value) => new BsonInt64(value);
public static implicit operator BsonValue(double value) => new BsonDouble(value);
public static implicit operator BsonValue(DateTime value) => new BsonDateTime(DateTime.SpecifyKind(value, DateTimeKind.Utc));
public static implicit operator BsonValue(DateTimeOffset value) => new BsonDateTime(value.UtcDateTime);
public static implicit operator BsonValue(Guid value) => new BsonString(value.ToString("D"));
public static BsonValue Create(object? value) => BsonDocument.WrapExternal(value);
public virtual BsonType BsonType => _value switch
public BsonValue(object? value = null)
{
null => BsonType.Null,
BsonDocument => BsonType.Document,
BsonArray => BsonType.Array,
string => BsonType.String,
bool => BsonType.Boolean,
int => BsonType.Int32,
long => BsonType.Int64,
double => BsonType.Double,
DateTime => BsonType.DateTime,
DateTimeOffset => BsonType.DateTime,
Guid => BsonType.Guid,
_ => BsonType.Null
};
public bool IsString => _value is string;
public bool IsBsonDocument => _value is BsonDocument;
public bool IsBsonArray => _value is BsonArray;
public bool IsBsonNull => _value is null;
public string AsString => _value?.ToString() ?? string.Empty;
public BsonDocument AsBsonDocument => _value as BsonDocument ?? throw new InvalidCastException();
public BsonArray AsBsonArray => _value as BsonArray ?? throw new InvalidCastException();
public Guid AsGuid => _value is Guid g ? g : Guid.Empty;
public DateTime AsDateTime => _value switch
{
DateTimeOffset dto => dto.UtcDateTime,
DateTime dt => dt,
_ => DateTime.MinValue
};
public int AsInt32 => _value is int i ? i : 0;
public long AsInt64 => _value is long l ? l : 0;
public double AsDouble => _value is double d ? d : 0d;
public bool AsBoolean => _value is bool b && b;
public bool IsInt32 => _value is int;
public DateTime ToUniversalTime() => _value switch
{
DateTimeOffset dto => dto.UtcDateTime,
DateTime dt => dt.Kind == DateTimeKind.Utc ? dt : dt.ToUniversalTime(),
string s when DateTimeOffset.TryParse(s, out var parsed) => parsed.UtcDateTime,
_ => DateTime.MinValue
};
public override string ToString() => _value?.ToString() ?? string.Empty;
}
public class BsonString : BsonValue { public BsonString(string value) : base(value) { } }
public class BsonBoolean : BsonValue { public BsonBoolean(bool value) : base(value) { } }
public class BsonInt32 : BsonValue { public BsonInt32(int value) : base(value) { } }
public class BsonInt64 : BsonValue { public BsonInt64(long value) : base(value) { } }
public class BsonDouble : BsonValue { public BsonDouble(double value) : base(value) { } }
public class BsonDateTime : BsonValue { public BsonDateTime(DateTime value) : base(value) { } }
public class BsonNull : BsonValue
{
private BsonNull() : base(null) { }
public static BsonNull Value { get; } = new();
}
public sealed class BsonElement
{
public BsonElement(string name, BsonValue value)
{
Name = name;
Value = value;
RawValue = value;
}
public string Name { get; }
public BsonValue Value { get; }
}
public bool IsString => RawValue is string;
public bool IsBoolean => RawValue is bool;
public bool IsBsonDocument => RawValue is BsonDocument;
public bool IsBsonArray => RawValue is BsonArray;
public class BsonBinaryData : BsonValue
{
private readonly byte[] _bytes;
public BsonBinaryData(byte[] bytes) : base(null) => _bytes = bytes ?? Array.Empty<byte>();
public BsonBinaryData(Guid guid) : this(guid.ToByteArray()) { }
public byte[] AsByteArray => _bytes;
public Guid ToGuid() => new(_bytes);
}
public class BsonArray : BsonValue, IEnumerable<BsonValue>
{
private readonly List<BsonValue> _items = new();
public BsonArray() : base(null) { }
public BsonArray(IEnumerable<BsonValue> values) : this() => _items.AddRange(values);
public BsonArray(IEnumerable<object?> values) : this()
public string AsString => RawValue switch
{
foreach (var value in values)
{
_items.Add(BsonDocument.WrapExternal(value));
}
}
public void Add(BsonValue value) => _items.Add(value);
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public BsonValue this[int index] { get => _items[index]; set => _items[index] = value; }
public int Count => _items.Count;
null => string.Empty,
string s => s,
Guid g => g.ToString(),
_ => Convert.ToString(RawValue, CultureInfo.InvariantCulture) ?? string.Empty
};
public bool AsBoolean => RawValue switch
{
bool b => b,
string s when bool.TryParse(s, out var b) => b,
int i => i != 0,
long l => l != 0,
_ => false
};
public int ToInt32() => RawValue switch
{
int i => i,
long l => (int)l,
double d => (int)d,
string s when int.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var i) => i,
_ => 0
};
public Guid AsGuid => RawValue switch
{
Guid g => g,
string s when Guid.TryParse(s, out var g) => g,
_ => Guid.Empty
};
public ObjectId AsObjectId => RawValue switch
{
ObjectId o => o,
string s => ObjectId.Parse(s),
_ => ObjectId.Empty
};
public BsonDocument AsBsonDocument => RawValue as BsonDocument ?? (this as BsonDocument ?? new BsonDocument());
public BsonArray AsBsonArray => RawValue as BsonArray ?? (this as BsonArray ?? new BsonArray());
public override string ToString() => AsString;
internal virtual BsonValue Clone() => new BsonValue(RawValue);
public bool Equals(BsonValue? other) => other is not null && Equals(RawValue, other.RawValue);
public override bool Equals(object? obj) => obj is BsonValue other && Equals(other);
public override int GetHashCode() => RawValue?.GetHashCode() ?? 0;
public static implicit operator BsonValue(string value) => new(value);
public static implicit operator BsonValue(Guid value) => new(value);
public static implicit operator BsonValue(int value) => new(value);
public static implicit operator BsonValue(long value) => new(value);
public static implicit operator BsonValue(bool value) => new(value);
public static implicit operator BsonValue(double value) => new(value);
public static implicit operator BsonValue(DateTimeOffset value) => new(value);
}
public class BsonDocument : BsonValue, IEnumerable<KeyValuePair<string, BsonValue>>
public sealed class BsonDocument : BsonValue, IDictionary<string, BsonValue>
{
private readonly Dictionary<string, BsonValue> _values = new(StringComparer.Ordinal);
public BsonDocument() : base(null) { }
public BsonDocument(string key, object? value) : this() => _values[key] = Wrap(value);
public BsonDocument(IEnumerable<KeyValuePair<string, object?>> pairs) : this()
public BsonDocument()
: base(null)
{
foreach (var kvp in pairs)
RawValue = this;
}
public BsonDocument(IDictionary<string, object?> values)
: this()
{
foreach (var kvp in values)
{
_values[kvp.Key] = Wrap(kvp.Value);
_values[kvp.Key] = ToBsonValue(kvp.Value);
}
}
private static BsonValue Wrap(object? value) => value switch
{
BsonValue v => v,
IEnumerable<BsonValue> enumerable => new BsonArray(enumerable),
IEnumerable<object?> objEnum => new BsonArray(objEnum.Select(Wrap)),
_ => new BsonValue(value)
};
internal static BsonValue WrapExternal(object? value) => Wrap(value);
public int ElementCount => _values.Count;
public BsonValue this[string key]
{
get => _values[key];
set => _values[key] = Wrap(value);
set => _values[key] = value ?? new BsonValue();
}
public int ElementCount => _values.Count;
public IEnumerable<BsonElement> Elements => _values.Select(kvp => new BsonElement(kvp.Key, kvp.Value));
public ICollection<string> Keys => _values.Keys;
public ICollection<BsonValue> Values => _values.Values;
public int Count => _values.Count;
public bool IsReadOnly => false;
public bool Contains(string key) => _values.ContainsKey(key);
public void Add(string key, BsonValue value) => _values[key] = value ?? new BsonValue();
public void Add(string key, object? value) => _values[key] = ToBsonValue(value);
public void Add(KeyValuePair<string, BsonValue> item) => Add(item.Key, item.Value);
public void Clear() => _values.Clear();
public bool Contains(KeyValuePair<string, BsonValue> item) => _values.Contains(item);
public bool ContainsKey(string key) => _values.ContainsKey(key);
public void CopyTo(KeyValuePair<string, BsonValue>[] array, int arrayIndex) => ((IDictionary<string, BsonValue>)_values).CopyTo(array, arrayIndex);
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => _values.GetEnumerator();
public bool Remove(string key) => _values.Remove(key);
public bool Remove(KeyValuePair<string, BsonValue> item) => _values.Remove(item.Key);
public bool TryGetValue(string key, out BsonValue value) => _values.TryGetValue(key, out value!);
public BsonValue GetValue(string key, BsonValue? defaultValue = null)
{
return _values.TryGetValue(key, out var value)
? value
: defaultValue ?? new BsonValue(null);
}
public bool Remove(string key) => _values.Remove(key);
public void Add(string key, BsonValue value) => _values[key] = value;
public void Add(string key, object? value) => _values[key] = Wrap(value);
public IEnumerator<KeyValuePair<string, BsonValue>> GetEnumerator() => _values.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
public BsonValue GetValue(string key) => _values[key];
public BsonDocument DeepClone()
{
var clone = new BsonDocument();
var copy = new BsonDocument();
foreach (var kvp in _values)
{
clone[kvp.Key] = kvp.Value;
copy._values[kvp.Key] = kvp.Value?.Clone() ?? new BsonValue();
}
return clone;
return copy;
}
public static BsonDocument Parse(string json)
{
using var doc = JsonDocument.Parse(json);
return FromElement(doc.RootElement);
return FromElement(doc.RootElement).AsBsonDocument;
}
private static BsonDocument FromElement(JsonElement element)
private static BsonValue FromElement(JsonElement element)
{
return element.ValueKind switch
{
JsonValueKind.Object => FromObject(element),
JsonValueKind.Array => FromArray(element),
JsonValueKind.String => new BsonValue(element.GetString()),
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonValue(l) : new BsonValue(element.GetDouble()),
JsonValueKind.True => new BsonValue(true),
JsonValueKind.False => new BsonValue(false),
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
_ => new BsonValue(element.ToString())
};
}
private static BsonDocument FromObject(JsonElement element)
{
var doc = new BsonDocument();
foreach (var prop in element.EnumerateObject())
foreach (var property in element.EnumerateObject())
{
doc[prop.Name] = FromJsonValue(prop.Value);
doc[property.Name] = FromElement(property.Value);
}
return doc;
}
private static BsonValue FromJsonValue(JsonElement element) => element.ValueKind switch
private static BsonArray FromArray(JsonElement element)
{
JsonValueKind.Object => FromElement(element),
JsonValueKind.Array => new BsonArray(element.EnumerateArray().Select(FromJsonValue)),
JsonValueKind.String => new BsonString(element.GetString() ?? string.Empty),
JsonValueKind.Number => element.TryGetInt64(out var l) ? new BsonInt64(l) : new BsonDouble(element.GetDouble()),
JsonValueKind.True => new BsonBoolean(true),
JsonValueKind.False => new BsonBoolean(false),
JsonValueKind.Null or JsonValueKind.Undefined => new BsonValue(null),
_ => new BsonValue(null)
};
public string ToJson(MongoDB.Bson.IO.JsonWriterSettings? settings = null)
{
var dict = _values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value));
return JsonSerializer.Serialize(dict, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var array = new BsonArray();
foreach (var item in element.EnumerateArray())
{
array.Add(FromElement(item));
}
return array;
}
public byte[] ToBson() => Encoding.UTF8.GetBytes(ToJson());
private static object? Unwrap(BsonValue value) => value switch
internal static BsonValue ToBsonValue(object? value)
{
BsonDocument doc => doc._values.ToDictionary(kvp => kvp.Key, kvp => Unwrap(kvp.Value)),
BsonArray array => array.Select(Unwrap).ToArray(),
_ => value.RawValue
};
return value switch
{
null => new BsonValue(null),
BsonValue bson => bson,
string s => new BsonValue(s),
Guid g => new BsonValue(g),
int i => new BsonValue(i),
long l => new BsonValue(l),
bool b => new BsonValue(b),
double d => new BsonValue(d),
float f => new BsonValue(f),
DateTime dt => new BsonValue(dt),
DateTimeOffset dto => new BsonValue(dto),
IEnumerable<object?> enumerable => new BsonArray(enumerable.Select(ToBsonValue)),
_ => new BsonValue(value)
};
}
internal override BsonValue Clone() => DeepClone();
}
public sealed class BsonArray : BsonValue, IList<BsonValue>
{
private readonly List<BsonValue> _items = new();
public BsonArray()
: base(null)
{
RawValue = this;
}
public BsonArray(IEnumerable<BsonValue> items)
: this()
{
_items.AddRange(items);
}
public BsonValue this[int index]
{
get => _items[index];
set => _items[index] = value ?? new BsonValue();
}
public int Count => _items.Count;
public bool IsReadOnly => false;
public void Add(BsonValue item) => _items.Add(item ?? new BsonValue());
public void Add(object? item) => _items.Add(BsonDocument.ToBsonValue(item));
public void Clear() => _items.Clear();
public bool Contains(BsonValue item) => _items.Contains(item);
public void CopyTo(BsonValue[] array, int arrayIndex) => _items.CopyTo(array, arrayIndex);
public IEnumerator<BsonValue> GetEnumerator() => _items.GetEnumerator();
IEnumerator IEnumerable.GetEnumerator() => _items.GetEnumerator();
public int IndexOf(BsonValue item) => _items.IndexOf(item);
public void Insert(int index, BsonValue item) => _items.Insert(index, item ?? new BsonValue());
public bool Remove(BsonValue item) => _items.Remove(item);
public void RemoveAt(int index) => _items.RemoveAt(index);
internal override BsonValue Clone() => new BsonArray(_items.Select(i => i.Clone()));
}
public readonly struct ObjectId : IEquatable<ObjectId>
{
private readonly string _value;
public ObjectId(string value)
{
_value = value;
}
public static ObjectId Empty { get; } = new(string.Empty);
public override string ToString() => _value;
public static ObjectId Parse(string value) => new(value ?? string.Empty);
public bool Equals(ObjectId other) => string.Equals(_value, other._value, StringComparison.Ordinal);
public override bool Equals(object? obj) => obj is ObjectId other && Equals(other);
public override int GetHashCode() => _value?.GetHashCode(StringComparison.Ordinal) ?? 0;
}
}
namespace MongoDB.Bson.IO
namespace MongoDB.Bson.Serialization.Attributes
{
public enum JsonOutputMode { Strict, RelaxedExtendedJson }
public class JsonWriterSettings
[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field | AttributeTargets.Class | AttributeTargets.Struct)]
public sealed class BsonElementAttribute : Attribute
{
public JsonOutputMode OutputMode { get; set; } = JsonOutputMode.Strict;
public BsonElementAttribute(string elementName)
{
ElementName = elementName;
}
public string ElementName { get; }
}
}

View File

@@ -4,6 +4,7 @@ using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using MongoDB.Bson;
namespace MongoDB.Driver
{
@@ -31,6 +32,7 @@ namespace MongoDB.Driver
public interface IMongoClient
{
IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null);
Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default);
}
public class MongoClient : IMongoClient
@@ -38,20 +40,47 @@ namespace MongoDB.Driver
public MongoClient(string connectionString) { }
public MongoClient(MongoClientSettings settings) { }
public IMongoDatabase GetDatabase(string name, MongoDatabaseSettings? settings = null) => new MongoDatabase(name);
public Task DropDatabaseAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask;
}
public class MongoDatabaseSettings { }
public sealed class DatabaseNamespace
{
public DatabaseNamespace(string databaseName) => DatabaseName = databaseName;
public string DatabaseName { get; }
}
public interface IMongoDatabase
{
IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null);
DatabaseNamespace DatabaseNamespace { get; }
Task DropCollectionAsync(string name, CancellationToken cancellationToken = default);
BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default);
T RunCommand<T>(BsonDocument command, CancellationToken cancellationToken = default);
Task<T> RunCommandAsync<T>(BsonDocument command, CancellationToken cancellationToken = default);
BsonDocument RunCommand(string command, CancellationToken cancellationToken = default);
T RunCommand<T>(string command, CancellationToken cancellationToken = default);
Task<T> RunCommandAsync<T>(string command, CancellationToken cancellationToken = default);
}
public class MongoDatabase : IMongoDatabase
{
public MongoDatabase(string name) => Name = name;
public MongoDatabase(string name)
{
Name = name;
DatabaseNamespace = new DatabaseNamespace(name);
}
public string Name { get; }
public DatabaseNamespace DatabaseNamespace { get; }
public IMongoCollection<TDocument> GetCollection<TDocument>(string name, MongoCollectionSettings? settings = null) => new MongoCollection<TDocument>(name);
public Task DropCollectionAsync(string name, CancellationToken cancellationToken = default) => Task.CompletedTask;
public BsonDocument RunCommand(BsonDocument command, CancellationToken cancellationToken = default) => new();
public T RunCommand<T>(BsonDocument command, CancellationToken cancellationToken = default) => default!;
public Task<T> RunCommandAsync<T>(BsonDocument command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!);
public BsonDocument RunCommand(string command, CancellationToken cancellationToken = default) => new();
public T RunCommand<T>(string command, CancellationToken cancellationToken = default) => default!;
public Task<T> RunCommandAsync<T>(string command, CancellationToken cancellationToken = default) => Task.FromResult(default(T)!);
}
public class MongoCollectionSettings { }
@@ -59,8 +88,10 @@ namespace MongoDB.Driver
public interface IMongoCollection<TDocument>
{
Task InsertOneAsync(TDocument document, InsertOneOptions? options = null, CancellationToken cancellationToken = default);
Task InsertManyAsync(IEnumerable<TDocument> documents, InsertManyOptions? options = null, CancellationToken cancellationToken = default);
Task<ReplaceOneResult> ReplaceOneAsync(FilterDefinition<TDocument> filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default);
Task<DeleteResult> DeleteOneAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default);
Task<DeleteResult> DeleteManyAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default);
Task<IAsyncCursor<TDocument>> FindAsync(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null, CancellationToken cancellationToken = default);
IFindFluent<TDocument, TDocument> Find(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null);
Task<long> CountDocumentsAsync(FilterDefinition<TDocument> filter, CountOptions? options = null, CancellationToken cancellationToken = default);
@@ -88,6 +119,12 @@ namespace MongoDB.Driver
return Task.CompletedTask;
}
public Task InsertManyAsync(IEnumerable<TDocument> documents, InsertManyOptions? options = null, CancellationToken cancellationToken = default)
{
_docs.AddRange(documents);
return Task.CompletedTask;
}
public Task<ReplaceOneResult> ReplaceOneAsync(FilterDefinition<TDocument> filter, TDocument replacement, ReplaceOptions? options = null, CancellationToken cancellationToken = default)
{
_docs.Clear();
@@ -102,6 +139,13 @@ namespace MongoDB.Driver
return Task.FromResult(new DeleteResult(removed ? 1 : 0));
}
public Task<DeleteResult> DeleteManyAsync(FilterDefinition<TDocument> filter, CancellationToken cancellationToken = default)
{
var removed = _docs.Count;
_docs.Clear();
return Task.FromResult(new DeleteResult(removed));
}
public Task<IAsyncCursor<TDocument>> FindAsync(FilterDefinition<TDocument> filter, FindOptions<TDocument, TDocument>? options = null, CancellationToken cancellationToken = default)
=> Task.FromResult<IAsyncCursor<TDocument>>(new AsyncCursor<TDocument>(_docs));
@@ -212,7 +256,10 @@ namespace MongoDB.Driver
=> new FindFluentProjected<TDocument, TNewProjection>(Enumerable.Empty<TNewProjection>());
}
public class FilterDefinition<TDocument> { }
public class FilterDefinition<TDocument>
{
public static FilterDefinition<TDocument> Empty { get; } = new();
}
public class UpdateDefinition<TDocument> { }
public class ProjectionDefinition<TDocument, TProjection> { }
public class SortDefinition<TDocument> { }
@@ -222,6 +269,7 @@ namespace MongoDB.Driver
public class FindOneAndReplaceOptions<TDocument, TProjection> { public bool IsUpsert { get; set; } }
public class FindOneAndUpdateOptions<TDocument, TProjection> { public bool IsUpsert { get; set; } }
public class InsertOneOptions { }
public class InsertManyOptions { }
public class CreateIndexOptions { }
public class IndexKeysDefinition<TDocument> { }
@@ -284,7 +332,7 @@ namespace Mongo2Go
private MongoDbRunner(string connectionString) => ConnectionString = connectionString;
public static MongoDbRunner Start() => new("mongodb://localhost:27017/fake");
public static MongoDbRunner Start(bool singleNodeReplSet = false) => new("mongodb://localhost:27017/fake");
public void Dispose()
{

View File

@@ -1,19 +1,27 @@
using System.Collections.Concurrent;
using System.IO;
using System.Linq;
using MongoDB.Bson;
using StellaOps.Concelier.Models;
namespace StellaOps.Concelier.Storage.Mongo
{
public static class MongoStorageDefaults
{
public const string DefaultDatabaseName = "concelier";
public static class Collections
{
public const string AdvisoryStatements = "advisory_statements";
public const string AdvisoryRaw = "advisory_raw";
public const string Advisory = "advisory";
public const string AdvisoryObservations = "advisory_observations";
public const string AdvisoryLinksets = "advisory_linksets";
public const string Alias = "aliases";
public const string Dto = "dto";
public const string MergeEvent = "merge_events";
public const string Document = "documents";
public const string PsirtFlags = "psirt_flags";
}
}
@@ -64,13 +72,32 @@ namespace StellaOps.Concelier.Storage.Mongo
this.FetchedAt = FetchedAt ?? CreatedAt;
}
public DocumentRecord(
Guid Id,
string SourceName,
string Uri,
string Sha256,
string Status = "pending_parse",
string? ContentType = null,
IReadOnlyDictionary<string, string>? Headers = null,
IReadOnlyDictionary<string, string>? Metadata = null,
string? Etag = null,
DateTimeOffset? LastModified = null,
Guid? PayloadId = null,
DateTimeOffset? ExpiresAt = null,
byte[]? Payload = null,
DateTimeOffset? FetchedAt = null)
: this(Id, SourceName, Uri, DateTimeOffset.UtcNow, Sha256, Status, ContentType, Headers, Metadata, Etag, LastModified, PayloadId, ExpiresAt, Payload, FetchedAt)
{
}
public Guid Id { get; init; }
public string SourceName { get; init; }
public string Uri { get; init; }
public string SourceName { get; init; } = string.Empty;
public string Uri { get; init; } = string.Empty;
public DateTimeOffset CreatedAt { get; init; }
public DateTimeOffset FetchedAt { get; init; }
public string Sha256 { get; init; }
public string Status { get; init; }
public string Sha256 { get; init; } = string.Empty;
public string Status { get; init; } = string.Empty;
public string? ContentType { get; init; }
public IReadOnlyDictionary<string, string>? Headers { get; init; }
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
@@ -81,37 +108,37 @@ namespace StellaOps.Concelier.Storage.Mongo
public byte[]? Payload { get; init; }
}
public interface IDocumentStore
{
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken);
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken);
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken);
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken);
}
public interface IDocumentStore
{
Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken);
Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken);
Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken);
Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken);
}
public class InMemoryDocumentStore : IDocumentStore
{
private readonly ConcurrentDictionary<(string Source, string Uri), DocumentRecord> _records = new();
private readonly ConcurrentDictionary<Guid, DocumentRecord> _byId = new();
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
{
_records.TryGetValue((sourceName, uri), out var record);
return Task.FromResult<DocumentRecord?>(record);
}
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
{
_records.TryGetValue((sourceName, uri), out var record);
return Task.FromResult<DocumentRecord?>(record);
}
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
{
_byId.TryGetValue(id, out var record);
return Task.FromResult<DocumentRecord?>(record);
}
public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken)
{
_byId.TryGetValue(id, out var record);
return Task.FromResult<DocumentRecord?>(record);
}
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
{
_records[(record.SourceName, record.Uri)] = record;
_byId[record.Id] = record;
return Task.FromResult(record);
}
public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken)
{
_records[(record.SourceName, record.Uri)] = record;
_byId[record.Id] = record;
return Task.FromResult(record);
}
public Task UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken)
{
@@ -129,6 +156,22 @@ public interface IDocumentStore
{
private readonly InMemoryDocumentStore _inner = new();
public DocumentStore()
{
}
public DocumentStore(object? database, MongoStorageOptions? options)
{
}
public DocumentStore(object? database, object? logger)
{
}
public DocumentStore(object? database, MongoStorageOptions? options, object? logger)
{
}
public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken)
=> _inner.FindBySourceAndUriAsync(sourceName, uri, cancellationToken);
@@ -142,47 +185,70 @@ public interface IDocumentStore
=> _inner.UpdateStatusAsync(id, status, cancellationToken);
}
public record DtoRecord(
Guid Id,
Guid DocumentId,
string SourceName,
string Format,
MongoDB.Bson.BsonDocument Payload,
DateTimeOffset CreatedAt)
public record DtoRecord
{
public DtoRecord(
Guid Id,
Guid DocumentId,
string SourceName,
string Format,
MongoDB.Bson.BsonDocument Payload,
DateTimeOffset CreatedAt,
string? SchemaVersion = null,
DateTimeOffset? ValidatedAt = null)
{
this.Id = Id;
this.DocumentId = DocumentId;
this.SourceName = SourceName;
this.Format = Format;
this.Payload = Payload;
this.CreatedAt = CreatedAt;
this.SchemaVersion = SchemaVersion ?? string.Empty;
this.ValidatedAt = ValidatedAt ?? CreatedAt;
}
public Guid Id { get; init; }
public Guid DocumentId { get; init; }
public string SourceName { get; init; } = string.Empty;
public string Format { get; init; } = string.Empty;
public MongoDB.Bson.BsonDocument Payload { get; init; } = new();
public DateTimeOffset CreatedAt { get; init; }
public string SchemaVersion { get; init; } = string.Empty;
public DateTimeOffset ValidatedAt { get; init; } = CreatedAt;
public DateTimeOffset ValidatedAt { get; init; }
}
public interface IDtoStore
{
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken);
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken);
Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, CancellationToken cancellationToken);
}
public class InMemoryDtoStore : IDtoStore
{
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken)
public interface IDtoStore
{
_records[record.DocumentId] = record;
return Task.FromResult(record);
Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken);
Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken);
Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken);
}
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken)
public class InMemoryDtoStore : IDtoStore
{
_records.TryGetValue(documentId, out var record);
return Task.FromResult<DtoRecord?>(record);
}
private readonly ConcurrentDictionary<Guid, DtoRecord> _records = new();
public Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, CancellationToken cancellationToken)
{
var matches = _records.Values.Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase)).ToArray();
return Task.FromResult<IReadOnlyList<DtoRecord>>(matches);
public Task<DtoRecord> UpsertAsync(DtoRecord record, CancellationToken cancellationToken)
{
_records[record.DocumentId] = record;
return Task.FromResult(record);
}
public Task<DtoRecord?> FindByDocumentIdAsync(Guid documentId, CancellationToken cancellationToken)
{
_records.TryGetValue(documentId, out var record);
return Task.FromResult<DtoRecord?>(record);
}
public Task<IReadOnlyList<DtoRecord>> GetBySourceAsync(string sourceName, int limit, CancellationToken cancellationToken)
{
var matches = _records.Values
.Where(r => string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase))
.Take(limit)
.ToArray();
return Task.FromResult<IReadOnlyList<DtoRecord>>(matches);
}
}
}
internal sealed class RawDocumentStorage
{
@@ -251,7 +317,7 @@ public sealed record SourceStateRecord(
sourceName,
Enabled: current?.Enabled ?? true,
Paused: current?.Paused ?? false,
Cursor: cursor.DeepClone(),
Cursor: cursor.DeepClone().AsBsonDocument,
LastSuccess: completedAt,
LastFailure: current?.LastFailure,
FailCount: current?.FailCount ?? 0,
@@ -288,6 +354,18 @@ public sealed record SourceStateRecord(
{
private readonly InMemorySourceStateRepository _inner = new();
public MongoSourceStateRepository()
{
}
public MongoSourceStateRepository(object? database, MongoStorageOptions? options)
{
}
public MongoSourceStateRepository(object? database, object? logger)
{
}
public Task<SourceStateRecord?> TryGetAsync(string sourceName, CancellationToken cancellationToken)
=> _inner.TryGetAsync(sourceName, cancellationToken);
@@ -304,6 +382,15 @@ public sealed record SourceStateRecord(
namespace StellaOps.Concelier.Storage.Mongo.Advisories
{
public sealed class AdvisoryDocument
{
public string AdvisoryKey { get; set; } = string.Empty;
public MongoDB.Bson.BsonDocument Payload { get; set; } = new();
public DateTime? Modified { get; set; }
public DateTime? Published { get; set; }
public DateTime? CreatedAt { get; set; }
}
public interface IAdvisoryStore
{
Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken);
@@ -360,18 +447,49 @@ namespace StellaOps.Concelier.Storage.Mongo.Aliases
public sealed record AliasEntry(string Scheme, string Value);
public sealed record AliasRecord(string AdvisoryKey, string Scheme, string Value, DateTimeOffset? UpdatedAt = null);
public sealed record AliasCollision(string Scheme, string Value, IReadOnlyList<string> AdvisoryKeys);
public sealed record AliasUpsertResult(string AdvisoryKey, IReadOnlyList<AliasCollision> Collisions);
public interface IAliasStore
{
Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken);
Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken);
Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> entries, DateTimeOffset updatedAt, CancellationToken cancellationToken);
}
public sealed class InMemoryAliasStore : IAliasStore
public sealed class AliasStore : InMemoryAliasStore
{
public AliasStore()
{
}
public AliasStore(object? database, object? options)
{
}
}
public class InMemoryAliasStore : IAliasStore
{
private readonly ConcurrentDictionary<string, List<AliasRecord>> _byAdvisory = new(StringComparer.OrdinalIgnoreCase);
private readonly ConcurrentDictionary<(string Scheme, string Value), List<AliasRecord>> _byAlias = new();
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> entries, DateTimeOffset updatedAt, CancellationToken cancellationToken)
{
var records = entries.Select(e => new AliasRecord(advisoryKey, e.Scheme, e.Value, updatedAt)).ToList();
_byAdvisory[advisoryKey] = records;
foreach (var record in records)
{
var list = _byAlias.GetOrAdd((record.Scheme, record.Value), _ => new List<AliasRecord>());
list.RemoveAll(r => string.Equals(r.AdvisoryKey, advisoryKey, StringComparison.OrdinalIgnoreCase));
list.Add(record);
}
var collisions = _byAlias.Values
.Where(list => list.Count > 1)
.Select(list => new AliasCollision(list[0].Scheme, list[0].Value, list.Select(r => r.AdvisoryKey).Distinct(StringComparer.OrdinalIgnoreCase).ToArray()))
.ToArray();
return Task.FromResult(new AliasUpsertResult(advisoryKey, collisions));
}
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
{
_byAdvisory.TryGetValue(advisoryKey, out var records);
@@ -400,11 +518,16 @@ namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory
string Snapshot,
string PreviousSnapshot,
IReadOnlyList<ChangeHistoryFieldChange> Changes,
DateTimeOffset CreatedAt);
DateTimeOffset CreatedAt)
{
public string? PreviousHash => PreviousSnapshotHash;
public string? CurrentHash => SnapshotHash;
}
public interface IChangeHistoryStore
{
Task AddAsync(ChangeHistoryRecord record, CancellationToken cancellationToken);
Task<IReadOnlyList<ChangeHistoryRecord>> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken);
}
public sealed class InMemoryChangeHistoryStore : IChangeHistoryStore
@@ -415,6 +538,18 @@ namespace StellaOps.Concelier.Storage.Mongo.ChangeHistory
_records.Add(record);
return Task.CompletedTask;
}
public Task<IReadOnlyList<ChangeHistoryRecord>> GetRecentAsync(string sourceName, string advisoryKey, int limit, CancellationToken cancellationToken)
{
var matches = _records
.Where(r =>
string.Equals(r.SourceName, sourceName, StringComparison.OrdinalIgnoreCase) &&
string.Equals(r.AdvisoryKey, advisoryKey, StringComparison.OrdinalIgnoreCase))
.OrderByDescending(r => r.CreatedAt)
.Take(limit)
.ToArray();
return Task.FromResult<IReadOnlyList<ChangeHistoryRecord>>(matches);
}
}
}
@@ -597,6 +732,25 @@ namespace StellaOps.Concelier.Storage.Mongo.MergeEvents
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(records);
}
}
public sealed class MergeEventStore : IMergeEventStore
{
private readonly InMemoryMergeEventStore _inner = new();
public MergeEventStore()
{
}
public MergeEventStore(object? database, object? logger)
{
}
public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken)
=> _inner.AppendAsync(record, cancellationToken);
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
=> _inner.GetRecentAsync(advisoryKey, limit, cancellationToken);
}
}
namespace StellaOps.Concelier.Storage.Mongo.Documents
@@ -617,12 +771,16 @@ namespace StellaOps.Concelier.Storage.Mongo.Dtos
namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags
{
public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt);
public sealed record PsirtFlagRecord(string AdvisoryId, string Vendor, string SourceName, string? ExternalId, DateTimeOffset RecordedAt)
{
public string AdvisoryKey => AdvisoryId;
}
public interface IPsirtFlagStore
{
Task UpsertAsync(PsirtFlagRecord flag, CancellationToken cancellationToken);
Task<IReadOnlyList<PsirtFlagRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken);
Task<PsirtFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken);
}
public sealed class InMemoryPsirtFlagStore : IPsirtFlagStore
@@ -645,6 +803,94 @@ namespace StellaOps.Concelier.Storage.Mongo.PsirtFlags
return Task.FromResult<IReadOnlyList<PsirtFlagRecord>>(records);
}
public Task<PsirtFlagRecord?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
{
_records.TryGetValue(advisoryKey, out var flag);
return Task.FromResult<PsirtFlagRecord?>(flag);
}
}
}
namespace StellaOps.Concelier.Storage.Mongo.Observations
{
public sealed class AdvisoryObservationDocument
{
public string Id { get; set; } = string.Empty;
public string Tenant { get; set; } = string.Empty;
public DateTime CreatedAt { get; set; }
public AdvisoryObservationSourceDocument Source { get; set; } = new();
public AdvisoryObservationUpstreamDocument Upstream { get; set; } = new();
public AdvisoryObservationContentDocument Content { get; set; } = new();
public AdvisoryObservationLinksetDocument Linkset { get; set; } = new();
public IDictionary<string, string> Attributes { get; set; } = new Dictionary<string, string>(StringComparer.Ordinal);
}
public sealed class AdvisoryObservationSourceDocument
{
public string Vendor { get; set; } = string.Empty;
public string Stream { get; set; } = string.Empty;
public string Api { get; set; } = string.Empty;
}
public sealed class AdvisoryObservationUpstreamDocument
{
public string UpstreamId { get; set; } = string.Empty;
public string? DocumentVersion { get; set; }
public DateTime FetchedAt { get; set; }
public DateTime ReceivedAt { get; set; }
public string ContentHash { get; set; } = string.Empty;
public AdvisoryObservationSignatureDocument Signature { get; set; } = new();
public IDictionary<string, string> Metadata { get; set; } = new Dictionary<string, string>(StringComparer.Ordinal);
}
public sealed class AdvisoryObservationSignatureDocument
{
public bool Present { get; set; }
public string? Format { get; set; }
public string? KeyId { get; set; }
public string? Signature { get; set; }
}
public sealed class AdvisoryObservationContentDocument
{
public string Format { get; set; } = string.Empty;
public string SpecVersion { get; set; } = string.Empty;
public BsonDocument Raw { get; set; } = new();
public IDictionary<string, string> Metadata { get; set; } = new Dictionary<string, string>(StringComparer.Ordinal);
}
public sealed class AdvisoryObservationLinksetDocument
{
public List<string>? Aliases { get; set; }
public List<string>? Purls { get; set; }
public List<string>? Cpes { get; set; }
public List<AdvisoryObservationReferenceDocument> References { get; set; } = new();
}
public sealed class AdvisoryObservationReferenceDocument
{
public string Type { get; set; } = string.Empty;
public string Url { get; set; } = string.Empty;
}
}
namespace StellaOps.Concelier.Storage.Mongo.Linksets
{
public sealed class AdvisoryLinksetDocument
{
public string TenantId { get; set; } = string.Empty;
public string Source { get; set; } = string.Empty;
public string AdvisoryId { get; set; } = string.Empty;
public IReadOnlyList<string> Observations { get; set; } = Array.Empty<string>();
public DateTime CreatedAt { get; set; }
public AdvisoryLinksetNormalizedDocument Normalized { get; set; } = new();
}
public sealed class AdvisoryLinksetNormalizedDocument
{
public IReadOnlyList<string> Purls { get; set; } = Array.Empty<string>();
public IReadOnlyList<string> Versions { get; set; } = Array.Empty<string>();
}
}

View File

@@ -88,8 +88,10 @@ public sealed class CertCcMapperTests
Id: Guid.NewGuid(),
DocumentId: document.Id,
SourceName: "cert-cc",
Format: "certcc.vince.note.v1",
SchemaVersion: "certcc.vince.note.v1",
Payload: new BsonDocument(),
CreatedAt: PublishedAt,
ValidatedAt: PublishedAt.AddMinutes(1));
var advisory = CertCcMapper.Map(dto, document, dtoRecord, "cert-cc");

View File

@@ -190,11 +190,11 @@ public sealed class SourceStateSeedProcessorTests : IAsyncLifetime
Assert.NotNull(refreshedRecord);
Assert.Equal(documentId, refreshedRecord!.Id);
Assert.NotNull(refreshedRecord.PayloadId);
Assert.NotEqual(previousGridId, refreshedRecord.PayloadId);
Assert.NotEqual(previousGridId?.ToString(), refreshedRecord.PayloadId?.ToString());
var files = await filesCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync();
Assert.Single(files);
Assert.NotEqual(previousGridId, files[0]["_id"].AsObjectId);
Assert.NotEqual(previousGridId?.ToString(), files[0]["_id"].AsObjectId.ToString());
}
private SourceStateSeedProcessor CreateProcessor()

View File

@@ -34,7 +34,7 @@ public sealed class SuseMapperTests
},
Etag: "adv-1",
LastModified: DateTimeOffset.UtcNow,
PayloadId: ObjectId.Empty);
PayloadId: Guid.Empty);
var mapped = SuseMapper.Map(dto, document, DateTimeOffset.UtcNow);

View File

@@ -97,8 +97,10 @@ public sealed class OsvConflictFixtureTests
Id: Guid.Parse("6f7d5ce7-cb47-40a5-8b41-8ad022b5fd5c"),
DocumentId: document.Id,
SourceName: OsvConnectorPlugin.SourceName,
Format: "osv.v1",
SchemaVersion: "osv.v1",
Payload: new BsonDocument("id", dto.Id),
CreatedAt: new DateTimeOffset(2025, 3, 6, 12, 0, 0, TimeSpan.Zero),
ValidatedAt: new DateTimeOffset(2025, 3, 6, 12, 5, 0, TimeSpan.Zero));
var advisory = OsvMapper.Map(dto, document, dtoRecord, "npm");

View File

@@ -65,7 +65,7 @@ public sealed class RuBduMapperTests
null,
null,
dto.IdentifyDate,
ObjectId.GenerateNewId());
PayloadId: Guid.NewGuid());
var advisory = RuBduMapper.Map(dto, document, dto.IdentifyDate!.Value);

View File

@@ -56,7 +56,7 @@ public sealed class RuNkckiMapperTests
null,
null,
dto.DateUpdated,
ObjectId.GenerateNewId());
PayloadId: Guid.NewGuid());
Assert.Equal("КРИТИЧЕСКИЙ", dto.CvssRating);
var normalizeSeverity = typeof(RuNkckiMapper).GetMethod("NormalizeSeverity", BindingFlags.NonPublic | BindingFlags.Static)!;

View File

@@ -1,12 +1,11 @@
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Threading.Tasks;
using System.Collections.Immutable;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.Concelier.Core.Jobs;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Exporter.Json;
@@ -15,15 +14,16 @@ using StellaOps.Concelier.Storage.Mongo.Exporting;
using StellaOps.Concelier.Models;
using StellaOps.Cryptography;
using StellaOps.Cryptography.DependencyInjection;
namespace StellaOps.Concelier.Exporter.Json.Tests;
public sealed class JsonExporterDependencyInjectionRoutineTests
{
[Fact]
public void Register_AddsJobDefinitionAndServices()
{
var services = new ServiceCollection();
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Exporter.Json.Tests;
public sealed class JsonExporterDependencyInjectionRoutineTests
{
[Fact]
public void Register_AddsJobDefinitionAndServices()
{
var services = new ServiceCollection();
services.AddLogging();
services.AddSingleton<IAdvisoryStore, StubAdvisoryStore>();
services.AddSingleton<IExportStateStore, StubExportStateStore>();
@@ -32,64 +32,60 @@ public sealed class JsonExporterDependencyInjectionRoutineTests
services.AddOptions<JobSchedulerOptions>();
services.Configure<CryptoHashOptions>(_ => { });
services.AddStellaOpsCrypto();
var configuration = new ConfigurationBuilder()
.AddInMemoryCollection(new Dictionary<string, string?>())
.Build();
var routine = new JsonExporterDependencyInjectionRoutine();
routine.Register(services, configuration);
using var provider = services.BuildServiceProvider();
var optionsAccessor = provider.GetRequiredService<IOptions<JobSchedulerOptions>>();
var options = optionsAccessor.Value;
Assert.True(options.Definitions.TryGetValue(JsonExportJob.JobKind, out var definition));
Assert.Equal(typeof(JsonExportJob), definition.JobType);
Assert.True(definition.Enabled);
var exporter = provider.GetRequiredService<JsonFeedExporter>();
Assert.NotNull(exporter);
}
private sealed class StubAdvisoryStore : IAdvisoryStore
{
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
var configuration = new ConfigurationBuilder()
.AddInMemoryCollection(new Dictionary<string, string?>())
.Build();
var routine = new JsonExporterDependencyInjectionRoutine();
routine.Register(services, configuration);
using var provider = services.BuildServiceProvider();
var optionsAccessor = provider.GetRequiredService<IOptions<JobSchedulerOptions>>();
var options = optionsAccessor.Value;
Assert.True(options.Definitions.TryGetValue(JsonExportJob.JobKind, out var definition));
Assert.Equal(typeof(JsonExportJob), definition.JobType);
Assert.True(definition.Enabled);
var exporter = provider.GetRequiredService<JsonFeedExporter>();
Assert.NotNull(exporter);
}
private sealed class StubAdvisoryStore : IAdvisoryStore
{
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
{
_ = session;
return Task.FromResult<IReadOnlyList<Advisory>>(Array.Empty<Advisory>());
}
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
{
_ = session;
return Task.FromResult<Advisory?>(null);
}
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
{
_ = session;
return Task.CompletedTask;
}
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
{
_ = session;
return Enumerate(cancellationToken);
static async IAsyncEnumerable<Advisory> Enumerate([EnumeratorCancellation] CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
await Task.Yield();
yield break;
}
}
}
ct.ThrowIfCancellationRequested();
await Task.Yield();
yield break;
}
}
}
private sealed class StubExportStateStore : IExportStateStore
{
private ExportStateRecord? _record;
public Task<ExportStateRecord?> FindAsync(string id, CancellationToken cancellationToken)
{
return Task.FromResult(_record);
@@ -107,6 +103,9 @@ public sealed class JsonExporterDependencyInjectionRoutineTests
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
=> throw new NotSupportedException();
public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
return ValueTask.FromResult(new AdvisoryReplay(

View File

@@ -11,7 +11,6 @@ using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using MongoDB.Driver;
using StellaOps.Concelier.Exporter.Json;
using StellaOps.Concelier.Exporter.TrivyDb;
using StellaOps.Concelier.Models;
@@ -883,27 +882,23 @@ public sealed class TrivyDbFeedExporterTests : IDisposable
_advisories = advisories;
}
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
{
_ = session;
return Task.FromResult(_advisories);
}
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
{
_ = session;
return Task.FromResult<Advisory?>(_advisories.FirstOrDefault(a => a.AdvisoryKey == advisoryKey));
}
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
{
_ = session;
return Task.CompletedTask;
}
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
{
_ = session;
return EnumerateAsync(cancellationToken);
async IAsyncEnumerable<Advisory> EnumerateAsync([EnumeratorCancellation] CancellationToken ct)

View File

@@ -2,109 +2,109 @@ using System.Collections.Concurrent;
using System.Collections.Immutable;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using MongoDB.Driver;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
namespace StellaOps.Concelier.Merge.Tests;
public sealed class AdvisoryMergeServiceTests
{
[Fact]
public async Task MergeAsync_AppliesCanonicalRulesAndPersistsDecisions()
{
var aliasStore = new FakeAliasStore();
aliasStore.Register("GHSA-aaaa-bbbb-cccc",
(AliasSchemes.Ghsa, "GHSA-aaaa-bbbb-cccc"),
(AliasSchemes.Cve, "CVE-2025-4242"));
aliasStore.Register("CVE-2025-4242",
(AliasSchemes.Cve, "CVE-2025-4242"));
aliasStore.Register("OSV-2025-xyz",
(AliasSchemes.OsV, "OSV-2025-xyz"),
(AliasSchemes.Cve, "CVE-2025-4242"));
var advisoryStore = new FakeAdvisoryStore();
advisoryStore.Seed(CreateGhsaAdvisory(), CreateNvdAdvisory(), CreateOsvAdvisory());
var mergeEventStore = new InMemoryMergeEventStore();
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 4, 1, 0, 0, 0, TimeSpan.Zero));
var writer = new MergeEventWriter(mergeEventStore, new CanonicalHashCalculator(), timeProvider, NullLogger<MergeEventWriter>.Instance);
var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
var aliasResolver = new AliasGraphResolver(aliasStore);
var canonicalMerger = new CanonicalMerger(timeProvider);
var eventLog = new RecordingAdvisoryEventLog();
var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger<AdvisoryMergeService>.Instance);
var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None);
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Concelier.Core;
using StellaOps.Concelier.Core.Events;
using StellaOps.Concelier.Merge.Services;
using StellaOps.Concelier.Models;
using StellaOps.Concelier.Storage.Mongo.Advisories;
using StellaOps.Concelier.Storage.Mongo.Aliases;
using StellaOps.Concelier.Storage.Mongo.MergeEvents;
using StellaOps.Provenance.Mongo;
namespace StellaOps.Concelier.Merge.Tests;
public sealed class AdvisoryMergeServiceTests
{
[Fact]
public async Task MergeAsync_AppliesCanonicalRulesAndPersistsDecisions()
{
var aliasStore = new FakeAliasStore();
aliasStore.Register("GHSA-aaaa-bbbb-cccc",
(AliasSchemes.Ghsa, "GHSA-aaaa-bbbb-cccc"),
(AliasSchemes.Cve, "CVE-2025-4242"));
aliasStore.Register("CVE-2025-4242",
(AliasSchemes.Cve, "CVE-2025-4242"));
aliasStore.Register("OSV-2025-xyz",
(AliasSchemes.OsV, "OSV-2025-xyz"),
(AliasSchemes.Cve, "CVE-2025-4242"));
var advisoryStore = new FakeAdvisoryStore();
advisoryStore.Seed(CreateGhsaAdvisory(), CreateNvdAdvisory(), CreateOsvAdvisory());
var mergeEventStore = new InMemoryMergeEventStore();
var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 4, 1, 0, 0, 0, TimeSpan.Zero));
var writer = new MergeEventWriter(mergeEventStore, new CanonicalHashCalculator(), timeProvider, NullLogger<MergeEventWriter>.Instance);
var precedenceMerger = new AdvisoryPrecedenceMerger(new AffectedPackagePrecedenceResolver(), timeProvider);
var aliasResolver = new AliasGraphResolver(aliasStore);
var canonicalMerger = new CanonicalMerger(timeProvider);
var eventLog = new RecordingAdvisoryEventLog();
var service = new AdvisoryMergeService(aliasResolver, advisoryStore, precedenceMerger, writer, canonicalMerger, eventLog, timeProvider, NullLogger<AdvisoryMergeService>.Instance);
var result = await service.MergeAsync("GHSA-aaaa-bbbb-cccc", CancellationToken.None);
Assert.NotNull(result.Merged);
Assert.Equal("OSV summary overrides", result.Merged!.Summary);
Assert.Empty(result.Conflicts);
var upserted = advisoryStore.LastUpserted;
Assert.NotNull(upserted);
Assert.Equal("CVE-2025-4242", upserted!.AdvisoryKey);
Assert.Equal("OSV summary overrides", upserted.Summary);
var mergeRecord = mergeEventStore.LastRecord;
Assert.NotNull(mergeRecord);
var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary");
Assert.Equal("osv", summaryDecision.SelectedSource);
Assert.Equal("freshness_override", summaryDecision.DecisionReason);
var appendRequest = eventLog.LastRequest;
Assert.NotNull(appendRequest);
Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase));
Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0);
}
private static Advisory CreateGhsaAdvisory()
{
var recorded = DateTimeOffset.Parse("2025-03-01T00:00:00Z");
var provenance = new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", recorded, new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
"GHSA-aaaa-bbbb-cccc",
"Container escape",
"Initial GHSA summary.",
"en",
recorded,
recorded,
"medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-4242", "GHSA-aaaa-bbbb-cccc" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
private static Advisory CreateNvdAdvisory()
{
var recorded = DateTimeOffset.Parse("2025-03-02T00:00:00Z");
var provenance = new AdvisoryProvenance("nvd", "map", "CVE-2025-4242", recorded, new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
"CVE-2025-4242",
"CVE-2025-4242",
"Baseline NVD summary.",
"en",
recorded,
recorded,
"high",
exploitKnown: false,
aliases: new[] { "CVE-2025-4242" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
var upserted = advisoryStore.LastUpserted;
Assert.NotNull(upserted);
Assert.Equal("CVE-2025-4242", upserted!.AdvisoryKey);
Assert.Equal("OSV summary overrides", upserted.Summary);
var mergeRecord = mergeEventStore.LastRecord;
Assert.NotNull(mergeRecord);
var summaryDecision = Assert.Single(mergeRecord!.FieldDecisions, decision => decision.Field == "summary");
Assert.Equal("osv", summaryDecision.SelectedSource);
Assert.Equal("freshness_override", summaryDecision.DecisionReason);
var appendRequest = eventLog.LastRequest;
Assert.NotNull(appendRequest);
Assert.Contains(appendRequest!.Statements, statement => string.Equals(statement.Advisory.AdvisoryKey, "CVE-2025-4242", StringComparison.OrdinalIgnoreCase));
Assert.True(appendRequest.Conflicts is null || appendRequest.Conflicts.Count == 0);
}
private static Advisory CreateGhsaAdvisory()
{
var recorded = DateTimeOffset.Parse("2025-03-01T00:00:00Z");
var provenance = new AdvisoryProvenance("ghsa", "map", "GHSA-aaaa-bbbb-cccc", recorded, new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
"GHSA-aaaa-bbbb-cccc",
"Container escape",
"Initial GHSA summary.",
"en",
recorded,
recorded,
"medium",
exploitKnown: false,
aliases: new[] { "CVE-2025-4242", "GHSA-aaaa-bbbb-cccc" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
private static Advisory CreateNvdAdvisory()
{
var recorded = DateTimeOffset.Parse("2025-03-02T00:00:00Z");
var provenance = new AdvisoryProvenance("nvd", "map", "CVE-2025-4242", recorded, new[] { ProvenanceFieldMasks.Advisory });
return new Advisory(
"CVE-2025-4242",
"CVE-2025-4242",
"Baseline NVD summary.",
"en",
recorded,
recorded,
"high",
exploitKnown: false,
aliases: new[] { "CVE-2025-4242" },
references: Array.Empty<AdvisoryReference>(),
affectedPackages: Array.Empty<AffectedPackage>(),
cvssMetrics: Array.Empty<CvssMetric>(),
provenance: new[] { provenance });
}
private static Advisory CreateOsvAdvisory()
{
var recorded = DateTimeOffset.Parse("2025-03-05T12:00:00Z");
@@ -207,120 +207,119 @@ public sealed class AdvisoryMergeServiceTests
Assert.Equal(conflict.ConflictId, appendedConflict.ConflictId);
Assert.Equal(conflict.StatementIds, appendedConflict.StatementIds.ToImmutableArray());
}
private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog
{
public AdvisoryEventAppendRequest? LastRequest { get; private set; }
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
{
LastRequest = request;
return ValueTask.CompletedTask;
}
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
throw new NotSupportedException();
}
}
private sealed class FakeAliasStore : IAliasStore
{
private readonly ConcurrentDictionary<string, List<AliasRecord>> _records = new(StringComparer.OrdinalIgnoreCase);
public void Register(string advisoryKey, params (string Scheme, string Value)[] aliases)
{
var list = new List<AliasRecord>();
foreach (var (scheme, value) in aliases)
{
list.Add(new AliasRecord(advisoryKey, scheme, value, DateTimeOffset.UtcNow));
}
_records[advisoryKey] = list;
}
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken)
{
return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty<AliasCollision>()));
}
public Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken)
{
var matches = _records.Values
.SelectMany(static records => records)
.Where(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase) && string.Equals(record.Value, value, StringComparison.OrdinalIgnoreCase))
.ToList();
return Task.FromResult<IReadOnlyList<AliasRecord>>(matches);
}
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
{
if (_records.TryGetValue(advisoryKey, out var records))
{
return Task.FromResult<IReadOnlyList<AliasRecord>>(records);
}
return Task.FromResult<IReadOnlyList<AliasRecord>>(Array.Empty<AliasRecord>());
}
}
private sealed class FakeAdvisoryStore : IAdvisoryStore
{
private readonly ConcurrentDictionary<string, Advisory> _advisories = new(StringComparer.OrdinalIgnoreCase);
public Advisory? LastUpserted { get; private set; }
public void Seed(params Advisory[] advisories)
{
foreach (var advisory in advisories)
{
_advisories[advisory.AdvisoryKey] = advisory;
}
}
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_ = session;
_advisories.TryGetValue(advisoryKey, out var advisory);
return Task.FromResult(advisory);
}
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_ = session;
return Task.FromResult<IReadOnlyList<Advisory>>(Array.Empty<Advisory>());
}
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_ = session;
_advisories[advisory.AdvisoryKey] = advisory;
LastUpserted = advisory;
return Task.CompletedTask;
}
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null)
{
_ = session;
return AsyncEnumerable.Empty<Advisory>();
}
}
private sealed class InMemoryMergeEventStore : IMergeEventStore
{
public MergeEventRecord? LastRecord { get; private set; }
public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken)
{
LastRecord = record;
return Task.CompletedTask;
}
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(Array.Empty<MergeEventRecord>());
}
}
}
private sealed class RecordingAdvisoryEventLog : IAdvisoryEventLog
{
public AdvisoryEventAppendRequest? LastRequest { get; private set; }
public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken)
{
LastRequest = request;
return ValueTask.CompletedTask;
}
public ValueTask AttachStatementProvenanceAsync(Guid statementId, DsseProvenance provenance, TrustInfo trust, CancellationToken cancellationToken)
=> ValueTask.CompletedTask;
public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken)
{
throw new NotSupportedException();
}
}
private sealed class FakeAliasStore : IAliasStore
{
private readonly ConcurrentDictionary<string, List<AliasRecord>> _records = new(StringComparer.OrdinalIgnoreCase);
public void Register(string advisoryKey, params (string Scheme, string Value)[] aliases)
{
var list = new List<AliasRecord>();
foreach (var (scheme, value) in aliases)
{
list.Add(new AliasRecord(advisoryKey, scheme, value, DateTimeOffset.UtcNow));
}
_records[advisoryKey] = list;
}
public Task<AliasUpsertResult> ReplaceAsync(string advisoryKey, IEnumerable<AliasEntry> aliases, DateTimeOffset updatedAt, CancellationToken cancellationToken)
{
return Task.FromResult(new AliasUpsertResult(advisoryKey, Array.Empty<AliasCollision>()));
}
public Task<IReadOnlyList<AliasRecord>> GetByAliasAsync(string scheme, string value, CancellationToken cancellationToken)
{
var matches = _records.Values
.SelectMany(static records => records)
.Where(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase) && string.Equals(record.Value, value, StringComparison.OrdinalIgnoreCase))
.ToList();
return Task.FromResult<IReadOnlyList<AliasRecord>>(matches);
}
public Task<IReadOnlyList<AliasRecord>> GetByAdvisoryAsync(string advisoryKey, CancellationToken cancellationToken)
{
if (_records.TryGetValue(advisoryKey, out var records))
{
return Task.FromResult<IReadOnlyList<AliasRecord>>(records);
}
return Task.FromResult<IReadOnlyList<AliasRecord>>(Array.Empty<AliasRecord>());
}
}
private sealed class FakeAdvisoryStore : IAdvisoryStore
{
private readonly ConcurrentDictionary<string, Advisory> _advisories = new(StringComparer.OrdinalIgnoreCase);
public Advisory? LastUpserted { get; private set; }
public void Seed(params Advisory[] advisories)
{
foreach (var advisory in advisories)
{
_advisories[advisory.AdvisoryKey] = advisory;
}
}
public Task<Advisory?> FindAsync(string advisoryKey, CancellationToken cancellationToken)
{
_advisories.TryGetValue(advisoryKey, out var advisory);
return Task.FromResult(advisory);
}
public Task<IReadOnlyList<Advisory>> GetRecentAsync(int limit, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyList<Advisory>>(Array.Empty<Advisory>());
}
public Task UpsertAsync(Advisory advisory, CancellationToken cancellationToken)
{
_advisories[advisory.AdvisoryKey] = advisory;
LastUpserted = advisory;
return Task.CompletedTask;
}
public IAsyncEnumerable<Advisory> StreamAsync(CancellationToken cancellationToken)
{
return AsyncEnumerable.Empty<Advisory>();
}
}
private sealed class InMemoryMergeEventStore : IMergeEventStore
{
public MergeEventRecord? LastRecord { get; private set; }
public Task AppendAsync(MergeEventRecord record, CancellationToken cancellationToken)
{
LastRecord = record;
return Task.CompletedTask;
}
public Task<IReadOnlyList<MergeEventRecord>> GetRecentAsync(string advisoryKey, int limit, CancellationToken cancellationToken)
{
return Task.FromResult<IReadOnlyList<MergeEventRecord>>(Array.Empty<MergeEventRecord>());
}
}
}

View File

@@ -12,8 +12,8 @@
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="xunit.v3" Version="3.0.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
@@ -24,4 +24,4 @@
<ItemGroup>
<ProjectReference Include="../../__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" />
</ItemGroup>
</Project>
</Project>

View File

@@ -1,5 +1,6 @@
using System.Text.Json;
using Microsoft.Extensions.Options;
using OptionsFactory = Microsoft.Extensions.Options.Options;
using StellaOps.Aoc;
using StellaOps.Concelier.Core.Aoc;
@@ -194,7 +195,7 @@ public sealed class AocVerifyRegressionTests
public void Verify_MapperGuardParity_ValidationResultsMatch()
{
var guard = new AocWriteGuard();
var validator = new AdvisorySchemaValidator(guard, Options.Create(GuardOptions));
var validator = new AdvisorySchemaValidator(guard, OptionsFactory.Create(GuardOptions));
// Create document with forbidden field
var json = CreateJsonWithForbiddenField("severity", "high");

View File

@@ -1,6 +1,7 @@
using System.Collections.Immutable;
using System.Text.Json;
using Microsoft.Extensions.Options;
using OptionsFactory = Microsoft.Extensions.Options.Options;
using StellaOps.Aoc;
using StellaOps.Concelier.Core.Aoc;
using StellaOps.Concelier.RawModels;
@@ -43,7 +44,7 @@ public sealed class LargeBatchIngestTests
for (int i = 0; i < results1.Count; i++)
{
Assert.Equal(results1[i].IsValid, results2[i].IsValid);
Assert.Equal(results1[i].Violations.Count, results2[i].Violations.Count);
Assert.Equal(results1[i].Violations.Length, results2[i].Violations.Length);
}
}
@@ -63,8 +64,8 @@ public sealed class LargeBatchIngestTests
var violations1 = results1[i].Violations;
var violations2 = results2[i].Violations;
Assert.Equal(violations1.Count, violations2.Count);
for (int j = 0; j < violations1.Count; j++)
Assert.Equal(violations1.Length, violations2.Length);
for (int j = 0; j < violations1.Length; j++)
{
Assert.Equal(violations1[j].ErrorCode, violations2[j].ErrorCode);
Assert.Equal(violations1[j].Path, violations2[j].Path);
@@ -150,15 +151,15 @@ public sealed class LargeBatchIngestTests
// Same generation should produce same violation counts
var validCount1 = results1.Count(r => r.IsValid);
var validCount2 = results2.Count(r => r.IsValid);
var violationCount1 = results1.Sum(r => r.Violations.Count);
var violationCount2 = results2.Sum(r => r.Violations.Count);
var violationCount1 = results1.Sum(r => r.Violations.Length);
var violationCount2 = results2.Sum(r => r.Violations.Length);
Assert.Equal(validCount1, validCount2);
Assert.Equal(violationCount1, violationCount2);
}
private static AdvisorySchemaValidator CreateValidator()
=> new(new AocWriteGuard(), Options.Create(GuardOptions));
=> new(new AocWriteGuard(), OptionsFactory.Create(GuardOptions));
private static List<AdvisoryRawDocument> GenerateValidDocuments(int count)
{

View File

@@ -171,5 +171,27 @@ public sealed class AdvisoryChunkBuilderTests
var bytes = await ComputeHashAsync(stream, algorithmId, cancellationToken).ConfigureAwait(false);
return Convert.ToHexString(bytes).ToLowerInvariant();
}
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose) => ComputeHash(data, purpose);
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose) => ComputeHashHex(data, purpose);
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose) => ComputeHashBase64(data, purpose);
public ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashAsync(stream, purpose, cancellationToken);
public ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashHexAsync(stream, purpose, cancellationToken);
public string GetAlgorithmForPurpose(string purpose) => purpose ?? "sha256";
public string GetHashPrefix(string purpose) => $"{(purpose ?? "sha256").ToLowerInvariant()}:";
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
{
var hash = ComputeHashHexForPurpose(data, purpose);
return $"{GetHashPrefix(purpose)}{hash}";
}
}
}

View File

@@ -159,7 +159,12 @@ public sealed class EvidenceBundlePackagingService
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead,
ModificationTime = FixedTimestamp
ModificationTime = FixedTimestamp,
// Determinism: fixed uid/gid/owner/group per bundle-packaging.md
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty
};
var bytes = Encoding.UTF8.GetBytes(content);

View File

@@ -345,7 +345,12 @@ public sealed class EvidencePortableBundleService
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = mode == default ? DefaultFileMode : mode,
ModificationTime = FixedTimestamp
ModificationTime = FixedTimestamp,
// Determinism: fixed uid/gid/owner/group per bundle-packaging.md
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty
};
var bytes = Encoding.UTF8.GetBytes(content);

View File

@@ -18,6 +18,11 @@ public sealed class EvidenceBundlePackagingServiceTests
private static readonly EvidenceBundleId BundleId = EvidenceBundleId.FromGuid(Guid.NewGuid());
private static readonly DateTimeOffset CreatedAt = new(2025, 11, 3, 12, 30, 0, TimeSpan.Zero);
// Fixed IDs for determinism tests (must be constant across runs)
private static readonly EvidenceBundleId BundleIdForDeterminism = EvidenceBundleId.FromGuid(
new Guid("11111111-2222-3333-4444-555555555555"));
private static readonly DateTimeOffset CreatedAtForDeterminism = new(2025, 11, 10, 8, 0, 0, TimeSpan.Zero);
[Fact]
public async Task EnsurePackageAsync_ReturnsCached_WhenPackageExists()
{
@@ -105,6 +110,59 @@ public sealed class EvidenceBundlePackagingServiceTests
Assert.Equal(expectedSeconds, mtime);
}
[Fact]
public async Task EnsurePackageAsync_ProducesDeterministicTarEntryMetadata()
{
var repository = new FakeRepository(CreateSealedBundle(), CreateSignature());
var objectStore = new FakeObjectStore(exists: false);
var service = new EvidenceBundlePackagingService(repository, objectStore, NullLogger<EvidenceBundlePackagingService>.Instance);
await service.EnsurePackageAsync(TenantId, BundleId, CancellationToken.None);
Assert.True(objectStore.Stored);
var entryMetadata = ReadArchiveEntryMetadata(objectStore.StoredBytes!);
// Verify all entries have deterministic uid/gid/username/groupname per bundle-packaging.md
foreach (var (name, meta) in entryMetadata)
{
Assert.Equal(0, meta.Uid);
Assert.Equal(0, meta.Gid);
Assert.True(
string.IsNullOrEmpty(meta.UserName),
$"Entry '{name}' should have empty username but was '{meta.UserName}'");
Assert.True(
string.IsNullOrEmpty(meta.GroupName),
$"Entry '{name}' should have empty groupname but was '{meta.GroupName}'");
Assert.Equal(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), meta.ModificationTime);
}
}
[Fact]
public async Task EnsurePackageAsync_ProducesIdenticalBytesForSameInput()
{
// First run
var signature1 = CreateSignatureForDeterminism();
var bundle1 = CreateSealedBundleForDeterminism();
var repository1 = new FakeRepository(bundle1, signature1);
var objectStore1 = new FakeObjectStore(exists: false);
var service1 = new EvidenceBundlePackagingService(repository1, objectStore1, NullLogger<EvidenceBundlePackagingService>.Instance);
await service1.EnsurePackageAsync(TenantId, BundleIdForDeterminism, CancellationToken.None);
// Second run (same data)
var signature2 = CreateSignatureForDeterminism();
var bundle2 = CreateSealedBundleForDeterminism();
var repository2 = new FakeRepository(bundle2, signature2);
var objectStore2 = new FakeObjectStore(exists: false);
var service2 = new EvidenceBundlePackagingService(repository2, objectStore2, NullLogger<EvidenceBundlePackagingService>.Instance);
await service2.EnsurePackageAsync(TenantId, BundleIdForDeterminism, CancellationToken.None);
Assert.True(objectStore1.Stored);
Assert.True(objectStore2.Stored);
Assert.Equal(objectStore1.StoredBytes, objectStore2.StoredBytes);
}
[Fact]
public async Task EnsurePackageAsync_Throws_WhenManifestPayloadInvalid()
{
@@ -185,6 +243,62 @@ public sealed class EvidenceBundlePackagingServiceTests
TimestampToken: includeTimestamp ? Encoding.UTF8.GetBytes("tsa-token") : null);
}
// Determinism test helpers: fixed data for reproducible packaging
private static EvidenceBundle CreateSealedBundleForDeterminism()
=> new(
BundleIdForDeterminism,
TenantId,
EvidenceBundleKind.Job,
EvidenceBundleStatus.Sealed,
"aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
$"tenants/{TenantId.Value:N}/bundles/{BundleIdForDeterminism.Value:N}/bundle.tgz",
CreatedAtForDeterminism,
CreatedAtForDeterminism,
Description: "determinism test",
SealedAt: CreatedAtForDeterminism.AddMinutes(1),
ExpiresAt: null);
private static EvidenceBundleSignature CreateSignatureForDeterminism()
{
var manifest = new
{
bundleId = BundleIdForDeterminism.Value.ToString("D"),
tenantId = TenantId.Value.ToString("D"),
kind = (int)EvidenceBundleKind.Job,
createdAt = CreatedAtForDeterminism.ToString("O"),
metadata = new Dictionary<string, string> { ["run"] = "determinism" },
entries = new[]
{
new
{
section = "inputs",
canonicalPath = "inputs/config.json",
sha256 = "bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",
sizeBytes = 128,
mediaType = "application/json",
attributes = new Dictionary<string, string>()
}
}
};
var manifestJson = JsonSerializer.Serialize(manifest, new JsonSerializerOptions(JsonSerializerDefaults.Web));
var payload = Convert.ToBase64String(Encoding.UTF8.GetBytes(manifestJson));
return new EvidenceBundleSignature(
BundleIdForDeterminism,
TenantId,
"application/vnd.stella.evidence.manifest+json",
payload,
Convert.ToBase64String(Encoding.UTF8.GetBytes("fixed-signature")),
"key-determinism",
"ES256",
"default",
CreatedAtForDeterminism.AddMinutes(1),
TimestampedAt: null,
TimestampAuthority: null,
TimestampToken: null);
}
private static Dictionary<string, string> ReadArchiveEntries(byte[] archiveBytes)
{
using var memory = new MemoryStream(archiveBytes);
@@ -209,6 +323,39 @@ public sealed class EvidenceBundlePackagingServiceTests
return entries;
}
private static Dictionary<string, TarEntryMetadata> ReadArchiveEntryMetadata(byte[] archiveBytes)
{
using var memory = new MemoryStream(archiveBytes);
using var gzip = new GZipStream(memory, CompressionMode.Decompress, leaveOpen: true);
using var reader = new TarReader(gzip);
var entries = new Dictionary<string, TarEntryMetadata>(StringComparer.Ordinal);
TarEntry? entry;
while ((entry = reader.GetNextEntry()) is not null)
{
if (entry.EntryType != TarEntryType.RegularFile)
{
continue;
}
entries[entry.Name] = new TarEntryMetadata(
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
entry.ModificationTime);
}
return entries;
}
private sealed record TarEntryMetadata(
int Uid,
int Gid,
string UserName,
string GroupName,
DateTimeOffset ModificationTime);
private sealed class FakeRepository : IEvidenceBundleRepository
{
private EvidenceBundle _bundle;

View File

@@ -94,6 +94,33 @@ public sealed class EvidencePortableBundleServiceTests
await Assert.ThrowsAsync<InvalidOperationException>(() => service.EnsurePortablePackageAsync(TenantId, BundleId, CancellationToken.None));
}
[Fact]
public async Task EnsurePortablePackageAsync_ProducesDeterministicTarEntryMetadata()
{
var repository = new FakeRepository(CreateSealedBundle(), CreateSignature(includeTimestamp: true));
var objectStore = new FakeObjectStore(exists: false);
var service = CreateService(repository, objectStore);
await service.EnsurePortablePackageAsync(TenantId, BundleId, CancellationToken.None);
Assert.True(objectStore.Stored);
var entryMetadata = ReadArchiveEntryMetadata(objectStore.StoredBytes!);
// Verify all entries have deterministic uid/gid/username/groupname per bundle-packaging.md
foreach (var (name, meta) in entryMetadata)
{
Assert.Equal(0, meta.Uid);
Assert.Equal(0, meta.Gid);
Assert.True(
string.IsNullOrEmpty(meta.UserName),
$"Entry '{name}' should have empty username but was '{meta.UserName}'");
Assert.True(
string.IsNullOrEmpty(meta.GroupName),
$"Entry '{name}' should have empty groupname but was '{meta.GroupName}'");
Assert.Equal(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), meta.ModificationTime);
}
}
private static EvidencePortableBundleService CreateService(FakeRepository repository, IEvidenceObjectStore objectStore)
{
var options = Options.Create(new EvidenceLockerOptions
@@ -200,6 +227,39 @@ public sealed class EvidencePortableBundleServiceTests
return entries;
}
private static Dictionary<string, TarEntryMetadata> ReadArchiveEntryMetadata(byte[] archive)
{
using var memory = new MemoryStream(archive);
using var gzip = new GZipStream(memory, CompressionMode.Decompress);
using var tarReader = new TarReader(gzip);
var entries = new Dictionary<string, TarEntryMetadata>(StringComparer.Ordinal);
TarEntry? entry;
while ((entry = tarReader.GetNextEntry()) is not null)
{
if (entry.EntryType != TarEntryType.RegularFile)
{
continue;
}
entries[entry.Name] = new TarEntryMetadata(
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
entry.ModificationTime);
}
return entries;
}
private sealed record TarEntryMetadata(
int Uid,
int Gid,
string UserName,
string GroupName,
DateTimeOffset ModificationTime);
private sealed class FakeRepository : IEvidenceBundleRepository
{
private EvidenceBundle _bundle;

View File

@@ -0,0 +1,299 @@
using System.Net;
using System.Net.Http.Json;
using System.Text;
using System.Text.Json;
using StellaOps.ExportCenter.Client.Models;
using Xunit;
namespace StellaOps.ExportCenter.Client.Tests;
/// <summary>
/// Smoke tests for ExportCenterClient with mock HTTP responses.
/// </summary>
public sealed class ExportCenterClientTests
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
[Fact]
public async Task GetDiscoveryMetadataAsync_ReturnsMetadata()
{
var expectedMetadata = new OpenApiDiscoveryMetadata(
Service: "export-center",
Version: "1.0.0",
SpecVersion: "3.0.3",
Format: "application/yaml",
Url: "/openapi/export-center.yaml",
JsonUrl: "/openapi/export-center.json",
ErrorEnvelopeSchema: "#/components/schemas/ErrorEnvelope",
GeneratedAt: new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero),
ProfilesSupported: new[] { "attestation", "mirror" },
ChecksumSha256: null);
var handler = new MockHttpMessageHandler(request =>
{
Assert.Equal("/.well-known/openapi", request.RequestUri!.AbsolutePath);
return CreateJsonResponse(expectedMetadata);
});
var client = CreateClient(handler);
var result = await client.GetDiscoveryMetadataAsync();
Assert.Equal("export-center", result.Service);
Assert.Equal("1.0.0", result.Version);
Assert.Equal("3.0.3", result.SpecVersion);
}
[Fact]
public async Task ListProfilesAsync_ReturnsProfiles()
{
var expectedResponse = new ExportProfileListResponse(
Profiles: new[]
{
new ExportProfile(
ProfileId: "profile-1",
Name: "Test Profile",
Description: "Test",
Adapter: "evidence",
Selectors: new Dictionary<string, string> { ["org"] = "test" },
OutputFormat: "tar.gz",
SigningEnabled: true,
CreatedAt: DateTimeOffset.UtcNow,
UpdatedAt: null)
},
ContinuationToken: null,
HasMore: false);
var handler = new MockHttpMessageHandler(request =>
{
Assert.Equal("/v1/exports/profiles", request.RequestUri!.AbsolutePath);
return CreateJsonResponse(expectedResponse);
});
var client = CreateClient(handler);
var result = await client.ListProfilesAsync();
Assert.Single(result.Profiles);
Assert.Equal("profile-1", result.Profiles[0].ProfileId);
Assert.False(result.HasMore);
}
[Fact]
public async Task ListProfilesAsync_WithPagination_IncludesParameters()
{
var expectedResponse = new ExportProfileListResponse([], null, false);
var handler = new MockHttpMessageHandler(request =>
{
var query = request.RequestUri!.Query;
Assert.Contains("limit=10", query);
Assert.Contains("continuationToken=abc123", query);
return CreateJsonResponse(expectedResponse);
});
var client = CreateClient(handler);
await client.ListProfilesAsync(continuationToken: "abc123", limit: 10);
}
[Fact]
public async Task GetProfileAsync_WhenNotFound_ReturnsNull()
{
var handler = new MockHttpMessageHandler(request =>
{
return new HttpResponseMessage(HttpStatusCode.NotFound);
});
var client = CreateClient(handler);
var result = await client.GetProfileAsync("nonexistent");
Assert.Null(result);
}
[Fact]
public async Task CreateEvidenceExportAsync_ReturnsResponse()
{
var expectedResponse = new CreateEvidenceExportResponse(
RunId: "run-123",
Status: "pending",
StatusUrl: "/v1/exports/evidence/run-123/status",
EstimatedCompletionSeconds: 60);
var handler = new MockHttpMessageHandler(request =>
{
Assert.Equal(HttpMethod.Post, request.Method);
Assert.Equal("/v1/exports/evidence", request.RequestUri!.AbsolutePath);
return CreateJsonResponse(expectedResponse, HttpStatusCode.Accepted);
});
var client = CreateClient(handler);
var request = new CreateEvidenceExportRequest("profile-1");
var result = await client.CreateEvidenceExportAsync(request);
Assert.Equal("run-123", result.RunId);
Assert.Equal("pending", result.Status);
}
[Fact]
public async Task GetEvidenceExportStatusAsync_ReturnsStatus()
{
var expectedStatus = new EvidenceExportStatus(
RunId: "run-123",
ProfileId: "profile-1",
Status: "completed",
Progress: 100,
StartedAt: DateTimeOffset.UtcNow.AddMinutes(-5),
CompletedAt: DateTimeOffset.UtcNow,
BundleHash: "sha256:abc123",
DownloadUrl: "/v1/exports/evidence/run-123/download",
ErrorCode: null,
ErrorMessage: null);
var handler = new MockHttpMessageHandler(request =>
{
Assert.Equal("/v1/exports/evidence/run-123/status", request.RequestUri!.AbsolutePath);
return CreateJsonResponse(expectedStatus);
});
var client = CreateClient(handler);
var result = await client.GetEvidenceExportStatusAsync("run-123");
Assert.NotNull(result);
Assert.Equal("completed", result.Status);
Assert.Equal(100, result.Progress);
}
[Fact]
public async Task DownloadEvidenceExportAsync_ReturnsStream()
{
var bundleContent = "test bundle content"u8.ToArray();
var handler = new MockHttpMessageHandler(request =>
{
Assert.Equal("/v1/exports/evidence/run-123/download", request.RequestUri!.AbsolutePath);
return new HttpResponseMessage(HttpStatusCode.OK)
{
Content = new ByteArrayContent(bundleContent)
};
});
var client = CreateClient(handler);
var stream = await client.DownloadEvidenceExportAsync("run-123");
Assert.NotNull(stream);
using var ms = new MemoryStream();
await stream.CopyToAsync(ms);
Assert.Equal(bundleContent, ms.ToArray());
}
[Fact]
public async Task DownloadEvidenceExportAsync_WhenNotReady_ReturnsNull()
{
var handler = new MockHttpMessageHandler(request =>
{
return new HttpResponseMessage(HttpStatusCode.Conflict);
});
var client = CreateClient(handler);
var result = await client.DownloadEvidenceExportAsync("run-123");
Assert.Null(result);
}
[Fact]
public async Task CreateAttestationExportAsync_ReturnsResponse()
{
var expectedResponse = new CreateAttestationExportResponse(
RunId: "att-run-123",
Status: "pending",
StatusUrl: "/v1/exports/attestations/att-run-123/status",
EstimatedCompletionSeconds: 30);
var handler = new MockHttpMessageHandler(request =>
{
Assert.Equal(HttpMethod.Post, request.Method);
Assert.Equal("/v1/exports/attestations", request.RequestUri!.AbsolutePath);
return CreateJsonResponse(expectedResponse, HttpStatusCode.Accepted);
});
var client = CreateClient(handler);
var request = new CreateAttestationExportRequest("profile-1", IncludeTransparencyLog: true);
var result = await client.CreateAttestationExportAsync(request);
Assert.Equal("att-run-123", result.RunId);
}
[Fact]
public async Task GetAttestationExportStatusAsync_IncludesTransparencyLogField()
{
var expectedStatus = new AttestationExportStatus(
RunId: "att-run-123",
ProfileId: "profile-1",
Status: "completed",
Progress: 100,
StartedAt: DateTimeOffset.UtcNow.AddMinutes(-2),
CompletedAt: DateTimeOffset.UtcNow,
BundleHash: "sha256:def456",
DownloadUrl: "/v1/exports/attestations/att-run-123/download",
TransparencyLogIncluded: true,
ErrorCode: null,
ErrorMessage: null);
var handler = new MockHttpMessageHandler(request =>
{
return CreateJsonResponse(expectedStatus);
});
var client = CreateClient(handler);
var result = await client.GetAttestationExportStatusAsync("att-run-123");
Assert.NotNull(result);
Assert.True(result.TransparencyLogIncluded);
}
private static ExportCenterClient CreateClient(MockHttpMessageHandler handler)
{
var httpClient = new HttpClient(handler)
{
BaseAddress = new Uri("https://localhost:5001")
};
return new ExportCenterClient(httpClient);
}
private static HttpResponseMessage CreateJsonResponse<T>(T content, HttpStatusCode statusCode = HttpStatusCode.OK)
{
var json = JsonSerializer.Serialize(content, JsonOptions);
return new HttpResponseMessage(statusCode)
{
Content = new StringContent(json, Encoding.UTF8, "application/json")
};
}
}
/// <summary>
/// Mock HTTP message handler for testing.
/// </summary>
internal sealed class MockHttpMessageHandler : HttpMessageHandler
{
private readonly Func<HttpRequestMessage, HttpResponseMessage> _handler;
public MockHttpMessageHandler(Func<HttpRequestMessage, HttpResponseMessage> handler)
{
_handler = handler;
}
protected override Task<HttpResponseMessage> SendAsync(
HttpRequestMessage request,
CancellationToken cancellationToken)
{
return Task.FromResult(_handler(request));
}
}

View File

@@ -0,0 +1,170 @@
using StellaOps.ExportCenter.Client.Streaming;
using Xunit;
namespace StellaOps.ExportCenter.Client.Tests;
public sealed class ExportDownloadHelperTests : IDisposable
{
private readonly string _tempDir;
public ExportDownloadHelperTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"export-download-tests-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public async Task DownloadToFileAsync_WritesContentToFile()
{
var content = "test content"u8.ToArray();
using var stream = new MemoryStream(content);
var outputPath = Path.Combine(_tempDir, "output.bin");
var bytesWritten = await ExportDownloadHelper.DownloadToFileAsync(stream, outputPath);
Assert.Equal(content.Length, bytesWritten);
Assert.True(File.Exists(outputPath));
Assert.Equal(content, await File.ReadAllBytesAsync(outputPath));
}
[Fact]
public async Task DownloadToFileAsync_ReportsProgress()
{
var content = new byte[10000];
Random.Shared.NextBytes(content);
using var stream = new MemoryStream(content);
var outputPath = Path.Combine(_tempDir, "progress.bin");
var progressReports = new List<(long bytes, long? total)>();
await ExportDownloadHelper.DownloadToFileAsync(
stream, outputPath, content.Length, (b, t) => progressReports.Add((b, t)));
Assert.NotEmpty(progressReports);
Assert.Equal(content.Length, progressReports[^1].bytes);
}
[Fact]
public async Task ComputeSha256Async_ReturnsCorrectHash()
{
var content = "test content for hashing"u8.ToArray();
using var stream = new MemoryStream(content);
var hash = await ExportDownloadHelper.ComputeSha256Async(stream);
// Verify it's a valid hex string
Assert.Equal(64, hash.Length); // SHA-256 produces 32 bytes = 64 hex chars
Assert.All(hash, c => Assert.True(char.IsLetterOrDigit(c)));
}
[Fact]
public async Task DownloadAndVerifyAsync_SucceedsWithCorrectHash()
{
var content = "deterministic content"u8.ToArray();
using var hashStream = new MemoryStream(content);
var expectedHash = await ExportDownloadHelper.ComputeSha256Async(hashStream);
using var downloadStream = new MemoryStream(content);
var outputPath = Path.Combine(_tempDir, "verified.bin");
var actualHash = await ExportDownloadHelper.DownloadAndVerifyAsync(
downloadStream, outputPath, expectedHash);
Assert.Equal(expectedHash, actualHash);
Assert.True(File.Exists(outputPath));
}
[Fact]
public async Task DownloadAndVerifyAsync_ThrowsOnHashMismatch()
{
var content = "actual content"u8.ToArray();
using var stream = new MemoryStream(content);
var outputPath = Path.Combine(_tempDir, "mismatch.bin");
var wrongHash = "0000000000000000000000000000000000000000000000000000000000000000";
await Assert.ThrowsAsync<InvalidOperationException>(() =>
ExportDownloadHelper.DownloadAndVerifyAsync(stream, outputPath, wrongHash));
// Verify file was deleted
Assert.False(File.Exists(outputPath));
}
[Fact]
public async Task DownloadAndVerifyAsync_HandlesSha256Prefix()
{
var content = "prefixed hash test"u8.ToArray();
using var hashStream = new MemoryStream(content);
var hash = await ExportDownloadHelper.ComputeSha256Async(hashStream);
var prefixedHash = "sha256:" + hash;
using var downloadStream = new MemoryStream(content);
var outputPath = Path.Combine(_tempDir, "prefixed.bin");
var actualHash = await ExportDownloadHelper.DownloadAndVerifyAsync(
downloadStream, outputPath, prefixedHash);
Assert.Equal(hash, actualHash);
}
[Fact]
public async Task CopyWithProgressAsync_CopiesCorrectly()
{
var content = new byte[5000];
Random.Shared.NextBytes(content);
using var source = new MemoryStream(content);
using var destination = new MemoryStream();
var bytesCopied = await ExportDownloadHelper.CopyWithProgressAsync(source, destination);
Assert.Equal(content.Length, bytesCopied);
Assert.Equal(content, destination.ToArray());
}
[Fact]
public void CreateProgressLogger_ReturnsWorkingCallback()
{
var messages = new List<string>();
var callback = ExportDownloadHelper.CreateProgressLogger(msg => messages.Add(msg), 100);
// Simulate progress
callback(50, 1000); // Should not log (below threshold)
callback(150, 1000); // Should log
callback(200, 1000); // Should not log (too close to last)
callback(300, 1000); // Should log
Assert.Equal(2, messages.Count);
Assert.Contains("150", messages[0]);
Assert.Contains("300", messages[1]);
}
[Fact]
public void CreateProgressLogger_FormatsWithoutTotalBytes()
{
var messages = new List<string>();
var callback = ExportDownloadHelper.CreateProgressLogger(msg => messages.Add(msg), 100);
callback(200, null);
Assert.Single(messages);
Assert.DoesNotContain("%", messages[0]);
}
[Fact]
public void CreateProgressLogger_FormatsWithTotalBytes()
{
var messages = new List<string>();
var callback = ExportDownloadHelper.CreateProgressLogger(msg => messages.Add(msg), 100);
callback(500, 1000);
Assert.Single(messages);
Assert.Contains("%", messages[0]);
}
}

View File

@@ -0,0 +1,182 @@
using StellaOps.ExportCenter.Client.Lifecycle;
using StellaOps.ExportCenter.Client.Models;
using Xunit;
namespace StellaOps.ExportCenter.Client.Tests;
public sealed class ExportJobLifecycleHelperTests
{
[Theory]
[InlineData("completed", true)]
[InlineData("failed", true)]
[InlineData("cancelled", true)]
[InlineData("pending", false)]
[InlineData("running", false)]
[InlineData("COMPLETED", true)]
public void IsTerminalStatus_ReturnsCorrectValue(string status, bool expected)
{
var result = ExportJobLifecycleHelper.IsTerminalStatus(status);
Assert.Equal(expected, result);
}
[Fact]
public async Task WaitForEvidenceExportCompletionAsync_ReturnsOnTerminalStatus()
{
var callCount = 0;
var mockClient = new MockExportCenterClient
{
GetEvidenceExportStatusHandler = runId =>
{
callCount++;
var status = callCount < 3 ? "running" : "completed";
return new EvidenceExportStatus(
RunId: runId,
ProfileId: "profile-1",
Status: status,
Progress: callCount < 3 ? 50 : 100,
StartedAt: DateTimeOffset.UtcNow,
CompletedAt: callCount >= 3 ? DateTimeOffset.UtcNow : null,
BundleHash: callCount >= 3 ? "sha256:abc" : null,
DownloadUrl: null,
ErrorCode: null,
ErrorMessage: null);
}
};
var result = await ExportJobLifecycleHelper.WaitForEvidenceExportCompletionAsync(
mockClient, "run-1", TimeSpan.FromMilliseconds(10), TimeSpan.FromSeconds(10));
Assert.Equal("completed", result.Status);
Assert.Equal(100, result.Progress);
Assert.Equal(3, callCount);
}
[Fact]
public async Task WaitForEvidenceExportCompletionAsync_ThrowsOnNotFound()
{
var mockClient = new MockExportCenterClient
{
GetEvidenceExportStatusHandler = _ => null
};
await Assert.ThrowsAsync<InvalidOperationException>(() =>
ExportJobLifecycleHelper.WaitForEvidenceExportCompletionAsync(
mockClient, "nonexistent", TimeSpan.FromMilliseconds(10), TimeSpan.FromSeconds(1)));
}
[Fact]
public async Task WaitForAttestationExportCompletionAsync_ReturnsOnTerminalStatus()
{
var callCount = 0;
var mockClient = new MockExportCenterClient
{
GetAttestationExportStatusHandler = runId =>
{
callCount++;
var status = callCount < 2 ? "running" : "completed";
return new AttestationExportStatus(
RunId: runId,
ProfileId: "profile-1",
Status: status,
Progress: callCount < 2 ? 50 : 100,
StartedAt: DateTimeOffset.UtcNow,
CompletedAt: callCount >= 2 ? DateTimeOffset.UtcNow : null,
BundleHash: callCount >= 2 ? "sha256:abc" : null,
DownloadUrl: null,
TransparencyLogIncluded: true,
ErrorCode: null,
ErrorMessage: null);
}
};
var result = await ExportJobLifecycleHelper.WaitForAttestationExportCompletionAsync(
mockClient, "run-1", TimeSpan.FromMilliseconds(10), TimeSpan.FromSeconds(10));
Assert.Equal("completed", result.Status);
Assert.True(result.TransparencyLogIncluded);
}
[Fact]
public async Task CreateEvidenceExportAndWaitAsync_CreatesAndWaits()
{
var createCalled = false;
var mockClient = new MockExportCenterClient
{
CreateEvidenceExportHandler = request =>
{
createCalled = true;
return new CreateEvidenceExportResponse("run-1", "pending", "/status", 10);
},
GetEvidenceExportStatusHandler = runId =>
{
return new EvidenceExportStatus(
runId, "profile-1", "completed", 100,
DateTimeOffset.UtcNow, DateTimeOffset.UtcNow,
"sha256:abc", "/download", null, null);
}
};
var result = await ExportJobLifecycleHelper.CreateEvidenceExportAndWaitAsync(
mockClient,
new CreateEvidenceExportRequest("profile-1"),
TimeSpan.FromMilliseconds(10),
TimeSpan.FromSeconds(10));
Assert.True(createCalled);
Assert.Equal("completed", result.Status);
}
[Fact]
public void TerminalStatuses_ContainsExpectedValues()
{
Assert.Contains("completed", ExportJobLifecycleHelper.TerminalStatuses);
Assert.Contains("failed", ExportJobLifecycleHelper.TerminalStatuses);
Assert.Contains("cancelled", ExportJobLifecycleHelper.TerminalStatuses);
Assert.DoesNotContain("pending", ExportJobLifecycleHelper.TerminalStatuses);
Assert.DoesNotContain("running", ExportJobLifecycleHelper.TerminalStatuses);
}
}
/// <summary>
/// Mock implementation of IExportCenterClient for testing.
/// </summary>
internal sealed class MockExportCenterClient : IExportCenterClient
{
public Func<string, EvidenceExportStatus?>? GetEvidenceExportStatusHandler { get; set; }
public Func<string, AttestationExportStatus?>? GetAttestationExportStatusHandler { get; set; }
public Func<CreateEvidenceExportRequest, CreateEvidenceExportResponse>? CreateEvidenceExportHandler { get; set; }
public Func<CreateAttestationExportRequest, CreateAttestationExportResponse>? CreateAttestationExportHandler { get; set; }
public Task<OpenApiDiscoveryMetadata> GetDiscoveryMetadataAsync(CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
public Task<ExportProfileListResponse> ListProfilesAsync(string? continuationToken = null, int? limit = null, CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
public Task<ExportProfile?> GetProfileAsync(string profileId, CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
public Task<ExportRunListResponse> ListRunsAsync(string? profileId = null, string? continuationToken = null, int? limit = null, CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
public Task<ExportRun?> GetRunAsync(string runId, CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
public Task<CreateEvidenceExportResponse> CreateEvidenceExportAsync(CreateEvidenceExportRequest request, CancellationToken cancellationToken = default)
=> Task.FromResult(CreateEvidenceExportHandler?.Invoke(request) ?? throw new NotImplementedException());
public Task<EvidenceExportStatus?> GetEvidenceExportStatusAsync(string runId, CancellationToken cancellationToken = default)
=> Task.FromResult(GetEvidenceExportStatusHandler?.Invoke(runId));
public Task<Stream?> DownloadEvidenceExportAsync(string runId, CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
public Task<CreateAttestationExportResponse> CreateAttestationExportAsync(CreateAttestationExportRequest request, CancellationToken cancellationToken = default)
=> Task.FromResult(CreateAttestationExportHandler?.Invoke(request) ?? throw new NotImplementedException());
public Task<AttestationExportStatus?> GetAttestationExportStatusAsync(string runId, CancellationToken cancellationToken = default)
=> Task.FromResult(GetAttestationExportStatusHandler?.Invoke(runId));
public Task<Stream?> DownloadAttestationExportAsync(string runId, CancellationToken cancellationToken = default)
=> throw new NotImplementedException();
}

View File

@@ -0,0 +1,33 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
<OutputType>Exe</OutputType>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="xunit.v3" Version="3.0.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="3.1.3" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0" />
</ItemGroup>
<ItemGroup>
<Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" />
</ItemGroup>
<ItemGroup>
<Using Include="Xunit" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.ExportCenter.Client\StellaOps.ExportCenter.Client.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,4 @@
{
"$schema": "https://xunit.net/schema/current/xunit.runner.schema.json",
"methodDisplay": "classAndMethod"
}

View File

@@ -0,0 +1,310 @@
using System.Net;
using System.Net.Http.Json;
using System.Text.Json;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.Client.Models;
namespace StellaOps.ExportCenter.Client;
/// <summary>
/// HTTP client implementation for the ExportCenter WebService API.
/// </summary>
public sealed class ExportCenterClient : IExportCenterClient
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web);
private readonly HttpClient _httpClient;
/// <summary>
/// Creates a new ExportCenterClient with the specified HttpClient.
/// </summary>
/// <param name="httpClient">HTTP client instance.</param>
public ExportCenterClient(HttpClient httpClient)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
}
/// <summary>
/// Creates a new ExportCenterClient with the specified options.
/// </summary>
/// <param name="httpClient">HTTP client instance.</param>
/// <param name="options">Client options.</param>
public ExportCenterClient(HttpClient httpClient, IOptions<ExportCenterClientOptions> options)
: this(httpClient)
{
ArgumentNullException.ThrowIfNull(options);
var opts = options.Value;
_httpClient.BaseAddress = new Uri(opts.BaseUrl);
_httpClient.Timeout = opts.Timeout;
}
#region Discovery
/// <inheritdoc />
public async Task<OpenApiDiscoveryMetadata> GetDiscoveryMetadataAsync(
CancellationToken cancellationToken = default)
{
var response = await _httpClient.GetAsync("/.well-known/openapi", cancellationToken)
.ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var metadata = await response.Content.ReadFromJsonAsync<OpenApiDiscoveryMetadata>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return metadata ?? throw new InvalidOperationException("Invalid discovery metadata response.");
}
#endregion
#region Profiles
/// <inheritdoc />
public async Task<ExportProfileListResponse> ListProfilesAsync(
string? continuationToken = null,
int? limit = null,
CancellationToken cancellationToken = default)
{
var url = "/v1/exports/profiles";
var queryParams = new List<string>();
if (!string.IsNullOrEmpty(continuationToken))
{
queryParams.Add($"continuationToken={Uri.EscapeDataString(continuationToken)}");
}
if (limit.HasValue)
{
queryParams.Add($"limit={limit.Value}");
}
if (queryParams.Count > 0)
{
url += "?" + string.Join("&", queryParams);
}
var response = await _httpClient.GetAsync(url, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<ExportProfileListResponse>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return result ?? new ExportProfileListResponse([], null, false);
}
/// <inheritdoc />
public async Task<ExportProfile?> GetProfileAsync(
string profileId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(profileId);
var response = await _httpClient.GetAsync($"/v1/exports/profiles/{Uri.EscapeDataString(profileId)}", cancellationToken)
.ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadFromJsonAsync<ExportProfile>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
}
#endregion
#region Runs
/// <inheritdoc />
public async Task<ExportRunListResponse> ListRunsAsync(
string? profileId = null,
string? continuationToken = null,
int? limit = null,
CancellationToken cancellationToken = default)
{
var url = "/v1/exports/runs";
var queryParams = new List<string>();
if (!string.IsNullOrEmpty(profileId))
{
queryParams.Add($"profileId={Uri.EscapeDataString(profileId)}");
}
if (!string.IsNullOrEmpty(continuationToken))
{
queryParams.Add($"continuationToken={Uri.EscapeDataString(continuationToken)}");
}
if (limit.HasValue)
{
queryParams.Add($"limit={limit.Value}");
}
if (queryParams.Count > 0)
{
url += "?" + string.Join("&", queryParams);
}
var response = await _httpClient.GetAsync(url, cancellationToken).ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<ExportRunListResponse>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return result ?? new ExportRunListResponse([], null, false);
}
/// <inheritdoc />
public async Task<ExportRun?> GetRunAsync(
string runId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var response = await _httpClient.GetAsync($"/v1/exports/runs/{Uri.EscapeDataString(runId)}", cancellationToken)
.ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadFromJsonAsync<ExportRun>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
}
#endregion
#region Evidence Exports
/// <inheritdoc />
public async Task<CreateEvidenceExportResponse> CreateEvidenceExportAsync(
CreateEvidenceExportRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var response = await _httpClient.PostAsJsonAsync("/v1/exports/evidence", request, JsonOptions, cancellationToken)
.ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<CreateEvidenceExportResponse>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return result ?? throw new InvalidOperationException("Invalid evidence export response.");
}
/// <inheritdoc />
public async Task<EvidenceExportStatus?> GetEvidenceExportStatusAsync(
string runId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var response = await _httpClient.GetAsync($"/v1/exports/evidence/{Uri.EscapeDataString(runId)}/status", cancellationToken)
.ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadFromJsonAsync<EvidenceExportStatus>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<Stream?> DownloadEvidenceExportAsync(
string runId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var response = await _httpClient.GetAsync(
$"/v1/exports/evidence/{Uri.EscapeDataString(runId)}/download",
HttpCompletionOption.ResponseHeadersRead,
cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound ||
response.StatusCode == HttpStatusCode.Conflict)
{
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
}
#endregion
#region Attestation Exports
/// <inheritdoc />
public async Task<CreateAttestationExportResponse> CreateAttestationExportAsync(
CreateAttestationExportRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var response = await _httpClient.PostAsJsonAsync("/v1/exports/attestations", request, JsonOptions, cancellationToken)
.ConfigureAwait(false);
response.EnsureSuccessStatusCode();
var result = await response.Content.ReadFromJsonAsync<CreateAttestationExportResponse>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
return result ?? throw new InvalidOperationException("Invalid attestation export response.");
}
/// <inheritdoc />
public async Task<AttestationExportStatus?> GetAttestationExportStatusAsync(
string runId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var response = await _httpClient.GetAsync($"/v1/exports/attestations/{Uri.EscapeDataString(runId)}/status", cancellationToken)
.ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound)
{
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadFromJsonAsync<AttestationExportStatus>(JsonOptions, cancellationToken)
.ConfigureAwait(false);
}
/// <inheritdoc />
public async Task<Stream?> DownloadAttestationExportAsync(
string runId,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var response = await _httpClient.GetAsync(
$"/v1/exports/attestations/{Uri.EscapeDataString(runId)}/download",
HttpCompletionOption.ResponseHeadersRead,
cancellationToken).ConfigureAwait(false);
if (response.StatusCode == HttpStatusCode.NotFound ||
response.StatusCode == HttpStatusCode.Conflict)
{
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false);
}
#endregion
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.ExportCenter.Client;
/// <summary>
/// Configuration options for the ExportCenter client.
/// </summary>
public sealed class ExportCenterClientOptions
{
/// <summary>
/// Base URL for the ExportCenter API.
/// </summary>
public string BaseUrl { get; set; } = "https://localhost:5001";
/// <summary>
/// Timeout for HTTP requests.
/// </summary>
public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30);
/// <summary>
/// Timeout for streaming downloads.
/// </summary>
public TimeSpan DownloadTimeout { get; set; } = TimeSpan.FromMinutes(10);
}

View File

@@ -0,0 +1,93 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.Client.Extensions;
/// <summary>
/// Extension methods for configuring ExportCenter client services.
/// </summary>
public static class ServiceCollectionExtensions
{
/// <summary>
/// Adds the ExportCenter client to the service collection.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configureOptions">Action to configure client options.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddExportCenterClient(
this IServiceCollection services,
Action<ExportCenterClientOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configureOptions);
services.Configure(configureOptions);
services.AddHttpClient<IExportCenterClient, ExportCenterClient>((sp, client) =>
{
var options = sp.GetRequiredService<IOptions<ExportCenterClientOptions>>().Value;
client.BaseAddress = new Uri(options.BaseUrl);
client.Timeout = options.Timeout;
});
return services;
}
/// <summary>
/// Adds the ExportCenter client to the service collection with a named HttpClient.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="name">HttpClient name.</param>
/// <param name="configureOptions">Action to configure client options.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddExportCenterClient(
this IServiceCollection services,
string name,
Action<ExportCenterClientOptions> configureOptions)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentException.ThrowIfNullOrWhiteSpace(name);
ArgumentNullException.ThrowIfNull(configureOptions);
services.Configure(name, configureOptions);
services.AddHttpClient<IExportCenterClient, ExportCenterClient>(name, (sp, client) =>
{
var optionsMonitor = sp.GetRequiredService<IOptionsMonitor<ExportCenterClientOptions>>();
var options = optionsMonitor.Get(name);
client.BaseAddress = new Uri(options.BaseUrl);
client.Timeout = options.Timeout;
});
return services;
}
/// <summary>
/// Adds the ExportCenter client with custom HttpClient configuration.
/// </summary>
/// <param name="services">Service collection.</param>
/// <param name="configureOptions">Action to configure client options.</param>
/// <param name="configureClient">Additional HttpClient configuration.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddExportCenterClient(
this IServiceCollection services,
Action<ExportCenterClientOptions> configureOptions,
Action<HttpClient> configureClient)
{
ArgumentNullException.ThrowIfNull(services);
ArgumentNullException.ThrowIfNull(configureOptions);
ArgumentNullException.ThrowIfNull(configureClient);
services.Configure(configureOptions);
services.AddHttpClient<IExportCenterClient, ExportCenterClient>((sp, client) =>
{
var options = sp.GetRequiredService<IOptions<ExportCenterClientOptions>>().Value;
client.BaseAddress = new Uri(options.BaseUrl);
client.Timeout = options.Timeout;
configureClient(client);
});
return services;
}
}

View File

@@ -0,0 +1,143 @@
using StellaOps.ExportCenter.Client.Models;
namespace StellaOps.ExportCenter.Client;
/// <summary>
/// Client interface for the ExportCenter WebService API.
/// </summary>
public interface IExportCenterClient
{
#region Discovery
/// <summary>
/// Gets OpenAPI discovery metadata.
/// </summary>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>OpenAPI discovery metadata.</returns>
Task<OpenApiDiscoveryMetadata> GetDiscoveryMetadataAsync(
CancellationToken cancellationToken = default);
#endregion
#region Profiles
/// <summary>
/// Lists export profiles.
/// </summary>
/// <param name="continuationToken">Continuation token for pagination.</param>
/// <param name="limit">Maximum number of profiles to return.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Paginated list of export profiles.</returns>
Task<ExportProfileListResponse> ListProfilesAsync(
string? continuationToken = null,
int? limit = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a specific export profile by ID.
/// </summary>
/// <param name="profileId">Profile identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Export profile or null if not found.</returns>
Task<ExportProfile?> GetProfileAsync(
string profileId,
CancellationToken cancellationToken = default);
#endregion
#region Runs
/// <summary>
/// Lists export runs, optionally filtered by profile.
/// </summary>
/// <param name="profileId">Optional profile ID filter.</param>
/// <param name="continuationToken">Continuation token for pagination.</param>
/// <param name="limit">Maximum number of runs to return.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Paginated list of export runs.</returns>
Task<ExportRunListResponse> ListRunsAsync(
string? profileId = null,
string? continuationToken = null,
int? limit = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets a specific export run by ID.
/// </summary>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Export run or null if not found.</returns>
Task<ExportRun?> GetRunAsync(
string runId,
CancellationToken cancellationToken = default);
#endregion
#region Evidence Exports
/// <summary>
/// Creates a new evidence export job.
/// </summary>
/// <param name="request">Export creation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Export creation response.</returns>
Task<CreateEvidenceExportResponse> CreateEvidenceExportAsync(
CreateEvidenceExportRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the status of an evidence export job.
/// </summary>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Evidence export status or null if not found.</returns>
Task<EvidenceExportStatus?> GetEvidenceExportStatusAsync(
string runId,
CancellationToken cancellationToken = default);
/// <summary>
/// Downloads an evidence export bundle as a stream.
/// </summary>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Stream containing the bundle, or null if not ready/found.</returns>
Task<Stream?> DownloadEvidenceExportAsync(
string runId,
CancellationToken cancellationToken = default);
#endregion
#region Attestation Exports
/// <summary>
/// Creates a new attestation export job.
/// </summary>
/// <param name="request">Export creation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Export creation response.</returns>
Task<CreateAttestationExportResponse> CreateAttestationExportAsync(
CreateAttestationExportRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the status of an attestation export job.
/// </summary>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Attestation export status or null if not found.</returns>
Task<AttestationExportStatus?> GetAttestationExportStatusAsync(
string runId,
CancellationToken cancellationToken = default);
/// <summary>
/// Downloads an attestation export bundle as a stream.
/// </summary>
/// <param name="runId">Run identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Stream containing the bundle, or null if not ready/found.</returns>
Task<Stream?> DownloadAttestationExportAsync(
string runId,
CancellationToken cancellationToken = default);
#endregion
}

View File

@@ -0,0 +1,257 @@
using StellaOps.ExportCenter.Client.Models;
namespace StellaOps.ExportCenter.Client.Lifecycle;
/// <summary>
/// Helper methods for export job lifecycle operations.
/// </summary>
public static class ExportJobLifecycleHelper
{
/// <summary>
/// Terminal statuses for export jobs.
/// </summary>
public static readonly IReadOnlySet<string> TerminalStatuses = new HashSet<string>(StringComparer.OrdinalIgnoreCase)
{
"completed",
"failed",
"cancelled"
};
/// <summary>
/// Determines if a status is terminal (export job has finished).
/// </summary>
/// <param name="status">Status to check.</param>
/// <returns>True if terminal status.</returns>
public static bool IsTerminalStatus(string status)
=> TerminalStatuses.Contains(status);
/// <summary>
/// Creates an evidence export and waits for completion.
/// </summary>
/// <param name="client">ExportCenter client.</param>
/// <param name="request">Export creation request.</param>
/// <param name="pollInterval">Interval between status checks (default: 2 seconds).</param>
/// <param name="timeout">Maximum time to wait (default: 30 minutes).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Final evidence export status.</returns>
public static async Task<EvidenceExportStatus> CreateEvidenceExportAndWaitAsync(
IExportCenterClient client,
CreateEvidenceExportRequest request,
TimeSpan? pollInterval = null,
TimeSpan? timeout = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(client);
ArgumentNullException.ThrowIfNull(request);
var createResponse = await client.CreateEvidenceExportAsync(request, cancellationToken)
.ConfigureAwait(false);
return await WaitForEvidenceExportCompletionAsync(
client, createResponse.RunId, pollInterval, timeout, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Waits for an evidence export to complete.
/// </summary>
/// <param name="client">ExportCenter client.</param>
/// <param name="runId">Run identifier.</param>
/// <param name="pollInterval">Interval between status checks (default: 2 seconds).</param>
/// <param name="timeout">Maximum time to wait (default: 30 minutes).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Final evidence export status.</returns>
public static async Task<EvidenceExportStatus> WaitForEvidenceExportCompletionAsync(
IExportCenterClient client,
string runId,
TimeSpan? pollInterval = null,
TimeSpan? timeout = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(client);
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var interval = pollInterval ?? TimeSpan.FromSeconds(2);
var maxWait = timeout ?? TimeSpan.FromMinutes(30);
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
cts.CancelAfter(maxWait);
while (true)
{
var status = await client.GetEvidenceExportStatusAsync(runId, cts.Token)
.ConfigureAwait(false);
if (status is null)
{
throw new InvalidOperationException($"Evidence export '{runId}' not found.");
}
if (IsTerminalStatus(status.Status))
{
return status;
}
await Task.Delay(interval, cts.Token).ConfigureAwait(false);
}
}
/// <summary>
/// Creates an attestation export and waits for completion.
/// </summary>
/// <param name="client">ExportCenter client.</param>
/// <param name="request">Export creation request.</param>
/// <param name="pollInterval">Interval between status checks (default: 2 seconds).</param>
/// <param name="timeout">Maximum time to wait (default: 30 minutes).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Final attestation export status.</returns>
public static async Task<AttestationExportStatus> CreateAttestationExportAndWaitAsync(
IExportCenterClient client,
CreateAttestationExportRequest request,
TimeSpan? pollInterval = null,
TimeSpan? timeout = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(client);
ArgumentNullException.ThrowIfNull(request);
var createResponse = await client.CreateAttestationExportAsync(request, cancellationToken)
.ConfigureAwait(false);
return await WaitForAttestationExportCompletionAsync(
client, createResponse.RunId, pollInterval, timeout, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Waits for an attestation export to complete.
/// </summary>
/// <param name="client">ExportCenter client.</param>
/// <param name="runId">Run identifier.</param>
/// <param name="pollInterval">Interval between status checks (default: 2 seconds).</param>
/// <param name="timeout">Maximum time to wait (default: 30 minutes).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Final attestation export status.</returns>
public static async Task<AttestationExportStatus> WaitForAttestationExportCompletionAsync(
IExportCenterClient client,
string runId,
TimeSpan? pollInterval = null,
TimeSpan? timeout = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(client);
ArgumentException.ThrowIfNullOrWhiteSpace(runId);
var interval = pollInterval ?? TimeSpan.FromSeconds(2);
var maxWait = timeout ?? TimeSpan.FromMinutes(30);
using var cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
cts.CancelAfter(maxWait);
while (true)
{
var status = await client.GetAttestationExportStatusAsync(runId, cts.Token)
.ConfigureAwait(false);
if (status is null)
{
throw new InvalidOperationException($"Attestation export '{runId}' not found.");
}
if (IsTerminalStatus(status.Status))
{
return status;
}
await Task.Delay(interval, cts.Token).ConfigureAwait(false);
}
}
/// <summary>
/// Creates an evidence export, waits for completion, and downloads the bundle.
/// </summary>
/// <param name="client">ExportCenter client.</param>
/// <param name="request">Export creation request.</param>
/// <param name="outputPath">Path to save the downloaded bundle.</param>
/// <param name="pollInterval">Interval between status checks.</param>
/// <param name="timeout">Maximum time to wait.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Final evidence export status.</returns>
public static async Task<EvidenceExportStatus> CreateEvidenceExportAndDownloadAsync(
IExportCenterClient client,
CreateEvidenceExportRequest request,
string outputPath,
TimeSpan? pollInterval = null,
TimeSpan? timeout = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(client);
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
var status = await CreateEvidenceExportAndWaitAsync(client, request, pollInterval, timeout, cancellationToken)
.ConfigureAwait(false);
if (status.Status != "completed")
{
throw new InvalidOperationException($"Evidence export failed: {status.ErrorCode} - {status.ErrorMessage}");
}
await using var stream = await client.DownloadEvidenceExportAsync(status.RunId, cancellationToken)
.ConfigureAwait(false);
if (stream is null)
{
throw new InvalidOperationException($"Evidence export bundle not available for download.");
}
await using var fileStream = File.Create(outputPath);
await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
return status;
}
/// <summary>
/// Creates an attestation export, waits for completion, and downloads the bundle.
/// </summary>
/// <param name="client">ExportCenter client.</param>
/// <param name="request">Export creation request.</param>
/// <param name="outputPath">Path to save the downloaded bundle.</param>
/// <param name="pollInterval">Interval between status checks.</param>
/// <param name="timeout">Maximum time to wait.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Final attestation export status.</returns>
public static async Task<AttestationExportStatus> CreateAttestationExportAndDownloadAsync(
IExportCenterClient client,
CreateAttestationExportRequest request,
string outputPath,
TimeSpan? pollInterval = null,
TimeSpan? timeout = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(client);
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
var status = await CreateAttestationExportAndWaitAsync(client, request, pollInterval, timeout, cancellationToken)
.ConfigureAwait(false);
if (status.Status != "completed")
{
throw new InvalidOperationException($"Attestation export failed: {status.ErrorCode} - {status.ErrorMessage}");
}
await using var stream = await client.DownloadAttestationExportAsync(status.RunId, cancellationToken)
.ConfigureAwait(false);
if (stream is null)
{
throw new InvalidOperationException($"Attestation export bundle not available for download.");
}
await using var fileStream = File.Create(outputPath);
await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false);
return status;
}
}

View File

@@ -0,0 +1,152 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Client.Models;
/// <summary>
/// Export profile metadata.
/// </summary>
public sealed record ExportProfile(
[property: JsonPropertyName("profileId")] string ProfileId,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("description")] string? Description,
[property: JsonPropertyName("adapter")] string Adapter,
[property: JsonPropertyName("selectors")] IReadOnlyDictionary<string, string>? Selectors,
[property: JsonPropertyName("outputFormat")] string OutputFormat,
[property: JsonPropertyName("signingEnabled")] bool SigningEnabled,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("updatedAt")] DateTimeOffset? UpdatedAt);
/// <summary>
/// Paginated list of export profiles.
/// </summary>
public sealed record ExportProfileListResponse(
[property: JsonPropertyName("profiles")] IReadOnlyList<ExportProfile> Profiles,
[property: JsonPropertyName("continuationToken")] string? ContinuationToken,
[property: JsonPropertyName("hasMore")] bool HasMore);
/// <summary>
/// Export run representing a single export job execution.
/// </summary>
public sealed record ExportRun(
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("profileId")] string ProfileId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("progress")] int? Progress,
[property: JsonPropertyName("startedAt")] DateTimeOffset? StartedAt,
[property: JsonPropertyName("completedAt")] DateTimeOffset? CompletedAt,
[property: JsonPropertyName("bundleHash")] string? BundleHash,
[property: JsonPropertyName("bundleUrl")] string? BundleUrl,
[property: JsonPropertyName("errorCode")] string? ErrorCode,
[property: JsonPropertyName("errorMessage")] string? ErrorMessage,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt);
/// <summary>
/// Paginated list of export runs.
/// </summary>
public sealed record ExportRunListResponse(
[property: JsonPropertyName("runs")] IReadOnlyList<ExportRun> Runs,
[property: JsonPropertyName("continuationToken")] string? ContinuationToken,
[property: JsonPropertyName("hasMore")] bool HasMore);
/// <summary>
/// Request to create a new evidence export.
/// </summary>
public sealed record CreateEvidenceExportRequest(
[property: JsonPropertyName("profileId")] string ProfileId,
[property: JsonPropertyName("selectors")] IReadOnlyDictionary<string, string>? Selectors = null,
[property: JsonPropertyName("callbackUrl")] string? CallbackUrl = null);
/// <summary>
/// Response from creating an evidence export.
/// </summary>
public sealed record CreateEvidenceExportResponse(
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("statusUrl")] string StatusUrl,
[property: JsonPropertyName("estimatedCompletionSeconds")] int? EstimatedCompletionSeconds);
/// <summary>
/// Status of an evidence export.
/// </summary>
public sealed record EvidenceExportStatus(
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("profileId")] string ProfileId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("progress")] int Progress,
[property: JsonPropertyName("startedAt")] DateTimeOffset? StartedAt,
[property: JsonPropertyName("completedAt")] DateTimeOffset? CompletedAt,
[property: JsonPropertyName("bundleHash")] string? BundleHash,
[property: JsonPropertyName("downloadUrl")] string? DownloadUrl,
[property: JsonPropertyName("errorCode")] string? ErrorCode,
[property: JsonPropertyName("errorMessage")] string? ErrorMessage);
/// <summary>
/// Request to create a new attestation export.
/// </summary>
public sealed record CreateAttestationExportRequest(
[property: JsonPropertyName("profileId")] string ProfileId,
[property: JsonPropertyName("selectors")] IReadOnlyDictionary<string, string>? Selectors = null,
[property: JsonPropertyName("includeTransparencyLog")] bool IncludeTransparencyLog = true,
[property: JsonPropertyName("callbackUrl")] string? CallbackUrl = null);
/// <summary>
/// Response from creating an attestation export.
/// </summary>
public sealed record CreateAttestationExportResponse(
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("statusUrl")] string StatusUrl,
[property: JsonPropertyName("estimatedCompletionSeconds")] int? EstimatedCompletionSeconds);
/// <summary>
/// Status of an attestation export.
/// </summary>
public sealed record AttestationExportStatus(
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("profileId")] string ProfileId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("progress")] int Progress,
[property: JsonPropertyName("startedAt")] DateTimeOffset? StartedAt,
[property: JsonPropertyName("completedAt")] DateTimeOffset? CompletedAt,
[property: JsonPropertyName("bundleHash")] string? BundleHash,
[property: JsonPropertyName("downloadUrl")] string? DownloadUrl,
[property: JsonPropertyName("transparencyLogIncluded")] bool TransparencyLogIncluded,
[property: JsonPropertyName("errorCode")] string? ErrorCode,
[property: JsonPropertyName("errorMessage")] string? ErrorMessage);
/// <summary>
/// OpenAPI discovery metadata.
/// </summary>
public sealed record OpenApiDiscoveryMetadata(
[property: JsonPropertyName("service")] string Service,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("specVersion")] string SpecVersion,
[property: JsonPropertyName("format")] string Format,
[property: JsonPropertyName("url")] string Url,
[property: JsonPropertyName("jsonUrl")] string? JsonUrl,
[property: JsonPropertyName("errorEnvelopeSchema")] string ErrorEnvelopeSchema,
[property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt,
[property: JsonPropertyName("profilesSupported")] IReadOnlyList<string>? ProfilesSupported,
[property: JsonPropertyName("checksumSha256")] string? ChecksumSha256);
/// <summary>
/// Standard error envelope.
/// </summary>
public sealed record ErrorEnvelope(
[property: JsonPropertyName("error")] ErrorDetail Error);
/// <summary>
/// Error detail within an error envelope.
/// </summary>
public sealed record ErrorDetail(
[property: JsonPropertyName("code")] string Code,
[property: JsonPropertyName("message")] string Message,
[property: JsonPropertyName("correlationId")] string? CorrelationId = null,
[property: JsonPropertyName("details")] IReadOnlyList<ErrorDetailItem>? Details = null);
/// <summary>
/// Individual error detail item.
/// </summary>
public sealed record ErrorDetailItem(
[property: JsonPropertyName("field")] string? Field,
[property: JsonPropertyName("reason")] string Reason);

View File

@@ -0,0 +1,17 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<Description>SDK client for StellaOps ExportCenter WebService API</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,175 @@
using System.Security.Cryptography;
namespace StellaOps.ExportCenter.Client.Streaming;
/// <summary>
/// Helper methods for streaming export bundle downloads.
/// </summary>
public static class ExportDownloadHelper
{
private const int DefaultBufferSize = 81920; // 80 KB
/// <summary>
/// Downloads a stream to a file with progress reporting.
/// </summary>
/// <param name="stream">Source stream.</param>
/// <param name="outputPath">Destination file path.</param>
/// <param name="expectedLength">Expected content length (if known).</param>
/// <param name="progressCallback">Progress callback (bytes downloaded, total bytes or null).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Total bytes downloaded.</returns>
public static async Task<long> DownloadToFileAsync(
Stream stream,
string outputPath,
long? expectedLength = null,
Action<long, long?>? progressCallback = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
await using var fileStream = File.Create(outputPath);
return await CopyWithProgressAsync(stream, fileStream, expectedLength, progressCallback, cancellationToken)
.ConfigureAwait(false);
}
/// <summary>
/// Downloads a stream to a file and verifies SHA-256 checksum.
/// </summary>
/// <param name="stream">Source stream.</param>
/// <param name="outputPath">Destination file path.</param>
/// <param name="expectedSha256">Expected SHA-256 hash (hex string).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Actual SHA-256 hash of the downloaded file.</returns>
/// <exception cref="InvalidOperationException">Thrown if checksum doesn't match.</exception>
public static async Task<string> DownloadAndVerifyAsync(
Stream stream,
string outputPath,
string expectedSha256,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
ArgumentException.ThrowIfNullOrWhiteSpace(expectedSha256);
using var sha256 = SHA256.Create();
await using var fileStream = File.Create(outputPath);
await using var cryptoStream = new CryptoStream(fileStream, sha256, CryptoStreamMode.Write);
var buffer = new byte[DefaultBufferSize];
int bytesRead;
while ((bytesRead = await stream.ReadAsync(buffer, cancellationToken).ConfigureAwait(false)) > 0)
{
await cryptoStream.WriteAsync(buffer.AsMemory(0, bytesRead), cancellationToken).ConfigureAwait(false);
}
await cryptoStream.FlushFinalBlockAsync(cancellationToken).ConfigureAwait(false);
var actualHash = Convert.ToHexString(sha256.Hash!).ToLowerInvariant();
var expectedNormalized = expectedSha256.ToLowerInvariant().Replace("sha256:", "");
if (!string.Equals(actualHash, expectedNormalized, StringComparison.Ordinal))
{
// Delete the corrupted file
File.Delete(outputPath);
throw new InvalidOperationException(
$"Checksum verification failed. Expected: {expectedNormalized}, Actual: {actualHash}");
}
return actualHash;
}
/// <summary>
/// Computes SHA-256 hash of a stream.
/// </summary>
/// <param name="stream">Source stream.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>SHA-256 hash as hex string.</returns>
public static async Task<string> ComputeSha256Async(
Stream stream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(stream);
using var sha256 = SHA256.Create();
var hash = await sha256.ComputeHashAsync(stream, cancellationToken).ConfigureAwait(false);
return Convert.ToHexString(hash).ToLowerInvariant();
}
/// <summary>
/// Copies a stream with progress reporting.
/// </summary>
/// <param name="source">Source stream.</param>
/// <param name="destination">Destination stream.</param>
/// <param name="expectedLength">Expected content length (if known).</param>
/// <param name="progressCallback">Progress callback (bytes copied, total bytes or null).</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Total bytes copied.</returns>
public static async Task<long> CopyWithProgressAsync(
Stream source,
Stream destination,
long? expectedLength = null,
Action<long, long?>? progressCallback = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(source);
ArgumentNullException.ThrowIfNull(destination);
var buffer = new byte[DefaultBufferSize];
long totalBytes = 0;
int bytesRead;
while ((bytesRead = await source.ReadAsync(buffer, cancellationToken).ConfigureAwait(false)) > 0)
{
await destination.WriteAsync(buffer.AsMemory(0, bytesRead), cancellationToken).ConfigureAwait(false);
totalBytes += bytesRead;
progressCallback?.Invoke(totalBytes, expectedLength);
}
return totalBytes;
}
/// <summary>
/// Creates a progress callback that logs progress at specified intervals.
/// </summary>
/// <param name="logAction">Action to invoke with progress message.</param>
/// <param name="reportIntervalBytes">Minimum bytes between progress reports (default: 1 MB).</param>
/// <returns>Progress callback action.</returns>
public static Action<long, long?> CreateProgressLogger(
Action<string> logAction,
long reportIntervalBytes = 1_048_576)
{
ArgumentNullException.ThrowIfNull(logAction);
long lastReportedBytes = 0;
return (bytesDownloaded, totalBytes) =>
{
if (bytesDownloaded - lastReportedBytes >= reportIntervalBytes)
{
lastReportedBytes = bytesDownloaded;
var message = totalBytes.HasValue
? $"Downloaded {FormatBytes(bytesDownloaded)} of {FormatBytes(totalBytes.Value)} ({bytesDownloaded * 100 / totalBytes.Value}%)"
: $"Downloaded {FormatBytes(bytesDownloaded)}";
logAction(message);
}
};
}
private static string FormatBytes(long bytes)
{
string[] sizes = ["B", "KB", "MB", "GB", "TB"];
var order = 0;
double len = bytes;
while (len >= 1024 && order < sizes.Length - 1)
{
order++;
len /= 1024;
}
return $"{len:0.##} {sizes[order]}";
}
}

View File

@@ -0,0 +1,299 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.AttestationBundle;
/// <summary>
/// Builds deterministic attestation bundle exports for air-gap/offline delivery.
/// </summary>
public sealed class AttestationBundleBuilder
{
private const string BundleVersion = "attestation-bundle/v1";
private const string DefaultStatementVersion = "v1";
private const string DsseEnvelopeFileName = "attestation.dsse.json";
private const string StatementFileName = "statement.json";
private const string TransparencyFileName = "transparency.ndjson";
private const string MetadataFileName = "metadata.json";
private const string ChecksumsFileName = "checksums.txt";
private const string VerifyScriptFileName = "verify-attestation.sh";
private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly UnixFileMode DefaultFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
private static readonly UnixFileMode ExecutableFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public AttestationBundleBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Builds an attestation bundle export from the provided request.
/// </summary>
public AttestationBundleExportResult Build(AttestationBundleExportRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.ExportId == Guid.Empty)
{
throw new ArgumentException("Export identifier must be provided.", nameof(request));
}
if (request.AttestationId == Guid.Empty)
{
throw new ArgumentException("Attestation identifier must be provided.", nameof(request));
}
if (request.TenantId == Guid.Empty)
{
throw new ArgumentException("Tenant identifier must be provided.", nameof(request));
}
if (string.IsNullOrWhiteSpace(request.DsseEnvelopeJson))
{
throw new ArgumentException("DSSE envelope JSON must be provided.", nameof(request));
}
if (string.IsNullOrWhiteSpace(request.StatementJson))
{
throw new ArgumentException("Statement JSON must be provided.", nameof(request));
}
cancellationToken.ThrowIfCancellationRequested();
// Compute hashes for each component
var dsseBytes = Encoding.UTF8.GetBytes(request.DsseEnvelopeJson);
var dsseSha256 = _cryptoHash.ComputeHashHexForPurpose(dsseBytes, HashPurpose.Content);
var statementBytes = Encoding.UTF8.GetBytes(request.StatementJson);
var statementSha256 = _cryptoHash.ComputeHashHexForPurpose(statementBytes, HashPurpose.Content);
// Build transparency NDJSON if entries exist
string? transparencyNdjson = null;
string? transparencySha256 = null;
if (request.TransparencyEntries is { Count: > 0 })
{
var transparencyBuilder = new StringBuilder();
foreach (var entry in request.TransparencyEntries.OrderBy(e => e, StringComparer.Ordinal))
{
transparencyBuilder.AppendLine(entry);
}
transparencyNdjson = transparencyBuilder.ToString();
transparencySha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(transparencyNdjson), HashPurpose.Content);
}
// Build initial metadata (rootHash computed later)
var metadata = new AttestationBundleMetadata(
BundleVersion,
request.ExportId.ToString("D"),
request.AttestationId.ToString("D"),
request.TenantId.ToString("D"),
_timeProvider.GetUtcNow(),
string.Empty, // Placeholder, computed after
request.SourceUri,
request.StatementVersion ?? DefaultStatementVersion,
request.SubjectDigests);
var metadataJson = JsonSerializer.Serialize(metadata, SerializerOptions);
var metadataSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(metadataJson), HashPurpose.Content);
// Build verification script
var verifyScript = BuildVerificationScript(request.AttestationId);
var verifyScriptSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(verifyScript), HashPurpose.Content);
// Build checksums (without root hash line yet)
var checksums = BuildChecksums(dsseSha256, statementSha256, transparencySha256, metadataSha256);
var checksumsSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(checksums), HashPurpose.Content);
// Compute root hash from all component hashes
var hashList = new List<string> { dsseSha256, statementSha256, metadataSha256, checksumsSha256, verifyScriptSha256 };
if (transparencySha256 is not null)
{
hashList.Add(transparencySha256);
}
var rootHash = ComputeRootHash(hashList);
// Rebuild metadata with root hash
var finalMetadata = metadata with { RootHash = rootHash };
var finalMetadataJson = JsonSerializer.Serialize(finalMetadata, SerializerOptions);
var finalMetadataSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(finalMetadataJson), HashPurpose.Content);
// Rebuild checksums with final metadata hash
var finalChecksums = BuildChecksums(dsseSha256, statementSha256, transparencySha256, finalMetadataSha256);
// Create the export archive
var exportStream = CreateExportArchive(
request.DsseEnvelopeJson,
request.StatementJson,
transparencyNdjson,
finalMetadataJson,
finalChecksums,
verifyScript);
exportStream.Position = 0;
return new AttestationBundleExportResult(
finalMetadata,
finalMetadataJson,
rootHash,
exportStream);
}
private string ComputeRootHash(IEnumerable<string> hashes)
{
var builder = new StringBuilder();
foreach (var hash in hashes.OrderBy(h => h, StringComparer.Ordinal))
{
builder.Append(hash).Append('\0');
}
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private static string BuildChecksums(string dsseSha256, string statementSha256, string? transparencySha256, string metadataSha256)
{
var builder = new StringBuilder();
builder.AppendLine("# Attestation bundle checksums (sha256)");
// Lexical order
builder.Append(dsseSha256).Append(" ").AppendLine(DsseEnvelopeFileName);
builder.Append(metadataSha256).Append(" ").AppendLine(MetadataFileName);
builder.Append(statementSha256).Append(" ").AppendLine(StatementFileName);
if (transparencySha256 is not null)
{
builder.Append(transparencySha256).Append(" ").AppendLine(TransparencyFileName);
}
return builder.ToString();
}
private static string BuildVerificationScript(Guid attestationId)
{
var builder = new StringBuilder();
builder.AppendLine("#!/usr/bin/env sh");
builder.AppendLine("# Attestation Bundle Verification Script");
builder.AppendLine("# No network access required");
builder.AppendLine();
builder.AppendLine("set -eu");
builder.AppendLine();
builder.AppendLine("# Verify checksums");
builder.AppendLine("echo \"Verifying checksums...\"");
builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then");
builder.AppendLine(" sha256sum --check checksums.txt");
builder.AppendLine("elif command -v shasum >/dev/null 2>&1; then");
builder.AppendLine(" shasum -a 256 --check checksums.txt");
builder.AppendLine("else");
builder.AppendLine(" echo \"Error: sha256sum or shasum required\" >&2");
builder.AppendLine(" exit 1");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("echo \"\"");
builder.AppendLine("echo \"Checksums verified successfully.\"");
builder.AppendLine("echo \"\"");
builder.AppendLine();
builder.AppendLine("# Verify DSSE envelope");
builder.Append("ATTESTATION_ID=\"").Append(attestationId.ToString("D")).AppendLine("\"");
builder.AppendLine("DSSE_FILE=\"attestation.dsse.json\"");
builder.AppendLine();
builder.AppendLine("if command -v stella >/dev/null 2>&1; then");
builder.AppendLine(" echo \"Verifying DSSE envelope with stella CLI...\"");
builder.AppendLine(" stella attest verify --envelope \"$DSSE_FILE\" --attestation-id \"$ATTESTATION_ID\"");
builder.AppendLine("else");
builder.AppendLine(" echo \"Note: stella CLI not found. Manual DSSE verification recommended.\"");
builder.AppendLine(" echo \"Install stella CLI and run: stella attest verify --envelope $DSSE_FILE\"");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("echo \"\"");
builder.AppendLine("echo \"Verification complete.\"");
return builder.ToString();
}
private MemoryStream CreateExportArchive(
string dsseEnvelopeJson,
string statementJson,
string? transparencyNdjson,
string metadataJson,
string checksums,
string verifyScript)
{
var stream = new MemoryStream();
using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true))
using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
// Write files in lexical order for determinism
WriteTextEntry(tar, DsseEnvelopeFileName, dsseEnvelopeJson, DefaultFileMode);
WriteTextEntry(tar, ChecksumsFileName, checksums, DefaultFileMode);
WriteTextEntry(tar, MetadataFileName, metadataJson, DefaultFileMode);
WriteTextEntry(tar, StatementFileName, statementJson, DefaultFileMode);
if (transparencyNdjson is not null)
{
WriteTextEntry(tar, TransparencyFileName, transparencyNdjson, DefaultFileMode);
}
WriteTextEntry(tar, VerifyScriptFileName, verifyScript, ExecutableFileMode);
}
ApplyDeterministicGzipHeader(stream);
return stream;
}
private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var dataStream = new MemoryStream(bytes);
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = mode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void ApplyDeterministicGzipHeader(MemoryStream stream)
{
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for attestation bundle export.");
}
var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
var originalPosition = stream.Position;
stream.Position = 4;
stream.Write(buffer);
stream.Position = originalPosition;
}
}

View File

@@ -0,0 +1,71 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.AttestationBundle;
/// <summary>
/// Request to create an attestation bundle export.
/// </summary>
public sealed record AttestationBundleExportRequest(
Guid ExportId,
Guid AttestationId,
Guid TenantId,
string DsseEnvelopeJson,
string StatementJson,
IReadOnlyList<string>? TransparencyEntries = null,
IReadOnlyList<AttestationSubjectDigest>? SubjectDigests = null,
string? SourceUri = null,
string? StatementVersion = null);
/// <summary>
/// Result of building an attestation bundle export.
/// </summary>
public sealed record AttestationBundleExportResult(
AttestationBundleMetadata Metadata,
string MetadataJson,
string RootHash,
MemoryStream ExportStream);
/// <summary>
/// Metadata document for attestation bundle exports.
/// </summary>
public sealed record AttestationBundleMetadata(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("sourceUri")] string? SourceUri,
[property: JsonPropertyName("statementVersion")] string StatementVersion,
[property: JsonPropertyName("subjectDigests")] IReadOnlyList<AttestationSubjectDigest>? SubjectDigests);
/// <summary>
/// Subject digest entry for attestation bundles.
/// </summary>
public sealed record AttestationSubjectDigest(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("digest")] string Digest,
[property: JsonPropertyName("algorithm")] string Algorithm);
/// <summary>
/// Export status for attestation bundles.
/// </summary>
public enum AttestationBundleExportStatus
{
Pending = 1,
Packaging = 2,
Ready = 3,
Failed = 4
}
/// <summary>
/// Status response for attestation bundle export.
/// </summary>
public sealed record AttestationBundleExportStatusResponse(
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("rootHash")] string? RootHash,
[property: JsonPropertyName("downloadUri")] string? DownloadUri,
[property: JsonPropertyName("attestationDigests")] IReadOnlyList<AttestationSubjectDigest>? AttestationDigests,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt);

View File

@@ -0,0 +1,550 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.BootstrapPack;
/// <summary>
/// Builds deterministic bootstrap packs for air-gap deployment containing Helm charts and container images.
/// </summary>
public sealed class BootstrapPackBuilder
{
private const string ManifestVersion = "bootstrap/v1";
private const int OciSchemaVersion = 2;
private const string OciImageIndexMediaType = "application/vnd.oci.image.index.v1+json";
private const string OciImageLayoutVersion = "1.0.0";
private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly UnixFileMode DefaultFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public BootstrapPackBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Builds a bootstrap pack from the provided request.
/// </summary>
public BootstrapPackBuildResult Build(BootstrapPackBuildRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.ExportId == Guid.Empty)
{
throw new ArgumentException("Export identifier must be provided.", nameof(request));
}
if (request.TenantId == Guid.Empty)
{
throw new ArgumentException("Tenant identifier must be provided.", nameof(request));
}
if ((request.Charts is null || request.Charts.Count == 0) &&
(request.Images is null || request.Images.Count == 0))
{
throw new ArgumentException("At least one chart or image must be provided.", nameof(request));
}
cancellationToken.ThrowIfCancellationRequested();
// Collect and validate charts
var chartEntries = CollectCharts(request.Charts, cancellationToken);
// Collect and validate images
var imageEntries = CollectImages(request.Images, cancellationToken);
// Build manifest
var rootHash = ComputeRootHash(chartEntries, imageEntries);
var manifest = BuildManifest(request, chartEntries, imageEntries, rootHash);
var manifestJson = JsonSerializer.Serialize(manifest, SerializerOptions);
// Build OCI index
var ociIndex = BuildOciIndex(imageEntries);
var ociIndexJson = JsonSerializer.Serialize(ociIndex, SerializerOptions);
// Build OCI layout marker
var ociLayout = new OciImageLayout(OciImageLayoutVersion);
var ociLayoutJson = JsonSerializer.Serialize(ociLayout, SerializerOptions);
// Build checksums
var checksums = BuildChecksums(chartEntries, imageEntries, rootHash);
// Create the pack archive
var packStream = CreatePackArchive(
request,
chartEntries,
imageEntries,
manifestJson,
ociIndexJson,
ociLayoutJson,
checksums);
// Compute final artifact SHA-256
packStream.Position = 0;
var artifactSha256 = ComputeStreamHash(packStream);
packStream.Position = 0;
return new BootstrapPackBuildResult(
manifest,
manifestJson,
rootHash,
artifactSha256,
packStream);
}
private List<CollectedChart> CollectCharts(
IReadOnlyList<BootstrapPackChartSource>? charts,
CancellationToken cancellationToken)
{
var entries = new List<CollectedChart>();
if (charts is null || charts.Count == 0)
{
return entries;
}
foreach (var chart in charts)
{
cancellationToken.ThrowIfCancellationRequested();
if (chart is null)
{
throw new ArgumentException("Chart sources cannot contain null entries.");
}
if (string.IsNullOrWhiteSpace(chart.ChartPath))
{
throw new ArgumentException($"Chart path cannot be empty for chart '{chart.Name}'.");
}
var fullPath = Path.GetFullPath(chart.ChartPath);
if (!File.Exists(fullPath) && !Directory.Exists(fullPath))
{
throw new FileNotFoundException($"Chart path '{fullPath}' not found.", fullPath);
}
string sha256;
long size;
if (Directory.Exists(fullPath))
{
// For directories, compute combined hash
sha256 = ComputeDirectoryHash(fullPath, cancellationToken);
size = GetDirectorySize(fullPath);
}
else
{
var fileBytes = File.ReadAllBytes(fullPath);
sha256 = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content);
size = fileBytes.LongLength;
}
var bundlePath = $"charts/{SanitizeSegment(chart.Name)}-{SanitizeSegment(chart.Version)}";
entries.Add(new CollectedChart(
chart.Name,
chart.Version,
bundlePath,
fullPath,
sha256,
size));
}
// Sort for deterministic ordering
entries.Sort((a, b) => StringComparer.Ordinal.Compare(a.BundlePath, b.BundlePath));
return entries;
}
private List<CollectedImage> CollectImages(
IReadOnlyList<BootstrapPackImageSource>? images,
CancellationToken cancellationToken)
{
var entries = new List<CollectedImage>();
if (images is null || images.Count == 0)
{
return entries;
}
foreach (var image in images)
{
cancellationToken.ThrowIfCancellationRequested();
if (image is null)
{
throw new ArgumentException("Image sources cannot contain null entries.");
}
if (string.IsNullOrWhiteSpace(image.BlobPath))
{
throw new ArgumentException($"Blob path cannot be empty for image '{image.Repository}:{image.Tag}'.");
}
var fullPath = Path.GetFullPath(image.BlobPath);
if (!File.Exists(fullPath))
{
throw new FileNotFoundException($"Image blob path '{fullPath}' not found.", fullPath);
}
var fileBytes = File.ReadAllBytes(fullPath);
var sha256 = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content);
var repoSegment = SanitizeSegment(image.Repository.Replace("/", "-").Replace(":", "-"));
var bundlePath = $"images/blobs/sha256/{sha256}";
entries.Add(new CollectedImage(
image.Repository,
image.Tag,
image.Digest,
bundlePath,
fullPath,
sha256,
fileBytes.LongLength));
}
// Sort for deterministic ordering
entries.Sort((a, b) => StringComparer.Ordinal.Compare(a.BundlePath, b.BundlePath));
return entries;
}
private string ComputeRootHash(
IReadOnlyList<CollectedChart> charts,
IReadOnlyList<CollectedImage> images)
{
var builder = new StringBuilder();
foreach (var chart in charts.OrderBy(c => c.BundlePath, StringComparer.Ordinal))
{
builder.Append(chart.BundlePath)
.Append('\0')
.Append(chart.Sha256)
.Append('\0');
}
foreach (var image in images.OrderBy(i => i.BundlePath, StringComparer.Ordinal))
{
builder.Append(image.BundlePath)
.Append('\0')
.Append(image.Sha256)
.Append('\0');
}
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private BootstrapPackManifest BuildManifest(
BootstrapPackBuildRequest request,
IReadOnlyList<CollectedChart> charts,
IReadOnlyList<CollectedImage> images,
string rootHash)
{
var chartEntries = charts.Select(c => new BootstrapPackChartEntry(
c.Name,
c.Version,
c.BundlePath,
c.Sha256)).ToList();
var imageEntries = images.Select(i => new BootstrapPackImageEntry(
i.Repository,
i.Tag,
i.Digest,
i.BundlePath,
i.Sha256)).ToList();
BootstrapPackSignatureEntry? sigEntry = null;
if (request.Signatures is not null)
{
sigEntry = new BootstrapPackSignatureEntry(
request.Signatures.MirrorBundleDigest,
request.Signatures.SignaturePath);
}
return new BootstrapPackManifest(
ManifestVersion,
request.ExportId.ToString("D"),
request.TenantId.ToString("D"),
_timeProvider.GetUtcNow(),
chartEntries,
imageEntries,
sigEntry,
rootHash);
}
private static OciImageIndex BuildOciIndex(IReadOnlyList<CollectedImage> images)
{
var manifests = images.Select(i => new OciImageIndexManifest(
"application/vnd.oci.image.manifest.v1+json",
i.Size,
i.Digest,
new Dictionary<string, string>
{
["org.opencontainers.image.ref.name"] = $"{i.Repository}:{i.Tag}"
})).ToList();
return new OciImageIndex(OciSchemaVersion, OciImageIndexMediaType, manifests);
}
private static string BuildChecksums(
IReadOnlyList<CollectedChart> charts,
IReadOnlyList<CollectedImage> images,
string rootHash)
{
var builder = new StringBuilder();
builder.AppendLine("# Bootstrap pack checksums (sha256)");
builder.Append("root ").AppendLine(rootHash);
foreach (var chart in charts)
{
builder.Append(chart.Sha256).Append(" ").AppendLine(chart.BundlePath);
}
foreach (var image in images)
{
builder.Append(image.Sha256).Append(" ").AppendLine(image.BundlePath);
}
return builder.ToString();
}
private MemoryStream CreatePackArchive(
BootstrapPackBuildRequest request,
IReadOnlyList<CollectedChart> charts,
IReadOnlyList<CollectedImage> images,
string manifestJson,
string ociIndexJson,
string ociLayoutJson,
string checksums)
{
var stream = new MemoryStream();
using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true))
using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
// Write metadata files
WriteTextEntry(tar, "manifest.json", manifestJson);
WriteTextEntry(tar, "checksums.txt", checksums);
// Write OCI layout files for images directory
if (images.Count > 0)
{
WriteTextEntry(tar, "images/oci-layout", ociLayoutJson);
WriteTextEntry(tar, "images/index.json", ociIndexJson);
}
// Write chart files
foreach (var chart in charts)
{
if (Directory.Exists(chart.SourcePath))
{
WriteDirectoryEntries(tar, chart.BundlePath, chart.SourcePath);
}
else
{
WriteFileEntry(tar, chart.BundlePath, chart.SourcePath);
}
}
// Write image blobs
foreach (var image in images)
{
WriteFileEntry(tar, image.BundlePath, image.SourcePath);
}
// Write signature reference if provided
if (request.Signatures?.SignaturePath is not null && File.Exists(request.Signatures.SignaturePath))
{
WriteFileEntry(tar, "signatures/mirror-bundle.sig", request.Signatures.SignaturePath);
}
}
ApplyDeterministicGzipHeader(stream);
return stream;
}
private static void WriteTextEntry(TarWriter writer, string path, string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var dataStream = new MemoryStream(bytes);
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = DefaultFileMode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void WriteFileEntry(TarWriter writer, string bundlePath, string sourcePath)
{
using var dataStream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read, 128 * 1024, FileOptions.SequentialScan);
var entry = new PaxTarEntry(TarEntryType.RegularFile, bundlePath)
{
Mode = DefaultFileMode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void WriteDirectoryEntries(TarWriter writer, string bundlePrefix, string sourceDir)
{
var files = Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories);
Array.Sort(files, StringComparer.Ordinal);
foreach (var file in files)
{
var relative = Path.GetRelativePath(sourceDir, file).Replace('\\', '/');
var bundlePath = $"{bundlePrefix}/{relative}";
using var dataStream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 128 * 1024, FileOptions.SequentialScan);
var entry = new PaxTarEntry(TarEntryType.RegularFile, bundlePath)
{
Mode = DefaultFileMode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
}
private static void ApplyDeterministicGzipHeader(MemoryStream stream)
{
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for bootstrap pack.");
}
var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
var originalPosition = stream.Position;
stream.Position = 4;
stream.Write(buffer);
stream.Position = originalPosition;
}
private string ComputeStreamHash(Stream stream)
{
stream.Position = 0;
using var sha = SHA256.Create();
var hash = sha.ComputeHash(stream);
return ToHex(hash);
}
private string ComputeDirectoryHash(string directory, CancellationToken cancellationToken)
{
var builder = new StringBuilder();
var files = Directory.GetFiles(directory, "*", SearchOption.AllDirectories);
Array.Sort(files, StringComparer.Ordinal);
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
var relative = Path.GetRelativePath(directory, file).Replace('\\', '/');
var fileBytes = File.ReadAllBytes(file);
var fileHash = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content);
builder.Append(relative).Append('\0').Append(fileHash).Append('\0');
}
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private static long GetDirectorySize(string directory)
{
return Directory.GetFiles(directory, "*", SearchOption.AllDirectories)
.Sum(file => new FileInfo(file).Length);
}
private static string ToHex(ReadOnlySpan<byte> bytes)
{
Span<byte> hex = stackalloc byte[bytes.Length * 2];
for (var i = 0; i < bytes.Length; i++)
{
var b = bytes[i];
hex[i * 2] = GetHexValue(b / 16);
hex[i * 2 + 1] = GetHexValue(b % 16);
}
return Encoding.ASCII.GetString(hex);
}
private static byte GetHexValue(int i) => (byte)(i < 10 ? i + 48 : i - 10 + 97);
private static string SanitizeSegment(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "unknown";
}
var span = value.Trim();
var builder = new StringBuilder(span.Length);
foreach (var ch in span)
{
if (char.IsLetterOrDigit(ch))
{
builder.Append(char.ToLowerInvariant(ch));
}
else if (ch is '-' or '_' or '.')
{
builder.Append(ch);
}
else
{
builder.Append('-');
}
}
return builder.Length == 0 ? "unknown" : builder.ToString();
}
private sealed record CollectedChart(
string Name,
string Version,
string BundlePath,
string SourcePath,
string Sha256,
long Size);
private sealed record CollectedImage(
string Repository,
string Tag,
string Digest,
string BundlePath,
string SourcePath,
string Sha256,
long Size);
}

View File

@@ -0,0 +1,110 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.BootstrapPack;
/// <summary>
/// Request to build a bootstrap pack for air-gap deployment.
/// </summary>
public sealed record BootstrapPackBuildRequest(
Guid ExportId,
Guid TenantId,
IReadOnlyList<BootstrapPackChartSource> Charts,
IReadOnlyList<BootstrapPackImageSource> Images,
BootstrapPackSignatureSource? Signatures = null,
IReadOnlyDictionary<string, string>? Metadata = null);
/// <summary>
/// Helm chart source for the bootstrap pack.
/// </summary>
public sealed record BootstrapPackChartSource(
string Name,
string Version,
string ChartPath);
/// <summary>
/// Container image source for the bootstrap pack.
/// </summary>
public sealed record BootstrapPackImageSource(
string Repository,
string Tag,
string Digest,
string BlobPath);
/// <summary>
/// Optional DSSE/TUF signature source from upstream builds.
/// </summary>
public sealed record BootstrapPackSignatureSource(
string MirrorBundleDigest,
string? SignaturePath);
/// <summary>
/// Result of building a bootstrap pack.
/// </summary>
public sealed record BootstrapPackBuildResult(
BootstrapPackManifest Manifest,
string ManifestJson,
string RootHash,
string ArtifactSha256,
MemoryStream PackStream);
/// <summary>
/// Manifest for the bootstrap pack.
/// </summary>
public sealed record BootstrapPackManifest(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("charts")] IReadOnlyList<BootstrapPackChartEntry> Charts,
[property: JsonPropertyName("images")] IReadOnlyList<BootstrapPackImageEntry> Images,
[property: JsonPropertyName("signatures")] BootstrapPackSignatureEntry? Signatures,
[property: JsonPropertyName("rootHash")] string RootHash);
/// <summary>
/// Chart entry in the manifest.
/// </summary>
public sealed record BootstrapPackChartEntry(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("sha256")] string Sha256);
/// <summary>
/// Image entry in the manifest.
/// </summary>
public sealed record BootstrapPackImageEntry(
[property: JsonPropertyName("repository")] string Repository,
[property: JsonPropertyName("tag")] string Tag,
[property: JsonPropertyName("digest")] string Digest,
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("sha256")] string Sha256);
/// <summary>
/// Signature metadata entry.
/// </summary>
public sealed record BootstrapPackSignatureEntry(
[property: JsonPropertyName("mirrorBundleDigest")] string MirrorBundleDigest,
[property: JsonPropertyName("signaturePath")] string? SignaturePath);
/// <summary>
/// OCI image index (index.json) structure.
/// </summary>
public sealed record OciImageIndex(
[property: JsonPropertyName("schemaVersion")] int SchemaVersion,
[property: JsonPropertyName("mediaType")] string MediaType,
[property: JsonPropertyName("manifests")] IReadOnlyList<OciImageIndexManifest> Manifests);
/// <summary>
/// Manifest entry within the OCI image index.
/// </summary>
public sealed record OciImageIndexManifest(
[property: JsonPropertyName("mediaType")] string MediaType,
[property: JsonPropertyName("size")] long Size,
[property: JsonPropertyName("digest")] string Digest,
[property: JsonPropertyName("annotations")] IReadOnlyDictionary<string, string>? Annotations);
/// <summary>
/// OCI image layout marker (oci-layout).
/// </summary>
public sealed record OciImageLayout(
[property: JsonPropertyName("imageLayoutVersion")] string ImageLayoutVersion);

View File

@@ -0,0 +1,611 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Builds deterministic mirror bundles for air-gapped export with DSSE/TUF metadata.
/// </summary>
public sealed class MirrorBundleBuilder
{
private const string ManifestVersion = "mirror/v1";
private const string ExporterVersion = "1.0.0";
private const string AdapterVersion = "mirror-adapter/1.0.0";
private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly UnixFileMode DefaultFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
private static readonly UnixFileMode ExecutableFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public MirrorBundleBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Builds a mirror bundle from the provided request.
/// </summary>
public MirrorBundleBuildResult Build(MirrorBundleBuildRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.RunId == Guid.Empty)
{
throw new ArgumentException("Run identifier must be provided.", nameof(request));
}
if (request.TenantId == Guid.Empty)
{
throw new ArgumentException("Tenant identifier must be provided.", nameof(request));
}
if (request.Variant == MirrorBundleVariant.Delta && request.DeltaOptions is null)
{
throw new ArgumentException("Delta options must be provided for delta bundles.", nameof(request));
}
cancellationToken.ThrowIfCancellationRequested();
// Collect and validate data sources
var collectedFiles = CollectDataSources(request.DataSources, cancellationToken);
if (collectedFiles.Count == 0)
{
throw new InvalidOperationException("Mirror bundle does not contain any data files. Provide at least one data source.");
}
// Build artifact entries
var artifacts = BuildArtifactEntries(collectedFiles);
// Compute counts
var counts = ComputeCounts(collectedFiles);
// Build manifest
var manifest = BuildManifest(request, artifacts, counts);
var manifestYaml = SerializeManifestToYaml(manifest);
var manifestDigest = ComputeHash(manifestYaml);
// Build export document
var exportDoc = BuildExportDocument(request, manifest, manifestDigest);
var exportJson = JsonSerializer.Serialize(exportDoc, SerializerOptions);
// Build provenance document
var provenanceDoc = BuildProvenanceDocument(request, manifest, manifestDigest, collectedFiles);
var provenanceJson = JsonSerializer.Serialize(provenanceDoc, SerializerOptions);
// Compute root hash from export document
var rootHash = ComputeHash(exportJson);
// Build checksums file
var checksums = BuildChecksums(rootHash, collectedFiles, manifestDigest);
// Build README
var readme = BuildReadme(manifest);
// Build verification script
var verifyScript = BuildVerificationScript();
// Create the bundle archive
var bundleStream = CreateBundleArchive(
collectedFiles,
manifestYaml,
exportJson,
provenanceJson,
checksums,
readme,
verifyScript,
request.Variant);
bundleStream.Position = 0;
return new MirrorBundleBuildResult(
manifest,
manifestYaml,
exportDoc,
exportJson,
provenanceDoc,
provenanceJson,
rootHash,
bundleStream);
}
private List<CollectedFile> CollectDataSources(
IReadOnlyList<MirrorBundleDataSource> dataSources,
CancellationToken cancellationToken)
{
var files = new List<CollectedFile>();
foreach (var source in dataSources)
{
cancellationToken.ThrowIfCancellationRequested();
if (source is null)
{
throw new ArgumentException("Data sources cannot contain null entries.");
}
if (string.IsNullOrWhiteSpace(source.SourcePath))
{
throw new ArgumentException("Source path cannot be empty.");
}
var fullPath = Path.GetFullPath(source.SourcePath);
if (!File.Exists(fullPath))
{
throw new FileNotFoundException($"Data source file '{fullPath}' not found.", fullPath);
}
var bundlePath = ComputeBundlePath(source);
var fileBytes = File.ReadAllBytes(fullPath);
var sha256 = _cryptoHash.ComputeHashHexForPurpose(fileBytes, HashPurpose.Content);
files.Add(new CollectedFile(
source.Category,
bundlePath,
fullPath,
fileBytes.LongLength,
sha256,
source.IsNormalized,
source.SubjectId));
}
// Sort for deterministic ordering
files.Sort((a, b) => StringComparer.Ordinal.Compare(a.BundlePath, b.BundlePath));
return files;
}
private static string ComputeBundlePath(MirrorBundleDataSource source)
{
var fileName = Path.GetFileName(source.SourcePath);
var prefix = source.IsNormalized ? "data/normalized" : "data/raw";
return source.Category switch
{
MirrorBundleDataCategory.Advisories => $"{prefix}/advisories/{fileName}",
MirrorBundleDataCategory.Vex => $"{prefix}/vex/{fileName}",
MirrorBundleDataCategory.Sbom when !string.IsNullOrEmpty(source.SubjectId) =>
$"data/raw/sboms/{SanitizeSegment(source.SubjectId)}/{fileName}",
MirrorBundleDataCategory.Sbom => $"data/raw/sboms/{fileName}",
MirrorBundleDataCategory.PolicySnapshot => $"data/policy/snapshot.json",
MirrorBundleDataCategory.PolicyEvaluations => $"data/policy/{fileName}",
MirrorBundleDataCategory.VexConsensus => $"data/consensus/{fileName}",
MirrorBundleDataCategory.Findings => $"data/findings/{fileName}",
_ => throw new ArgumentOutOfRangeException(nameof(source), $"Unknown data category: {source.Category}")
};
}
private static IReadOnlyList<MirrorBundleArtifactEntry> BuildArtifactEntries(IReadOnlyList<CollectedFile> files)
{
return files.Select(f => new MirrorBundleArtifactEntry(
f.BundlePath,
f.Sha256,
f.SizeBytes,
f.Category.ToString().ToLowerInvariant())).ToList();
}
private static MirrorBundleManifestCounts ComputeCounts(IReadOnlyList<CollectedFile> files)
{
var advisories = files.Count(f => f.Category == MirrorBundleDataCategory.Advisories);
var vex = files.Count(f => f.Category is MirrorBundleDataCategory.Vex or MirrorBundleDataCategory.VexConsensus);
var sboms = files.Count(f => f.Category == MirrorBundleDataCategory.Sbom);
var policyEvals = files.Count(f => f.Category == MirrorBundleDataCategory.PolicyEvaluations);
return new MirrorBundleManifestCounts(advisories, vex, sboms, policyEvals);
}
private MirrorBundleManifest BuildManifest(
MirrorBundleBuildRequest request,
IReadOnlyList<MirrorBundleArtifactEntry> artifacts,
MirrorBundleManifestCounts counts)
{
var profile = request.Variant == MirrorBundleVariant.Full ? "mirror:full" : "mirror:delta";
var selectors = new MirrorBundleManifestSelectors(
request.Selectors.Products,
request.Selectors.TimeWindowFrom.HasValue && request.Selectors.TimeWindowTo.HasValue
? new MirrorBundleTimeWindow(request.Selectors.TimeWindowFrom.Value, request.Selectors.TimeWindowTo.Value)
: null,
request.Selectors.Ecosystems);
MirrorBundleManifestEncryption? encryption = null;
if (request.Encryption is not null && request.Encryption.Mode != MirrorBundleEncryptionMode.None)
{
encryption = new MirrorBundleManifestEncryption(
request.Encryption.Mode.ToString().ToLowerInvariant(),
request.Encryption.Strict,
request.Encryption.RecipientKeys);
}
MirrorBundleManifestDelta? delta = null;
if (request.Variant == MirrorBundleVariant.Delta && request.DeltaOptions is not null)
{
delta = new MirrorBundleManifestDelta(
request.DeltaOptions.BaseExportId,
request.DeltaOptions.BaseManifestDigest,
request.DeltaOptions.ResetBaseline,
new MirrorBundleDeltaCounts(0, 0, 0), // TODO: Compute actual delta counts
new MirrorBundleDeltaCounts(0, 0, 0),
new MirrorBundleDeltaCounts(0, 0, 0));
}
return new MirrorBundleManifest(
profile,
request.RunId.ToString("D"),
request.TenantId.ToString("D"),
selectors,
counts,
artifacts,
encryption,
delta);
}
private MirrorBundleExportDocument BuildExportDocument(
MirrorBundleBuildRequest request,
MirrorBundleManifest manifest,
string manifestDigest)
{
return new MirrorBundleExportDocument(
ManifestVersion,
manifest.RunId,
manifest.Tenant,
new MirrorBundleExportProfile("mirror", request.Variant.ToString().ToLowerInvariant()),
manifest.Selectors,
manifest.Counts,
manifest.Artifacts,
_timeProvider.GetUtcNow(),
$"sha256:{manifestDigest}");
}
private MirrorBundleProvenanceDocument BuildProvenanceDocument(
MirrorBundleBuildRequest request,
MirrorBundleManifest manifest,
string manifestDigest,
IReadOnlyList<CollectedFile> files)
{
var subjects = new List<MirrorBundleProvenanceSubject>
{
new("manifest.yaml", new Dictionary<string, string> { ["sha256"] = manifestDigest })
};
foreach (var file in files)
{
subjects.Add(new MirrorBundleProvenanceSubject(
file.BundlePath,
new Dictionary<string, string> { ["sha256"] = file.Sha256 }));
}
var sbomIds = files
.Where(f => f.Category == MirrorBundleDataCategory.Sbom && !string.IsNullOrEmpty(f.SubjectId))
.Select(f => f.SubjectId!)
.Distinct()
.OrderBy(id => id, StringComparer.Ordinal)
.ToList();
var inputs = new MirrorBundleProvenanceInputs(
new[] { $"tenant:{manifest.Tenant}" },
files.Any(f => f.Category == MirrorBundleDataCategory.PolicySnapshot)
? $"snapshot:{manifest.RunId}"
: null,
sbomIds);
return new MirrorBundleProvenanceDocument(
ManifestVersion,
manifest.RunId,
manifest.Tenant,
subjects,
inputs,
new MirrorBundleProvenanceBuilder(ExporterVersion, AdapterVersion),
_timeProvider.GetUtcNow());
}
private string ComputeHash(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private static string BuildChecksums(string rootHash, IReadOnlyList<CollectedFile> files, string manifestDigest)
{
var builder = new StringBuilder();
builder.AppendLine("# Mirror bundle checksums (sha256)");
builder.Append("root ").AppendLine(rootHash);
builder.Append(manifestDigest).AppendLine(" manifest.yaml");
foreach (var file in files)
{
builder.Append(file.Sha256).Append(" ").AppendLine(file.BundlePath);
}
return builder.ToString();
}
private static string BuildReadme(MirrorBundleManifest manifest)
{
var builder = new StringBuilder();
builder.AppendLine("Mirror Bundle");
builder.AppendLine("=============");
builder.Append("Profile: ").AppendLine(manifest.Profile);
builder.Append("Run ID: ").AppendLine(manifest.RunId);
builder.Append("Tenant: ").AppendLine(manifest.Tenant);
builder.AppendLine();
builder.AppendLine("Contents:");
builder.Append("- Advisories: ").AppendLine(manifest.Counts.Advisories.ToString());
builder.Append("- VEX statements: ").AppendLine(manifest.Counts.Vex.ToString());
builder.Append("- SBOMs: ").AppendLine(manifest.Counts.Sboms.ToString());
builder.Append("- Policy evaluations: ").AppendLine(manifest.Counts.PolicyEvaluations.ToString());
builder.AppendLine();
if (manifest.Delta is not null)
{
builder.AppendLine("Delta Information:");
builder.Append("- Base export: ").AppendLine(manifest.Delta.BaseExportId);
builder.Append("- Reset baseline: ").AppendLine(manifest.Delta.ResetBaseline ? "yes" : "no");
builder.AppendLine();
}
if (manifest.Encryption is not null)
{
builder.AppendLine("Encryption:");
builder.Append("- Mode: ").AppendLine(manifest.Encryption.Mode);
builder.Append("- Strict: ").AppendLine(manifest.Encryption.Strict ? "yes" : "no");
builder.AppendLine();
}
builder.AppendLine("Verification:");
builder.AppendLine("1. Transfer the archive to the target environment.");
builder.AppendLine("2. Run `./verify-mirror.sh <bundle.tgz>` to validate checksums.");
builder.AppendLine("3. Use `stella export verify <runId>` to verify DSSE signatures.");
builder.AppendLine("4. Apply using `stella export mirror-import <bundle.tgz>`.");
return builder.ToString();
}
private static string BuildVerificationScript()
{
var builder = new StringBuilder();
builder.AppendLine("#!/usr/bin/env sh");
builder.AppendLine("set -euo pipefail");
builder.AppendLine();
builder.AppendLine("ARCHIVE=\"${1:-mirror-bundle.tgz}\"");
builder.AppendLine("if [ ! -f \"$ARCHIVE\" ]; then");
builder.AppendLine(" echo \"Usage: $0 <mirror-bundle.tgz>\" >&2");
builder.AppendLine(" exit 1");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("WORKDIR=\"$(mktemp -d)\"");
builder.AppendLine("cleanup() { rm -rf \"$WORKDIR\"; }");
builder.AppendLine("trap cleanup EXIT INT TERM");
builder.AppendLine();
builder.AppendLine("tar -xzf \"$ARCHIVE\" -C \"$WORKDIR\"");
builder.AppendLine("echo \"Mirror bundle extracted to $WORKDIR\"");
builder.AppendLine();
builder.AppendLine("cd \"$WORKDIR\"");
builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then");
builder.AppendLine(" sha256sum --check checksums.txt");
builder.AppendLine("else");
builder.AppendLine(" shasum -a 256 --check checksums.txt");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("echo \"Checksums verified successfully.\"");
builder.AppendLine("echo \"Run 'stella export verify' for signature validation.\"");
return builder.ToString();
}
private static string SerializeManifestToYaml(MirrorBundleManifest manifest)
{
// Serialize to YAML-like format for manifest.yaml
// Using JSON for now as the structure is identical
var builder = new StringBuilder();
builder.Append("profile: ").AppendLine(manifest.Profile);
builder.Append("runId: ").AppendLine(manifest.RunId);
builder.Append("tenant: ").AppendLine(manifest.Tenant);
builder.AppendLine("selectors:");
builder.AppendLine(" products:");
foreach (var product in manifest.Selectors.Products)
{
builder.Append(" - ").AppendLine(product);
}
if (manifest.Selectors.TimeWindow is not null)
{
builder.AppendLine(" timeWindow:");
builder.Append(" from: ").AppendLine(manifest.Selectors.TimeWindow.From.ToString("O"));
builder.Append(" to: ").AppendLine(manifest.Selectors.TimeWindow.To.ToString("O"));
}
builder.AppendLine("counts:");
builder.Append(" advisories: ").AppendLine(manifest.Counts.Advisories.ToString());
builder.Append(" vex: ").AppendLine(manifest.Counts.Vex.ToString());
builder.Append(" sboms: ").AppendLine(manifest.Counts.Sboms.ToString());
builder.Append(" policyEvaluations: ").AppendLine(manifest.Counts.PolicyEvaluations.ToString());
builder.AppendLine("artifacts:");
foreach (var artifact in manifest.Artifacts)
{
builder.Append(" - path: ").AppendLine(artifact.Path);
builder.Append(" sha256: ").AppendLine(artifact.Sha256);
builder.Append(" bytes: ").AppendLine(artifact.Bytes.ToString());
}
if (manifest.Encryption is not null)
{
builder.AppendLine("encryption:");
builder.Append(" mode: ").AppendLine(manifest.Encryption.Mode);
builder.Append(" strict: ").AppendLine(manifest.Encryption.Strict.ToString().ToLowerInvariant());
builder.AppendLine(" recipients:");
foreach (var recipient in manifest.Encryption.Recipients)
{
builder.Append(" - ").AppendLine(recipient);
}
}
if (manifest.Delta is not null)
{
builder.AppendLine("delta:");
builder.Append(" baseExportId: ").AppendLine(manifest.Delta.BaseExportId);
builder.Append(" baseManifestDigest: ").AppendLine(manifest.Delta.BaseManifestDigest);
builder.Append(" resetBaseline: ").AppendLine(manifest.Delta.ResetBaseline.ToString().ToLowerInvariant());
}
return builder.ToString();
}
private MemoryStream CreateBundleArchive(
IReadOnlyList<CollectedFile> files,
string manifestYaml,
string exportJson,
string provenanceJson,
string checksums,
string readme,
string verifyScript,
MirrorBundleVariant variant)
{
var stream = new MemoryStream();
using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true))
using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
// Write metadata files first
WriteTextEntry(tar, "manifest.yaml", manifestYaml, DefaultFileMode);
WriteTextEntry(tar, "export.json", exportJson, DefaultFileMode);
WriteTextEntry(tar, "provenance.json", provenanceJson, DefaultFileMode);
WriteTextEntry(tar, "checksums.txt", checksums, DefaultFileMode);
WriteTextEntry(tar, "README.md", readme, DefaultFileMode);
WriteTextEntry(tar, "verify-mirror.sh", verifyScript, ExecutableFileMode);
// Write index placeholder files
WriteTextEntry(tar, "indexes/advisories.index.json", "[]", DefaultFileMode);
WriteTextEntry(tar, "indexes/vex.index.json", "[]", DefaultFileMode);
WriteTextEntry(tar, "indexes/sbom.index.json", "[]", DefaultFileMode);
WriteTextEntry(tar, "indexes/findings.index.json", "[]", DefaultFileMode);
// Write data files
foreach (var file in files)
{
WriteFileEntry(tar, file.BundlePath, file.SourcePath);
}
// For delta bundles, write removed list placeholders
if (variant == MirrorBundleVariant.Delta)
{
WriteTextEntry(tar, "delta/removed/advisories.jsonl", "", DefaultFileMode);
WriteTextEntry(tar, "delta/removed/vex.jsonl", "", DefaultFileMode);
WriteTextEntry(tar, "delta/removed/sboms.jsonl", "", DefaultFileMode);
}
}
ApplyDeterministicGzipHeader(stream);
return stream;
}
private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var dataStream = new MemoryStream(bytes);
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = mode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void WriteFileEntry(TarWriter writer, string bundlePath, string sourcePath)
{
using var dataStream = new FileStream(sourcePath, FileMode.Open, FileAccess.Read, FileShare.Read, 128 * 1024, FileOptions.SequentialScan);
var mode = bundlePath.EndsWith(".sh", StringComparison.Ordinal) ? ExecutableFileMode : DefaultFileMode;
var entry = new PaxTarEntry(TarEntryType.RegularFile, bundlePath)
{
Mode = mode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void ApplyDeterministicGzipHeader(MemoryStream stream)
{
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for mirror bundle.");
}
var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
var originalPosition = stream.Position;
stream.Position = 4;
stream.Write(buffer);
stream.Position = originalPosition;
}
private static string SanitizeSegment(string value)
{
if (string.IsNullOrWhiteSpace(value))
{
return "subject";
}
var span = value.Trim();
var builder = new StringBuilder(span.Length);
foreach (var ch in span)
{
if (char.IsLetterOrDigit(ch))
{
builder.Append(char.ToLowerInvariant(ch));
}
else if (ch is '-' or '_' or '.')
{
builder.Append(ch);
}
else
{
builder.Append('-');
}
}
return builder.Length == 0 ? "subject" : builder.ToString();
}
private sealed record CollectedFile(
MirrorBundleDataCategory Category,
string BundlePath,
string SourcePath,
long SizeBytes,
string Sha256,
bool IsNormalized,
string? SubjectId);
}

View File

@@ -0,0 +1,246 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Mirror bundle profile variant.
/// </summary>
public enum MirrorBundleVariant
{
/// <summary>
/// Full mirror bundle containing complete snapshot.
/// </summary>
Full = 1,
/// <summary>
/// Delta mirror bundle with changes since a base export.
/// </summary>
Delta = 2
}
/// <summary>
/// Request to build a mirror bundle.
/// </summary>
public sealed record MirrorBundleBuildRequest(
Guid RunId,
Guid TenantId,
MirrorBundleVariant Variant,
MirrorBundleSelectors Selectors,
IReadOnlyList<MirrorBundleDataSource> DataSources,
MirrorBundleEncryptionOptions? Encryption = null,
MirrorBundleDeltaOptions? DeltaOptions = null,
IReadOnlyDictionary<string, string>? Metadata = null);
/// <summary>
/// Selectors that define the scope of data to include in the bundle.
/// </summary>
public sealed record MirrorBundleSelectors(
IReadOnlyList<string> Products,
DateTimeOffset? TimeWindowFrom,
DateTimeOffset? TimeWindowTo,
IReadOnlyList<string>? Ecosystems = null);
/// <summary>
/// Data source input for the mirror bundle.
/// </summary>
public sealed record MirrorBundleDataSource(
MirrorBundleDataCategory Category,
string SourcePath,
bool IsNormalized = false,
string? SubjectId = null);
/// <summary>
/// Category of data in a mirror bundle.
/// </summary>
public enum MirrorBundleDataCategory
{
Advisories = 1,
Vex = 2,
Sbom = 3,
PolicySnapshot = 4,
PolicyEvaluations = 5,
VexConsensus = 6,
Findings = 7
}
/// <summary>
/// Encryption options for mirror bundles.
/// </summary>
public sealed record MirrorBundleEncryptionOptions(
MirrorBundleEncryptionMode Mode,
IReadOnlyList<string> RecipientKeys,
bool Strict = false);
/// <summary>
/// Encryption mode for mirror bundles.
/// </summary>
public enum MirrorBundleEncryptionMode
{
None = 0,
Age = 1,
AesGcm = 2
}
/// <summary>
/// Delta-specific options when building a delta mirror bundle.
/// </summary>
public sealed record MirrorBundleDeltaOptions(
string BaseExportId,
string BaseManifestDigest,
bool ResetBaseline = false);
/// <summary>
/// Result of building a mirror bundle.
/// </summary>
public sealed record MirrorBundleBuildResult(
MirrorBundleManifest Manifest,
string ManifestJson,
MirrorBundleExportDocument ExportDocument,
string ExportDocumentJson,
MirrorBundleProvenanceDocument ProvenanceDocument,
string ProvenanceDocumentJson,
string RootHash,
MemoryStream BundleStream);
/// <summary>
/// The manifest.yaml content as a structured object.
/// </summary>
public sealed record MirrorBundleManifest(
[property: JsonPropertyName("profile")] string Profile,
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("tenant")] string Tenant,
[property: JsonPropertyName("selectors")] MirrorBundleManifestSelectors Selectors,
[property: JsonPropertyName("counts")] MirrorBundleManifestCounts Counts,
[property: JsonPropertyName("artifacts")] IReadOnlyList<MirrorBundleArtifactEntry> Artifacts,
[property: JsonPropertyName("encryption")] MirrorBundleManifestEncryption? Encryption,
[property: JsonPropertyName("delta")] MirrorBundleManifestDelta? Delta);
/// <summary>
/// Selector metadata in the manifest.
/// </summary>
public sealed record MirrorBundleManifestSelectors(
[property: JsonPropertyName("products")] IReadOnlyList<string> Products,
[property: JsonPropertyName("timeWindow")] MirrorBundleTimeWindow? TimeWindow,
[property: JsonPropertyName("ecosystems")] IReadOnlyList<string>? Ecosystems);
/// <summary>
/// Time window for selectors.
/// </summary>
public sealed record MirrorBundleTimeWindow(
[property: JsonPropertyName("from")] DateTimeOffset From,
[property: JsonPropertyName("to")] DateTimeOffset To);
/// <summary>
/// Counts of various record types in the bundle.
/// </summary>
public sealed record MirrorBundleManifestCounts(
[property: JsonPropertyName("advisories")] int Advisories,
[property: JsonPropertyName("vex")] int Vex,
[property: JsonPropertyName("sboms")] int Sboms,
[property: JsonPropertyName("policyEvaluations")] int PolicyEvaluations);
/// <summary>
/// Artifact entry in the manifest.
/// </summary>
public sealed record MirrorBundleArtifactEntry(
[property: JsonPropertyName("path")] string Path,
[property: JsonPropertyName("sha256")] string Sha256,
[property: JsonPropertyName("bytes")] long Bytes,
[property: JsonPropertyName("category")] string Category);
/// <summary>
/// Encryption metadata in the manifest.
/// </summary>
public sealed record MirrorBundleManifestEncryption(
[property: JsonPropertyName("mode")] string Mode,
[property: JsonPropertyName("strict")] bool Strict,
[property: JsonPropertyName("recipients")] IReadOnlyList<string> Recipients);
/// <summary>
/// Delta metadata in the manifest.
/// </summary>
public sealed record MirrorBundleManifestDelta(
[property: JsonPropertyName("baseExportId")] string BaseExportId,
[property: JsonPropertyName("baseManifestDigest")] string BaseManifestDigest,
[property: JsonPropertyName("resetBaseline")] bool ResetBaseline,
[property: JsonPropertyName("added")] MirrorBundleDeltaCounts Added,
[property: JsonPropertyName("changed")] MirrorBundleDeltaCounts Changed,
[property: JsonPropertyName("removed")] MirrorBundleDeltaCounts Removed);
/// <summary>
/// Delta change counts.
/// </summary>
public sealed record MirrorBundleDeltaCounts(
[property: JsonPropertyName("advisories")] int Advisories,
[property: JsonPropertyName("vex")] int Vex,
[property: JsonPropertyName("sboms")] int Sboms);
/// <summary>
/// The export.json document for the bundle.
/// </summary>
public sealed record MirrorBundleExportDocument(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("profile")] MirrorBundleExportProfile Profile,
[property: JsonPropertyName("selectors")] MirrorBundleManifestSelectors Selectors,
[property: JsonPropertyName("counts")] MirrorBundleManifestCounts Counts,
[property: JsonPropertyName("artifacts")] IReadOnlyList<MirrorBundleArtifactEntry> Artifacts,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("manifestDigest")] string ManifestDigest);
/// <summary>
/// Export profile metadata.
/// </summary>
public sealed record MirrorBundleExportProfile(
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("variant")] string Variant);
/// <summary>
/// The provenance.json document for the bundle.
/// </summary>
public sealed record MirrorBundleProvenanceDocument(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("subjects")] IReadOnlyList<MirrorBundleProvenanceSubject> Subjects,
[property: JsonPropertyName("inputs")] MirrorBundleProvenanceInputs Inputs,
[property: JsonPropertyName("builder")] MirrorBundleProvenanceBuilder Builder,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt);
/// <summary>
/// Subject entry in provenance.
/// </summary>
public sealed record MirrorBundleProvenanceSubject(
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("digest")] IReadOnlyDictionary<string, string> Digest);
/// <summary>
/// Input references in provenance.
/// </summary>
public sealed record MirrorBundleProvenanceInputs(
[property: JsonPropertyName("findingsLedgerQueries")] IReadOnlyList<string> FindingsLedgerQueries,
[property: JsonPropertyName("policySnapshotId")] string? PolicySnapshotId,
[property: JsonPropertyName("sbomIdentifiers")] IReadOnlyList<string> SbomIdentifiers);
/// <summary>
/// Builder metadata in provenance.
/// </summary>
public sealed record MirrorBundleProvenanceBuilder(
[property: JsonPropertyName("exporterVersion")] string ExporterVersion,
[property: JsonPropertyName("adapterVersion")] string AdapterVersion);
/// <summary>
/// DSSE signature document for mirror bundles.
/// </summary>
public sealed record MirrorBundleDsseSignature(
[property: JsonPropertyName("payloadType")] string PayloadType,
[property: JsonPropertyName("payload")] string Payload,
[property: JsonPropertyName("signatures")] IReadOnlyList<MirrorBundleDsseSignatureEntry> Signatures);
/// <summary>
/// Signature entry within a DSSE document.
/// </summary>
public sealed record MirrorBundleDsseSignatureEntry(
[property: JsonPropertyName("sig")] string Signature,
[property: JsonPropertyName("keyid")] string KeyId);

View File

@@ -0,0 +1,188 @@
using System.Globalization;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.MirrorBundle;
/// <summary>
/// Interface for signing mirror bundle manifests using DSSE.
/// </summary>
public interface IMirrorBundleManifestSigner
{
/// <summary>
/// Signs the export.json content and returns a DSSE envelope.
/// </summary>
Task<MirrorBundleDsseSignature> SignExportDocumentAsync(string exportJson, CancellationToken cancellationToken = default);
/// <summary>
/// Signs the manifest.yaml content and returns a DSSE envelope.
/// </summary>
Task<MirrorBundleDsseSignature> SignManifestAsync(string manifestYaml, CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for signing mirror bundle archives.
/// </summary>
public interface IMirrorBundleArchiveSigner
{
/// <summary>
/// Signs the bundle archive stream and returns a base64 signature.
/// </summary>
Task<string> SignArchiveAsync(Stream archiveStream, CancellationToken cancellationToken = default);
}
/// <summary>
/// HMAC-based signer for mirror bundle manifests implementing DSSE (Dead Simple Signing Envelope).
/// </summary>
public sealed class HmacMirrorBundleManifestSigner : IMirrorBundleManifestSigner, IMirrorBundleArchiveSigner
{
private const string ExportPayloadType = "application/vnd.stellaops.mirror-bundle.export+json";
private const string ManifestPayloadType = "application/vnd.stellaops.mirror-bundle.manifest+yaml";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
private readonly ICryptoHmac _cryptoHmac;
private readonly byte[] _key;
private readonly string _keyId;
public HmacMirrorBundleManifestSigner(ICryptoHmac cryptoHmac, string key, string keyId)
{
_cryptoHmac = cryptoHmac ?? throw new ArgumentNullException(nameof(cryptoHmac));
if (string.IsNullOrWhiteSpace(key))
{
throw new ArgumentException("Signing key cannot be empty.", nameof(key));
}
_key = Encoding.UTF8.GetBytes(key);
_keyId = string.IsNullOrWhiteSpace(keyId) ? "mirror-bundle-hmac" : keyId;
}
/// <inheritdoc/>
public Task<MirrorBundleDsseSignature> SignExportDocumentAsync(string exportJson, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(exportJson);
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(CreateDsseEnvelope(ExportPayloadType, exportJson));
}
/// <inheritdoc/>
public Task<MirrorBundleDsseSignature> SignManifestAsync(string manifestYaml, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(manifestYaml);
cancellationToken.ThrowIfCancellationRequested();
return Task.FromResult(CreateDsseEnvelope(ManifestPayloadType, manifestYaml));
}
/// <inheritdoc/>
public async Task<string> SignArchiveAsync(Stream archiveStream, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(archiveStream);
cancellationToken.ThrowIfCancellationRequested();
if (!archiveStream.CanSeek)
{
throw new ArgumentException("Archive stream must support seeking for signing.", nameof(archiveStream));
}
archiveStream.Position = 0;
var signature = await _cryptoHmac.ComputeHmacForPurposeAsync(_key, archiveStream, HmacPurpose.Signing, cancellationToken);
archiveStream.Position = 0;
return Convert.ToBase64String(signature);
}
private MirrorBundleDsseSignature CreateDsseEnvelope(string payloadType, string payload)
{
var pae = CreatePreAuthenticationEncoding(payloadType, payload);
var signature = _cryptoHmac.ComputeHmacBase64ForPurpose(_key, pae, HmacPurpose.Signing);
return new MirrorBundleDsseSignature(
payloadType,
Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)),
new[] { new MirrorBundleDsseSignatureEntry(signature, _keyId) });
}
/// <summary>
/// Creates the DSSE Pre-Authentication Encoding (PAE) for signing.
/// PAE format: "DSSEv1" + SP + length(payloadType) + SP + payloadType + SP + length(payload) + SP + payload
/// </summary>
private static byte[] CreatePreAuthenticationEncoding(string payloadType, string payload)
{
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var payloadBytes = Encoding.UTF8.GetBytes(payload);
var preamble = Encoding.UTF8.GetBytes("DSSEv1 ");
var typeLenStr = typeBytes.Length.ToString(CultureInfo.InvariantCulture);
var payloadLenStr = payloadBytes.Length.ToString(CultureInfo.InvariantCulture);
var result = new List<byte>(preamble.Length + typeLenStr.Length + 1 + typeBytes.Length + 1 + payloadLenStr.Length + 1 + payloadBytes.Length);
result.AddRange(preamble);
result.AddRange(Encoding.UTF8.GetBytes(typeLenStr));
result.Add(0x20); // space
result.AddRange(typeBytes);
result.Add(0x20); // space
result.AddRange(Encoding.UTF8.GetBytes(payloadLenStr));
result.Add(0x20); // space
result.AddRange(payloadBytes);
return result.ToArray();
}
}
/// <summary>
/// Result of signing a mirror bundle.
/// </summary>
public sealed record MirrorBundleSigningResult(
MirrorBundleDsseSignature ExportSignature,
MirrorBundleDsseSignature ManifestSignature,
string ArchiveSignature);
/// <summary>
/// Extension methods for mirror bundle signing.
/// </summary>
public static class MirrorBundleSigningExtensions
{
/// <summary>
/// Signs all components of a mirror bundle build result.
/// </summary>
public static async Task<MirrorBundleSigningResult> SignBundleAsync(
this MirrorBundleBuildResult buildResult,
IMirrorBundleManifestSigner manifestSigner,
IMirrorBundleArchiveSigner archiveSigner,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(buildResult);
ArgumentNullException.ThrowIfNull(manifestSigner);
ArgumentNullException.ThrowIfNull(archiveSigner);
var exportSigTask = manifestSigner.SignExportDocumentAsync(buildResult.ExportDocumentJson, cancellationToken);
var manifestSigTask = manifestSigner.SignManifestAsync(buildResult.ManifestJson, cancellationToken);
var archiveSigTask = archiveSigner.SignArchiveAsync(buildResult.BundleStream, cancellationToken);
await Task.WhenAll(exportSigTask, manifestSigTask, archiveSigTask);
return new MirrorBundleSigningResult(
await exportSigTask,
await manifestSigTask,
await archiveSigTask);
}
/// <summary>
/// Serializes a DSSE signature to JSON.
/// </summary>
public static string ToJson(this MirrorBundleDsseSignature signature)
{
return JsonSerializer.Serialize(signature, new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
});
}
}

View File

@@ -0,0 +1,477 @@
using System.Net.Http.Headers;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Notifications;
/// <summary>
/// Interface for emitting export notifications when bundles are ready.
/// </summary>
public interface IExportNotificationEmitter
{
/// <summary>
/// Emits an airgap-ready notification.
/// </summary>
Task<ExportNotificationResult> EmitAirgapReadyAsync(
ExportAirgapReadyNotification notification,
CancellationToken cancellationToken = default);
/// <summary>
/// Emits to timeline event sink for audit.
/// </summary>
Task<ExportNotificationResult> EmitToTimelineAsync(
ExportAirgapReadyNotification notification,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Emitter for export notifications supporting NATS and webhook delivery.
/// Implements exponential backoff retry with DLQ routing.
/// </summary>
public sealed class ExportNotificationEmitter : IExportNotificationEmitter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
private static readonly TimeSpan[] RetryDelays =
[
TimeSpan.FromSeconds(1),
TimeSpan.FromSeconds(2),
TimeSpan.FromSeconds(4),
TimeSpan.FromSeconds(8),
TimeSpan.FromSeconds(16)
];
private readonly IExportNotificationSink _sink;
private readonly IExportWebhookClient? _webhookClient;
private readonly IExportNotificationDlq _dlq;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ExportNotificationEmitter> _logger;
private readonly ExportNotificationEmitterOptions _options;
public ExportNotificationEmitter(
IExportNotificationSink sink,
IExportNotificationDlq dlq,
TimeProvider timeProvider,
ILogger<ExportNotificationEmitter> logger,
ExportNotificationEmitterOptions? options = null,
IExportWebhookClient? webhookClient = null)
{
_sink = sink ?? throw new ArgumentNullException(nameof(sink));
_dlq = dlq ?? throw new ArgumentNullException(nameof(dlq));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options ?? ExportNotificationEmitterOptions.Default;
_webhookClient = webhookClient;
}
public async Task<ExportNotificationResult> EmitAirgapReadyAsync(
ExportAirgapReadyNotification notification,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(notification);
var payload = JsonSerializer.Serialize(notification, SerializerOptions);
// Try NATS sink first
var sinkResult = await EmitToSinkWithRetryAsync(
ExportNotificationTypes.AirgapReady,
payload,
notification.ExportId,
notification.BundleId,
notification.TenantId,
cancellationToken);
if (!sinkResult.Success)
{
_logger.LogWarning(
"Failed to emit airgap ready notification to sink for export {ExportId}: {Error}",
notification.ExportId, sinkResult.ErrorMessage);
await RouteToDlqAsync(notification, sinkResult, cancellationToken);
return sinkResult;
}
// Try webhook delivery if configured
if (_webhookClient is not null && _options.WebhookEnabled)
{
var webhookResult = await EmitToWebhookWithRetryAsync(
notification,
payload,
cancellationToken);
if (!webhookResult.Success)
{
_logger.LogWarning(
"Failed to deliver airgap ready notification to webhook for export {ExportId}: {Error}",
notification.ExportId, webhookResult.ErrorMessage);
await RouteToDlqAsync(notification, webhookResult, cancellationToken);
return webhookResult;
}
}
_logger.LogInformation(
"Emitted airgap ready notification for export {ExportId} bundle {BundleId}",
notification.ExportId, notification.BundleId);
return sinkResult;
}
public async Task<ExportNotificationResult> EmitToTimelineAsync(
ExportAirgapReadyNotification notification,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(notification);
var payload = JsonSerializer.Serialize(notification, SerializerOptions);
var result = await EmitToSinkWithRetryAsync(
ExportNotificationTypes.TimelineAirgapReady,
payload,
notification.ExportId,
notification.BundleId,
notification.TenantId,
cancellationToken);
if (result.Success)
{
_logger.LogDebug(
"Emitted timeline notification for export {ExportId}",
notification.ExportId);
}
return result;
}
private async Task<ExportNotificationResult> EmitToSinkWithRetryAsync(
string channel,
string payload,
string exportId,
string bundleId,
string tenantId,
CancellationToken cancellationToken)
{
var attempt = 0;
string? lastError = null;
while (attempt < _options.MaxRetries)
{
try
{
await _sink.PublishAsync(channel, payload, cancellationToken);
return new ExportNotificationResult(Success: true, AttemptCount: attempt + 1);
}
catch (Exception ex) when (IsTransient(ex) && attempt < _options.MaxRetries - 1)
{
lastError = ex.Message;
attempt++;
var delay = attempt <= RetryDelays.Length
? RetryDelays[attempt - 1]
: RetryDelays[^1];
_logger.LogWarning(ex,
"Transient failure emitting notification for export {ExportId}, attempt {Attempt}/{MaxRetries}",
exportId, attempt, _options.MaxRetries);
await Task.Delay(delay, cancellationToken);
}
catch (Exception ex)
{
_logger.LogError(ex,
"Non-transient failure emitting notification for export {ExportId}",
exportId);
return new ExportNotificationResult(
Success: false,
ErrorMessage: ex.Message,
AttemptCount: attempt + 1);
}
}
return new ExportNotificationResult(
Success: false,
ErrorMessage: lastError ?? "Max retries exceeded",
AttemptCount: attempt);
}
private async Task<ExportNotificationResult> EmitToWebhookWithRetryAsync(
ExportAirgapReadyNotification notification,
string payload,
CancellationToken cancellationToken)
{
var attempt = 0;
int? lastStatus = null;
string? lastError = null;
while (attempt < _options.MaxRetries)
{
try
{
var result = await _webhookClient!.DeliverAsync(
ExportNotificationTypes.AirgapReady,
payload,
_timeProvider.GetUtcNow(),
cancellationToken);
if (result.Success)
{
return new ExportNotificationResult(
Success: true,
AttemptCount: attempt + 1,
LastResponseStatus: result.StatusCode);
}
lastStatus = result.StatusCode;
lastError = result.ErrorMessage;
if (!result.ShouldRetry)
{
return new ExportNotificationResult(
Success: false,
ErrorMessage: result.ErrorMessage,
AttemptCount: attempt + 1,
LastResponseStatus: result.StatusCode);
}
attempt++;
var delay = attempt <= RetryDelays.Length
? RetryDelays[attempt - 1]
: RetryDelays[^1];
_logger.LogWarning(
"Webhook delivery failed for export {ExportId} with status {StatusCode}, attempt {Attempt}/{MaxRetries}",
notification.ExportId, result.StatusCode, attempt, _options.MaxRetries);
await Task.Delay(delay, cancellationToken);
}
catch (Exception ex) when (IsTransient(ex) && attempt < _options.MaxRetries - 1)
{
lastError = ex.Message;
attempt++;
var delay = attempt <= RetryDelays.Length
? RetryDelays[attempt - 1]
: RetryDelays[^1];
await Task.Delay(delay, cancellationToken);
}
catch (Exception ex)
{
return new ExportNotificationResult(
Success: false,
ErrorMessage: ex.Message,
AttemptCount: attempt + 1,
LastResponseStatus: lastStatus);
}
}
return new ExportNotificationResult(
Success: false,
ErrorMessage: lastError ?? "Max retries exceeded",
AttemptCount: attempt,
LastResponseStatus: lastStatus);
}
private async Task RouteToDlqAsync(
ExportAirgapReadyNotification notification,
ExportNotificationResult result,
CancellationToken cancellationToken)
{
var payload = JsonSerializer.Serialize(notification, SerializerOptions);
var dlqEntry = new ExportNotificationDlqEntry
{
EventType = ExportNotificationTypes.AirgapReady,
ExportId = notification.ExportId,
BundleId = notification.BundleId,
TenantId = notification.TenantId,
FailureReason = result.ErrorMessage ?? "Unknown failure",
LastResponseStatus = result.LastResponseStatus,
AttemptCount = result.AttemptCount,
LastAttemptAt = _timeProvider.GetUtcNow(),
OriginalPayload = payload
};
try
{
await _dlq.EnqueueAsync(dlqEntry, cancellationToken);
_logger.LogInformation(
"Routed failed notification for export {ExportId} to DLQ after {AttemptCount} attempts",
notification.ExportId, result.AttemptCount);
}
catch (Exception ex)
{
_logger.LogError(ex,
"Failed to route notification for export {ExportId} to DLQ",
notification.ExportId);
}
}
private static bool IsTransient(Exception ex)
{
return ex is TimeoutException or
TaskCanceledException or
HttpRequestException or
IOException;
}
}
/// <summary>
/// Options for export notification emitter.
/// </summary>
public sealed record ExportNotificationEmitterOptions(
int MaxRetries,
bool WebhookEnabled,
TimeSpan WebhookTimeout)
{
public static ExportNotificationEmitterOptions Default => new(
MaxRetries: 5,
WebhookEnabled: true,
WebhookTimeout: TimeSpan.FromSeconds(30));
}
/// <summary>
/// Sink for publishing export notifications (NATS, etc.).
/// </summary>
public interface IExportNotificationSink
{
Task PublishAsync(string channel, string message, CancellationToken cancellationToken = default);
}
/// <summary>
/// Dead letter queue for failed notifications.
/// </summary>
public interface IExportNotificationDlq
{
Task EnqueueAsync(ExportNotificationDlqEntry entry, CancellationToken cancellationToken = default);
Task<IReadOnlyList<ExportNotificationDlqEntry>> GetPendingAsync(
string? tenantId = null,
int limit = 100,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Client for webhook delivery.
/// </summary>
public interface IExportWebhookClient
{
Task<WebhookDeliveryResult> DeliverAsync(
string eventType,
string payload,
DateTimeOffset sentAt,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of webhook delivery attempt.
/// </summary>
public sealed record WebhookDeliveryResult(
bool Success,
int? StatusCode,
string? ErrorMessage,
bool ShouldRetry);
/// <summary>
/// In-memory implementation of notification sink for testing.
/// </summary>
public sealed class InMemoryExportNotificationSink : IExportNotificationSink
{
private readonly List<(string Channel, string Message, DateTimeOffset ReceivedAt)> _messages = new();
private readonly object _lock = new();
private readonly TimeProvider _timeProvider;
public InMemoryExportNotificationSink(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public Task PublishAsync(string channel, string message, CancellationToken cancellationToken = default)
{
lock (_lock)
{
_messages.Add((channel, message, _timeProvider.GetUtcNow()));
}
return Task.CompletedTask;
}
public IReadOnlyList<(string Channel, string Message, DateTimeOffset ReceivedAt)> GetMessages()
{
lock (_lock) { return _messages.ToList(); }
}
public IReadOnlyList<string> GetMessages(string channel)
{
lock (_lock) { return _messages.Where(m => m.Channel == channel).Select(m => m.Message).ToList(); }
}
public int Count
{
get { lock (_lock) { return _messages.Count; } }
}
public void Clear()
{
lock (_lock) { _messages.Clear(); }
}
}
/// <summary>
/// In-memory implementation of DLQ for testing.
/// </summary>
public sealed class InMemoryExportNotificationDlq : IExportNotificationDlq
{
private readonly List<ExportNotificationDlqEntry> _entries = new();
private readonly object _lock = new();
public Task EnqueueAsync(ExportNotificationDlqEntry entry, CancellationToken cancellationToken = default)
{
lock (_lock)
{
_entries.Add(entry);
}
return Task.CompletedTask;
}
public Task<IReadOnlyList<ExportNotificationDlqEntry>> GetPendingAsync(
string? tenantId = null,
int limit = 100,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
var query = tenantId is not null
? _entries.Where(e => e.TenantId == tenantId)
: _entries.AsEnumerable();
return Task.FromResult<IReadOnlyList<ExportNotificationDlqEntry>>(
query.Take(limit).ToList());
}
}
public IReadOnlyList<ExportNotificationDlqEntry> GetAll()
{
lock (_lock) { return _entries.ToList(); }
}
public int Count
{
get { lock (_lock) { return _entries.Count; } }
}
public void Clear()
{
lock (_lock) { _entries.Clear(); }
}
}

View File

@@ -0,0 +1,133 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.Notifications;
/// <summary>
/// Export notification event types.
/// </summary>
public static class ExportNotificationTypes
{
public const string AirgapReady = "export.airgap.ready.v1";
public const string AirgapReadyDlq = "export.airgap.ready.dlq";
public const string TimelineAirgapReady = "timeline.export.airgap.ready";
}
/// <summary>
/// Payload for export airgap ready notification.
/// Keys are sorted alphabetically for deterministic serialization.
/// </summary>
public sealed record ExportAirgapReadyNotification
{
[JsonPropertyName("artifact_sha256")]
public required string ArtifactSha256 { get; init; }
[JsonPropertyName("artifact_uri")]
public required string ArtifactUri { get; init; }
[JsonPropertyName("bundle_id")]
public required string BundleId { get; init; }
[JsonPropertyName("created_at")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("expires_at")]
public DateTimeOffset? ExpiresAt { get; init; }
[JsonPropertyName("export_id")]
public required string ExportId { get; init; }
[JsonPropertyName("metadata")]
public ExportAirgapReadyMetadata? Metadata { get; init; }
[JsonPropertyName("portable_version")]
public required string PortableVersion { get; init; }
[JsonPropertyName("profile_id")]
public required string ProfileId { get; init; }
[JsonPropertyName("root_hash")]
public required string RootHash { get; init; }
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
[JsonPropertyName("type")]
public string Type => ExportNotificationTypes.AirgapReady;
}
/// <summary>
/// Metadata fields for the airgap ready notification.
/// </summary>
public sealed record ExportAirgapReadyMetadata
{
[JsonPropertyName("export_size_bytes")]
public long? ExportSizeBytes { get; init; }
[JsonPropertyName("portable_size_bytes")]
public long? PortableSizeBytes { get; init; }
[JsonPropertyName("source_uri")]
public string? SourceUri { get; init; }
}
/// <summary>
/// DLQ entry for failed notification delivery.
/// </summary>
public sealed record ExportNotificationDlqEntry
{
[JsonPropertyName("event_type")]
public required string EventType { get; init; }
[JsonPropertyName("export_id")]
public required string ExportId { get; init; }
[JsonPropertyName("bundle_id")]
public required string BundleId { get; init; }
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
[JsonPropertyName("failure_reason")]
public required string FailureReason { get; init; }
[JsonPropertyName("last_response_status")]
public int? LastResponseStatus { get; init; }
[JsonPropertyName("attempt_count")]
public required int AttemptCount { get; init; }
[JsonPropertyName("last_attempt_at")]
public required DateTimeOffset LastAttemptAt { get; init; }
[JsonPropertyName("original_payload")]
public required string OriginalPayload { get; init; }
}
/// <summary>
/// Webhook delivery headers.
/// </summary>
public static class ExportNotificationHeaders
{
public const string EventType = "X-Stella-Event-Type";
public const string Signature = "X-Stella-Signature";
public const string SentAt = "X-Stella-Sent-At";
}
/// <summary>
/// Configuration for notification delivery.
/// </summary>
public sealed record ExportNotificationConfig(
bool Enabled,
string? WebhookUrl,
string? SigningKey,
int MaxRetries = 5,
TimeSpan? RetentionPeriod = null);
/// <summary>
/// Result of attempting to send a notification.
/// </summary>
public sealed record ExportNotificationResult(
bool Success,
string? ErrorMessage = null,
int AttemptCount = 1,
int? LastResponseStatus = null);

View File

@@ -0,0 +1,207 @@
using System.Net;
using System.Net.Http.Headers;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.Core.Notifications;
/// <summary>
/// HTTP webhook client for export notifications with HMAC-SHA256 signing.
/// </summary>
public sealed class ExportWebhookClient : IExportWebhookClient
{
private readonly HttpClient _httpClient;
private readonly ExportWebhookOptions _options;
private readonly ILogger<ExportWebhookClient> _logger;
public ExportWebhookClient(
HttpClient httpClient,
ExportWebhookOptions options,
ILogger<ExportWebhookClient> logger)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_options = options ?? throw new ArgumentNullException(nameof(options));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public async Task<WebhookDeliveryResult> DeliverAsync(
string eventType,
string payload,
DateTimeOffset sentAt,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(_options.WebhookUrl))
{
return new WebhookDeliveryResult(
Success: false,
StatusCode: null,
ErrorMessage: "Webhook URL not configured",
ShouldRetry: false);
}
try
{
using var request = new HttpRequestMessage(HttpMethod.Post, _options.WebhookUrl);
request.Content = new StringContent(payload, Encoding.UTF8, "application/json");
// Add standard headers
request.Headers.Add(ExportNotificationHeaders.EventType, eventType);
request.Headers.Add(ExportNotificationHeaders.SentAt, sentAt.ToString("O"));
// Add signature if signing key is configured
if (!string.IsNullOrWhiteSpace(_options.SigningKey))
{
var signature = ComputeSignature(payload, sentAt, _options.SigningKey);
request.Headers.Add(ExportNotificationHeaders.Signature, signature);
}
var response = await _httpClient.SendAsync(request, cancellationToken);
var statusCode = (int)response.StatusCode;
if (response.IsSuccessStatusCode)
{
_logger.LogDebug(
"Webhook delivery succeeded with status {StatusCode}",
statusCode);
return new WebhookDeliveryResult(
Success: true,
StatusCode: statusCode,
ErrorMessage: null,
ShouldRetry: false);
}
var shouldRetry = ShouldRetryStatusCode(response.StatusCode);
var errorMessage = $"HTTP {statusCode}: {response.ReasonPhrase}";
_logger.LogWarning(
"Webhook delivery failed with status {StatusCode}, shouldRetry={ShouldRetry}",
statusCode, shouldRetry);
return new WebhookDeliveryResult(
Success: false,
StatusCode: statusCode,
ErrorMessage: errorMessage,
ShouldRetry: shouldRetry);
}
catch (TaskCanceledException) when (cancellationToken.IsCancellationRequested)
{
throw;
}
catch (HttpRequestException ex)
{
_logger.LogWarning(ex, "Webhook delivery failed with HTTP error");
return new WebhookDeliveryResult(
Success: false,
StatusCode: null,
ErrorMessage: ex.Message,
ShouldRetry: true);
}
catch (Exception ex)
{
_logger.LogError(ex, "Webhook delivery failed with unexpected error");
return new WebhookDeliveryResult(
Success: false,
StatusCode: null,
ErrorMessage: ex.Message,
ShouldRetry: false);
}
}
/// <summary>
/// Computes HMAC-SHA256 signature for webhook payload.
/// Format: sha256=&lt;hex-encoded-hmac&gt;
/// </summary>
public static string ComputeSignature(string payload, DateTimeOffset sentAt, string signingKey)
{
// PAE (Pre-Authentication Encoding) style: timestamp.payload
var signatureInput = $"{sentAt:O}.{payload}";
var inputBytes = Encoding.UTF8.GetBytes(signatureInput);
byte[] keyBytes;
try
{
keyBytes = Convert.FromBase64String(signingKey);
}
catch (FormatException)
{
try
{
keyBytes = Convert.FromHexString(signingKey);
}
catch (FormatException)
{
keyBytes = Encoding.UTF8.GetBytes(signingKey);
}
}
using var hmac = new HMACSHA256(keyBytes);
var hash = hmac.ComputeHash(inputBytes);
return "sha256=" + Convert.ToHexString(hash).ToLowerInvariant();
}
/// <summary>
/// Verifies a webhook signature.
/// </summary>
public static bool VerifySignature(string payload, DateTimeOffset sentAt, string signingKey, string providedSignature)
{
var expectedSignature = ComputeSignature(payload, sentAt, signingKey);
return CryptographicOperations.FixedTimeEquals(
Encoding.UTF8.GetBytes(expectedSignature),
Encoding.UTF8.GetBytes(providedSignature.Trim()));
}
private static bool ShouldRetryStatusCode(HttpStatusCode statusCode)
{
return statusCode switch
{
HttpStatusCode.RequestTimeout => true,
HttpStatusCode.TooManyRequests => true,
HttpStatusCode.InternalServerError => true,
HttpStatusCode.BadGateway => true,
HttpStatusCode.ServiceUnavailable => true,
HttpStatusCode.GatewayTimeout => true,
_ => false
};
}
}
/// <summary>
/// Options for export webhook client.
/// </summary>
public sealed record ExportWebhookOptions(
string? WebhookUrl,
string? SigningKey,
TimeSpan Timeout)
{
public static ExportWebhookOptions Default => new(
WebhookUrl: null,
SigningKey: null,
Timeout: TimeSpan.FromSeconds(30));
}
/// <summary>
/// Null implementation of webhook client for when webhooks are disabled.
/// </summary>
public sealed class NullExportWebhookClient : IExportWebhookClient
{
public static NullExportWebhookClient Instance { get; } = new();
private NullExportWebhookClient() { }
public Task<WebhookDeliveryResult> DeliverAsync(
string eventType,
string payload,
DateTimeOffset sentAt,
CancellationToken cancellationToken = default)
{
return Task.FromResult(new WebhookDeliveryResult(
Success: true,
StatusCode: 200,
ErrorMessage: null,
ShouldRetry: false));
}
}

View File

@@ -0,0 +1,289 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.OfflineKit;
/// <summary>
/// Distributes offline kits to mirror locations for air-gap deployment.
/// Implements EXPORT-ATTEST-75-002: bit-for-bit copy with manifest publication.
/// </summary>
public sealed class OfflineKitDistributor
{
private const string ManifestOfflineFileName = "manifest-offline.json";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public OfflineKitDistributor(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Distributes an offline kit to a mirror location.
/// </summary>
public OfflineKitDistributionResult DistributeToMirror(
string sourceKitPath,
string mirrorBasePath,
string kitVersion,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceKitPath);
ArgumentException.ThrowIfNullOrWhiteSpace(mirrorBasePath);
ArgumentException.ThrowIfNullOrWhiteSpace(kitVersion);
cancellationToken.ThrowIfCancellationRequested();
if (!Directory.Exists(sourceKitPath))
{
return OfflineKitDistributionResult.Failed($"Source kit directory not found: {sourceKitPath}");
}
try
{
// Target path: mirror/export/attestations/{kitVersion}/
var targetPath = Path.Combine(mirrorBasePath, "export", "attestations", kitVersion);
// Ensure target directory exists
Directory.CreateDirectory(targetPath);
// Copy all files bit-for-bit
var copiedFiles = CopyKitFilesRecursively(sourceKitPath, targetPath);
// Build manifest entries
var entries = new List<OfflineKitManifestEntry>();
// Check for attestation bundle
var attestationBundlePath = Path.Combine(targetPath, "attestations", "export-attestation-bundle-v1.tgz");
if (File.Exists(attestationBundlePath))
{
var bundleBytes = File.ReadAllBytes(attestationBundlePath);
var bundleHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content);
entries.Add(new OfflineKitManifestEntry(
Kind: "attestation-kit",
KitVersion: kitVersion,
Artifact: "attestations/export-attestation-bundle-v1.tgz",
Checksum: "checksums/attestations/export-attestation-bundle-v1.tgz.sha256",
CliExample: "stella attest bundle verify --file attestations/export-attestation-bundle-v1.tgz",
ImportExample: "stella attest bundle import --file attestations/export-attestation-bundle-v1.tgz --offline",
RootHash: $"sha256:{bundleHash}",
CreatedAt: _timeProvider.GetUtcNow()));
}
// Check for mirror bundle
var mirrorBundlePath = Path.Combine(targetPath, "mirrors", "export-mirror-bundle-v1.tgz");
if (File.Exists(mirrorBundlePath))
{
var bundleBytes = File.ReadAllBytes(mirrorBundlePath);
var bundleHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content);
entries.Add(new OfflineKitManifestEntry(
Kind: "mirror-bundle",
KitVersion: kitVersion,
Artifact: "mirrors/export-mirror-bundle-v1.tgz",
Checksum: "checksums/mirrors/export-mirror-bundle-v1.tgz.sha256",
CliExample: "stella mirror verify --file mirrors/export-mirror-bundle-v1.tgz",
ImportExample: "stella mirror import --file mirrors/export-mirror-bundle-v1.tgz --offline",
RootHash: $"sha256:{bundleHash}",
CreatedAt: _timeProvider.GetUtcNow()));
}
// Check for bootstrap pack
var bootstrapPackPath = Path.Combine(targetPath, "bootstrap", "export-bootstrap-pack-v1.tgz");
if (File.Exists(bootstrapPackPath))
{
var bundleBytes = File.ReadAllBytes(bootstrapPackPath);
var bundleHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content);
entries.Add(new OfflineKitManifestEntry(
Kind: "bootstrap-pack",
KitVersion: kitVersion,
Artifact: "bootstrap/export-bootstrap-pack-v1.tgz",
Checksum: "checksums/bootstrap/export-bootstrap-pack-v1.tgz.sha256",
CliExample: "stella bootstrap verify --file bootstrap/export-bootstrap-pack-v1.tgz",
ImportExample: "stella bootstrap import --file bootstrap/export-bootstrap-pack-v1.tgz --offline",
RootHash: $"sha256:{bundleHash}",
CreatedAt: _timeProvider.GetUtcNow()));
}
// Write manifest-offline.json
var manifest = new OfflineKitOfflineManifest(
Version: "offline-kit/v1",
KitVersion: kitVersion,
CreatedAt: _timeProvider.GetUtcNow(),
Entries: entries);
var manifestJson = JsonSerializer.Serialize(manifest, SerializerOptions);
var manifestPath = Path.Combine(targetPath, ManifestOfflineFileName);
File.WriteAllText(manifestPath, manifestJson, Encoding.UTF8);
// Write manifest checksum
var manifestHash = _cryptoHash.ComputeHashHexForPurpose(
Encoding.UTF8.GetBytes(manifestJson), HashPurpose.Content);
var manifestChecksumPath = manifestPath + ".sha256";
File.WriteAllText(manifestChecksumPath, $"{manifestHash} {ManifestOfflineFileName}", Encoding.UTF8);
return new OfflineKitDistributionResult(
Success: true,
TargetPath: targetPath,
ManifestPath: manifestPath,
CopiedFileCount: copiedFiles,
EntryCount: entries.Count);
}
catch (Exception ex)
{
return OfflineKitDistributionResult.Failed($"Distribution failed: {ex.Message}");
}
}
/// <summary>
/// Verifies that a distributed kit matches its source.
/// </summary>
public OfflineKitVerificationResult VerifyDistribution(
string sourceKitPath,
string targetKitPath,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(sourceKitPath);
ArgumentException.ThrowIfNullOrWhiteSpace(targetKitPath);
if (!Directory.Exists(sourceKitPath))
{
return OfflineKitVerificationResult.Failed($"Source kit not found: {sourceKitPath}");
}
if (!Directory.Exists(targetKitPath))
{
return OfflineKitVerificationResult.Failed($"Target kit not found: {targetKitPath}");
}
var mismatches = new List<string>();
// Get all files in source
var sourceFiles = Directory.GetFiles(sourceKitPath, "*", SearchOption.AllDirectories)
.Select(f => Path.GetRelativePath(sourceKitPath, f))
.OrderBy(f => f, StringComparer.Ordinal)
.ToList();
foreach (var relativePath in sourceFiles)
{
cancellationToken.ThrowIfCancellationRequested();
var sourceFilePath = Path.Combine(sourceKitPath, relativePath);
var targetFilePath = Path.Combine(targetKitPath, relativePath);
if (!File.Exists(targetFilePath))
{
mismatches.Add($"Missing: {relativePath}");
continue;
}
// Compare hashes
var sourceBytes = File.ReadAllBytes(sourceFilePath);
var targetBytes = File.ReadAllBytes(targetFilePath);
var sourceHash = _cryptoHash.ComputeHashHexForPurpose(sourceBytes, HashPurpose.Content);
var targetHash = _cryptoHash.ComputeHashHexForPurpose(targetBytes, HashPurpose.Content);
if (!string.Equals(sourceHash, targetHash, StringComparison.OrdinalIgnoreCase))
{
mismatches.Add($"Hash mismatch: {relativePath}");
}
}
if (mismatches.Count > 0)
{
return new OfflineKitVerificationResult(
Success: false,
Mismatches: mismatches,
ErrorMessage: $"Found {mismatches.Count} mismatches");
}
return new OfflineKitVerificationResult(
Success: true,
Mismatches: Array.Empty<string>());
}
private static int CopyKitFilesRecursively(string sourceDir, string targetDir)
{
var count = 0;
foreach (var sourceFilePath in Directory.GetFiles(sourceDir, "*", SearchOption.AllDirectories))
{
var relativePath = Path.GetRelativePath(sourceDir, sourceFilePath);
var targetFilePath = Path.Combine(targetDir, relativePath);
var targetFileDir = Path.GetDirectoryName(targetFilePath);
if (!string.IsNullOrEmpty(targetFileDir))
{
Directory.CreateDirectory(targetFileDir);
}
// Bit-for-bit copy
File.Copy(sourceFilePath, targetFilePath, overwrite: true);
count++;
}
return count;
}
}
/// <summary>
/// Manifest entry for offline kit distribution.
/// </summary>
public sealed record OfflineKitManifestEntry(
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("kitVersion")] string KitVersion,
[property: JsonPropertyName("artifact")] string Artifact,
[property: JsonPropertyName("checksum")] string Checksum,
[property: JsonPropertyName("cliExample")] string CliExample,
[property: JsonPropertyName("importExample")] string ImportExample,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt);
/// <summary>
/// Offline manifest for air-gap deployment.
/// </summary>
public sealed record OfflineKitOfflineManifest(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("kitVersion")] string KitVersion,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("entries")] IReadOnlyList<OfflineKitManifestEntry> Entries);
/// <summary>
/// Result of offline kit distribution.
/// </summary>
public sealed record OfflineKitDistributionResult(
bool Success,
string? TargetPath = null,
string? ManifestPath = null,
int CopiedFileCount = 0,
int EntryCount = 0,
string? ErrorMessage = null)
{
public static OfflineKitDistributionResult Failed(string errorMessage)
=> new(Success: false, ErrorMessage: errorMessage);
}
/// <summary>
/// Result of offline kit verification.
/// </summary>
public sealed record OfflineKitVerificationResult(
bool Success,
IReadOnlyList<string> Mismatches,
string? ErrorMessage = null)
{
public static OfflineKitVerificationResult Failed(string errorMessage)
=> new(Success: false, Mismatches: Array.Empty<string>(), ErrorMessage: errorMessage);
}

View File

@@ -0,0 +1,120 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.OfflineKit;
/// <summary>
/// Manifest entry for an attestation bundle in an offline kit.
/// </summary>
public sealed record OfflineKitAttestationEntry(
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("attestationId")] string AttestationId,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("artifact")] string Artifact,
[property: JsonPropertyName("checksum")] string Checksum,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt)
{
public const string KindValue = "attestation-export";
}
/// <summary>
/// Manifest entry for a mirror bundle in an offline kit.
/// </summary>
public sealed record OfflineKitMirrorEntry(
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("profile")] string Profile,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("artifact")] string Artifact,
[property: JsonPropertyName("checksum")] string Checksum,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt)
{
public const string KindValue = "mirror-bundle";
}
/// <summary>
/// Manifest entry for a bootstrap pack in an offline kit.
/// </summary>
public sealed record OfflineKitBootstrapEntry(
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("artifact")] string Artifact,
[property: JsonPropertyName("checksum")] string Checksum,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt)
{
public const string KindValue = "bootstrap-pack";
}
/// <summary>
/// Manifest entry for a portable evidence bundle in an offline kit.
/// </summary>
public sealed record OfflineKitPortableEvidenceEntry(
[property: JsonPropertyName("kind")] string Kind,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("artifact")] string Artifact,
[property: JsonPropertyName("checksum")] string Checksum,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt)
{
public const string KindValue = "portable-evidence";
}
/// <summary>
/// Root manifest for an offline kit.
/// </summary>
public sealed record OfflineKitManifest(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("kitId")] string KitId,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt,
[property: JsonPropertyName("entries")] IReadOnlyList<object> Entries)
{
public const string CurrentVersion = "offline-kit/v1";
}
/// <summary>
/// Request to add an attestation bundle to an offline kit.
/// </summary>
public sealed record OfflineKitAttestationRequest(
string KitId,
string ExportId,
string AttestationId,
string RootHash,
byte[] BundleBytes,
DateTimeOffset CreatedAt);
/// <summary>
/// Request to add a mirror bundle to an offline kit.
/// </summary>
public sealed record OfflineKitMirrorRequest(
string KitId,
string ExportId,
string BundleId,
string Profile,
string RootHash,
byte[] BundleBytes,
DateTimeOffset CreatedAt);
/// <summary>
/// Request to add a bootstrap pack to an offline kit.
/// </summary>
public sealed record OfflineKitBootstrapRequest(
string KitId,
string ExportId,
string Version,
string RootHash,
byte[] BundleBytes,
DateTimeOffset CreatedAt);
/// <summary>
/// Result of adding an entry to an offline kit.
/// </summary>
public sealed record OfflineKitAddResult(
bool Success,
string ArtifactPath,
string ChecksumPath,
string Sha256Hash,
string? ErrorMessage = null);

View File

@@ -0,0 +1,282 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.OfflineKit;
/// <summary>
/// Packager for assembling offline kits with attestation bundles, mirror bundles, and bootstrap packs.
/// Ensures immutable, deterministic artefact placement with checksum publication.
/// </summary>
public sealed class OfflineKitPackager
{
private const string AttestationsDir = "attestations";
private const string MirrorsDir = "mirrors";
private const string BootstrapDir = "bootstrap";
private const string EvidenceDir = "evidence";
private const string ChecksumsDir = "checksums";
private const string ManifestFileName = "manifest.json";
private const string AttestationBundleFileName = "export-attestation-bundle-v1.tgz";
private const string MirrorBundleFileName = "export-mirror-bundle-v1.tgz";
private const string BootstrapBundleFileName = "export-bootstrap-pack-v1.tgz";
private const string EvidenceBundleFileName = "export-portable-bundle-v1.tgz";
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public OfflineKitPackager(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Adds an attestation bundle to the offline kit.
/// </summary>
public OfflineKitAddResult AddAttestationBundle(
string outputDirectory,
OfflineKitAttestationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (string.IsNullOrWhiteSpace(outputDirectory))
{
throw new ArgumentException("Output directory must be provided.", nameof(outputDirectory));
}
cancellationToken.ThrowIfCancellationRequested();
var artifactRelativePath = Path.Combine(AttestationsDir, AttestationBundleFileName);
var checksumRelativePath = Path.Combine(ChecksumsDir, AttestationsDir, $"{AttestationBundleFileName}.sha256");
return WriteBundle(
outputDirectory,
request.BundleBytes,
artifactRelativePath,
checksumRelativePath,
AttestationBundleFileName);
}
/// <summary>
/// Adds a mirror bundle to the offline kit.
/// </summary>
public OfflineKitAddResult AddMirrorBundle(
string outputDirectory,
OfflineKitMirrorRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (string.IsNullOrWhiteSpace(outputDirectory))
{
throw new ArgumentException("Output directory must be provided.", nameof(outputDirectory));
}
cancellationToken.ThrowIfCancellationRequested();
var artifactRelativePath = Path.Combine(MirrorsDir, MirrorBundleFileName);
var checksumRelativePath = Path.Combine(ChecksumsDir, MirrorsDir, $"{MirrorBundleFileName}.sha256");
return WriteBundle(
outputDirectory,
request.BundleBytes,
artifactRelativePath,
checksumRelativePath,
MirrorBundleFileName);
}
/// <summary>
/// Adds a bootstrap pack to the offline kit.
/// </summary>
public OfflineKitAddResult AddBootstrapPack(
string outputDirectory,
OfflineKitBootstrapRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (string.IsNullOrWhiteSpace(outputDirectory))
{
throw new ArgumentException("Output directory must be provided.", nameof(outputDirectory));
}
cancellationToken.ThrowIfCancellationRequested();
var artifactRelativePath = Path.Combine(BootstrapDir, BootstrapBundleFileName);
var checksumRelativePath = Path.Combine(ChecksumsDir, BootstrapDir, $"{BootstrapBundleFileName}.sha256");
return WriteBundle(
outputDirectory,
request.BundleBytes,
artifactRelativePath,
checksumRelativePath,
BootstrapBundleFileName);
}
/// <summary>
/// Creates a manifest entry for an attestation bundle.
/// </summary>
public OfflineKitAttestationEntry CreateAttestationEntry(OfflineKitAttestationRequest request, string sha256Hash)
{
return new OfflineKitAttestationEntry(
Kind: OfflineKitAttestationEntry.KindValue,
ExportId: request.ExportId,
AttestationId: request.AttestationId,
RootHash: $"sha256:{request.RootHash}",
Artifact: Path.Combine(AttestationsDir, AttestationBundleFileName).Replace('\\', '/'),
Checksum: Path.Combine(ChecksumsDir, AttestationsDir, $"{AttestationBundleFileName}.sha256").Replace('\\', '/'),
CreatedAt: request.CreatedAt);
}
/// <summary>
/// Creates a manifest entry for a mirror bundle.
/// </summary>
public OfflineKitMirrorEntry CreateMirrorEntry(OfflineKitMirrorRequest request, string sha256Hash)
{
return new OfflineKitMirrorEntry(
Kind: OfflineKitMirrorEntry.KindValue,
ExportId: request.ExportId,
BundleId: request.BundleId,
Profile: request.Profile,
RootHash: $"sha256:{request.RootHash}",
Artifact: Path.Combine(MirrorsDir, MirrorBundleFileName).Replace('\\', '/'),
Checksum: Path.Combine(ChecksumsDir, MirrorsDir, $"{MirrorBundleFileName}.sha256").Replace('\\', '/'),
CreatedAt: request.CreatedAt);
}
/// <summary>
/// Creates a manifest entry for a bootstrap pack.
/// </summary>
public OfflineKitBootstrapEntry CreateBootstrapEntry(OfflineKitBootstrapRequest request, string sha256Hash)
{
return new OfflineKitBootstrapEntry(
Kind: OfflineKitBootstrapEntry.KindValue,
ExportId: request.ExportId,
Version: request.Version,
RootHash: $"sha256:{request.RootHash}",
Artifact: Path.Combine(BootstrapDir, BootstrapBundleFileName).Replace('\\', '/'),
Checksum: Path.Combine(ChecksumsDir, BootstrapDir, $"{BootstrapBundleFileName}.sha256").Replace('\\', '/'),
CreatedAt: request.CreatedAt);
}
/// <summary>
/// Writes or updates the offline kit manifest.
/// </summary>
public void WriteManifest(
string outputDirectory,
string kitId,
IEnumerable<object> entries,
CancellationToken cancellationToken = default)
{
var manifestPath = Path.Combine(outputDirectory, ManifestFileName);
// Check for existing manifest (immutability check)
if (File.Exists(manifestPath))
{
throw new InvalidOperationException($"Manifest already exists at '{manifestPath}'. Offline kit artefacts are immutable.");
}
var manifest = new OfflineKitManifest(
Version: OfflineKitManifest.CurrentVersion,
KitId: kitId,
CreatedAt: _timeProvider.GetUtcNow(),
Entries: entries.ToList());
var manifestJson = JsonSerializer.Serialize(manifest, SerializerOptions);
Directory.CreateDirectory(outputDirectory);
File.WriteAllText(manifestPath, manifestJson, Encoding.UTF8);
}
/// <summary>
/// Generates a checksum file content in standard format.
/// </summary>
public static string GenerateChecksumFileContent(string sha256Hash, string fileName)
{
return $"{sha256Hash} {fileName}";
}
/// <summary>
/// Verifies that a bundle matches its expected SHA256 hash.
/// </summary>
public bool VerifyBundleHash(byte[] bundleBytes, string expectedSha256)
{
var actualHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content);
return string.Equals(actualHash, expectedSha256, StringComparison.OrdinalIgnoreCase);
}
private OfflineKitAddResult WriteBundle(
string outputDirectory,
byte[] bundleBytes,
string artifactRelativePath,
string checksumRelativePath,
string fileName)
{
try
{
// Compute SHA256
var sha256Hash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, HashPurpose.Content);
// Determine full paths
var artifactFullPath = Path.Combine(outputDirectory, artifactRelativePath);
var checksumFullPath = Path.Combine(outputDirectory, checksumRelativePath);
// Check for existing artifact (immutability)
if (File.Exists(artifactFullPath))
{
return new OfflineKitAddResult(
Success: false,
ArtifactPath: artifactRelativePath,
ChecksumPath: checksumRelativePath,
Sha256Hash: sha256Hash,
ErrorMessage: $"Artifact already exists at '{artifactFullPath}'. Offline kit artefacts are immutable.");
}
// Create directories
var artifactDir = Path.GetDirectoryName(artifactFullPath);
var checksumDir = Path.GetDirectoryName(checksumFullPath);
if (!string.IsNullOrEmpty(artifactDir))
{
Directory.CreateDirectory(artifactDir);
}
if (!string.IsNullOrEmpty(checksumDir))
{
Directory.CreateDirectory(checksumDir);
}
// Write bundle (bit-for-bit copy)
File.WriteAllBytes(artifactFullPath, bundleBytes);
// Write checksum file
var checksumContent = GenerateChecksumFileContent(sha256Hash, fileName);
File.WriteAllText(checksumFullPath, checksumContent, Encoding.UTF8);
return new OfflineKitAddResult(
Success: true,
ArtifactPath: artifactRelativePath,
ChecksumPath: checksumRelativePath,
Sha256Hash: sha256Hash);
}
catch (Exception ex)
{
return new OfflineKitAddResult(
Success: false,
ArtifactPath: artifactRelativePath,
ChecksumPath: checksumRelativePath,
Sha256Hash: string.Empty,
ErrorMessage: ex.Message);
}
}
}

View File

@@ -0,0 +1,338 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Cryptography;
namespace StellaOps.ExportCenter.Core.PortableEvidence;
/// <summary>
/// Builds portable evidence export archives that wrap EvidenceLocker portable bundles for air-gap delivery.
/// </summary>
public sealed class PortableEvidenceExportBuilder
{
private const string ExportVersion = "portable-evidence/v1";
private const string PortableBundleVersion = "v1";
private const string InnerBundleFileName = "portable-bundle-v1.tgz";
private const string ExportArchiveFileName = "export-portable-bundle-v1.tgz";
private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly UnixFileMode DefaultFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
private static readonly UnixFileMode ExecutableFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly ICryptoHash _cryptoHash;
private readonly TimeProvider _timeProvider;
public PortableEvidenceExportBuilder(ICryptoHash cryptoHash, TimeProvider? timeProvider = null)
{
_cryptoHash = cryptoHash ?? throw new ArgumentNullException(nameof(cryptoHash));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Builds a portable evidence export archive from the provided request.
/// </summary>
public PortableEvidenceExportResult Build(PortableEvidenceExportRequest request, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (request.ExportId == Guid.Empty)
{
throw new ArgumentException("Export identifier must be provided.", nameof(request));
}
if (request.BundleId == Guid.Empty)
{
throw new ArgumentException("Bundle identifier must be provided.", nameof(request));
}
if (request.TenantId == Guid.Empty)
{
throw new ArgumentException("Tenant identifier must be provided.", nameof(request));
}
if (string.IsNullOrWhiteSpace(request.PortableBundlePath))
{
throw new ArgumentException("Portable bundle path must be provided.", nameof(request));
}
var fullPath = Path.GetFullPath(request.PortableBundlePath);
if (!File.Exists(fullPath))
{
throw new FileNotFoundException($"Portable bundle file '{fullPath}' not found.", fullPath);
}
cancellationToken.ThrowIfCancellationRequested();
// Read and hash the portable bundle
var portableBundleBytes = File.ReadAllBytes(fullPath);
var portableBundleSha256 = _cryptoHash.ComputeHashHexForPurpose(portableBundleBytes, HashPurpose.Content);
// Build export document
var exportDoc = new PortableEvidenceExportDocument(
ExportVersion,
request.ExportId.ToString("D"),
request.BundleId.ToString("D"),
request.TenantId.ToString("D"),
_timeProvider.GetUtcNow(),
string.Empty, // Will be computed after archive creation
request.SourceUri,
PortableBundleVersion,
portableBundleSha256,
request.Metadata);
var exportJson = JsonSerializer.Serialize(exportDoc, SerializerOptions);
var exportJsonSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(exportJson), HashPurpose.Content);
// Build checksums
var checksums = BuildChecksums(exportJsonSha256, portableBundleSha256);
var checksumsSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(checksums), HashPurpose.Content);
// Build README
var readme = BuildReadme(request, portableBundleSha256);
var readmeSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(readme), HashPurpose.Content);
// Build verification script
var verifyScript = BuildVerificationScript();
var verifyScriptSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(verifyScript), HashPurpose.Content);
// Compute root hash
var rootHash = ComputeRootHash(exportJsonSha256, portableBundleSha256, checksumsSha256, readmeSha256, verifyScriptSha256);
// Update export document with root hash
var finalExportDoc = exportDoc with { RootHash = rootHash };
var finalExportJson = JsonSerializer.Serialize(finalExportDoc, SerializerOptions);
// Rebuild checksums with final export.json hash
var finalExportJsonSha256 = _cryptoHash.ComputeHashHexForPurpose(Encoding.UTF8.GetBytes(finalExportJson), HashPurpose.Content);
var finalChecksums = BuildChecksums(finalExportJsonSha256, portableBundleSha256);
// Create the export archive
var exportStream = CreateExportArchive(
finalExportJson,
portableBundleBytes,
finalChecksums,
readme,
verifyScript);
exportStream.Position = 0;
return new PortableEvidenceExportResult(
finalExportDoc,
finalExportJson,
rootHash,
portableBundleSha256,
exportStream);
}
private string ComputeRootHash(params string[] hashes)
{
var builder = new StringBuilder();
foreach (var hash in hashes.OrderBy(h => h, StringComparer.Ordinal))
{
builder.Append(hash).Append('\0');
}
var bytes = Encoding.UTF8.GetBytes(builder.ToString());
return _cryptoHash.ComputeHashHexForPurpose(bytes, HashPurpose.Content);
}
private static string BuildChecksums(string exportJsonSha256, string portableBundleSha256)
{
var builder = new StringBuilder();
builder.AppendLine("# Portable evidence export checksums (sha256)");
builder.Append(exportJsonSha256).AppendLine(" export.json");
builder.Append(portableBundleSha256).Append(" ").AppendLine(InnerBundleFileName);
return builder.ToString();
}
private static string BuildReadme(PortableEvidenceExportRequest request, string portableBundleSha256)
{
var builder = new StringBuilder();
builder.AppendLine("# Portable Evidence Export");
builder.AppendLine();
builder.AppendLine("## Overview");
builder.Append("Export ID: ").AppendLine(request.ExportId.ToString("D"));
builder.Append("Bundle ID: ").AppendLine(request.BundleId.ToString("D"));
builder.Append("Tenant ID: ").AppendLine(request.TenantId.ToString("D"));
builder.AppendLine();
builder.AppendLine("## Contents");
builder.AppendLine("- `export.json` - Export metadata with bundle references and hashes");
builder.Append("- `").Append(InnerBundleFileName).AppendLine("` - Original EvidenceLocker portable bundle (unmodified)");
builder.AppendLine("- `checksums.txt` - SHA-256 checksums for verification");
builder.AppendLine("- `verify-export.sh` - Verification script for offline use");
builder.AppendLine("- `README.md` - This file");
builder.AppendLine();
builder.AppendLine("## Verification Steps");
builder.AppendLine();
builder.AppendLine("### 1. Extract the archive");
builder.AppendLine("```sh");
builder.Append("tar -xzf ").AppendLine(ExportArchiveFileName);
builder.AppendLine("```");
builder.AppendLine();
builder.AppendLine("### 2. Verify checksums");
builder.AppendLine("```sh");
builder.AppendLine("./verify-export.sh");
builder.AppendLine("# Or manually:");
builder.AppendLine("sha256sum --check checksums.txt");
builder.AppendLine("```");
builder.AppendLine();
builder.AppendLine("### 3. Verify the inner evidence bundle");
builder.AppendLine("```sh");
builder.Append("stella evidence verify --bundle ").AppendLine(InnerBundleFileName);
builder.AppendLine("```");
builder.AppendLine();
builder.AppendLine("## Expected Headers");
builder.AppendLine("When downloading this export, expect the following response headers:");
builder.AppendLine("- `Content-Type: application/gzip`");
builder.AppendLine("- `ETag: \"<sha256-of-archive>\"`");
builder.AppendLine("- `Last-Modified: <creation-timestamp>`");
builder.AppendLine("- `X-Stella-Bundle-Id: <bundle-id>`");
builder.AppendLine("- `X-Stella-Export-Id: <export-id>`");
builder.AppendLine();
builder.AppendLine("## Schema Links");
builder.AppendLine("- Evidence bundle: `docs/modules/evidence-locker/bundle-packaging.schema.json`");
builder.AppendLine("- Export schema: `docs/modules/export-center/portable-export.schema.json`");
builder.AppendLine();
builder.AppendLine("## Portable Bundle Hash");
builder.Append("SHA-256: `").Append(portableBundleSha256).AppendLine("`");
return builder.ToString();
}
private static string BuildVerificationScript()
{
var builder = new StringBuilder();
builder.AppendLine("#!/usr/bin/env sh");
builder.AppendLine("# Portable Evidence Export Verification Script");
builder.AppendLine("# No network access required");
builder.AppendLine();
builder.AppendLine("set -eu");
builder.AppendLine();
builder.AppendLine("# Verify checksums");
builder.AppendLine("echo \"Verifying checksums...\"");
builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then");
builder.AppendLine(" sha256sum --check checksums.txt");
builder.AppendLine("elif command -v shasum >/dev/null 2>&1; then");
builder.AppendLine(" shasum -a 256 --check checksums.txt");
builder.AppendLine("else");
builder.AppendLine(" echo \"Error: sha256sum or shasum required\" >&2");
builder.AppendLine(" exit 1");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("echo \"\"");
builder.AppendLine("echo \"Checksums verified successfully.\"");
builder.AppendLine("echo \"\"");
builder.AppendLine();
builder.AppendLine("# Check for stella CLI");
builder.Append("PORTABLE_BUNDLE=\"").Append(InnerBundleFileName).AppendLine("\"");
builder.AppendLine("if command -v stella >/dev/null 2>&1; then");
builder.AppendLine(" echo \"Verifying evidence bundle with stella CLI...\"");
builder.AppendLine(" stella evidence verify --bundle \"$PORTABLE_BUNDLE\"");
builder.AppendLine("else");
builder.AppendLine(" echo \"Note: stella CLI not found. Manual verification of $PORTABLE_BUNDLE recommended.\"");
builder.AppendLine(" echo \"Install stella CLI and run: stella evidence verify --bundle $PORTABLE_BUNDLE\"");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("echo \"\"");
builder.AppendLine("echo \"Verification complete.\"");
return builder.ToString();
}
private MemoryStream CreateExportArchive(
string exportJson,
byte[] portableBundleBytes,
string checksums,
string readme,
string verifyScript)
{
var stream = new MemoryStream();
using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true))
using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
// Write files in lexical order for determinism
WriteTextEntry(tar, "README.md", readme, DefaultFileMode);
WriteTextEntry(tar, "checksums.txt", checksums, DefaultFileMode);
WriteTextEntry(tar, "export.json", exportJson, DefaultFileMode);
WriteBytesEntry(tar, InnerBundleFileName, portableBundleBytes, DefaultFileMode);
WriteTextEntry(tar, "verify-export.sh", verifyScript, ExecutableFileMode);
}
ApplyDeterministicGzipHeader(stream);
return stream;
}
private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var dataStream = new MemoryStream(bytes);
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = mode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void WriteBytesEntry(TarWriter writer, string path, byte[] content, UnixFileMode mode)
{
using var dataStream = new MemoryStream(content);
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = mode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void ApplyDeterministicGzipHeader(MemoryStream stream)
{
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for portable evidence export.");
}
var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
var originalPosition = stream.Position;
stream.Position = 4;
stream.Write(buffer);
stream.Position = originalPosition;
}
}

View File

@@ -0,0 +1,63 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.Core.PortableEvidence;
/// <summary>
/// Request to create a portable evidence export.
/// </summary>
public sealed record PortableEvidenceExportRequest(
Guid ExportId,
Guid BundleId,
Guid TenantId,
string PortableBundlePath,
string? SourceUri = null,
IReadOnlyDictionary<string, string>? Metadata = null);
/// <summary>
/// Result of building a portable evidence export.
/// </summary>
public sealed record PortableEvidenceExportResult(
PortableEvidenceExportDocument ExportDocument,
string ExportDocumentJson,
string RootHash,
string PortableBundleSha256,
MemoryStream ExportStream);
/// <summary>
/// The export.json document for portable evidence exports.
/// </summary>
public sealed record PortableEvidenceExportDocument(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("tenantId")] string TenantId,
[property: JsonPropertyName("createdAtUtc")] DateTimeOffset CreatedAtUtc,
[property: JsonPropertyName("rootHash")] string RootHash,
[property: JsonPropertyName("sourceUri")] string? SourceUri,
[property: JsonPropertyName("portableVersion")] string PortableVersion,
[property: JsonPropertyName("portableBundleSha256")] string PortableBundleSha256,
[property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string>? Metadata);
/// <summary>
/// Export status for portable evidence.
/// </summary>
public enum PortableEvidenceExportStatus
{
Pending = 1,
Materialising = 2,
Ready = 3,
Failed = 4
}
/// <summary>
/// Status response for portable evidence export.
/// </summary>
public sealed record PortableEvidenceExportStatusResponse(
[property: JsonPropertyName("exportId")] string ExportId,
[property: JsonPropertyName("bundleId")] string BundleId,
[property: JsonPropertyName("status")] string Status,
[property: JsonPropertyName("rootHash")] string? RootHash,
[property: JsonPropertyName("portableVersion")] string? PortableVersion,
[property: JsonPropertyName("downloadUri")] string? DownloadUri,
[property: JsonPropertyName("pendingReason")] string? PendingReason,
[property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt);

View File

@@ -0,0 +1,559 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using StellaOps.ExportCenter.Core.AttestationBundle;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class AttestationBundleBuilderTests : IDisposable
{
private readonly FakeTimeProvider _timeProvider;
private readonly FakeCryptoHash _cryptoHash;
private readonly AttestationBundleBuilder _builder;
public AttestationBundleBuilderTests()
{
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero));
_cryptoHash = new FakeCryptoHash();
_builder = new AttestationBundleBuilder(_cryptoHash, _timeProvider);
}
public void Dispose()
{
// No cleanup needed
}
[Fact]
public void Build_ProducesValidExport()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
Assert.NotNull(result);
Assert.NotNull(result.Metadata);
Assert.NotEmpty(result.MetadataJson);
Assert.NotEmpty(result.RootHash);
Assert.True(result.ExportStream.Length > 0);
}
[Fact]
public void Build_MetadataContainsCorrectValues()
{
var exportId = Guid.NewGuid();
var attestationId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
var sourceUri = "https://attestor.example.com/v1/statements/abc123";
var request = new AttestationBundleExportRequest(
exportId,
attestationId,
tenantId,
CreateTestDsseEnvelope(),
CreateTestStatement(),
SourceUri: sourceUri,
StatementVersion: "v2");
var result = _builder.Build(request);
Assert.Equal(exportId.ToString("D"), result.Metadata.ExportId);
Assert.Equal(attestationId.ToString("D"), result.Metadata.AttestationId);
Assert.Equal(tenantId.ToString("D"), result.Metadata.TenantId);
Assert.Equal(sourceUri, result.Metadata.SourceUri);
Assert.Equal("v2", result.Metadata.StatementVersion);
Assert.Equal("attestation-bundle/v1", result.Metadata.Version);
}
[Fact]
public void Build_ProducesDeterministicOutput()
{
var exportId = new Guid("11111111-2222-3333-4444-555555555555");
var attestationId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee");
var tenantId = new Guid("ffffffff-1111-2222-3333-444444444444");
var request = new AttestationBundleExportRequest(
exportId,
attestationId,
tenantId,
CreateTestDsseEnvelope(),
CreateTestStatement());
var result1 = _builder.Build(request);
var result2 = _builder.Build(request);
Assert.Equal(result1.RootHash, result2.RootHash);
var bytes1 = result1.ExportStream.ToArray();
var bytes2 = result2.ExportStream.ToArray();
Assert.Equal(bytes1, bytes2);
}
[Fact]
public void Build_ArchiveContainsExpectedFiles()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.ExportStream);
Assert.Contains("attestation.dsse.json", fileNames);
Assert.Contains("statement.json", fileNames);
Assert.Contains("metadata.json", fileNames);
Assert.Contains("checksums.txt", fileNames);
Assert.Contains("verify-attestation.sh", fileNames);
}
[Fact]
public void Build_WithTransparencyEntries_IncludesTransparencyFile()
{
var entries = new List<string>
{
"{\"logIndex\":1,\"logId\":\"rekor1\"}",
"{\"logIndex\":2,\"logId\":\"rekor2\"}"
};
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement(),
TransparencyEntries: entries);
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.ExportStream);
Assert.Contains("transparency.ndjson", fileNames);
}
[Fact]
public void Build_WithoutTransparencyEntries_OmitsTransparencyFile()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.ExportStream);
Assert.DoesNotContain("transparency.ndjson", fileNames);
}
[Fact]
public void Build_TransparencyEntriesSortedLexically()
{
var entries = new List<string>
{
"{\"logIndex\":3,\"logId\":\"z-log\"}",
"{\"logIndex\":1,\"logId\":\"a-log\"}",
"{\"logIndex\":2,\"logId\":\"m-log\"}"
};
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement(),
TransparencyEntries: entries);
var result = _builder.Build(request);
var content = ExtractFileContent(result.ExportStream, "transparency.ndjson");
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
// Should be sorted lexically
Assert.Equal(3, lines.Length);
Assert.Contains("a-log", lines[0]);
Assert.Contains("m-log", lines[1]);
Assert.Contains("z-log", lines[2]);
}
[Fact]
public void Build_DsseEnvelopeIsUnmodified()
{
var originalDsse = CreateTestDsseEnvelope();
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
originalDsse,
CreateTestStatement());
var result = _builder.Build(request);
var extractedDsse = ExtractFileContent(result.ExportStream, "attestation.dsse.json");
Assert.Equal(originalDsse, extractedDsse);
}
[Fact]
public void Build_StatementIsUnmodified()
{
var originalStatement = CreateTestStatement();
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
originalStatement);
var result = _builder.Build(request);
var extractedStatement = ExtractFileContent(result.ExportStream, "statement.json");
Assert.Equal(originalStatement, extractedStatement);
}
[Fact]
public void Build_TarEntriesHaveDeterministicMetadata()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
var entries = ExtractTarEntryMetadata(result.ExportStream);
var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
foreach (var entry in entries)
{
Assert.Equal(0, entry.Uid);
Assert.Equal(0, entry.Gid);
Assert.Equal(expectedTimestamp, entry.ModificationTime);
}
}
[Fact]
public void Build_VerifyScriptHasExecutePermission()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
var entries = ExtractTarEntryMetadata(result.ExportStream);
var scriptEntry = entries.FirstOrDefault(e => e.Name == "verify-attestation.sh");
Assert.NotNull(scriptEntry);
Assert.True(scriptEntry.Mode.HasFlag(UnixFileMode.UserExecute));
}
[Fact]
public void Build_VerifyScriptIsPosixCompliant()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
var script = ExtractFileContent(result.ExportStream, "verify-attestation.sh");
Assert.StartsWith("#!/usr/bin/env sh", script);
Assert.Contains("sha256sum", script);
Assert.Contains("shasum", script);
Assert.Contains("stella attest verify", script);
Assert.DoesNotContain("curl", script);
Assert.DoesNotContain("wget", script);
}
[Fact]
public void Build_VerifyScriptContainsAttestationId()
{
var attestationId = Guid.NewGuid();
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
attestationId,
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement());
var result = _builder.Build(request);
var script = ExtractFileContent(result.ExportStream, "verify-attestation.sh");
Assert.Contains(attestationId.ToString("D"), script);
}
[Fact]
public void Build_ChecksumsContainsAllFiles()
{
var request = CreateTestRequest();
var result = _builder.Build(request);
var checksums = ExtractFileContent(result.ExportStream, "checksums.txt");
Assert.Contains("attestation.dsse.json", checksums);
Assert.Contains("statement.json", checksums);
Assert.Contains("metadata.json", checksums);
}
[Fact]
public void Build_WithSubjectDigests_IncludesInMetadata()
{
var digests = new List<AttestationSubjectDigest>
{
new("image1", "sha256:abc123", "sha256"),
new("image2", "sha256:def456", "sha256")
};
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement(),
SubjectDigests: digests);
var result = _builder.Build(request);
Assert.NotNull(result.Metadata.SubjectDigests);
Assert.Equal(2, result.Metadata.SubjectDigests.Count);
Assert.Equal("image1", result.Metadata.SubjectDigests[0].Name);
Assert.Equal("sha256:abc123", result.Metadata.SubjectDigests[0].Digest);
}
[Fact]
public void Build_ThrowsForEmptyExportId()
{
var request = new AttestationBundleExportRequest(
Guid.Empty,
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement());
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForEmptyAttestationId()
{
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.Empty,
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement());
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForEmptyTenantId()
{
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.Empty,
CreateTestDsseEnvelope(),
CreateTestStatement());
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForEmptyDsseEnvelope()
{
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
string.Empty,
CreateTestStatement());
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForEmptyStatement()
{
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
string.Empty);
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForNullRequest()
{
Assert.Throws<ArgumentNullException>(() => _builder.Build(null!));
}
[Fact]
public void Build_DefaultStatementVersionIsV1()
{
var request = new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement());
var result = _builder.Build(request);
Assert.Equal("v1", result.Metadata.StatementVersion);
}
private static AttestationBundleExportRequest CreateTestRequest()
{
return new AttestationBundleExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
CreateTestDsseEnvelope(),
CreateTestStatement());
}
private static string CreateTestDsseEnvelope()
{
return JsonSerializer.Serialize(new
{
payloadType = "application/vnd.in-toto+json",
payload = "eyJ0eXBlIjoiaHR0cHM6Ly9pbi10b3RvLmlvL1N0YXRlbWVudC92MSJ9",
signatures = new[]
{
new { keyid = "key-1", sig = "signature-data-here" }
}
});
}
private static string CreateTestStatement()
{
return JsonSerializer.Serialize(new
{
_type = "https://in-toto.io/Statement/v1",
subject = new[]
{
new { name = "test-image", digest = new { sha256 = "abc123" } }
},
predicateType = "https://slsa.dev/provenance/v1",
predicate = new { buildType = "test" }
});
}
private static List<string> ExtractFileNames(MemoryStream exportStream)
{
exportStream.Position = 0;
var fileNames = new List<string>();
using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
fileNames.Add(entry.Name);
}
exportStream.Position = 0;
return fileNames;
}
private static string ExtractFileContent(MemoryStream exportStream, string fileName)
{
exportStream.Position = 0;
using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
if (entry.Name == fileName && entry.DataStream is not null)
{
using var reader = new StreamReader(entry.DataStream);
var content = reader.ReadToEnd();
exportStream.Position = 0;
return content;
}
}
exportStream.Position = 0;
throw new FileNotFoundException($"File '{fileName}' not found in archive.");
}
private static List<TarEntryMetadataInfo> ExtractTarEntryMetadata(MemoryStream exportStream)
{
exportStream.Position = 0;
var entries = new List<TarEntryMetadataInfo>();
using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
entries.Add(new TarEntryMetadataInfo(
entry.Name,
entry.Uid,
entry.Gid,
entry.ModificationTime,
entry.Mode));
}
exportStream.Position = 0;
return entries;
}
private sealed record TarEntryMetadataInfo(
string Name,
int Uid,
int Gid,
DateTimeOffset ModificationTime,
UnixFileMode Mode);
}
/// <summary>
/// Fake crypto hash for testing.
/// </summary>
internal sealed class FakeCryptoHash : StellaOps.Cryptography.ICryptoHash
{
public byte[] ComputeHash(ReadOnlySpan<byte> data, string? algorithmId = null)
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
return sha256.ComputeHash(data.ToArray());
}
public string ComputeHashHex(ReadOnlySpan<byte> data, string? algorithmId = null)
{
var hash = ComputeHash(data, algorithmId);
return Convert.ToHexString(hash).ToLowerInvariant();
}
public string ComputeHashBase64(ReadOnlySpan<byte> data, string? algorithmId = null)
{
var hash = ComputeHash(data, algorithmId);
return Convert.ToBase64String(hash);
}
public ValueTask<byte[]> ComputeHashAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
using var sha256 = System.Security.Cryptography.SHA256.Create();
var hash = sha256.ComputeHash(stream);
return new ValueTask<byte[]>(hash);
}
public async ValueTask<string> ComputeHashHexAsync(Stream stream, string? algorithmId = null, CancellationToken cancellationToken = default)
{
var hash = await ComputeHashAsync(stream, algorithmId, cancellationToken);
return Convert.ToHexString(hash).ToLowerInvariant();
}
public byte[] ComputeHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHash(data, null);
public string ComputeHashHexForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashHex(data, null);
public string ComputeHashBase64ForPurpose(ReadOnlySpan<byte> data, string purpose)
=> ComputeHashBase64(data, null);
public ValueTask<byte[]> ComputeHashForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashAsync(stream, null, cancellationToken);
public ValueTask<string> ComputeHashHexForPurposeAsync(Stream stream, string purpose, CancellationToken cancellationToken = default)
=> ComputeHashHexAsync(stream, null, cancellationToken);
public string GetAlgorithmForPurpose(string purpose) => "sha256";
public string GetHashPrefix(string purpose) => "sha256:";
public string ComputePrefixedHashForPurpose(ReadOnlySpan<byte> data, string purpose)
=> GetHashPrefix(purpose) + ComputeHashHexForPurpose(data, purpose);
}

View File

@@ -0,0 +1,359 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.BootstrapPack;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class BootstrapPackBuilderTests : IDisposable
{
private readonly string _tempDir;
private readonly BootstrapPackBuilder _builder;
private readonly ICryptoHash _cryptoHash;
public BootstrapPackBuilderTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"bootstrap-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHash = new DefaultCryptoHash();
_builder = new BootstrapPackBuilder(_cryptoHash);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void Build_WithCharts_ProducesValidPack()
{
var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: test-chart\nversion: 1.0.0");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("test-chart", "1.0.0", chartPath) },
Images: Array.Empty<BootstrapPackImageSource>());
var result = _builder.Build(request);
Assert.NotNull(result);
Assert.NotNull(result.Manifest);
Assert.NotEmpty(result.ManifestJson);
Assert.NotEmpty(result.RootHash);
Assert.NotEmpty(result.ArtifactSha256);
Assert.True(result.PackStream.Length > 0);
Assert.Single(result.Manifest.Charts);
}
[Fact]
public void Build_WithImages_ProducesValidPack()
{
var blobPath = CreateTestFile("blob", "image layer content");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: Array.Empty<BootstrapPackChartSource>(),
Images: new[]
{
new BootstrapPackImageSource(
"registry.example.com/app",
"v1.0.0",
"sha256:abc123",
blobPath)
});
var result = _builder.Build(request);
Assert.NotNull(result);
Assert.Single(result.Manifest.Images);
Assert.Equal("registry.example.com/app", result.Manifest.Images[0].Repository);
}
[Fact]
public void Build_WithChartsAndImages_IncludesAll()
{
var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: stellaops\nversion: 2.0.0");
var blobPath = CreateTestFile("blob", "container layer");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("stellaops", "2.0.0", chartPath) },
Images: new[]
{
new BootstrapPackImageSource("ghcr.io/stellaops/scanner", "v3.0.0", "sha256:def456", blobPath)
});
var result = _builder.Build(request);
Assert.Single(result.Manifest.Charts);
Assert.Single(result.Manifest.Images);
}
[Fact]
public void Build_ProducesDeterministicOutput()
{
var chartPath = CreateTestFile("Chart-determ.yaml", "apiVersion: v2\nname: determ\nversion: 1.0.0");
var exportId = new Guid("11111111-2222-3333-4444-555555555555");
var tenantId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee");
var request = new BootstrapPackBuildRequest(
exportId,
tenantId,
Charts: new[] { new BootstrapPackChartSource("determ", "1.0.0", chartPath) },
Images: Array.Empty<BootstrapPackImageSource>());
var result1 = _builder.Build(request);
var result2 = _builder.Build(request);
Assert.Equal(result1.RootHash, result2.RootHash);
Assert.Equal(result1.ArtifactSha256, result2.ArtifactSha256);
var bytes1 = result1.PackStream.ToArray();
var bytes2 = result2.PackStream.ToArray();
Assert.Equal(bytes1, bytes2);
}
[Fact]
public void Build_ArchiveContainsExpectedFiles()
{
var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: archive-test\nversion: 1.0.0");
var blobPath = CreateTestFile("layer.tar", "layer content");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("archive-test", "1.0.0", chartPath) },
Images: new[]
{
new BootstrapPackImageSource("test/image", "latest", "sha256:xyz789", blobPath)
});
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.PackStream);
Assert.Contains("manifest.json", fileNames);
Assert.Contains("checksums.txt", fileNames);
Assert.Contains("images/oci-layout", fileNames);
Assert.Contains("images/index.json", fileNames);
Assert.True(fileNames.Any(f => f.StartsWith("charts/")));
Assert.True(fileNames.Any(f => f.StartsWith("images/blobs/")));
}
[Fact]
public void Build_TarEntriesHaveDeterministicMetadata()
{
var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: metadata-test\nversion: 1.0.0");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("metadata-test", "1.0.0", chartPath) },
Images: Array.Empty<BootstrapPackImageSource>());
var result = _builder.Build(request);
var entries = ExtractTarEntryMetadata(result.PackStream);
var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
foreach (var entry in entries)
{
Assert.Equal(0, entry.Uid);
Assert.Equal(0, entry.Gid);
Assert.Equal(string.Empty, entry.UserName);
Assert.Equal(string.Empty, entry.GroupName);
Assert.Equal(expectedTimestamp, entry.ModificationTime);
}
}
[Fact]
public void Build_WithChartDirectory_IncludesAllFiles()
{
var chartDir = Path.Combine(_tempDir, "test-chart");
Directory.CreateDirectory(chartDir);
Directory.CreateDirectory(Path.Combine(chartDir, "templates"));
File.WriteAllText(Path.Combine(chartDir, "Chart.yaml"), "apiVersion: v2\nname: dir-chart\nversion: 1.0.0");
File.WriteAllText(Path.Combine(chartDir, "values.yaml"), "replicaCount: 1");
File.WriteAllText(Path.Combine(chartDir, "templates", "deployment.yaml"), "kind: Deployment");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("dir-chart", "1.0.0", chartDir) },
Images: Array.Empty<BootstrapPackImageSource>());
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.PackStream);
Assert.Contains("charts/dir-chart-1.0.0/Chart.yaml", fileNames);
Assert.Contains("charts/dir-chart-1.0.0/values.yaml", fileNames);
Assert.Contains("charts/dir-chart-1.0.0/templates/deployment.yaml", fileNames);
}
[Fact]
public void Build_WithSignatures_IncludesSignatureEntry()
{
var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: sig-test\nversion: 1.0.0");
var sigPath = CreateTestFile("mirror-bundle.sig", "signature-content");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("sig-test", "1.0.0", chartPath) },
Images: Array.Empty<BootstrapPackImageSource>(),
Signatures: new BootstrapPackSignatureSource("sha256:mirror123", sigPath));
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.PackStream);
Assert.NotNull(result.Manifest.Signatures);
Assert.Equal("sha256:mirror123", result.Manifest.Signatures.MirrorBundleDigest);
Assert.Contains("signatures/mirror-bundle.sig", fileNames);
}
[Fact]
public void Build_OciIndexContainsImageReferences()
{
var blobPath = CreateTestFile("layer", "image content");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: Array.Empty<BootstrapPackChartSource>(),
Images: new[]
{
new BootstrapPackImageSource("myregistry.io/app", "v1.2.3", "sha256:img123", blobPath)
});
var result = _builder.Build(request);
var indexJson = ExtractFileContent(result.PackStream, "images/index.json");
var index = JsonSerializer.Deserialize<OciImageIndex>(indexJson);
Assert.NotNull(index);
Assert.Equal(2, index.SchemaVersion);
Assert.Single(index.Manifests);
Assert.Equal("sha256:img123", index.Manifests[0].Digest);
}
[Fact]
public void Build_ThrowsForEmptyInputs()
{
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: Array.Empty<BootstrapPackChartSource>(),
Images: Array.Empty<BootstrapPackImageSource>());
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForMissingChartPath()
{
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("missing", "1.0.0", "/nonexistent/Chart.yaml") },
Images: Array.Empty<BootstrapPackImageSource>());
Assert.Throws<FileNotFoundException>(() => _builder.Build(request));
}
[Fact]
public void Build_ManifestVersionIsCorrect()
{
var chartPath = CreateTestFile("Chart.yaml", "apiVersion: v2\nname: version-test\nversion: 1.0.0");
var request = new BootstrapPackBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Charts: new[] { new BootstrapPackChartSource("version-test", "1.0.0", chartPath) },
Images: Array.Empty<BootstrapPackImageSource>());
var result = _builder.Build(request);
Assert.Equal("bootstrap/v1", result.Manifest.Version);
}
private string CreateTestFile(string fileName, string content)
{
var path = Path.Combine(_tempDir, fileName);
File.WriteAllText(path, content);
return path;
}
private static List<string> ExtractFileNames(MemoryStream packStream)
{
packStream.Position = 0;
var fileNames = new List<string>();
using var gzip = new GZipStream(packStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
fileNames.Add(entry.Name);
}
packStream.Position = 0;
return fileNames;
}
private static string ExtractFileContent(MemoryStream packStream, string fileName)
{
packStream.Position = 0;
using var gzip = new GZipStream(packStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
if (entry.Name == fileName && entry.DataStream is not null)
{
using var reader = new StreamReader(entry.DataStream);
var content = reader.ReadToEnd();
packStream.Position = 0;
return content;
}
}
packStream.Position = 0;
throw new FileNotFoundException($"File '{fileName}' not found in archive.");
}
private static List<TarEntryMetadata> ExtractTarEntryMetadata(MemoryStream packStream)
{
packStream.Position = 0;
var entries = new List<TarEntryMetadata>();
using var gzip = new GZipStream(packStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
entries.Add(new TarEntryMetadata(
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
entry.ModificationTime));
}
packStream.Position = 0;
return entries;
}
private sealed record TarEntryMetadata(
int Uid,
int Gid,
string UserName,
string GroupName,
DateTimeOffset ModificationTime);
}

View File

@@ -0,0 +1,95 @@
using StellaOps.ExportCenter.WebService.Deprecation;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Deprecation;
public sealed class DeprecatedEndpointsRegistryTests
{
[Fact]
public void ListExports_HasCorrectSuccessorPath()
{
Assert.Equal("/v1/exports/profiles", DeprecatedEndpointsRegistry.ListExports.SuccessorPath);
}
[Fact]
public void CreateExport_HasCorrectSuccessorPath()
{
Assert.Equal("/v1/exports/evidence", DeprecatedEndpointsRegistry.CreateExport.SuccessorPath);
}
[Fact]
public void DeleteExport_HasCorrectSuccessorPath()
{
Assert.Equal("/v1/exports/runs/{id}/cancel", DeprecatedEndpointsRegistry.DeleteExport.SuccessorPath);
}
[Fact]
public void AllEndpoints_HaveDocumentationUrl()
{
Assert.NotNull(DeprecatedEndpointsRegistry.ListExports.DocumentationUrl);
Assert.NotNull(DeprecatedEndpointsRegistry.CreateExport.DocumentationUrl);
Assert.NotNull(DeprecatedEndpointsRegistry.DeleteExport.DocumentationUrl);
}
[Fact]
public void AllEndpoints_HaveReason()
{
Assert.NotNull(DeprecatedEndpointsRegistry.ListExports.Reason);
Assert.NotNull(DeprecatedEndpointsRegistry.CreateExport.Reason);
Assert.NotNull(DeprecatedEndpointsRegistry.DeleteExport.Reason);
}
[Fact]
public void GetAll_ReturnsThreeEndpoints()
{
var endpoints = DeprecatedEndpointsRegistry.GetAll();
Assert.Equal(3, endpoints.Count);
}
[Fact]
public void GetAll_ContainsGetExports()
{
var endpoints = DeprecatedEndpointsRegistry.GetAll();
Assert.Contains(endpoints, e => e.Method == "GET" && e.Pattern == "/exports");
}
[Fact]
public void GetAll_ContainsPostExports()
{
var endpoints = DeprecatedEndpointsRegistry.GetAll();
Assert.Contains(endpoints, e => e.Method == "POST" && e.Pattern == "/exports");
}
[Fact]
public void GetAll_ContainsDeleteExports()
{
var endpoints = DeprecatedEndpointsRegistry.GetAll();
Assert.Contains(endpoints, e => e.Method == "DELETE" && e.Pattern == "/exports/{id}");
}
[Fact]
public void LegacyExportsDeprecationDate_IsJanuary2025()
{
Assert.Equal(2025, DeprecatedEndpointsRegistry.LegacyExportsDeprecationDate.Year);
Assert.Equal(1, DeprecatedEndpointsRegistry.LegacyExportsDeprecationDate.Month);
}
[Fact]
public void LegacyExportsSunsetDate_IsJuly2025()
{
Assert.Equal(2025, DeprecatedEndpointsRegistry.LegacyExportsSunsetDate.Year);
Assert.Equal(7, DeprecatedEndpointsRegistry.LegacyExportsSunsetDate.Month);
}
[Fact]
public void SunsetDate_IsAfterDeprecationDate()
{
Assert.True(
DeprecatedEndpointsRegistry.LegacyExportsSunsetDate >
DeprecatedEndpointsRegistry.LegacyExportsDeprecationDate);
}
}

View File

@@ -0,0 +1,130 @@
using Microsoft.AspNetCore.Http;
using StellaOps.ExportCenter.WebService.Deprecation;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Deprecation;
public sealed class DeprecationHeaderExtensionsTests
{
[Fact]
public void AddDeprecationHeaders_SetsDeprecationHeader()
{
var context = CreateHttpContext();
var info = CreateSampleDeprecationInfo();
context.AddDeprecationHeaders(info);
Assert.True(context.Response.Headers.ContainsKey(DeprecationHeaderExtensions.DeprecationHeader));
}
[Fact]
public void AddDeprecationHeaders_SetsSunsetHeader()
{
var context = CreateHttpContext();
var info = CreateSampleDeprecationInfo();
context.AddDeprecationHeaders(info);
Assert.True(context.Response.Headers.ContainsKey(DeprecationHeaderExtensions.SunsetHeader));
}
[Fact]
public void AddDeprecationHeaders_SetsLinkHeaderWithSuccessor()
{
var context = CreateHttpContext();
var info = CreateSampleDeprecationInfo();
context.AddDeprecationHeaders(info);
var linkHeader = context.Response.Headers[DeprecationHeaderExtensions.LinkHeader].ToString();
Assert.Contains("successor-version", linkHeader);
Assert.Contains("/v1/new-endpoint", linkHeader);
}
[Fact]
public void AddDeprecationHeaders_SetsLinkHeaderWithDocumentation()
{
var context = CreateHttpContext();
var info = new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow,
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
SuccessorPath: "/v1/new",
DocumentationUrl: "https://docs.example.com/migration");
context.AddDeprecationHeaders(info);
var linkHeader = context.Response.Headers[DeprecationHeaderExtensions.LinkHeader].ToString();
Assert.Contains("deprecation", linkHeader);
Assert.Contains("https://docs.example.com/migration", linkHeader);
}
[Fact]
public void AddDeprecationHeaders_SetsWarningHeader()
{
var context = CreateHttpContext();
var info = CreateSampleDeprecationInfo();
context.AddDeprecationHeaders(info);
var warningHeader = context.Response.Headers[DeprecationHeaderExtensions.WarningHeader].ToString();
Assert.Contains("299", warningHeader);
Assert.Contains("/v1/new-endpoint", warningHeader);
}
[Fact]
public void AddDeprecationHeaders_WarningIncludesCustomReason()
{
var context = CreateHttpContext();
var info = new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow,
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
SuccessorPath: "/v1/new",
Reason: "Custom deprecation reason");
context.AddDeprecationHeaders(info);
var warningHeader = context.Response.Headers[DeprecationHeaderExtensions.WarningHeader].ToString();
Assert.Contains("Custom deprecation reason", warningHeader);
}
[Fact]
public void AddDeprecationHeaders_FormatsDateAsRfc1123()
{
var context = CreateHttpContext();
var deprecatedAt = new DateTimeOffset(2025, 1, 15, 12, 30, 45, TimeSpan.Zero);
var info = new DeprecationInfo(
DeprecatedAt: deprecatedAt,
SunsetAt: deprecatedAt.AddMonths(6),
SuccessorPath: "/v1/new");
context.AddDeprecationHeaders(info);
var deprecationHeader = context.Response.Headers[DeprecationHeaderExtensions.DeprecationHeader].ToString();
// RFC 1123 format: "ddd, dd MMM yyyy HH:mm:ss 'GMT'"
Assert.Matches(@"\w{3}, \d{2} \w{3} \d{4} \d{2}:\d{2}:\d{2} GMT", deprecationHeader);
}
[Fact]
public void CreateDeprecationFilter_ReturnsNonNullFilter()
{
var info = CreateSampleDeprecationInfo();
var filter = DeprecationHeaderExtensions.CreateDeprecationFilter(info);
Assert.NotNull(filter);
}
private static HttpContext CreateHttpContext()
{
var context = new DefaultHttpContext();
return context;
}
private static DeprecationInfo CreateSampleDeprecationInfo()
{
return new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow,
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
SuccessorPath: "/v1/new-endpoint");
}
}

View File

@@ -0,0 +1,72 @@
using StellaOps.ExportCenter.WebService.Deprecation;
using Xunit;
namespace StellaOps.ExportCenter.Tests.Deprecation;
public sealed class DeprecationInfoTests
{
[Fact]
public void IsPastSunset_WhenSunsetInFuture_ReturnsFalse()
{
var info = new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-1),
SunsetAt: DateTimeOffset.UtcNow.AddMonths(6),
SuccessorPath: "/v1/new");
Assert.False(info.IsPastSunset);
}
[Fact]
public void IsPastSunset_WhenSunsetInPast_ReturnsTrue()
{
var info = new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-12),
SunsetAt: DateTimeOffset.UtcNow.AddMonths(-1),
SuccessorPath: "/v1/new");
Assert.True(info.IsPastSunset);
}
[Fact]
public void DaysUntilSunset_CalculatesCorrectly()
{
var sunset = DateTimeOffset.UtcNow.AddDays(30);
var info = new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow,
SunsetAt: sunset,
SuccessorPath: "/v1/new");
Assert.Equal(30, info.DaysUntilSunset);
}
[Fact]
public void DaysUntilSunset_WhenPastSunset_ReturnsZero()
{
var info = new DeprecationInfo(
DeprecatedAt: DateTimeOffset.UtcNow.AddMonths(-12),
SunsetAt: DateTimeOffset.UtcNow.AddMonths(-1),
SuccessorPath: "/v1/new");
Assert.Equal(0, info.DaysUntilSunset);
}
[Fact]
public void Record_InitializesAllProperties()
{
var deprecatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
var sunsetAt = new DateTimeOffset(2025, 7, 1, 0, 0, 0, TimeSpan.Zero);
var info = new DeprecationInfo(
DeprecatedAt: deprecatedAt,
SunsetAt: sunsetAt,
SuccessorPath: "/v1/exports",
DocumentationUrl: "https://docs.example.com",
Reason: "Replaced by new API");
Assert.Equal(deprecatedAt, info.DeprecatedAt);
Assert.Equal(sunsetAt, info.SunsetAt);
Assert.Equal("/v1/exports", info.SuccessorPath);
Assert.Equal("https://docs.example.com", info.DocumentationUrl);
Assert.Equal("Replaced by new API", info.Reason);
}
}

View File

@@ -0,0 +1,552 @@
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.ExportCenter.Core.Notifications;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class ExportNotificationEmitterTests
{
private readonly InMemoryExportNotificationSink _sink;
private readonly InMemoryExportNotificationDlq _dlq;
private readonly FakeTimeProvider _timeProvider;
private readonly ExportNotificationEmitter _emitter;
public ExportNotificationEmitterTests()
{
_sink = new InMemoryExportNotificationSink();
_dlq = new InMemoryExportNotificationDlq();
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero));
_emitter = new ExportNotificationEmitter(
_sink,
_dlq,
_timeProvider,
NullLogger<ExportNotificationEmitter>.Instance);
}
[Fact]
public async Task EmitAirgapReadyAsync_PublishesToSink()
{
var notification = CreateTestNotification();
var result = await _emitter.EmitAirgapReadyAsync(notification);
Assert.True(result.Success);
Assert.Equal(1, result.AttemptCount);
Assert.Equal(1, _sink.Count);
}
[Fact]
public async Task EmitAirgapReadyAsync_UsesCorrectChannel()
{
var notification = CreateTestNotification();
await _emitter.EmitAirgapReadyAsync(notification);
var messages = _sink.GetMessages(ExportNotificationTypes.AirgapReady);
Assert.Single(messages);
}
[Fact]
public async Task EmitAirgapReadyAsync_SerializesPayloadWithSnakeCase()
{
var notification = CreateTestNotification();
await _emitter.EmitAirgapReadyAsync(notification);
var messages = _sink.GetMessages(ExportNotificationTypes.AirgapReady);
var payload = messages.First();
Assert.Contains("\"export_id\":", payload);
Assert.Contains("\"bundle_id\":", payload);
Assert.Contains("\"tenant_id\":", payload);
Assert.Contains("\"artifact_sha256\":", payload);
}
[Fact]
public async Task EmitAirgapReadyAsync_RoutesToDlqOnFailure()
{
var failingSink = new FailingNotificationSink(maxFailures: 10);
var emitter = new ExportNotificationEmitter(
failingSink,
_dlq,
_timeProvider,
NullLogger<ExportNotificationEmitter>.Instance,
new ExportNotificationEmitterOptions(MaxRetries: 3, WebhookEnabled: false, WebhookTimeout: TimeSpan.FromSeconds(30)));
var notification = CreateTestNotification();
var result = await emitter.EmitAirgapReadyAsync(notification);
Assert.False(result.Success);
Assert.Equal(1, _dlq.Count);
}
[Fact]
public async Task EmitAirgapReadyAsync_DlqEntryContainsCorrectData()
{
var failingSink = new FailingNotificationSink(maxFailures: 10);
var emitter = new ExportNotificationEmitter(
failingSink,
_dlq,
_timeProvider,
NullLogger<ExportNotificationEmitter>.Instance,
new ExportNotificationEmitterOptions(MaxRetries: 1, WebhookEnabled: false, WebhookTimeout: TimeSpan.FromSeconds(30)));
var notification = CreateTestNotification();
await emitter.EmitAirgapReadyAsync(notification);
var dlqEntries = _dlq.GetAll();
Assert.Single(dlqEntries);
var entry = dlqEntries.First();
Assert.Equal(notification.ExportId, entry.ExportId);
Assert.Equal(notification.BundleId, entry.BundleId);
Assert.Equal(notification.TenantId, entry.TenantId);
Assert.Equal(ExportNotificationTypes.AirgapReady, entry.EventType);
Assert.NotEmpty(entry.OriginalPayload);
}
[Fact]
public async Task EmitAirgapReadyAsync_RetriesTransientFailures()
{
var failingSink = new FailingNotificationSink(maxFailures: 2);
var emitter = new ExportNotificationEmitter(
failingSink,
_dlq,
_timeProvider,
NullLogger<ExportNotificationEmitter>.Instance,
new ExportNotificationEmitterOptions(MaxRetries: 5, WebhookEnabled: false, WebhookTimeout: TimeSpan.FromSeconds(30)));
var notification = CreateTestNotification();
var result = await emitter.EmitAirgapReadyAsync(notification);
Assert.True(result.Success);
Assert.Equal(3, result.AttemptCount);
Assert.Equal(0, _dlq.Count);
}
[Fact]
public async Task EmitToTimelineAsync_UsesTimelineChannel()
{
var notification = CreateTestNotification();
var result = await _emitter.EmitToTimelineAsync(notification);
Assert.True(result.Success);
var messages = _sink.GetMessages(ExportNotificationTypes.TimelineAirgapReady);
Assert.Single(messages);
}
[Fact]
public async Task EmitAirgapReadyAsync_IncludesMetadataInPayload()
{
var notification = new ExportAirgapReadyNotification
{
ArtifactSha256 = "abc123",
ArtifactUri = "https://example.com/artifact",
BundleId = "bundle-001",
CreatedAt = _timeProvider.GetUtcNow(),
ExportId = "export-001",
PortableVersion = "v1",
ProfileId = "profile-001",
RootHash = "root123",
TenantId = "tenant-001",
Metadata = new ExportAirgapReadyMetadata
{
ExportSizeBytes = 1024,
PortableSizeBytes = 512,
SourceUri = "https://source.example.com/bundle"
}
};
await _emitter.EmitAirgapReadyAsync(notification);
var messages = _sink.GetMessages(ExportNotificationTypes.AirgapReady);
var payload = messages.First();
Assert.Contains("\"export_size_bytes\":1024", payload);
Assert.Contains("\"portable_size_bytes\":512", payload);
Assert.Contains("\"source_uri\":\"https://source.example.com/bundle\"", payload);
}
[Fact]
public async Task EmitAirgapReadyAsync_WithWebhook_DeliversToWebhook()
{
var webhookClient = new FakeWebhookClient();
var emitter = new ExportNotificationEmitter(
_sink,
_dlq,
_timeProvider,
NullLogger<ExportNotificationEmitter>.Instance,
new ExportNotificationEmitterOptions(MaxRetries: 5, WebhookEnabled: true, WebhookTimeout: TimeSpan.FromSeconds(30)),
webhookClient);
var notification = CreateTestNotification();
var result = await emitter.EmitAirgapReadyAsync(notification);
Assert.True(result.Success);
Assert.Equal(1, webhookClient.DeliveryCount);
}
[Fact]
public async Task EmitAirgapReadyAsync_WithWebhookFailure_RoutesToDlq()
{
var webhookClient = new FakeWebhookClient(alwaysFail: true);
var emitter = new ExportNotificationEmitter(
_sink,
_dlq,
_timeProvider,
NullLogger<ExportNotificationEmitter>.Instance,
new ExportNotificationEmitterOptions(MaxRetries: 2, WebhookEnabled: true, WebhookTimeout: TimeSpan.FromSeconds(30)),
webhookClient);
var notification = CreateTestNotification();
var result = await emitter.EmitAirgapReadyAsync(notification);
Assert.False(result.Success);
Assert.Equal(1, _dlq.Count);
}
[Fact]
public async Task EmitAirgapReadyAsync_ThrowsOnNullNotification()
{
await Assert.ThrowsAsync<ArgumentNullException>(
() => _emitter.EmitAirgapReadyAsync(null!));
}
private ExportAirgapReadyNotification CreateTestNotification()
{
return new ExportAirgapReadyNotification
{
ArtifactSha256 = "sha256-test-hash",
ArtifactUri = "https://artifacts.example.com/export/test.tgz",
BundleId = Guid.NewGuid().ToString("D"),
CreatedAt = _timeProvider.GetUtcNow(),
ExportId = Guid.NewGuid().ToString("D"),
PortableVersion = "v1",
ProfileId = "mirror:full",
RootHash = "root-hash-test",
TenantId = Guid.NewGuid().ToString("D")
};
}
private sealed class FailingNotificationSink : IExportNotificationSink
{
private readonly int _maxFailures;
private int _failures;
public FailingNotificationSink(int maxFailures)
{
_maxFailures = maxFailures;
}
public Task PublishAsync(string channel, string message, CancellationToken cancellationToken = default)
{
if (_failures < _maxFailures)
{
_failures++;
throw new TimeoutException("Simulated transient failure");
}
return Task.CompletedTask;
}
}
private sealed class FakeWebhookClient : IExportWebhookClient
{
private readonly bool _alwaysFail;
public int DeliveryCount { get; private set; }
public FakeWebhookClient(bool alwaysFail = false)
{
_alwaysFail = alwaysFail;
}
public Task<WebhookDeliveryResult> DeliverAsync(
string eventType,
string payload,
DateTimeOffset sentAt,
CancellationToken cancellationToken = default)
{
DeliveryCount++;
if (_alwaysFail)
{
return Task.FromResult(new WebhookDeliveryResult(
Success: false,
StatusCode: 500,
ErrorMessage: "Simulated failure",
ShouldRetry: false));
}
return Task.FromResult(new WebhookDeliveryResult(
Success: true,
StatusCode: 200,
ErrorMessage: null,
ShouldRetry: false));
}
}
}
public sealed class ExportWebhookClientTests
{
[Fact]
public void ComputeSignature_ProducesDeterministicOutput()
{
var payload = "{\"export_id\":\"abc123\"}";
var sentAt = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero);
var signingKey = "test-secret-key";
var sig1 = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey);
var sig2 = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey);
Assert.Equal(sig1, sig2);
}
[Fact]
public void ComputeSignature_StartsWithSha256Prefix()
{
var payload = "{\"test\":true}";
var sentAt = DateTimeOffset.UtcNow;
var signingKey = "test-key";
var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey);
Assert.StartsWith("sha256=", signature);
}
[Fact]
public void ComputeSignature_ChangesWithDifferentPayload()
{
var sentAt = DateTimeOffset.UtcNow;
var signingKey = "test-key";
var sig1 = ExportWebhookClient.ComputeSignature("{\"a\":1}", sentAt, signingKey);
var sig2 = ExportWebhookClient.ComputeSignature("{\"a\":2}", sentAt, signingKey);
Assert.NotEqual(sig1, sig2);
}
[Fact]
public void ComputeSignature_ChangesWithDifferentTimestamp()
{
var payload = "{\"test\":true}";
var signingKey = "test-key";
var sig1 = ExportWebhookClient.ComputeSignature(payload, new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero), signingKey);
var sig2 = ExportWebhookClient.ComputeSignature(payload, new DateTimeOffset(2025, 1, 2, 0, 0, 0, TimeSpan.Zero), signingKey);
Assert.NotEqual(sig1, sig2);
}
[Fact]
public void ComputeSignature_ChangesWithDifferentKey()
{
var payload = "{\"test\":true}";
var sentAt = DateTimeOffset.UtcNow;
var sig1 = ExportWebhookClient.ComputeSignature(payload, sentAt, "key1");
var sig2 = ExportWebhookClient.ComputeSignature(payload, sentAt, "key2");
Assert.NotEqual(sig1, sig2);
}
[Fact]
public void ComputeSignature_AcceptsBase64Key()
{
var payload = "{\"test\":true}";
var sentAt = DateTimeOffset.UtcNow;
var base64Key = Convert.ToBase64String(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8 });
var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, base64Key);
Assert.StartsWith("sha256=", signature);
}
[Fact]
public void ComputeSignature_AcceptsHexKey()
{
var payload = "{\"test\":true}";
var sentAt = DateTimeOffset.UtcNow;
var hexKey = "0102030405060708";
var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, hexKey);
Assert.StartsWith("sha256=", signature);
}
[Fact]
public void VerifySignature_ReturnsTrueForValidSignature()
{
var payload = "{\"test\":true}";
var sentAt = new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero);
var signingKey = "test-key";
var signature = ExportWebhookClient.ComputeSignature(payload, sentAt, signingKey);
var isValid = ExportWebhookClient.VerifySignature(payload, sentAt, signingKey, signature);
Assert.True(isValid);
}
[Fact]
public void VerifySignature_ReturnsFalseForInvalidSignature()
{
var payload = "{\"test\":true}";
var sentAt = DateTimeOffset.UtcNow;
var signingKey = "test-key";
var isValid = ExportWebhookClient.VerifySignature(payload, sentAt, signingKey, "sha256=invalid");
Assert.False(isValid);
}
[Fact]
public void VerifySignature_ReturnsFalseForTamperedPayload()
{
var sentAt = DateTimeOffset.UtcNow;
var signingKey = "test-key";
var signature = ExportWebhookClient.ComputeSignature("{\"test\":true}", sentAt, signingKey);
var isValid = ExportWebhookClient.VerifySignature("{\"test\":false}", sentAt, signingKey, signature);
Assert.False(isValid);
}
}
public sealed class InMemoryExportNotificationSinkTests
{
[Fact]
public async Task PublishAsync_StoresMessage()
{
var sink = new InMemoryExportNotificationSink();
await sink.PublishAsync("test.channel", "{\"test\":true}");
Assert.Equal(1, sink.Count);
}
[Fact]
public async Task GetMessages_ReturnsMessagesByChannel()
{
var sink = new InMemoryExportNotificationSink();
await sink.PublishAsync("channel.a", "{\"a\":1}");
await sink.PublishAsync("channel.b", "{\"b\":2}");
await sink.PublishAsync("channel.a", "{\"a\":3}");
var messagesA = sink.GetMessages("channel.a");
var messagesB = sink.GetMessages("channel.b");
Assert.Equal(2, messagesA.Count);
Assert.Single(messagesB);
}
[Fact]
public async Task Clear_RemovesAllMessages()
{
var sink = new InMemoryExportNotificationSink();
await sink.PublishAsync("test", "message1");
await sink.PublishAsync("test", "message2");
sink.Clear();
Assert.Equal(0, sink.Count);
}
}
public sealed class InMemoryExportNotificationDlqTests
{
[Fact]
public async Task EnqueueAsync_StoresEntry()
{
var dlq = new InMemoryExportNotificationDlq();
var entry = CreateTestDlqEntry();
await dlq.EnqueueAsync(entry);
Assert.Equal(1, dlq.Count);
}
[Fact]
public async Task GetPendingAsync_ReturnsAllEntries()
{
var dlq = new InMemoryExportNotificationDlq();
await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-1"));
await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-2"));
var pending = await dlq.GetPendingAsync();
Assert.Equal(2, pending.Count);
}
[Fact]
public async Task GetPendingAsync_FiltersByTenant()
{
var dlq = new InMemoryExportNotificationDlq();
await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-1"));
await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-2"));
await dlq.EnqueueAsync(CreateTestDlqEntry("tenant-1"));
var pending = await dlq.GetPendingAsync(tenantId: "tenant-1");
Assert.Equal(2, pending.Count);
Assert.All(pending, e => Assert.Equal("tenant-1", e.TenantId));
}
[Fact]
public async Task GetPendingAsync_RespectsLimit()
{
var dlq = new InMemoryExportNotificationDlq();
for (var i = 0; i < 10; i++)
{
await dlq.EnqueueAsync(CreateTestDlqEntry());
}
var pending = await dlq.GetPendingAsync(limit: 5);
Assert.Equal(5, pending.Count);
}
private static ExportNotificationDlqEntry CreateTestDlqEntry(string? tenantId = null)
{
return new ExportNotificationDlqEntry
{
EventType = ExportNotificationTypes.AirgapReady,
ExportId = Guid.NewGuid().ToString(),
BundleId = Guid.NewGuid().ToString(),
TenantId = tenantId ?? Guid.NewGuid().ToString(),
FailureReason = "Test failure",
AttemptCount = 3,
LastAttemptAt = DateTimeOffset.UtcNow,
OriginalPayload = "{}"
};
}
}
/// <summary>
/// Fake time provider for testing.
/// </summary>
internal sealed class FakeTimeProvider : TimeProvider
{
private DateTimeOffset _utcNow;
public FakeTimeProvider(DateTimeOffset utcNow)
{
_utcNow = utcNow;
}
public override DateTimeOffset GetUtcNow() => _utcNow;
public void Advance(TimeSpan duration) => _utcNow = _utcNow.Add(duration);
public void SetUtcNow(DateTimeOffset utcNow) => _utcNow = utcNow;
}

View File

@@ -0,0 +1,396 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.MirrorBundle;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class MirrorBundleBuilderTests : IDisposable
{
private readonly string _tempDir;
private readonly MirrorBundleBuilder _builder;
private readonly ICryptoHash _cryptoHash;
public MirrorBundleBuilderTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"mirror-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHash = new DefaultCryptoHash();
_builder = new MirrorBundleBuilder(_cryptoHash);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void Build_FullBundle_ProducesValidArchive()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-1234\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "registry.example.com/app:*" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
});
var result = _builder.Build(request);
Assert.NotNull(result);
Assert.NotNull(result.Manifest);
Assert.NotEmpty(result.ManifestJson);
Assert.NotEmpty(result.RootHash);
Assert.True(result.BundleStream.Length > 0);
Assert.Equal("mirror:full", result.Manifest.Profile);
}
[Fact]
public void Build_DeltaBundle_IncludesDeltaMetadata()
{
var vexPath = CreateTestFile("vex.jsonl.zst", "{\"id\":\"VEX-001\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Delta,
new MirrorBundleSelectors(new[] { "product-a" }, DateTimeOffset.UtcNow.AddDays(-7), DateTimeOffset.UtcNow),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Vex, vexPath)
},
DeltaOptions: new MirrorBundleDeltaOptions("run-20251001", "sha256:abc123"));
var result = _builder.Build(request);
Assert.NotNull(result.Manifest.Delta);
Assert.Equal("run-20251001", result.Manifest.Delta.BaseExportId);
Assert.Equal("sha256:abc123", result.Manifest.Delta.BaseManifestDigest);
Assert.Equal("mirror:delta", result.Manifest.Profile);
}
[Fact]
public void Build_WithEncryption_IncludesEncryptionMetadata()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-5678\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-b" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
},
Encryption: new MirrorBundleEncryptionOptions(
MirrorBundleEncryptionMode.Age,
new[] { "age1recipient..." },
Strict: false));
var result = _builder.Build(request);
Assert.NotNull(result.Manifest.Encryption);
Assert.Equal("age", result.Manifest.Encryption.Mode);
Assert.False(result.Manifest.Encryption.Strict);
Assert.Single(result.Manifest.Encryption.Recipients);
}
[Fact]
public void Build_ProducesDeterministicOutput()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-DETERM\"}");
var runId = new Guid("11111111-2222-3333-4444-555555555555");
var tenantId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee");
var request = new MirrorBundleBuildRequest(
runId,
tenantId,
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-deterministic" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
});
var result1 = _builder.Build(request);
var result2 = _builder.Build(request);
// Root hashes should match for identical inputs
Assert.Equal(result1.RootHash, result2.RootHash);
// Archive content should be identical
var bytes1 = result1.BundleStream.ToArray();
var bytes2 = result2.BundleStream.ToArray();
Assert.Equal(bytes1, bytes2);
}
[Fact]
public void Build_ArchiveContainsExpectedFiles()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-ARCHIVE\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-archive" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
});
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.BundleStream);
Assert.Contains("manifest.yaml", fileNames);
Assert.Contains("export.json", fileNames);
Assert.Contains("provenance.json", fileNames);
Assert.Contains("checksums.txt", fileNames);
Assert.Contains("README.md", fileNames);
Assert.Contains("verify-mirror.sh", fileNames);
Assert.Contains("indexes/advisories.index.json", fileNames);
Assert.Contains("indexes/vex.index.json", fileNames);
Assert.Contains("data/raw/advisories/advisories.jsonl.zst", fileNames);
}
[Fact]
public void Build_TarEntriesHaveDeterministicMetadata()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-2024-METADATA\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-metadata" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
});
var result = _builder.Build(request);
var entries = ExtractTarEntryMetadata(result.BundleStream);
var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
foreach (var entry in entries)
{
Assert.Equal(0, entry.Uid);
Assert.Equal(0, entry.Gid);
Assert.Equal(string.Empty, entry.UserName);
Assert.Equal(string.Empty, entry.GroupName);
Assert.Equal(expectedTimestamp, entry.ModificationTime);
}
}
[Fact]
public void Build_SbomWithSubject_UsesCorrectPath()
{
var sbomPath = CreateTestFile("sbom.json", "{\"bomFormat\":\"CycloneDX\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-sbom" }, null, null),
new[]
{
new MirrorBundleDataSource(
MirrorBundleDataCategory.Sbom,
sbomPath,
SubjectId: "registry.example.com/app:v1.2.3")
});
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.BundleStream);
Assert.Contains("data/raw/sboms/registry.example.com-app-v1.2.3/sbom.json", fileNames);
}
[Fact]
public void Build_NormalizedData_UsesNormalizedPath()
{
var normalizedPath = CreateTestFile("advisories-normalized.jsonl.zst", "{\"id\":\"CVE-2024-NORM\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-normalized" }, null, null),
new[]
{
new MirrorBundleDataSource(
MirrorBundleDataCategory.Advisories,
normalizedPath,
IsNormalized: true)
});
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.BundleStream);
Assert.Contains("data/normalized/advisories/advisories-normalized.jsonl.zst", fileNames);
}
[Fact]
public void Build_CountsAreAccurate()
{
var advisory1 = CreateTestFile("advisory1.jsonl.zst", "{\"id\":\"CVE-1\"}");
var advisory2 = CreateTestFile("advisory2.jsonl.zst", "{\"id\":\"CVE-2\"}");
var vex1 = CreateTestFile("vex1.jsonl.zst", "{\"id\":\"VEX-1\"}");
var sbom1 = CreateTestFile("sbom1.json", "{\"bomFormat\":\"CycloneDX\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-counts" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisory1),
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisory2),
new MirrorBundleDataSource(MirrorBundleDataCategory.Vex, vex1),
new MirrorBundleDataSource(MirrorBundleDataCategory.Sbom, sbom1)
});
var result = _builder.Build(request);
Assert.Equal(2, result.Manifest.Counts.Advisories);
Assert.Equal(1, result.Manifest.Counts.Vex);
Assert.Equal(1, result.Manifest.Counts.Sboms);
}
[Fact]
public void Build_ThrowsForMissingDataSource()
{
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-missing" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, "/nonexistent/file.jsonl.zst")
});
Assert.Throws<FileNotFoundException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForDeltaWithoutOptions()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-DELTA\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Delta,
new MirrorBundleSelectors(new[] { "product-delta" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
},
DeltaOptions: null);
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_ProvenanceDocumentContainsSubjects()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-PROVENANCE\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-provenance" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
});
var result = _builder.Build(request);
Assert.NotEmpty(result.ProvenanceDocument.Subjects);
Assert.Contains(result.ProvenanceDocument.Subjects, s => s.Name == "manifest.yaml");
Assert.NotNull(result.ProvenanceDocument.Builder);
Assert.NotEmpty(result.ProvenanceDocument.Builder.ExporterVersion);
}
[Fact]
public void Build_ExportDocumentContainsManifestDigest()
{
var advisoryPath = CreateTestFile("advisories.jsonl.zst", "{\"id\":\"CVE-EXPORT\"}");
var request = new MirrorBundleBuildRequest(
Guid.NewGuid(),
Guid.NewGuid(),
MirrorBundleVariant.Full,
new MirrorBundleSelectors(new[] { "product-export" }, null, null),
new[]
{
new MirrorBundleDataSource(MirrorBundleDataCategory.Advisories, advisoryPath)
});
var result = _builder.Build(request);
Assert.StartsWith("sha256:", result.ExportDocument.ManifestDigest);
Assert.Equal(result.Manifest.Profile, $"{result.ExportDocument.Profile.Kind}:{result.ExportDocument.Profile.Variant}");
}
private string CreateTestFile(string fileName, string content)
{
var path = Path.Combine(_tempDir, fileName);
File.WriteAllText(path, content);
return path;
}
private static List<string> ExtractFileNames(MemoryStream bundleStream)
{
bundleStream.Position = 0;
var fileNames = new List<string>();
using var gzip = new GZipStream(bundleStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
fileNames.Add(entry.Name);
}
bundleStream.Position = 0;
return fileNames;
}
private static List<TarEntryMetadata> ExtractTarEntryMetadata(MemoryStream bundleStream)
{
bundleStream.Position = 0;
var entries = new List<TarEntryMetadata>();
using var gzip = new GZipStream(bundleStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
entries.Add(new TarEntryMetadata(
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
entry.ModificationTime));
}
bundleStream.Position = 0;
return entries;
}
private sealed record TarEntryMetadata(
int Uid,
int Gid,
string UserName,
string GroupName,
DateTimeOffset ModificationTime);
}

View File

@@ -0,0 +1,159 @@
using System.Text;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.MirrorBundle;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class MirrorBundleSigningTests
{
private readonly ICryptoHmac _cryptoHmac;
private readonly HmacMirrorBundleManifestSigner _signer;
public MirrorBundleSigningTests()
{
_cryptoHmac = new DefaultCryptoHmac();
_signer = new HmacMirrorBundleManifestSigner(_cryptoHmac, "test-signing-key-12345", "test-key-id");
}
[Fact]
public async Task SignExportDocumentAsync_ReturnsDsseEnvelope()
{
var exportJson = """{"runId":"abc123","tenantId":"tenant-1"}""";
var result = await _signer.SignExportDocumentAsync(exportJson);
Assert.NotNull(result);
Assert.Equal("application/vnd.stellaops.mirror-bundle.export+json", result.PayloadType);
Assert.NotEmpty(result.Payload);
Assert.Single(result.Signatures);
Assert.Equal("test-key-id", result.Signatures[0].KeyId);
Assert.NotEmpty(result.Signatures[0].Signature);
}
[Fact]
public async Task SignManifestAsync_ReturnsDsseEnvelope()
{
var manifestYaml = "profile: mirror:full\nrunId: abc123";
var result = await _signer.SignManifestAsync(manifestYaml);
Assert.NotNull(result);
Assert.Equal("application/vnd.stellaops.mirror-bundle.manifest+yaml", result.PayloadType);
Assert.NotEmpty(result.Payload);
Assert.Single(result.Signatures);
}
[Fact]
public async Task SignArchiveAsync_ReturnsBase64Signature()
{
using var stream = new MemoryStream(Encoding.UTF8.GetBytes("test archive content"));
var signature = await _signer.SignArchiveAsync(stream);
Assert.NotEmpty(signature);
// Verify it's valid base64
var decoded = Convert.FromBase64String(signature);
Assert.NotEmpty(decoded);
}
[Fact]
public async Task SignArchiveAsync_ResetStreamPosition()
{
using var stream = new MemoryStream(Encoding.UTF8.GetBytes("test archive content"));
stream.Position = 5;
await _signer.SignArchiveAsync(stream);
Assert.Equal(0, stream.Position);
}
[Fact]
public async Task SignExportDocumentAsync_PayloadIsBase64Encoded()
{
var exportJson = """{"runId":"encoded-test"}""";
var result = await _signer.SignExportDocumentAsync(exportJson);
var decodedPayload = Encoding.UTF8.GetString(Convert.FromBase64String(result.Payload));
Assert.Equal(exportJson, decodedPayload);
}
[Fact]
public async Task SignExportDocumentAsync_IsDeterministic()
{
var exportJson = """{"runId":"deterministic-test"}""";
var result1 = await _signer.SignExportDocumentAsync(exportJson);
var result2 = await _signer.SignExportDocumentAsync(exportJson);
Assert.Equal(result1.Signatures[0].Signature, result2.Signatures[0].Signature);
Assert.Equal(result1.Payload, result2.Payload);
}
[Fact]
public void ToJson_SerializesCorrectly()
{
var signature = new MirrorBundleDsseSignature(
"test/payload+json",
Convert.ToBase64String(Encoding.UTF8.GetBytes("test-payload")),
new[] { new MirrorBundleDsseSignatureEntry("sig-value", "key-id-1") });
var json = signature.ToJson();
Assert.Contains("\"payloadType\"", json);
Assert.Contains("test/payload+json", json);
Assert.Contains("\"signatures\"", json);
Assert.Contains("sig-value", json);
// Verify it's valid JSON
var parsed = JsonDocument.Parse(json);
Assert.NotNull(parsed);
}
[Fact]
public void Constructor_ThrowsForEmptyKey()
{
Assert.Throws<ArgumentException>(() =>
new HmacMirrorBundleManifestSigner(_cryptoHmac, "", "key-id"));
}
[Fact]
public void Constructor_ThrowsForNullKey()
{
Assert.Throws<ArgumentException>(() =>
new HmacMirrorBundleManifestSigner(_cryptoHmac, null!, "key-id"));
}
[Fact]
public void Constructor_ThrowsForNullCryptoHmac()
{
Assert.Throws<ArgumentNullException>(() =>
new HmacMirrorBundleManifestSigner(null!, "test-key", "key-id"));
}
[Fact]
public void Constructor_UsesDefaultKeyIdWhenEmpty()
{
var signer = new HmacMirrorBundleManifestSigner(_cryptoHmac, "test-key", "");
var result = signer.SignExportDocumentAsync("{}").Result;
Assert.Equal("mirror-bundle-hmac", result.Signatures[0].KeyId);
}
[Fact]
public async Task SignArchiveAsync_ThrowsForNonSeekableStream()
{
using var nonSeekable = new NonSeekableMemoryStream(Encoding.UTF8.GetBytes("test"));
await Assert.ThrowsAsync<ArgumentException>(() =>
_signer.SignArchiveAsync(nonSeekable));
}
private sealed class NonSeekableMemoryStream : MemoryStream
{
public NonSeekableMemoryStream(byte[] buffer) : base(buffer) { }
public override bool CanSeek => false;
}
}

View File

@@ -0,0 +1,290 @@
using System.Text;
using System.Text.Json;
using StellaOps.ExportCenter.Core.OfflineKit;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class OfflineKitDistributorTests : IDisposable
{
private readonly string _tempDir;
private readonly FakeTimeProvider _timeProvider;
private readonly FakeCryptoHash _cryptoHash;
private readonly OfflineKitDistributor _distributor;
public OfflineKitDistributorTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"offline-kit-dist-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero));
_cryptoHash = new FakeCryptoHash();
_distributor = new OfflineKitDistributor(_cryptoHash, _timeProvider);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void DistributeToMirror_CopiesFilesToMirrorLocation()
{
var sourceKit = SetupSourceKit();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(result.Success);
Assert.True(Directory.Exists(Path.Combine(mirrorBase, "export", "attestations", kitVersion)));
}
[Fact]
public void DistributeToMirror_CreatesManifestOfflineJson()
{
var sourceKit = SetupSourceKit();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(result.Success);
Assert.NotNull(result.ManifestPath);
Assert.True(File.Exists(result.ManifestPath));
}
[Fact]
public void DistributeToMirror_ManifestContainsAttestationEntry()
{
var sourceKit = SetupSourceKitWithAttestation();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(result.Success);
var manifestJson = File.ReadAllText(result.ManifestPath!);
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
var entries = manifest.GetProperty("entries").EnumerateArray().ToList();
var attestationEntry = entries.FirstOrDefault(e =>
e.GetProperty("kind").GetString() == "attestation-kit");
Assert.NotEqual(default, attestationEntry);
Assert.Contains("stella attest bundle verify", attestationEntry.GetProperty("cliExample").GetString());
}
[Fact]
public void DistributeToMirror_CreatesManifestChecksum()
{
var sourceKit = SetupSourceKit();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(result.Success);
Assert.True(File.Exists(result.ManifestPath + ".sha256"));
}
[Fact]
public void DistributeToMirror_PreservesBytesExactly()
{
var sourceKit = SetupSourceKitWithAttestation();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var sourceFile = Path.Combine(sourceKit, "attestations", "export-attestation-bundle-v1.tgz");
var sourceBytes = File.ReadAllBytes(sourceFile);
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
var targetFile = Path.Combine(result.TargetPath!, "attestations", "export-attestation-bundle-v1.tgz");
var targetBytes = File.ReadAllBytes(targetFile);
Assert.Equal(sourceBytes, targetBytes);
}
[Fact]
public void DistributeToMirror_ReturnsCorrectFileCount()
{
var sourceKit = SetupSourceKitWithMultipleFiles();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(result.Success);
Assert.True(result.CopiedFileCount >= 3); // At least 3 files
}
[Fact]
public void DistributeToMirror_SourceNotFound_ReturnsFailed()
{
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror("/nonexistent/path", mirrorBase, kitVersion);
Assert.False(result.Success);
Assert.Contains("not found", result.ErrorMessage);
}
[Fact]
public void VerifyDistribution_MatchingKits_ReturnsSuccess()
{
var sourceKit = SetupSourceKitWithAttestation();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var distResult = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(distResult.Success);
var verifyResult = _distributor.VerifyDistribution(sourceKit, distResult.TargetPath!);
Assert.True(verifyResult.Success);
Assert.Empty(verifyResult.Mismatches);
}
[Fact]
public void VerifyDistribution_MissingFile_ReportsError()
{
var sourceKit = SetupSourceKitWithAttestation();
var targetKit = Path.Combine(_tempDir, "target-incomplete");
Directory.CreateDirectory(targetKit);
// Copy only some files
var sourceFile = Directory.GetFiles(sourceKit, "*", SearchOption.AllDirectories).First();
// Don't copy anything to target
var result = _distributor.VerifyDistribution(sourceKit, targetKit);
Assert.False(result.Success);
Assert.NotEmpty(result.Mismatches);
}
[Fact]
public void VerifyDistribution_ModifiedFile_ReportsHashMismatch()
{
var sourceKit = SetupSourceKitWithAttestation();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var distResult = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
Assert.True(distResult.Success);
// Modify a file in target
var targetFile = Path.Combine(distResult.TargetPath!, "attestations", "export-attestation-bundle-v1.tgz");
File.WriteAllText(targetFile, "modified content");
var verifyResult = _distributor.VerifyDistribution(sourceKit, distResult.TargetPath!);
Assert.False(verifyResult.Success);
Assert.Contains(verifyResult.Mismatches, m => m.Contains("Hash mismatch"));
}
[Fact]
public void DistributeToMirror_ManifestHasCorrectVersion()
{
var sourceKit = SetupSourceKit();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v2.0.0";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
var manifestJson = File.ReadAllText(result.ManifestPath!);
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
Assert.Equal("offline-kit/v1", manifest.GetProperty("version").GetString());
Assert.Equal(kitVersion, manifest.GetProperty("kitVersion").GetString());
}
[Fact]
public void DistributeToMirror_MirrorBundleEntry_HasCorrectPaths()
{
var sourceKit = SetupSourceKitWithMirror();
var mirrorBase = Path.Combine(_tempDir, "mirror");
var kitVersion = "v1";
var result = _distributor.DistributeToMirror(sourceKit, mirrorBase, kitVersion);
var manifestJson = File.ReadAllText(result.ManifestPath!);
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
var entries = manifest.GetProperty("entries").EnumerateArray().ToList();
var mirrorEntry = entries.FirstOrDefault(e =>
e.GetProperty("kind").GetString() == "mirror-bundle");
Assert.NotEqual(default, mirrorEntry);
Assert.Equal("mirrors/export-mirror-bundle-v1.tgz", mirrorEntry.GetProperty("artifact").GetString());
}
private string SetupSourceKit()
{
var kitPath = Path.Combine(_tempDir, $"source-kit-{Guid.NewGuid():N}");
Directory.CreateDirectory(kitPath);
File.WriteAllText(Path.Combine(kitPath, "manifest.json"), "{}");
return kitPath;
}
private string SetupSourceKitWithAttestation()
{
var kitPath = Path.Combine(_tempDir, $"source-kit-{Guid.NewGuid():N}");
Directory.CreateDirectory(Path.Combine(kitPath, "attestations"));
Directory.CreateDirectory(Path.Combine(kitPath, "checksums", "attestations"));
File.WriteAllBytes(
Path.Combine(kitPath, "attestations", "export-attestation-bundle-v1.tgz"),
Encoding.UTF8.GetBytes("test-attestation-bundle"));
File.WriteAllText(
Path.Combine(kitPath, "checksums", "attestations", "export-attestation-bundle-v1.tgz.sha256"),
"abc123 export-attestation-bundle-v1.tgz");
File.WriteAllText(Path.Combine(kitPath, "manifest.json"), "{}");
return kitPath;
}
private string SetupSourceKitWithMirror()
{
var kitPath = SetupSourceKitWithAttestation();
Directory.CreateDirectory(Path.Combine(kitPath, "mirrors"));
Directory.CreateDirectory(Path.Combine(kitPath, "checksums", "mirrors"));
File.WriteAllBytes(
Path.Combine(kitPath, "mirrors", "export-mirror-bundle-v1.tgz"),
Encoding.UTF8.GetBytes("test-mirror-bundle"));
File.WriteAllText(
Path.Combine(kitPath, "checksums", "mirrors", "export-mirror-bundle-v1.tgz.sha256"),
"def456 export-mirror-bundle-v1.tgz");
return kitPath;
}
private string SetupSourceKitWithMultipleFiles()
{
var kitPath = SetupSourceKitWithAttestation();
// Add bootstrap
Directory.CreateDirectory(Path.Combine(kitPath, "bootstrap"));
Directory.CreateDirectory(Path.Combine(kitPath, "checksums", "bootstrap"));
File.WriteAllBytes(
Path.Combine(kitPath, "bootstrap", "export-bootstrap-pack-v1.tgz"),
Encoding.UTF8.GetBytes("test-bootstrap"));
File.WriteAllText(
Path.Combine(kitPath, "checksums", "bootstrap", "export-bootstrap-pack-v1.tgz.sha256"),
"ghi789 export-bootstrap-pack-v1.tgz");
return kitPath;
}
}

View File

@@ -0,0 +1,326 @@
using System.Text;
using System.Text.Json;
using StellaOps.ExportCenter.Core.OfflineKit;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class OfflineKitPackagerTests : IDisposable
{
private readonly string _tempDir;
private readonly FakeTimeProvider _timeProvider;
private readonly FakeCryptoHash _cryptoHash;
private readonly OfflineKitPackager _packager;
public OfflineKitPackagerTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"offline-kit-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 15, 12, 0, 0, TimeSpan.Zero));
_cryptoHash = new FakeCryptoHash();
_packager = new OfflineKitPackager(_cryptoHash, _timeProvider);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void AddAttestationBundle_CreatesArtifactAndChecksum()
{
var request = CreateTestAttestationRequest();
var result = _packager.AddAttestationBundle(_tempDir, request);
Assert.True(result.Success);
Assert.True(File.Exists(Path.Combine(_tempDir, result.ArtifactPath)));
Assert.True(File.Exists(Path.Combine(_tempDir, result.ChecksumPath)));
}
[Fact]
public void AddAttestationBundle_PreservesBytesExactly()
{
var originalBytes = Encoding.UTF8.GetBytes("test-attestation-bundle-content");
var request = new OfflineKitAttestationRequest(
KitId: "kit-001",
ExportId: Guid.NewGuid().ToString(),
AttestationId: Guid.NewGuid().ToString(),
RootHash: "abc123",
BundleBytes: originalBytes,
CreatedAt: _timeProvider.GetUtcNow());
var result = _packager.AddAttestationBundle(_tempDir, request);
var writtenBytes = File.ReadAllBytes(Path.Combine(_tempDir, result.ArtifactPath));
Assert.Equal(originalBytes, writtenBytes);
}
[Fact]
public void AddAttestationBundle_ChecksumFileContainsCorrectFormat()
{
var request = CreateTestAttestationRequest();
var result = _packager.AddAttestationBundle(_tempDir, request);
var checksumContent = File.ReadAllText(Path.Combine(_tempDir, result.ChecksumPath));
Assert.Contains("export-attestation-bundle-v1.tgz", checksumContent);
Assert.Contains(result.Sha256Hash, checksumContent);
Assert.Contains(" ", checksumContent); // Two spaces before filename
}
[Fact]
public void AddAttestationBundle_RejectsOverwrite()
{
var request = CreateTestAttestationRequest();
// First write succeeds
var result1 = _packager.AddAttestationBundle(_tempDir, request);
Assert.True(result1.Success);
// Second write fails (immutability)
var result2 = _packager.AddAttestationBundle(_tempDir, request);
Assert.False(result2.Success);
Assert.Contains("immutable", result2.ErrorMessage, StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void AddMirrorBundle_CreatesArtifactAndChecksum()
{
var request = CreateTestMirrorRequest();
var result = _packager.AddMirrorBundle(_tempDir, request);
Assert.True(result.Success);
Assert.True(File.Exists(Path.Combine(_tempDir, result.ArtifactPath)));
Assert.True(File.Exists(Path.Combine(_tempDir, result.ChecksumPath)));
}
[Fact]
public void AddBootstrapPack_CreatesArtifactAndChecksum()
{
var request = CreateTestBootstrapRequest();
var result = _packager.AddBootstrapPack(_tempDir, request);
Assert.True(result.Success);
Assert.True(File.Exists(Path.Combine(_tempDir, result.ArtifactPath)));
Assert.True(File.Exists(Path.Combine(_tempDir, result.ChecksumPath)));
}
[Fact]
public void CreateAttestationEntry_HasCorrectKind()
{
var request = CreateTestAttestationRequest();
var entry = _packager.CreateAttestationEntry(request, "sha256hash");
Assert.Equal("attestation-export", entry.Kind);
}
[Fact]
public void CreateAttestationEntry_HasCorrectPaths()
{
var request = CreateTestAttestationRequest();
var entry = _packager.CreateAttestationEntry(request, "sha256hash");
Assert.Equal("attestations/export-attestation-bundle-v1.tgz", entry.Artifact);
Assert.Equal("checksums/attestations/export-attestation-bundle-v1.tgz.sha256", entry.Checksum);
}
[Fact]
public void CreateAttestationEntry_FormatsRootHashWithPrefix()
{
var request = new OfflineKitAttestationRequest(
KitId: "kit-001",
ExportId: Guid.NewGuid().ToString(),
AttestationId: Guid.NewGuid().ToString(),
RootHash: "abc123def456",
BundleBytes: new byte[] { 1, 2, 3 },
CreatedAt: _timeProvider.GetUtcNow());
var entry = _packager.CreateAttestationEntry(request, "sha256hash");
Assert.Equal("sha256:abc123def456", entry.RootHash);
}
[Fact]
public void CreateMirrorEntry_HasCorrectKind()
{
var request = CreateTestMirrorRequest();
var entry = _packager.CreateMirrorEntry(request, "sha256hash");
Assert.Equal("mirror-bundle", entry.Kind);
}
[Fact]
public void CreateBootstrapEntry_HasCorrectKind()
{
var request = CreateTestBootstrapRequest();
var entry = _packager.CreateBootstrapEntry(request, "sha256hash");
Assert.Equal("bootstrap-pack", entry.Kind);
}
[Fact]
public void WriteManifest_CreatesManifestFile()
{
var kitId = "kit-" + Guid.NewGuid().ToString("N");
var entries = new List<object>
{
_packager.CreateAttestationEntry(CreateTestAttestationRequest(), "hash1")
};
_packager.WriteManifest(_tempDir, kitId, entries);
Assert.True(File.Exists(Path.Combine(_tempDir, "manifest.json")));
}
[Fact]
public void WriteManifest_ContainsCorrectVersion()
{
var kitId = "kit-" + Guid.NewGuid().ToString("N");
var entries = new List<object>();
_packager.WriteManifest(_tempDir, kitId, entries);
var manifestJson = File.ReadAllText(Path.Combine(_tempDir, "manifest.json"));
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
Assert.Equal("offline-kit/v1", manifest.GetProperty("version").GetString());
}
[Fact]
public void WriteManifest_ContainsKitId()
{
var kitId = "test-kit-123";
var entries = new List<object>();
_packager.WriteManifest(_tempDir, kitId, entries);
var manifestJson = File.ReadAllText(Path.Combine(_tempDir, "manifest.json"));
var manifest = JsonSerializer.Deserialize<JsonElement>(manifestJson);
Assert.Equal(kitId, manifest.GetProperty("kitId").GetString());
}
[Fact]
public void WriteManifest_RejectsOverwrite()
{
var kitId = "kit-001";
var entries = new List<object>();
// First write succeeds
_packager.WriteManifest(_tempDir, kitId, entries);
// Second write fails (immutability)
Assert.Throws<InvalidOperationException>(() =>
_packager.WriteManifest(_tempDir, kitId, entries));
}
[Fact]
public void GenerateChecksumFileContent_HasCorrectFormat()
{
var content = OfflineKitPackager.GenerateChecksumFileContent("abc123def456", "test.tgz");
Assert.Equal("abc123def456 test.tgz", content);
}
[Fact]
public void VerifyBundleHash_ReturnsTrueForMatchingHash()
{
var bundleBytes = Encoding.UTF8.GetBytes("test-content");
var expectedHash = _cryptoHash.ComputeHashHexForPurpose(bundleBytes, StellaOps.Cryptography.HashPurpose.Content);
var result = _packager.VerifyBundleHash(bundleBytes, expectedHash);
Assert.True(result);
}
[Fact]
public void VerifyBundleHash_ReturnsFalseForMismatchedHash()
{
var bundleBytes = Encoding.UTF8.GetBytes("test-content");
var result = _packager.VerifyBundleHash(bundleBytes, "wrong-hash");
Assert.False(result);
}
[Fact]
public void AddAttestationBundle_ThrowsForNullRequest()
{
Assert.Throws<ArgumentNullException>(() =>
_packager.AddAttestationBundle(_tempDir, null!));
}
[Fact]
public void AddAttestationBundle_ThrowsForEmptyOutputDirectory()
{
var request = CreateTestAttestationRequest();
Assert.Throws<ArgumentException>(() =>
_packager.AddAttestationBundle(string.Empty, request));
}
[Fact]
public void DirectoryStructure_FollowsOfflineKitLayout()
{
var attestationRequest = CreateTestAttestationRequest();
var mirrorRequest = CreateTestMirrorRequest();
var bootstrapRequest = CreateTestBootstrapRequest();
var attestResult = _packager.AddAttestationBundle(_tempDir, attestationRequest);
var mirrorResult = _packager.AddMirrorBundle(_tempDir, mirrorRequest);
var bootstrapResult = _packager.AddBootstrapPack(_tempDir, bootstrapRequest);
// Verify directory structure
Assert.True(Directory.Exists(Path.Combine(_tempDir, "attestations")));
Assert.True(Directory.Exists(Path.Combine(_tempDir, "mirrors")));
Assert.True(Directory.Exists(Path.Combine(_tempDir, "bootstrap")));
Assert.True(Directory.Exists(Path.Combine(_tempDir, "checksums", "attestations")));
Assert.True(Directory.Exists(Path.Combine(_tempDir, "checksums", "mirrors")));
Assert.True(Directory.Exists(Path.Combine(_tempDir, "checksums", "bootstrap")));
}
private OfflineKitAttestationRequest CreateTestAttestationRequest()
{
return new OfflineKitAttestationRequest(
KitId: "kit-001",
ExportId: Guid.NewGuid().ToString(),
AttestationId: Guid.NewGuid().ToString(),
RootHash: "test-root-hash",
BundleBytes: Encoding.UTF8.GetBytes("test-attestation-bundle"),
CreatedAt: _timeProvider.GetUtcNow());
}
private OfflineKitMirrorRequest CreateTestMirrorRequest()
{
return new OfflineKitMirrorRequest(
KitId: "kit-001",
ExportId: Guid.NewGuid().ToString(),
BundleId: Guid.NewGuid().ToString(),
Profile: "mirror:full",
RootHash: "test-root-hash",
BundleBytes: Encoding.UTF8.GetBytes("test-mirror-bundle"),
CreatedAt: _timeProvider.GetUtcNow());
}
private OfflineKitBootstrapRequest CreateTestBootstrapRequest()
{
return new OfflineKitBootstrapRequest(
KitId: "kit-001",
ExportId: Guid.NewGuid().ToString(),
Version: "v1.0.0",
RootHash: "test-root-hash",
BundleBytes: Encoding.UTF8.GetBytes("test-bootstrap-pack"),
CreatedAt: _timeProvider.GetUtcNow());
}
}

View File

@@ -0,0 +1,185 @@
using System.Net;
using System.Text.Json;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Http.HttpResults;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class OpenApiDiscoveryEndpointsTests
{
[Fact]
public void DiscoveryResponse_ContainsRequiredFields()
{
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope"
};
Assert.Equal("export-center", response.Service);
Assert.Equal("1.0.0", response.Version);
Assert.Equal("3.0.3", response.SpecVersion);
Assert.Equal("application/yaml", response.Format);
Assert.Equal("/openapi/export-center.yaml", response.Url);
Assert.Equal("#/components/schemas/ErrorEnvelope", response.ErrorEnvelopeSchema);
}
[Fact]
public void DiscoveryResponse_SupportedProfilesCanBeNull()
{
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
ProfilesSupported = null
};
Assert.Null(response.ProfilesSupported);
}
[Fact]
public void DiscoveryResponse_SupportedProfiles_ContainsExpectedValues()
{
var profiles = new[] { "attestation", "mirror", "bootstrap", "airgap-evidence" };
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
ProfilesSupported = profiles
};
Assert.NotNull(response.ProfilesSupported);
Assert.Contains("attestation", response.ProfilesSupported);
Assert.Contains("mirror", response.ProfilesSupported);
Assert.Contains("bootstrap", response.ProfilesSupported);
Assert.Contains("airgap-evidence", response.ProfilesSupported);
}
[Fact]
public void DiscoveryResponse_SerializesToCamelCase()
{
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
GeneratedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero)
};
var options = new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase };
var json = JsonSerializer.Serialize(response, options);
Assert.Contains("\"service\":", json);
Assert.Contains("\"version\":", json);
Assert.Contains("\"specVersion\":", json);
Assert.Contains("\"format\":", json);
Assert.Contains("\"url\":", json);
Assert.Contains("\"errorEnvelopeSchema\":", json);
Assert.Contains("\"generatedAt\":", json);
}
[Fact]
public void DiscoveryResponse_JsonUrlIsOptional()
{
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
JsonUrl = "/openapi/export-center.json"
};
Assert.Equal("/openapi/export-center.json", response.JsonUrl);
}
[Fact]
public void DiscoveryResponse_ChecksumSha256IsOptional()
{
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
ChecksumSha256 = "abc123"
};
Assert.Equal("abc123", response.ChecksumSha256);
}
[Fact]
public void MinimalSpec_ContainsOpenApi303Header()
{
// The minimal spec should be a valid OpenAPI 3.0.3 document
var minimalSpecCheck = "openapi: 3.0.3";
Assert.NotEmpty(minimalSpecCheck);
}
[Fact]
public void DiscoveryResponse_GeneratedAtIsDateTimeOffset()
{
var generatedAt = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
GeneratedAt = generatedAt
};
Assert.Equal(generatedAt, response.GeneratedAt);
}
[Fact]
public void DiscoveryResponse_CanSerializeToJsonWithNulls()
{
var response = new WebService.OpenApiDiscoveryResponse
{
Service = "export-center",
Version = "1.0.0",
SpecVersion = "3.0.3",
Format = "application/yaml",
Url = "/openapi/export-center.yaml",
ErrorEnvelopeSchema = "#/components/schemas/ErrorEnvelope",
JsonUrl = null,
ProfilesSupported = null,
ChecksumSha256 = null
};
var options = new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
};
var json = JsonSerializer.Serialize(response, options);
// Should NOT contain null fields
Assert.DoesNotContain("\"jsonUrl\":", json);
Assert.DoesNotContain("\"profilesSupported\":", json);
Assert.DoesNotContain("\"checksumSha256\":", json);
}
}

View File

@@ -0,0 +1,386 @@
using System.Formats.Tar;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using StellaOps.Cryptography;
using StellaOps.ExportCenter.Core.PortableEvidence;
using Xunit;
namespace StellaOps.ExportCenter.Tests;
public sealed class PortableEvidenceExportBuilderTests : IDisposable
{
private readonly string _tempDir;
private readonly PortableEvidenceExportBuilder _builder;
private readonly ICryptoHash _cryptoHash;
public PortableEvidenceExportBuilderTests()
{
_tempDir = Path.Combine(Path.GetTempPath(), $"portable-evidence-test-{Guid.NewGuid():N}");
Directory.CreateDirectory(_tempDir);
_cryptoHash = new DefaultCryptoHash();
_builder = new PortableEvidenceExportBuilder(_cryptoHash);
}
public void Dispose()
{
if (Directory.Exists(_tempDir))
{
Directory.Delete(_tempDir, recursive: true);
}
}
[Fact]
public void Build_ProducesValidExport()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
Assert.NotNull(result);
Assert.NotNull(result.ExportDocument);
Assert.NotEmpty(result.ExportDocumentJson);
Assert.NotEmpty(result.RootHash);
Assert.NotEmpty(result.PortableBundleSha256);
Assert.True(result.ExportStream.Length > 0);
}
[Fact]
public void Build_ExportDocumentContainsCorrectMetadata()
{
var exportId = Guid.NewGuid();
var bundleId = Guid.NewGuid();
var tenantId = Guid.NewGuid();
var portableBundlePath = CreateTestPortableBundle();
var sourceUri = "https://evidencelocker.example.com/v1/bundles/portable/abc123";
var request = new PortableEvidenceExportRequest(
exportId,
bundleId,
tenantId,
portableBundlePath,
sourceUri);
var result = _builder.Build(request);
Assert.Equal(exportId.ToString("D"), result.ExportDocument.ExportId);
Assert.Equal(bundleId.ToString("D"), result.ExportDocument.BundleId);
Assert.Equal(tenantId.ToString("D"), result.ExportDocument.TenantId);
Assert.Equal(sourceUri, result.ExportDocument.SourceUri);
Assert.Equal("v1", result.ExportDocument.PortableVersion);
Assert.NotEmpty(result.ExportDocument.PortableBundleSha256);
Assert.NotEmpty(result.ExportDocument.RootHash);
}
[Fact]
public void Build_ProducesDeterministicOutput()
{
var exportId = new Guid("11111111-2222-3333-4444-555555555555");
var bundleId = new Guid("aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee");
var tenantId = new Guid("ffffffff-1111-2222-3333-444444444444");
var portableBundlePath = CreateTestPortableBundle("deterministic-content");
var request = new PortableEvidenceExportRequest(
exportId,
bundleId,
tenantId,
portableBundlePath);
var result1 = _builder.Build(request);
var result2 = _builder.Build(request);
Assert.Equal(result1.RootHash, result2.RootHash);
Assert.Equal(result1.PortableBundleSha256, result2.PortableBundleSha256);
var bytes1 = result1.ExportStream.ToArray();
var bytes2 = result2.ExportStream.ToArray();
Assert.Equal(bytes1, bytes2);
}
[Fact]
public void Build_ArchiveContainsExpectedFiles()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var fileNames = ExtractFileNames(result.ExportStream);
Assert.Contains("export.json", fileNames);
Assert.Contains("portable-bundle-v1.tgz", fileNames);
Assert.Contains("checksums.txt", fileNames);
Assert.Contains("verify-export.sh", fileNames);
Assert.Contains("README.md", fileNames);
}
[Fact]
public void Build_TarEntriesHaveDeterministicMetadata()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var entries = ExtractTarEntryMetadata(result.ExportStream);
var expectedTimestamp = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
foreach (var entry in entries)
{
Assert.Equal(0, entry.Uid);
Assert.Equal(0, entry.Gid);
Assert.Equal(string.Empty, entry.UserName);
Assert.Equal(string.Empty, entry.GroupName);
Assert.Equal(expectedTimestamp, entry.ModificationTime);
}
}
[Fact]
public void Build_PortableBundleIsIncludedUnmodified()
{
var originalContent = "original-portable-bundle-content-bytes";
var portableBundlePath = CreateTestPortableBundle(originalContent);
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var extractedContent = ExtractFileContent(result.ExportStream, "portable-bundle-v1.tgz");
Assert.Equal(originalContent, extractedContent);
}
[Fact]
public void Build_ChecksumsContainsAllFiles()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var checksums = ExtractFileContent(result.ExportStream, "checksums.txt");
Assert.Contains("export.json", checksums);
Assert.Contains("portable-bundle-v1.tgz", checksums);
}
[Fact]
public void Build_ReadmeContainsBundleInfo()
{
var bundleId = Guid.NewGuid();
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
bundleId,
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var readme = ExtractFileContent(result.ExportStream, "README.md");
Assert.Contains(bundleId.ToString("D"), readme);
Assert.Contains("Portable Evidence Export", readme);
Assert.Contains("stella evidence verify", readme);
}
[Fact]
public void Build_VerifyScriptIsPosixCompliant()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var script = ExtractFileContent(result.ExportStream, "verify-export.sh");
Assert.StartsWith("#!/usr/bin/env sh", script);
Assert.Contains("sha256sum", script);
Assert.Contains("shasum", script);
Assert.DoesNotContain("curl", script);
Assert.DoesNotContain("wget", script);
}
[Fact]
public void Build_VerifyScriptHasExecutePermission()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
var entries = ExtractTarEntryMetadata(result.ExportStream);
var scriptEntry = entries.FirstOrDefault(e => e.Name == "verify-export.sh");
Assert.NotNull(scriptEntry);
Assert.True(scriptEntry.Mode.HasFlag(UnixFileMode.UserExecute));
}
[Fact]
public void Build_WithMetadata_IncludesInExportDocument()
{
var portableBundlePath = CreateTestPortableBundle();
var metadata = new Dictionary<string, string>
{
["environment"] = "production",
["scannerVersion"] = "v3.0.0"
};
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath,
Metadata: metadata);
var result = _builder.Build(request);
Assert.NotNull(result.ExportDocument.Metadata);
Assert.Equal("production", result.ExportDocument.Metadata["environment"]);
Assert.Equal("v3.0.0", result.ExportDocument.Metadata["scannerVersion"]);
}
[Fact]
public void Build_ThrowsForMissingPortableBundle()
{
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
"/nonexistent/portable-bundle.tgz");
Assert.Throws<FileNotFoundException>(() => _builder.Build(request));
}
[Fact]
public void Build_ThrowsForEmptyBundleId()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.Empty,
Guid.NewGuid(),
portableBundlePath);
Assert.Throws<ArgumentException>(() => _builder.Build(request));
}
[Fact]
public void Build_VersionIsCorrect()
{
var portableBundlePath = CreateTestPortableBundle();
var request = new PortableEvidenceExportRequest(
Guid.NewGuid(),
Guid.NewGuid(),
Guid.NewGuid(),
portableBundlePath);
var result = _builder.Build(request);
Assert.Equal("portable-evidence/v1", result.ExportDocument.Version);
}
private string CreateTestPortableBundle(string? content = null)
{
var path = Path.Combine(_tempDir, $"portable-bundle-{Guid.NewGuid():N}.tgz");
File.WriteAllText(path, content ?? "test-portable-bundle-content");
return path;
}
private static List<string> ExtractFileNames(MemoryStream exportStream)
{
exportStream.Position = 0;
var fileNames = new List<string>();
using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
fileNames.Add(entry.Name);
}
exportStream.Position = 0;
return fileNames;
}
private static string ExtractFileContent(MemoryStream exportStream, string fileName)
{
exportStream.Position = 0;
using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
if (entry.Name == fileName && entry.DataStream is not null)
{
using var reader = new StreamReader(entry.DataStream);
var content = reader.ReadToEnd();
exportStream.Position = 0;
return content;
}
}
exportStream.Position = 0;
throw new FileNotFoundException($"File '{fileName}' not found in archive.");
}
private static List<TarEntryMetadataWithName> ExtractTarEntryMetadata(MemoryStream exportStream)
{
exportStream.Position = 0;
var entries = new List<TarEntryMetadataWithName>();
using var gzip = new GZipStream(exportStream, CompressionMode.Decompress, leaveOpen: true);
using var tar = new TarReader(gzip, leaveOpen: true);
TarEntry? entry;
while ((entry = tar.GetNextEntry()) is not null)
{
entries.Add(new TarEntryMetadataWithName(
entry.Name,
entry.Uid,
entry.Gid,
entry.UserName ?? string.Empty,
entry.GroupName ?? string.Empty,
entry.ModificationTime,
entry.Mode));
}
exportStream.Position = 0;
return entries;
}
private sealed record TarEntryMetadataWithName(
string Name,
int Uid,
int Gid,
string UserName,
string GroupName,
DateTimeOffset ModificationTime,
UnixFileMode Mode);
}

View File

@@ -112,21 +112,23 @@
<ItemGroup>
<ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/>
<ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/>
<ProjectReference Include="..\StellaOps.ExportCenter.WebService\StellaOps.ExportCenter.WebService.csproj" />
<ProjectReference Include="..\..\StellaOps.ExportCenter.RiskBundles\StellaOps.ExportCenter.RiskBundles.csproj" />
<ProjectReference Include="..\..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
</ItemGroup>

View File

@@ -0,0 +1,147 @@
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.ServerIntegration;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Extension methods for mapping attestation endpoints.
/// </summary>
public static class AttestationEndpoints
{
/// <summary>
/// Maps attestation endpoints to the application.
/// </summary>
public static WebApplication MapAttestationEndpoints(this WebApplication app)
{
var group = app.MapGroup("/v1/exports")
.WithTags("Attestations")
.RequireAuthorization(StellaOpsResourceServerPolicies.ExportViewer);
// GET /v1/exports/{id}/attestation - Get attestation by export run ID
group.MapGet("/{id}/attestation", GetAttestationByExportRunAsync)
.WithName("GetExportAttestation")
.WithSummary("Get attestation for an export run")
.WithDescription("Returns the DSSE attestation envelope for the specified export run.")
.Produces<ExportAttestationResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// GET /v1/exports/attestations/{attestationId} - Get attestation by ID
group.MapGet("/attestations/{attestationId}", GetAttestationByIdAsync)
.WithName("GetAttestationById")
.WithSummary("Get attestation by ID")
.WithDescription("Returns the DSSE attestation envelope for the specified attestation ID.")
.Produces<ExportAttestationResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// POST /v1/exports/{id}/attestation/verify - Verify attestation
group.MapPost("/{id}/attestation/verify", VerifyAttestationAsync)
.WithName("VerifyExportAttestation")
.WithSummary("Verify attestation signature")
.WithDescription("Verifies the cryptographic signature of the export attestation.")
.Produces<AttestationVerifyResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
return app;
}
private static async Task<Results<Ok<ExportAttestationResponse>, NotFound>> GetAttestationByExportRunAsync(
string id,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IExportAttestationService attestationService,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var result = await attestationService.GetAttestationByExportRunAsync(id, tenantId, cancellationToken);
if (result is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(result);
}
private static async Task<Results<Ok<ExportAttestationResponse>, NotFound>> GetAttestationByIdAsync(
string attestationId,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IExportAttestationService attestationService,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var result = await attestationService.GetAttestationAsync(attestationId, tenantId, cancellationToken);
if (result is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(result);
}
private static async Task<Results<Ok<AttestationVerifyResponse>, NotFound>> VerifyAttestationAsync(
string id,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IExportAttestationService attestationService,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var attestation = await attestationService.GetAttestationByExportRunAsync(id, tenantId, cancellationToken);
if (attestation is null)
{
return TypedResults.NotFound();
}
var isValid = await attestationService.VerifyAttestationAsync(
attestation.AttestationId, tenantId, cancellationToken);
return TypedResults.Ok(new AttestationVerifyResponse
{
AttestationId = attestation.AttestationId,
IsValid = isValid,
VerifiedAt = DateTimeOffset.UtcNow
});
}
private static string? ResolveTenantId(string? header, HttpContext httpContext)
{
if (!string.IsNullOrWhiteSpace(header))
{
return header;
}
// Try to get from claims
var tenantClaim = httpContext.User.FindFirst("tenant_id")
?? httpContext.User.FindFirst("tid");
return tenantClaim?.Value;
}
}
/// <summary>
/// Response for attestation verification.
/// </summary>
public sealed record AttestationVerifyResponse
{
public required string AttestationId { get; init; }
public required bool IsValid { get; init; }
public required DateTimeOffset VerifiedAt { get; init; }
public string? ErrorMessage { get; init; }
}

View File

@@ -0,0 +1,50 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Extension methods for registering attestation services.
/// </summary>
public static class AttestationServiceCollectionExtensions
{
/// <summary>
/// Adds export attestation services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Optional configuration for attestation options.</param>
/// <param name="configureSignerOptions">Optional configuration for signer options.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddExportAttestation(
this IServiceCollection services,
Action<ExportAttestationOptions>? configureOptions = null,
Action<ExportAttestationSignerOptions>? configureSignerOptions = null)
{
ArgumentNullException.ThrowIfNull(services);
// Configure options
if (configureOptions is not null)
{
services.Configure(configureOptions);
}
if (configureSignerOptions is not null)
{
services.Configure(configureSignerOptions);
}
// Register TimeProvider if not already registered
services.TryAddSingleton(TimeProvider.System);
// Register signer
services.TryAddSingleton<IExportAttestationSigner, ExportAttestationSigner>();
// Register attestation service
services.TryAddSingleton<IExportAttestationService, ExportAttestationService>();
// Register promotion attestation assembler
services.TryAddSingleton<IPromotionAttestationAssembler, PromotionAttestationAssembler>();
return services;
}
}

View File

@@ -0,0 +1,192 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Export attestation payload types.
/// </summary>
public static class ExportAttestationPayloadTypes
{
public const string DssePayloadType = "application/vnd.in-toto+json";
public const string ExportBundlePredicateType = "stella.ops/export-bundle@v1";
public const string ExportArtifactPredicateType = "stella.ops/export-artifact@v1";
public const string ExportProvenancePredicateType = "stella.ops/export-provenance@v1";
}
/// <summary>
/// Request to create attestation for an export artifact.
/// </summary>
public sealed record ExportAttestationRequest
{
public required string TenantId { get; init; }
public required string ExportRunId { get; init; }
public string? ProfileId { get; init; }
public required string ArtifactDigest { get; init; }
public required string ArtifactName { get; init; }
public required string ArtifactMediaType { get; init; }
public long ArtifactSizeBytes { get; init; }
public string? BundleId { get; init; }
public string? BundleRootHash { get; init; }
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Result of attestation creation.
/// </summary>
public sealed record ExportAttestationResult
{
public bool Success { get; init; }
public string? AttestationId { get; init; }
public ExportDsseEnvelope? Envelope { get; init; }
public string? ErrorMessage { get; init; }
public static ExportAttestationResult Succeeded(string attestationId, ExportDsseEnvelope envelope) =>
new() { Success = true, AttestationId = attestationId, Envelope = envelope };
public static ExportAttestationResult Failed(string errorMessage) =>
new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// DSSE envelope for export attestations.
/// </summary>
public sealed record ExportDsseEnvelope
{
[JsonPropertyName("payloadType")]
public required string PayloadType { get; init; }
[JsonPropertyName("payload")]
public required string Payload { get; init; }
[JsonPropertyName("signatures")]
public required IReadOnlyList<ExportDsseEnvelopeSignature> Signatures { get; init; }
}
/// <summary>
/// Signature within a DSSE envelope.
/// </summary>
public sealed record ExportDsseEnvelopeSignature
{
[JsonPropertyName("keyid")]
public string? KeyId { get; init; }
[JsonPropertyName("sig")]
public required string Signature { get; init; }
}
/// <summary>
/// In-toto statement for export attestations.
/// </summary>
public sealed record ExportInTotoStatement
{
[JsonPropertyName("_type")]
public string Type { get; init; } = "https://in-toto.io/Statement/v0.1";
[JsonPropertyName("predicateType")]
public required string PredicateType { get; init; }
[JsonPropertyName("subject")]
public required IReadOnlyList<ExportInTotoSubject> Subject { get; init; }
[JsonPropertyName("predicate")]
public required ExportBundlePredicate Predicate { get; init; }
}
/// <summary>
/// Subject of an in-toto statement.
/// </summary>
public sealed record ExportInTotoSubject
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("digest")]
public required IReadOnlyDictionary<string, string> Digest { get; init; }
}
/// <summary>
/// Predicate for export bundle attestation.
/// </summary>
public sealed record ExportBundlePredicate
{
[JsonPropertyName("exportRunId")]
public required string ExportRunId { get; init; }
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("profileId")]
public string? ProfileId { get; init; }
[JsonPropertyName("bundleId")]
public string? BundleId { get; init; }
[JsonPropertyName("bundleRootHash")]
public string? BundleRootHash { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("exporter")]
public required ExportAttestationExporter Exporter { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Exporter information for attestation.
/// </summary>
public sealed record ExportAttestationExporter
{
[JsonPropertyName("name")]
public string Name { get; init; } = "StellaOps.ExportCenter";
[JsonPropertyName("version")]
public required string Version { get; init; }
[JsonPropertyName("buildTimestamp")]
public DateTimeOffset? BuildTimestamp { get; init; }
}
/// <summary>
/// Response DTO for attestation endpoint.
/// </summary>
public sealed record ExportAttestationResponse
{
[JsonPropertyName("attestation_id")]
public required string AttestationId { get; init; }
[JsonPropertyName("export_run_id")]
public required string ExportRunId { get; init; }
[JsonPropertyName("artifact_digest")]
public required string ArtifactDigest { get; init; }
[JsonPropertyName("created_at")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("envelope")]
public required ExportDsseEnvelope Envelope { get; init; }
[JsonPropertyName("verification")]
public ExportAttestationVerification? Verification { get; init; }
}
/// <summary>
/// Verification information for attestation.
/// </summary>
public sealed record ExportAttestationVerification
{
[JsonPropertyName("key_id")]
public string? KeyId { get; init; }
[JsonPropertyName("algorithm")]
public string? Algorithm { get; init; }
[JsonPropertyName("provider")]
public string? Provider { get; init; }
[JsonPropertyName("public_key_pem")]
public string? PublicKeyPem { get; init; }
}

View File

@@ -0,0 +1,309 @@
using System.Collections.Concurrent;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Telemetry;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Service for producing DSSE attestations for export artifacts.
/// </summary>
public sealed class ExportAttestationService : IExportAttestationService
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
private static readonly string ExporterVersion = typeof(ExportAttestationService).Assembly
.GetName().Version?.ToString() ?? "1.0.0";
private readonly IExportAttestationSigner _signer;
private readonly TimeProvider _timeProvider;
private readonly ILogger<ExportAttestationService> _logger;
private readonly ExportAttestationOptions _options;
// In-memory storage for attestations (production would use persistent storage)
private readonly ConcurrentDictionary<string, StoredAttestation> _attestations = new();
private readonly ConcurrentDictionary<string, string> _runToAttestationMap = new();
public ExportAttestationService(
IExportAttestationSigner signer,
TimeProvider timeProvider,
ILogger<ExportAttestationService> logger,
IOptions<ExportAttestationOptions>? options = null)
{
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? ExportAttestationOptions.Default;
}
public async Task<ExportAttestationResult> CreateAttestationAsync(
ExportAttestationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
using var activity = ExportTelemetry.ActivitySource.StartActivity("attestation.create");
activity?.SetTag("tenant_id", request.TenantId);
activity?.SetTag("export_run_id", request.ExportRunId);
try
{
var now = _timeProvider.GetUtcNow();
var attestationId = GenerateAttestationId(request);
// Build in-toto statement
var statement = BuildStatement(request, now);
// Serialize statement to canonical JSON
var statementJson = JsonSerializer.SerializeToUtf8Bytes(statement, SerializerOptions);
var payloadBase64 = ToBase64Url(statementJson);
// Sign using PAE (Pre-Authentication Encoding)
var signResult = await _signer.SignAsync(
ExportAttestationPayloadTypes.DssePayloadType,
statementJson,
cancellationToken).ConfigureAwait(false);
if (!signResult.Success)
{
_logger.LogError(
"Failed to sign attestation for export {ExportRunId}: {Error}",
request.ExportRunId, signResult.ErrorMessage);
return ExportAttestationResult.Failed(signResult.ErrorMessage ?? "Signing failed");
}
// Build DSSE envelope
var envelope = new ExportDsseEnvelope
{
PayloadType = ExportAttestationPayloadTypes.DssePayloadType,
Payload = payloadBase64,
Signatures = signResult.Signatures.Select(s => new ExportDsseEnvelopeSignature
{
KeyId = s.KeyId,
Signature = s.Signature
}).ToList()
};
// Store attestation
var stored = new StoredAttestation(
attestationId,
request.TenantId,
request.ExportRunId,
request.ArtifactDigest,
now,
envelope,
signResult.Verification);
_attestations[attestationId] = stored;
_runToAttestationMap[BuildRunKey(request.TenantId, request.ExportRunId)] = attestationId;
_logger.LogInformation(
"Created attestation {AttestationId} for export {ExportRunId}",
attestationId, request.ExportRunId);
ExportTelemetry.ExportArtifactsTotal.Add(1,
new KeyValuePair<string, object?>("artifact_type", "attestation"),
new KeyValuePair<string, object?>("tenant_id", request.TenantId));
return ExportAttestationResult.Succeeded(attestationId, envelope);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error creating attestation for export {ExportRunId}", request.ExportRunId);
return ExportAttestationResult.Failed($"Error: {ex.Message}");
}
}
public Task<ExportAttestationResponse?> GetAttestationAsync(
string attestationId,
string tenantId,
CancellationToken cancellationToken = default)
{
if (!_attestations.TryGetValue(attestationId, out var stored))
{
return Task.FromResult<ExportAttestationResponse?>(null);
}
if (!string.Equals(stored.TenantId, tenantId, StringComparison.OrdinalIgnoreCase))
{
return Task.FromResult<ExportAttestationResponse?>(null);
}
return Task.FromResult<ExportAttestationResponse?>(BuildResponse(stored));
}
public Task<ExportAttestationResponse?> GetAttestationByExportRunAsync(
string exportRunId,
string tenantId,
CancellationToken cancellationToken = default)
{
var key = BuildRunKey(tenantId, exportRunId);
if (!_runToAttestationMap.TryGetValue(key, out var attestationId))
{
return Task.FromResult<ExportAttestationResponse?>(null);
}
return GetAttestationAsync(attestationId, tenantId, cancellationToken);
}
public async Task<bool> VerifyAttestationAsync(
string attestationId,
string tenantId,
CancellationToken cancellationToken = default)
{
if (!_attestations.TryGetValue(attestationId, out var stored))
{
return false;
}
if (!string.Equals(stored.TenantId, tenantId, StringComparison.OrdinalIgnoreCase))
{
return false;
}
try
{
// Decode payload
var payloadBytes = FromBase64Url(stored.Envelope.Payload);
// Verify each signature
foreach (var signature in stored.Envelope.Signatures)
{
var isValid = await _signer.VerifyAsync(
stored.Envelope.PayloadType,
payloadBytes,
signature.Signature,
signature.KeyId,
cancellationToken).ConfigureAwait(false);
if (!isValid)
{
_logger.LogWarning(
"Attestation {AttestationId} signature verification failed for key {KeyId}",
attestationId, signature.KeyId);
return false;
}
}
return true;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error verifying attestation {AttestationId}", attestationId);
return false;
}
}
private ExportInTotoStatement BuildStatement(ExportAttestationRequest request, DateTimeOffset now)
{
var subject = new ExportInTotoSubject
{
Name = request.ArtifactName,
Digest = new Dictionary<string, string>(StringComparer.Ordinal)
{
["sha256"] = request.ArtifactDigest.ToLowerInvariant()
}
};
var predicate = new ExportBundlePredicate
{
ExportRunId = request.ExportRunId,
TenantId = request.TenantId,
ProfileId = request.ProfileId,
BundleId = request.BundleId,
BundleRootHash = request.BundleRootHash,
CreatedAt = now,
Exporter = new ExportAttestationExporter
{
Version = ExporterVersion
},
Metadata = request.Metadata
};
return new ExportInTotoStatement
{
PredicateType = ExportAttestationPayloadTypes.ExportBundlePredicateType,
Subject = [subject],
Predicate = predicate
};
}
private static ExportAttestationResponse BuildResponse(StoredAttestation stored)
{
return new ExportAttestationResponse
{
AttestationId = stored.AttestationId,
ExportRunId = stored.ExportRunId,
ArtifactDigest = stored.ArtifactDigest,
CreatedAt = stored.CreatedAt,
Envelope = stored.Envelope,
Verification = stored.Verification
};
}
private static string GenerateAttestationId(ExportAttestationRequest request)
{
var input = $"{request.TenantId}:{request.ExportRunId}:{request.ArtifactDigest}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return $"att-{Convert.ToHexStringLower(hash)[..16]}";
}
private static string BuildRunKey(string tenantId, string exportRunId)
{
return $"{tenantId}:{exportRunId}";
}
private static string ToBase64Url(byte[] data)
{
return Convert.ToBase64String(data)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
private static byte[] FromBase64Url(string base64Url)
{
var base64 = base64Url
.Replace('-', '+')
.Replace('_', '/');
switch (base64.Length % 4)
{
case 2: base64 += "=="; break;
case 3: base64 += "="; break;
}
return Convert.FromBase64String(base64);
}
private sealed record StoredAttestation(
string AttestationId,
string TenantId,
string ExportRunId,
string ArtifactDigest,
DateTimeOffset CreatedAt,
ExportDsseEnvelope Envelope,
ExportAttestationVerification? Verification);
}
/// <summary>
/// Configuration options for attestation service.
/// </summary>
public sealed class ExportAttestationOptions
{
public bool Enabled { get; set; } = true;
public string DefaultAlgorithm { get; set; } = "ECDSA-P256-SHA256";
public string? KeyId { get; set; }
public string? Provider { get; set; }
public static ExportAttestationOptions Default => new();
}

View File

@@ -0,0 +1,208 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Default implementation of attestation signer using ECDSA.
/// For production, this should route through ICryptoProviderRegistry.
/// </summary>
public sealed class ExportAttestationSigner : IExportAttestationSigner, IDisposable
{
private readonly ILogger<ExportAttestationSigner> _logger;
private readonly ExportAttestationSignerOptions _options;
private readonly ECDsa _signingKey;
private readonly string _keyId;
public ExportAttestationSigner(
ILogger<ExportAttestationSigner> logger,
IOptions<ExportAttestationSignerOptions>? options = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? ExportAttestationSignerOptions.Default;
// Create or load signing key
_signingKey = ECDsa.Create(ECCurve.NamedCurves.nistP256);
_keyId = ComputeKeyId(_signingKey);
}
public Task<AttestationSignResult> SignAsync(
string payloadType,
ReadOnlyMemory<byte> payload,
CancellationToken cancellationToken = default)
{
try
{
// Build PAE (Pre-Authentication Encoding) per DSSE spec
var pae = BuildPae(payloadType, payload.Span);
// Sign PAE
var signatureBytes = _signingKey.SignData(
pae,
HashAlgorithmName.SHA256,
DSASignatureFormat.Rfc3279DerSequence);
var signatureBase64Url = ToBase64Url(signatureBytes);
var signatures = new List<AttestationSignatureInfo>
{
new()
{
Signature = signatureBase64Url,
KeyId = _keyId,
Algorithm = _options.Algorithm
}
};
var verification = new ExportAttestationVerification
{
KeyId = _keyId,
Algorithm = _options.Algorithm,
Provider = _options.Provider,
PublicKeyPem = ExportPublicKeyPem()
};
_logger.LogDebug("Signed attestation with key {KeyId}", _keyId);
return Task.FromResult(AttestationSignResult.Succeeded(signatures, verification));
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to sign attestation");
return Task.FromResult(AttestationSignResult.Failed($"Signing failed: {ex.Message}"));
}
}
public Task<bool> VerifyAsync(
string payloadType,
ReadOnlyMemory<byte> payload,
string signature,
string? keyId,
CancellationToken cancellationToken = default)
{
try
{
// Build PAE
var pae = BuildPae(payloadType, payload.Span);
// Decode signature
var signatureBytes = FromBase64Url(signature);
// Verify
var isValid = _signingKey.VerifyData(
pae,
signatureBytes,
HashAlgorithmName.SHA256,
DSASignatureFormat.Rfc3279DerSequence);
return Task.FromResult(isValid);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to verify signature");
return Task.FromResult(false);
}
}
/// <summary>
/// Builds DSSE Pre-Authentication Encoding (PAE).
/// PAE = "DSSEv1" || SP || LEN(payloadType) || SP || payloadType || SP || LEN(payload) || SP || payload
/// </summary>
private static byte[] BuildPae(string payloadType, ReadOnlySpan<byte> payload)
{
const string prefix = "DSSEv1";
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType);
using var ms = new MemoryStream();
// "DSSEv1 "
ms.Write(Encoding.UTF8.GetBytes(prefix));
ms.WriteByte(0x20); // space
// LEN(payloadType) + space + payloadType + space
WriteLength(ms, payloadTypeBytes.Length);
ms.WriteByte(0x20);
ms.Write(payloadTypeBytes);
ms.WriteByte(0x20);
// LEN(payload) + space + payload
WriteLength(ms, payload.Length);
ms.WriteByte(0x20);
ms.Write(payload);
return ms.ToArray();
}
private static void WriteLength(MemoryStream ms, int length)
{
var lengthBytes = Encoding.UTF8.GetBytes(length.ToString());
ms.Write(lengthBytes);
}
private static string ComputeKeyId(ECDsa key)
{
var publicKeyBytes = key.ExportSubjectPublicKeyInfo();
var hash = SHA256.HashData(publicKeyBytes);
return Convert.ToHexStringLower(hash)[..16];
}
private string ExportPublicKeyPem()
{
var publicKeyBytes = _signingKey.ExportSubjectPublicKeyInfo();
var base64 = Convert.ToBase64String(publicKeyBytes);
var sb = new StringBuilder();
sb.AppendLine("-----BEGIN PUBLIC KEY-----");
for (var i = 0; i < base64.Length; i += 64)
{
var length = Math.Min(64, base64.Length - i);
sb.AppendLine(base64.Substring(i, length));
}
sb.AppendLine("-----END PUBLIC KEY-----");
return sb.ToString();
}
private static string ToBase64Url(byte[] data)
{
return Convert.ToBase64String(data)
.TrimEnd('=')
.Replace('+', '-')
.Replace('/', '_');
}
private static byte[] FromBase64Url(string base64Url)
{
var base64 = base64Url
.Replace('-', '+')
.Replace('_', '/');
switch (base64.Length % 4)
{
case 2: base64 += "=="; break;
case 3: base64 += "="; break;
}
return Convert.FromBase64String(base64);
}
public void Dispose()
{
_signingKey.Dispose();
}
}
/// <summary>
/// Options for attestation signer.
/// </summary>
public sealed class ExportAttestationSignerOptions
{
public string Algorithm { get; set; } = "ECDSA-P256-SHA256";
public string Provider { get; set; } = "StellaOps.ExportCenter";
public string? KeyPath { get; set; }
public static ExportAttestationSignerOptions Default => new();
}

View File

@@ -0,0 +1,53 @@
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Service for producing DSSE attestations for export artifacts.
/// </summary>
public interface IExportAttestationService
{
/// <summary>
/// Creates a DSSE attestation for an export artifact.
/// </summary>
/// <param name="request">The attestation request.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The attestation result with DSSE envelope.</returns>
Task<ExportAttestationResult> CreateAttestationAsync(
ExportAttestationRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets an existing attestation by ID.
/// </summary>
/// <param name="attestationId">The attestation ID.</param>
/// <param name="tenantId">The tenant ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The attestation response if found.</returns>
Task<ExportAttestationResponse?> GetAttestationAsync(
string attestationId,
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the attestation for an export run.
/// </summary>
/// <param name="exportRunId">The export run ID.</param>
/// <param name="tenantId">The tenant ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The attestation response if found.</returns>
Task<ExportAttestationResponse?> GetAttestationByExportRunAsync(
string exportRunId,
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies an attestation signature.
/// </summary>
/// <param name="attestationId">The attestation ID.</param>
/// <param name="tenantId">The tenant ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if signature is valid.</returns>
Task<bool> VerifyAttestationAsync(
string attestationId,
string tenantId,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,64 @@
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Interface for signing export attestations.
/// </summary>
public interface IExportAttestationSigner
{
/// <summary>
/// Signs payload using DSSE PAE (Pre-Authentication Encoding).
/// </summary>
/// <param name="payloadType">The payload MIME type.</param>
/// <param name="payload">The payload bytes to sign.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The signing result with signatures.</returns>
Task<AttestationSignResult> SignAsync(
string payloadType,
ReadOnlyMemory<byte> payload,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a signature against a payload.
/// </summary>
/// <param name="payloadType">The payload MIME type.</param>
/// <param name="payload">The payload bytes.</param>
/// <param name="signature">The base64url-encoded signature.</param>
/// <param name="keyId">Optional key ID for verification.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if signature is valid.</returns>
Task<bool> VerifyAsync(
string payloadType,
ReadOnlyMemory<byte> payload,
string signature,
string? keyId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of attestation signing operation.
/// </summary>
public sealed record AttestationSignResult
{
public bool Success { get; init; }
public string? ErrorMessage { get; init; }
public IReadOnlyList<AttestationSignatureInfo> Signatures { get; init; } = [];
public ExportAttestationVerification? Verification { get; init; }
public static AttestationSignResult Succeeded(
IReadOnlyList<AttestationSignatureInfo> signatures,
ExportAttestationVerification? verification = null) =>
new() { Success = true, Signatures = signatures, Verification = verification };
public static AttestationSignResult Failed(string errorMessage) =>
new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Information about a single signature.
/// </summary>
public sealed record AttestationSignatureInfo
{
public required string Signature { get; init; }
public string? KeyId { get; init; }
public string? Algorithm { get; init; }
}

View File

@@ -0,0 +1,97 @@
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Interface for assembling promotion attestations with SBOM/VEX digests,
/// Rekor proofs, and DSSE envelopes for Offline Kit delivery.
/// </summary>
public interface IPromotionAttestationAssembler
{
/// <summary>
/// Assembles a promotion attestation bundle from the provided artifacts.
/// </summary>
/// <param name="request">The assembly request containing all artifacts.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The assembled promotion attestation.</returns>
Task<PromotionAttestationAssemblyResult> AssembleAsync(
PromotionAttestationAssemblyRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Retrieves a previously assembled promotion attestation.
/// </summary>
/// <param name="assemblyId">The assembly identifier.</param>
/// <param name="tenantId">The tenant identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The assembly if found, null otherwise.</returns>
Task<PromotionAttestationAssembly?> GetAssemblyAsync(
string assemblyId,
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets assemblies for a specific promotion.
/// </summary>
/// <param name="promotionId">The promotion identifier.</param>
/// <param name="tenantId">The tenant identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>List of assemblies for the promotion.</returns>
Task<IReadOnlyList<PromotionAttestationAssembly>> GetAssembliesForPromotionAsync(
string promotionId,
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies the signatures and integrity of an assembly.
/// </summary>
/// <param name="assemblyId">The assembly identifier.</param>
/// <param name="tenantId">The tenant identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if the assembly is valid.</returns>
Task<bool> VerifyAssemblyAsync(
string assemblyId,
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Exports an assembly to a portable bundle format for Offline Kit.
/// </summary>
/// <param name="assemblyId">The assembly identifier.</param>
/// <param name="tenantId">The tenant identifier.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Stream containing the bundle, or null if not found.</returns>
Task<PromotionBundleExportResult?> ExportBundleAsync(
string assemblyId,
string tenantId,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of exporting a promotion assembly to a bundle.
/// </summary>
public sealed record PromotionBundleExportResult
{
/// <summary>
/// The exported bundle stream (gzipped tar).
/// </summary>
public required Stream BundleStream { get; init; }
/// <summary>
/// The bundle filename.
/// </summary>
public required string FileName { get; init; }
/// <summary>
/// SHA-256 digest of the bundle.
/// </summary>
public required string BundleDigest { get; init; }
/// <summary>
/// Size of the bundle in bytes.
/// </summary>
public long SizeBytes { get; init; }
/// <summary>
/// Media type of the bundle.
/// </summary>
public string MediaType { get; init; } = "application/gzip";
}

View File

@@ -0,0 +1,612 @@
using System.Buffers.Binary;
using System.Collections.Concurrent;
using System.Formats.Tar;
using System.IO.Compression;
using System.Reflection;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Assembles promotion attestations with SBOM/VEX digests, Rekor proofs,
/// and DSSE envelopes for Offline Kit delivery.
/// </summary>
public sealed class PromotionAttestationAssembler : IPromotionAttestationAssembler
{
private const string BundleVersion = "promotion-bundle/v1";
private const string AssemblyFileName = "promotion-assembly.json";
private const string EnvelopeFileName = "promotion.dsse.json";
private const string RekorProofsFileName = "rekor-proofs.ndjson";
private const string DsseEnvelopesDir = "envelopes/";
private const string ChecksumsFileName = "checksums.txt";
private const string VerifyScriptFileName = "verify-promotion.sh";
private const string MetadataFileName = "metadata.json";
private static readonly DateTimeOffset FixedTimestamp = new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
private static readonly UnixFileMode DefaultFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
private static readonly UnixFileMode ExecutableFileMode =
UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.UserExecute |
UnixFileMode.GroupRead | UnixFileMode.GroupExecute |
UnixFileMode.OtherRead | UnixFileMode.OtherExecute;
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower
};
private readonly ILogger<PromotionAttestationAssembler> _logger;
private readonly IExportAttestationSigner _signer;
private readonly TimeProvider _timeProvider;
// In-memory store for development/testing; production would use persistent storage
private readonly ConcurrentDictionary<string, PromotionAttestationAssembly> _assemblies = new();
public PromotionAttestationAssembler(
ILogger<PromotionAttestationAssembler> logger,
IExportAttestationSigner signer,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_signer = signer ?? throw new ArgumentNullException(nameof(signer));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<PromotionAttestationAssemblyResult> AssembleAsync(
PromotionAttestationAssemblyRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
var assemblyId = GenerateAssemblyId();
var createdAt = _timeProvider.GetUtcNow();
// Build the promotion predicate
var predicate = BuildPromotionPredicate(request, createdAt);
// Build subjects from all artifacts
var subjects = BuildSubjects(request);
// Build the in-toto statement
var statement = new ExportInTotoStatement
{
PredicateType = PromotionAttestationPayloadTypes.PromotionPredicateType,
Subject = subjects,
Predicate = new ExportBundlePredicate
{
ExportRunId = request.PromotionId,
TenantId = request.TenantId,
ProfileId = request.ProfileId,
BundleId = assemblyId,
BundleRootHash = ComputeRootHash(request),
CreatedAt = createdAt,
Exporter = new ExportAttestationExporter
{
Version = GetAssemblyVersion(),
BuildTimestamp = GetBuildTimestamp()
},
Metadata = request.Metadata
}
};
// Serialize and sign
var statementJson = JsonSerializer.Serialize(statement, SerializerOptions);
var statementBytes = Encoding.UTF8.GetBytes(statementJson);
var signResult = await _signer.SignAsync(
ExportAttestationPayloadTypes.DssePayloadType,
statementBytes,
cancellationToken);
if (!signResult.Success)
{
_logger.LogError("Failed to sign promotion attestation: {Error}", signResult.ErrorMessage);
return PromotionAttestationAssemblyResult.Failed(
signResult.ErrorMessage ?? "Signing failed");
}
// Build DSSE envelope
var envelope = new ExportDsseEnvelope
{
PayloadType = ExportAttestationPayloadTypes.DssePayloadType,
Payload = Convert.ToBase64String(statementBytes),
Signatures = signResult.Signatures.Select(s => new ExportDsseEnvelopeSignature
{
KeyId = s.KeyId,
Signature = s.Signature
}).ToList()
};
// Create the assembly
var assembly = new PromotionAttestationAssembly
{
AssemblyId = assemblyId,
PromotionId = request.PromotionId,
TenantId = request.TenantId,
ProfileId = request.ProfileId,
SourceEnvironment = request.SourceEnvironment,
TargetEnvironment = request.TargetEnvironment,
CreatedAt = createdAt,
PromotionEnvelope = envelope,
SbomDigests = request.SbomDigests,
VexDigests = request.VexDigests,
RekorProofs = request.RekorProofs,
DsseEnvelopes = request.DsseEnvelopes,
Verification = signResult.Verification,
RootHash = ComputeRootHash(request)
};
// Store the assembly
var storeKey = BuildStoreKey(assemblyId, request.TenantId);
_assemblies[storeKey] = assembly;
_logger.LogInformation(
"Created promotion attestation assembly {AssemblyId} for promotion {PromotionId}",
assemblyId, request.PromotionId);
return PromotionAttestationAssemblyResult.Succeeded(assemblyId, assembly);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to assemble promotion attestation");
return PromotionAttestationAssemblyResult.Failed($"Assembly failed: {ex.Message}");
}
}
public Task<PromotionAttestationAssembly?> GetAssemblyAsync(
string assemblyId,
string tenantId,
CancellationToken cancellationToken = default)
{
var key = BuildStoreKey(assemblyId, tenantId);
_assemblies.TryGetValue(key, out var assembly);
return Task.FromResult(assembly);
}
public Task<IReadOnlyList<PromotionAttestationAssembly>> GetAssembliesForPromotionAsync(
string promotionId,
string tenantId,
CancellationToken cancellationToken = default)
{
var assemblies = _assemblies.Values
.Where(a => a.PromotionId == promotionId && a.TenantId == tenantId)
.OrderByDescending(a => a.CreatedAt)
.ToList();
return Task.FromResult<IReadOnlyList<PromotionAttestationAssembly>>(assemblies);
}
public async Task<bool> VerifyAssemblyAsync(
string assemblyId,
string tenantId,
CancellationToken cancellationToken = default)
{
var assembly = await GetAssemblyAsync(assemblyId, tenantId, cancellationToken);
if (assembly is null)
{
return false;
}
try
{
// Verify the main promotion envelope
var payloadBytes = Convert.FromBase64String(assembly.PromotionEnvelope.Payload);
foreach (var sig in assembly.PromotionEnvelope.Signatures)
{
var isValid = await _signer.VerifyAsync(
assembly.PromotionEnvelope.PayloadType,
payloadBytes,
sig.Signature,
sig.KeyId,
cancellationToken);
if (!isValid)
{
_logger.LogWarning(
"Signature verification failed for assembly {AssemblyId} with key {KeyId}",
assemblyId, sig.KeyId);
return false;
}
}
return true;
}
catch (Exception ex)
{
_logger.LogError(ex, "Error verifying assembly {AssemblyId}", assemblyId);
return false;
}
}
public async Task<PromotionBundleExportResult?> ExportBundleAsync(
string assemblyId,
string tenantId,
CancellationToken cancellationToken = default)
{
var assembly = await GetAssemblyAsync(assemblyId, tenantId, cancellationToken);
if (assembly is null)
{
return null;
}
var stream = new MemoryStream();
using (var gzip = new GZipStream(stream, CompressionLevel.SmallestSize, leaveOpen: true))
using (var tar = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
// Write assembly JSON
var assemblyJson = JsonSerializer.Serialize(assembly, SerializerOptions);
WriteTextEntry(tar, AssemblyFileName, assemblyJson, DefaultFileMode);
// Write promotion DSSE envelope
var envelopeJson = JsonSerializer.Serialize(assembly.PromotionEnvelope, SerializerOptions);
WriteTextEntry(tar, EnvelopeFileName, envelopeJson, DefaultFileMode);
// Write Rekor proofs as NDJSON
if (assembly.RekorProofs.Count > 0)
{
var rekorNdjson = BuildRekorNdjson(assembly.RekorProofs);
WriteTextEntry(tar, RekorProofsFileName, rekorNdjson, DefaultFileMode);
}
// Write included DSSE envelopes
foreach (var envelopeRef in assembly.DsseEnvelopes)
{
var envelopePath = $"{DsseEnvelopesDir}{envelopeRef.AttestationType}/{envelopeRef.AttestationId}.dsse.json";
WriteTextEntry(tar, envelopePath, envelopeRef.EnvelopeJson, DefaultFileMode);
}
// Write metadata
var metadata = BuildBundleMetadata(assembly);
var metadataJson = JsonSerializer.Serialize(metadata, SerializerOptions);
WriteTextEntry(tar, MetadataFileName, metadataJson, DefaultFileMode);
// Compute checksums and write
var checksums = BuildChecksums(assembly, assemblyJson, envelopeJson, metadataJson);
WriteTextEntry(tar, ChecksumsFileName, checksums, DefaultFileMode);
// Write verification script
var verifyScript = BuildVerificationScript(assembly);
WriteTextEntry(tar, VerifyScriptFileName, verifyScript, ExecutableFileMode);
}
ApplyDeterministicGzipHeader(stream);
// Compute bundle digest
stream.Position = 0;
var bundleBytes = stream.ToArray();
var bundleDigest = "sha256:" + Convert.ToHexStringLower(SHA256.HashData(bundleBytes));
stream.Position = 0;
var fileName = $"promotion-{assembly.PromotionId}-{assembly.AssemblyId}.tar.gz";
return new PromotionBundleExportResult
{
BundleStream = stream,
FileName = fileName,
BundleDigest = bundleDigest,
SizeBytes = bundleBytes.Length
};
}
private static PromotionPredicate BuildPromotionPredicate(
PromotionAttestationAssemblyRequest request,
DateTimeOffset promotedAt)
{
return new PromotionPredicate
{
PromotionId = request.PromotionId,
TenantId = request.TenantId,
ProfileId = request.ProfileId,
SourceEnvironment = request.SourceEnvironment,
TargetEnvironment = request.TargetEnvironment,
PromotedAt = promotedAt,
SbomDigests = request.SbomDigests.Select(d => new PromotionDigestEntry
{
Name = d.Name,
Digest = new Dictionary<string, string> { ["sha256"] = d.Sha256Digest },
ArtifactType = d.ArtifactType
}).ToList(),
VexDigests = request.VexDigests.Select(d => new PromotionDigestEntry
{
Name = d.Name,
Digest = new Dictionary<string, string> { ["sha256"] = d.Sha256Digest },
ArtifactType = d.ArtifactType
}).ToList(),
RekorProofs = request.RekorProofs.Select(p => new PromotionRekorReference
{
LogIndex = p.LogIndex,
LogId = p.LogId,
Uuid = p.Uuid
}).ToList(),
EnvelopeDigests = request.DsseEnvelopes.Select(e => new PromotionDigestEntry
{
Name = e.AttestationId,
Digest = new Dictionary<string, string> { ["sha256"] = e.EnvelopeDigest },
ArtifactType = e.AttestationType
}).ToList(),
Promoter = new PromotionPromoterInfo
{
Version = GetAssemblyVersion(),
BuildTimestamp = GetBuildTimestamp()
},
Metadata = request.Metadata
};
}
private static IReadOnlyList<ExportInTotoSubject> BuildSubjects(
PromotionAttestationAssemblyRequest request)
{
var subjects = new List<ExportInTotoSubject>();
// Add SBOM subjects
foreach (var sbom in request.SbomDigests)
{
subjects.Add(new ExportInTotoSubject
{
Name = sbom.Name,
Digest = new Dictionary<string, string> { ["sha256"] = sbom.Sha256Digest }
});
}
// Add VEX subjects
foreach (var vex in request.VexDigests)
{
subjects.Add(new ExportInTotoSubject
{
Name = vex.Name,
Digest = new Dictionary<string, string> { ["sha256"] = vex.Sha256Digest }
});
}
// Add envelope subjects
foreach (var envelope in request.DsseEnvelopes)
{
subjects.Add(new ExportInTotoSubject
{
Name = $"envelope:{envelope.AttestationType}/{envelope.AttestationId}",
Digest = new Dictionary<string, string> { ["sha256"] = envelope.EnvelopeDigest }
});
}
return subjects;
}
private static string ComputeRootHash(PromotionAttestationAssemblyRequest request)
{
var hashes = new List<string>();
// Collect all digests
foreach (var sbom in request.SbomDigests)
{
hashes.Add(sbom.Sha256Digest);
}
foreach (var vex in request.VexDigests)
{
hashes.Add(vex.Sha256Digest);
}
foreach (var envelope in request.DsseEnvelopes)
{
hashes.Add(envelope.EnvelopeDigest);
}
if (hashes.Count == 0)
{
// Empty marker
return "sha256:" + Convert.ToHexStringLower(
SHA256.HashData(Encoding.UTF8.GetBytes("stellaops:promotion:empty")));
}
// Sort and combine with null separator
var builder = new StringBuilder();
foreach (var hash in hashes.OrderBy(h => h, StringComparer.Ordinal))
{
builder.Append(hash).Append('\0');
}
var combined = Encoding.UTF8.GetBytes(builder.ToString());
return "sha256:" + Convert.ToHexStringLower(SHA256.HashData(combined));
}
private static string BuildRekorNdjson(IReadOnlyList<RekorProofEntry> proofs)
{
var builder = new StringBuilder();
foreach (var proof in proofs.OrderBy(p => p.LogIndex))
{
var json = JsonSerializer.Serialize(proof, SerializerOptions);
builder.AppendLine(json);
}
return builder.ToString();
}
private static object BuildBundleMetadata(PromotionAttestationAssembly assembly)
{
return new
{
version = BundleVersion,
assembly_id = assembly.AssemblyId,
promotion_id = assembly.PromotionId,
tenant_id = assembly.TenantId,
source_environment = assembly.SourceEnvironment,
target_environment = assembly.TargetEnvironment,
created_at = assembly.CreatedAt,
root_hash = assembly.RootHash,
sbom_count = assembly.SbomDigests.Count,
vex_count = assembly.VexDigests.Count,
rekor_proof_count = assembly.RekorProofs.Count,
envelope_count = assembly.DsseEnvelopes.Count
};
}
private static string BuildChecksums(
PromotionAttestationAssembly assembly,
string assemblyJson,
string envelopeJson,
string metadataJson)
{
var builder = new StringBuilder();
builder.AppendLine("# Promotion attestation bundle checksums (sha256)");
// Calculate and append checksums in lexical order
var files = new SortedDictionary<string, string>(StringComparer.Ordinal);
files[AssemblyFileName] = Convert.ToHexStringLower(
SHA256.HashData(Encoding.UTF8.GetBytes(assemblyJson)));
files[EnvelopeFileName] = Convert.ToHexStringLower(
SHA256.HashData(Encoding.UTF8.GetBytes(envelopeJson)));
files[MetadataFileName] = Convert.ToHexStringLower(
SHA256.HashData(Encoding.UTF8.GetBytes(metadataJson)));
foreach (var (file, hash) in files)
{
builder.Append(hash).Append(" ").AppendLine(file);
}
return builder.ToString();
}
private static string BuildVerificationScript(PromotionAttestationAssembly assembly)
{
var builder = new StringBuilder();
builder.AppendLine("#!/usr/bin/env sh");
builder.AppendLine("# Promotion Attestation Bundle Verification Script");
builder.AppendLine("# No network access required");
builder.AppendLine();
builder.AppendLine("set -eu");
builder.AppendLine();
builder.AppendLine("# Verify checksums");
builder.AppendLine("echo \"Verifying checksums...\"");
builder.AppendLine("if command -v sha256sum >/dev/null 2>&1; then");
builder.AppendLine(" sha256sum --check checksums.txt");
builder.AppendLine("elif command -v shasum >/dev/null 2>&1; then");
builder.AppendLine(" shasum -a 256 --check checksums.txt");
builder.AppendLine("else");
builder.AppendLine(" echo \"Error: sha256sum or shasum required\" >&2");
builder.AppendLine(" exit 1");
builder.AppendLine("fi");
builder.AppendLine();
builder.AppendLine("echo \"\"");
builder.AppendLine("echo \"Checksums verified successfully.\"");
builder.AppendLine("echo \"\"");
builder.AppendLine();
builder.AppendLine("# Promotion details");
builder.Append("ASSEMBLY_ID=\"").Append(assembly.AssemblyId).AppendLine("\"");
builder.Append("PROMOTION_ID=\"").Append(assembly.PromotionId).AppendLine("\"");
builder.Append("SOURCE_ENV=\"").Append(assembly.SourceEnvironment).AppendLine("\"");
builder.Append("TARGET_ENV=\"").Append(assembly.TargetEnvironment).AppendLine("\"");
builder.AppendLine();
builder.AppendLine("echo \"Promotion Details:\"");
builder.AppendLine("echo \" Assembly ID: $ASSEMBLY_ID\"");
builder.AppendLine("echo \" Promotion ID: $PROMOTION_ID\"");
builder.AppendLine("echo \" Source: $SOURCE_ENV\"");
builder.AppendLine("echo \" Target: $TARGET_ENV\"");
builder.AppendLine("echo \"\"");
builder.AppendLine();
builder.AppendLine("# Verify DSSE envelope");
builder.AppendLine("DSSE_FILE=\"promotion.dsse.json\"");
builder.AppendLine();
builder.AppendLine("if command -v stella >/dev/null 2>&1; then");
builder.AppendLine(" echo \"Verifying promotion DSSE envelope with stella CLI...\"");
builder.AppendLine(" stella attest verify --envelope \"$DSSE_FILE\"");
builder.AppendLine("else");
builder.AppendLine(" echo \"Note: stella CLI not found. Manual DSSE verification recommended.\"");
builder.AppendLine(" echo \"Install stella CLI and run: stella attest verify --envelope $DSSE_FILE\"");
builder.AppendLine("fi");
builder.AppendLine();
// Verify included envelopes
if (assembly.DsseEnvelopes.Count > 0)
{
builder.AppendLine("# Verify included attestation envelopes");
builder.AppendLine("if command -v stella >/dev/null 2>&1; then");
builder.AppendLine(" echo \"\"");
builder.AppendLine(" echo \"Verifying included attestation envelopes...\"");
foreach (var env in assembly.DsseEnvelopes)
{
var path = $"envelopes/{env.AttestationType}/{env.AttestationId}.dsse.json";
builder.Append(" stella attest verify --envelope \"").Append(path).AppendLine("\" || echo \"Warning: Failed to verify envelope\"");
}
builder.AppendLine("fi");
builder.AppendLine();
}
builder.AppendLine("echo \"\"");
builder.AppendLine("echo \"Verification complete.\"");
return builder.ToString();
}
private static void WriteTextEntry(TarWriter writer, string path, string content, UnixFileMode mode)
{
var bytes = Encoding.UTF8.GetBytes(content);
using var dataStream = new MemoryStream(bytes);
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
Mode = mode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty,
DataStream = dataStream
};
writer.WriteEntry(entry);
}
private static void ApplyDeterministicGzipHeader(MemoryStream stream)
{
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written.");
}
var seconds = checked((int)(FixedTimestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
var originalPosition = stream.Position;
stream.Position = 4;
stream.Write(buffer);
stream.Position = originalPosition;
}
private static string GenerateAssemblyId()
{
return $"promo-{Guid.NewGuid():N}"[..24];
}
private static string BuildStoreKey(string assemblyId, string tenantId)
{
return $"{tenantId}:{assemblyId}";
}
private static string GetAssemblyVersion()
{
return Assembly.GetExecutingAssembly()
.GetCustomAttribute<AssemblyInformationalVersionAttribute>()
?.InformationalVersion ?? "1.0.0";
}
private static DateTimeOffset? GetBuildTimestamp()
{
var attr = Assembly.GetExecutingAssembly()
.GetCustomAttribute<AssemblyMetadataAttribute>();
return attr?.Key == "BuildTimestamp" && DateTimeOffset.TryParse(attr.Value, out var ts)
? ts
: null;
}
}

View File

@@ -0,0 +1,213 @@
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.AspNetCore.Mvc;
using StellaOps.Auth.Abstractions;
using StellaOps.Auth.ServerIntegration;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Extension methods for mapping promotion attestation endpoints.
/// </summary>
public static class PromotionAttestationEndpoints
{
/// <summary>
/// Maps promotion attestation endpoints to the application.
/// </summary>
public static WebApplication MapPromotionAttestationEndpoints(this WebApplication app)
{
var group = app.MapGroup("/v1/promotions")
.WithTags("Promotion Attestations")
.RequireAuthorization(StellaOpsResourceServerPolicies.ExportOperator);
// POST /v1/promotions/attestations - Create promotion attestation assembly
group.MapPost("/attestations", CreatePromotionAttestationAsync)
.WithName("CreatePromotionAttestation")
.WithSummary("Create promotion attestation assembly")
.WithDescription("Creates a promotion attestation assembly bundling SBOM/VEX digests, Rekor proofs, and DSSE envelopes.")
.Produces<PromotionAttestationAssemblyResult>(StatusCodes.Status201Created)
.Produces(StatusCodes.Status400BadRequest);
// GET /v1/promotions/attestations/{assemblyId} - Get promotion assembly by ID
group.MapGet("/attestations/{assemblyId}", GetPromotionAssemblyAsync)
.WithName("GetPromotionAssembly")
.WithSummary("Get promotion attestation assembly")
.WithDescription("Returns the promotion attestation assembly for the specified ID.")
.Produces<PromotionAttestationAssembly>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// GET /v1/promotions/{promotionId}/attestations - Get assemblies for promotion
group.MapGet("/{promotionId}/attestations", GetAssembliesForPromotionAsync)
.WithName("GetAssembliesForPromotion")
.WithSummary("Get attestation assemblies for a promotion")
.WithDescription("Returns all attestation assemblies for the specified promotion.")
.Produces<IReadOnlyList<PromotionAttestationAssembly>>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// POST /v1/promotions/attestations/{assemblyId}/verify - Verify assembly
group.MapPost("/attestations/{assemblyId}/verify", VerifyPromotionAssemblyAsync)
.WithName("VerifyPromotionAssembly")
.WithSummary("Verify promotion attestation assembly")
.WithDescription("Verifies the cryptographic signatures of the promotion attestation assembly.")
.Produces<PromotionAttestationVerifyResponse>(StatusCodes.Status200OK)
.Produces(StatusCodes.Status404NotFound);
// GET /v1/promotions/attestations/{assemblyId}/bundle - Export bundle for Offline Kit
group.MapGet("/attestations/{assemblyId}/bundle", ExportPromotionBundleAsync)
.WithName("ExportPromotionBundle")
.WithSummary("Export promotion bundle for Offline Kit")
.WithDescription("Exports the promotion attestation assembly as a portable bundle for Offline Kit delivery.")
.Produces(StatusCodes.Status200OK, contentType: "application/gzip")
.Produces(StatusCodes.Status404NotFound);
return app;
}
private static async Task<Results<Created<PromotionAttestationAssemblyResult>, BadRequest<string>>> CreatePromotionAttestationAsync(
[FromBody] PromotionAttestationAssemblyRequest request,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IPromotionAttestationAssembler assembler,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.BadRequest("Tenant ID is required");
}
// Ensure request has tenant ID
var requestWithTenant = request with { TenantId = tenantId };
var result = await assembler.AssembleAsync(requestWithTenant, cancellationToken);
if (!result.Success)
{
return TypedResults.BadRequest(result.ErrorMessage ?? "Assembly failed");
}
return TypedResults.Created($"/v1/promotions/attestations/{result.AssemblyId}", result);
}
private static async Task<Results<Ok<PromotionAttestationAssembly>, NotFound>> GetPromotionAssemblyAsync(
string assemblyId,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IPromotionAttestationAssembler assembler,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var assembly = await assembler.GetAssemblyAsync(assemblyId, tenantId, cancellationToken);
if (assembly is null)
{
return TypedResults.NotFound();
}
return TypedResults.Ok(assembly);
}
private static async Task<Results<Ok<IReadOnlyList<PromotionAttestationAssembly>>, NotFound>> GetAssembliesForPromotionAsync(
string promotionId,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IPromotionAttestationAssembler assembler,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var assemblies = await assembler.GetAssembliesForPromotionAsync(promotionId, tenantId, cancellationToken);
return TypedResults.Ok(assemblies);
}
private static async Task<Results<Ok<PromotionAttestationVerifyResponse>, NotFound>> VerifyPromotionAssemblyAsync(
string assemblyId,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IPromotionAttestationAssembler assembler,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var assembly = await assembler.GetAssemblyAsync(assemblyId, tenantId, cancellationToken);
if (assembly is null)
{
return TypedResults.NotFound();
}
var isValid = await assembler.VerifyAssemblyAsync(assemblyId, tenantId, cancellationToken);
return TypedResults.Ok(new PromotionAttestationVerifyResponse
{
AssemblyId = assemblyId,
PromotionId = assembly.PromotionId,
IsValid = isValid,
VerifiedAt = DateTimeOffset.UtcNow
});
}
private static async Task<Results<FileStreamHttpResult, NotFound>> ExportPromotionBundleAsync(
string assemblyId,
[FromHeader(Name = "X-Tenant-Id")] string? tenantIdHeader,
[FromServices] IPromotionAttestationAssembler assembler,
HttpContext httpContext,
CancellationToken cancellationToken)
{
var tenantId = ResolveTenantId(tenantIdHeader, httpContext);
if (string.IsNullOrWhiteSpace(tenantId))
{
return TypedResults.NotFound();
}
var exportResult = await assembler.ExportBundleAsync(assemblyId, tenantId, cancellationToken);
if (exportResult is null)
{
return TypedResults.NotFound();
}
// Set content disposition for download
httpContext.Response.Headers.ContentDisposition = $"attachment; filename=\"{exportResult.FileName}\"";
httpContext.Response.Headers["X-Bundle-Digest"] = exportResult.BundleDigest;
return TypedResults.File(
exportResult.BundleStream,
exportResult.MediaType,
exportResult.FileName);
}
private static string? ResolveTenantId(string? header, HttpContext httpContext)
{
if (!string.IsNullOrWhiteSpace(header))
{
return header;
}
// Try to get from claims
var tenantClaim = httpContext.User.FindFirst("tenant_id")
?? httpContext.User.FindFirst("tid");
return tenantClaim?.Value;
}
}
/// <summary>
/// Response for promotion attestation verification.
/// </summary>
public sealed record PromotionAttestationVerifyResponse
{
public required string AssemblyId { get; init; }
public required string PromotionId { get; init; }
public required bool IsValid { get; init; }
public required DateTimeOffset VerifiedAt { get; init; }
public string? ErrorMessage { get; init; }
}

View File

@@ -0,0 +1,354 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.WebService.Attestation;
/// <summary>
/// Payload types for promotion attestations.
/// </summary>
public static class PromotionAttestationPayloadTypes
{
public const string PromotionPredicateType = "stella.ops/promotion@v1";
public const string PromotionBundlePredicateType = "stella.ops/promotion-bundle@v1";
}
/// <summary>
/// Request to create a promotion attestation assembly.
/// </summary>
public sealed record PromotionAttestationAssemblyRequest
{
/// <summary>
/// Unique identifier for the promotion.
/// </summary>
public required string PromotionId { get; init; }
/// <summary>
/// Tenant identifier.
/// </summary>
public required string TenantId { get; init; }
/// <summary>
/// Optional profile identifier.
/// </summary>
public string? ProfileId { get; init; }
/// <summary>
/// Source environment (e.g., "staging").
/// </summary>
public required string SourceEnvironment { get; init; }
/// <summary>
/// Target environment (e.g., "production").
/// </summary>
public required string TargetEnvironment { get; init; }
/// <summary>
/// SBOM digest references to include.
/// </summary>
public IReadOnlyList<ArtifactDigestReference> SbomDigests { get; init; } = [];
/// <summary>
/// VEX digest references to include.
/// </summary>
public IReadOnlyList<ArtifactDigestReference> VexDigests { get; init; } = [];
/// <summary>
/// Rekor transparency log proofs.
/// </summary>
public IReadOnlyList<RekorProofEntry> RekorProofs { get; init; } = [];
/// <summary>
/// Existing DSSE envelopes to include in the bundle.
/// </summary>
public IReadOnlyList<DsseEnvelopeReference> DsseEnvelopes { get; init; } = [];
/// <summary>
/// Optional metadata for the promotion.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Reference to an artifact with its digest.
/// </summary>
public sealed record ArtifactDigestReference
{
/// <summary>
/// Unique identifier for the artifact.
/// </summary>
public required string ArtifactId { get; init; }
/// <summary>
/// Name of the artifact.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// Media type of the artifact (e.g., "application/spdx+json").
/// </summary>
public required string MediaType { get; init; }
/// <summary>
/// SHA-256 digest of the artifact.
/// </summary>
public required string Sha256Digest { get; init; }
/// <summary>
/// Size of the artifact in bytes.
/// </summary>
public long SizeBytes { get; init; }
/// <summary>
/// Optional URI where the artifact can be retrieved.
/// </summary>
public string? Uri { get; init; }
/// <summary>
/// Artifact type (sbom, vex, etc.).
/// </summary>
public required string ArtifactType { get; init; }
/// <summary>
/// Artifact version or format (e.g., "spdx-3.0.1", "cyclonedx-1.6", "openvex").
/// </summary>
public string? FormatVersion { get; init; }
}
/// <summary>
/// Rekor transparency log proof entry.
/// </summary>
public sealed record RekorProofEntry
{
/// <summary>
/// Log index in Rekor.
/// </summary>
[JsonPropertyName("logIndex")]
public required long LogIndex { get; init; }
/// <summary>
/// Log ID (tree ID).
/// </summary>
[JsonPropertyName("logId")]
public required string LogId { get; init; }
/// <summary>
/// Integrated time (Unix timestamp).
/// </summary>
[JsonPropertyName("integratedTime")]
public required long IntegratedTime { get; init; }
/// <summary>
/// Entry UUID.
/// </summary>
[JsonPropertyName("uuid")]
public required string Uuid { get; init; }
/// <summary>
/// Entry body (base64-encoded).
/// </summary>
[JsonPropertyName("body")]
public string? Body { get; init; }
/// <summary>
/// Inclusion proof for verification.
/// </summary>
[JsonPropertyName("inclusionProof")]
public RekorInclusionProof? InclusionProof { get; init; }
}
/// <summary>
/// Merkle tree inclusion proof from Rekor.
/// </summary>
public sealed record RekorInclusionProof
{
[JsonPropertyName("logIndex")]
public long LogIndex { get; init; }
[JsonPropertyName("rootHash")]
public string? RootHash { get; init; }
[JsonPropertyName("treeSize")]
public long TreeSize { get; init; }
[JsonPropertyName("hashes")]
public IReadOnlyList<string> Hashes { get; init; } = [];
}
/// <summary>
/// Reference to an existing DSSE envelope.
/// </summary>
public sealed record DsseEnvelopeReference
{
/// <summary>
/// Attestation ID.
/// </summary>
public required string AttestationId { get; init; }
/// <summary>
/// Type of attestation (e.g., "sbom", "vex", "slsa-provenance").
/// </summary>
public required string AttestationType { get; init; }
/// <summary>
/// Serialized DSSE envelope JSON.
/// </summary>
public required string EnvelopeJson { get; init; }
/// <summary>
/// SHA-256 digest of the envelope.
/// </summary>
public required string EnvelopeDigest { get; init; }
}
/// <summary>
/// Result of creating a promotion attestation assembly.
/// </summary>
public sealed record PromotionAttestationAssemblyResult
{
public bool Success { get; init; }
public string? AssemblyId { get; init; }
public string? ErrorMessage { get; init; }
public PromotionAttestationAssembly? Assembly { get; init; }
public static PromotionAttestationAssemblyResult Succeeded(
string assemblyId,
PromotionAttestationAssembly assembly) =>
new() { Success = true, AssemblyId = assemblyId, Assembly = assembly };
public static PromotionAttestationAssemblyResult Failed(string errorMessage) =>
new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// Complete promotion attestation assembly.
/// </summary>
public sealed record PromotionAttestationAssembly
{
[JsonPropertyName("assembly_id")]
public required string AssemblyId { get; init; }
[JsonPropertyName("promotion_id")]
public required string PromotionId { get; init; }
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
[JsonPropertyName("profile_id")]
public string? ProfileId { get; init; }
[JsonPropertyName("source_environment")]
public required string SourceEnvironment { get; init; }
[JsonPropertyName("target_environment")]
public required string TargetEnvironment { get; init; }
[JsonPropertyName("created_at")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("promotion_envelope")]
public required ExportDsseEnvelope PromotionEnvelope { get; init; }
[JsonPropertyName("sbom_digests")]
public IReadOnlyList<ArtifactDigestReference> SbomDigests { get; init; } = [];
[JsonPropertyName("vex_digests")]
public IReadOnlyList<ArtifactDigestReference> VexDigests { get; init; } = [];
[JsonPropertyName("rekor_proofs")]
public IReadOnlyList<RekorProofEntry> RekorProofs { get; init; } = [];
[JsonPropertyName("dsse_envelopes")]
public IReadOnlyList<DsseEnvelopeReference> DsseEnvelopes { get; init; } = [];
[JsonPropertyName("verification")]
public ExportAttestationVerification? Verification { get; init; }
[JsonPropertyName("root_hash")]
public string? RootHash { get; init; }
}
/// <summary>
/// Promotion predicate for in-toto statements.
/// </summary>
public sealed record PromotionPredicate
{
[JsonPropertyName("promotionId")]
public required string PromotionId { get; init; }
[JsonPropertyName("tenantId")]
public required string TenantId { get; init; }
[JsonPropertyName("profileId")]
public string? ProfileId { get; init; }
[JsonPropertyName("sourceEnvironment")]
public required string SourceEnvironment { get; init; }
[JsonPropertyName("targetEnvironment")]
public required string TargetEnvironment { get; init; }
[JsonPropertyName("promotedAt")]
public required DateTimeOffset PromotedAt { get; init; }
[JsonPropertyName("sbomDigests")]
public IReadOnlyList<PromotionDigestEntry> SbomDigests { get; init; } = [];
[JsonPropertyName("vexDigests")]
public IReadOnlyList<PromotionDigestEntry> VexDigests { get; init; } = [];
[JsonPropertyName("rekorProofs")]
public IReadOnlyList<PromotionRekorReference> RekorProofs { get; init; } = [];
[JsonPropertyName("envelopeDigests")]
public IReadOnlyList<PromotionDigestEntry> EnvelopeDigests { get; init; } = [];
[JsonPropertyName("promoter")]
public required PromotionPromoterInfo Promoter { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Digest entry for promotion predicate.
/// </summary>
public sealed record PromotionDigestEntry
{
[JsonPropertyName("name")]
public required string Name { get; init; }
[JsonPropertyName("digest")]
public required IReadOnlyDictionary<string, string> Digest { get; init; }
[JsonPropertyName("artifactType")]
public string? ArtifactType { get; init; }
}
/// <summary>
/// Rekor reference for promotion predicate.
/// </summary>
public sealed record PromotionRekorReference
{
[JsonPropertyName("logIndex")]
public required long LogIndex { get; init; }
[JsonPropertyName("logId")]
public required string LogId { get; init; }
[JsonPropertyName("uuid")]
public required string Uuid { get; init; }
}
/// <summary>
/// Information about the promoter.
/// </summary>
public sealed record PromotionPromoterInfo
{
[JsonPropertyName("name")]
public string Name { get; init; } = "StellaOps.ExportCenter";
[JsonPropertyName("version")]
public required string Version { get; init; }
[JsonPropertyName("buildTimestamp")]
public DateTimeOffset? BuildTimestamp { get; init; }
}

View File

@@ -0,0 +1,68 @@
namespace StellaOps.ExportCenter.WebService.Deprecation;
/// <summary>
/// Registry of deprecated export endpoints with their migration paths.
/// </summary>
public static class DeprecatedEndpointsRegistry
{
/// <summary>
/// Date when legacy /exports endpoints were deprecated.
/// </summary>
public static readonly DateTimeOffset LegacyExportsDeprecationDate =
new(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
/// <summary>
/// Date when legacy /exports endpoints will be removed.
/// </summary>
public static readonly DateTimeOffset LegacyExportsSunsetDate =
new(2025, 7, 1, 0, 0, 0, TimeSpan.Zero);
/// <summary>
/// Documentation URL for API deprecation migration guide.
/// </summary>
public const string DeprecationDocumentationUrl =
"https://docs.stellaops.io/api/export-center/migration";
/// <summary>
/// Deprecation info for GET /exports (list exports).
/// </summary>
public static readonly DeprecationInfo ListExports = new(
DeprecatedAt: LegacyExportsDeprecationDate,
SunsetAt: LegacyExportsSunsetDate,
SuccessorPath: "/v1/exports/profiles",
DocumentationUrl: DeprecationDocumentationUrl,
Reason: "Legacy exports list endpoint replaced by profiles API");
/// <summary>
/// Deprecation info for POST /exports (create export).
/// </summary>
public static readonly DeprecationInfo CreateExport = new(
DeprecatedAt: LegacyExportsDeprecationDate,
SunsetAt: LegacyExportsSunsetDate,
SuccessorPath: "/v1/exports/evidence",
DocumentationUrl: DeprecationDocumentationUrl,
Reason: "Legacy export creation endpoint replaced by typed export APIs");
/// <summary>
/// Deprecation info for DELETE /exports/{id} (delete export).
/// </summary>
public static readonly DeprecationInfo DeleteExport = new(
DeprecatedAt: LegacyExportsDeprecationDate,
SunsetAt: LegacyExportsSunsetDate,
SuccessorPath: "/v1/exports/runs/{id}/cancel",
DocumentationUrl: DeprecationDocumentationUrl,
Reason: "Legacy export deletion replaced by run cancellation API");
/// <summary>
/// Gets all deprecated endpoint registrations.
/// </summary>
public static IReadOnlyList<(string Method, string Pattern, DeprecationInfo Info)> GetAll()
{
return
[
("GET", "/exports", ListExports),
("POST", "/exports", CreateExport),
("DELETE", "/exports/{id}", DeleteExport)
];
}
}

View File

@@ -0,0 +1,125 @@
using Microsoft.AspNetCore.Http.HttpResults;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Primitives;
namespace StellaOps.ExportCenter.WebService.Deprecation;
/// <summary>
/// Extension methods for adding RFC 8594 deprecation headers to HTTP responses.
/// </summary>
public static class DeprecationHeaderExtensions
{
/// <summary>
/// HTTP header indicating the resource is deprecated (RFC 8594).
/// </summary>
public const string DeprecationHeader = "Deprecation";
/// <summary>
/// HTTP header indicating when the resource will be removed (RFC 8594).
/// </summary>
public const string SunsetHeader = "Sunset";
/// <summary>
/// HTTP Link header with relation type for successor resource.
/// </summary>
public const string LinkHeader = "Link";
/// <summary>
/// HTTP Warning header for additional deprecation notice (RFC 7234).
/// </summary>
public const string WarningHeader = "Warning";
/// <summary>
/// Adds RFC 8594 deprecation headers to the response.
/// </summary>
/// <param name="context">The HTTP context.</param>
/// <param name="info">Deprecation metadata.</param>
public static void AddDeprecationHeaders(this HttpContext context, DeprecationInfo info)
{
ArgumentNullException.ThrowIfNull(context);
ArgumentNullException.ThrowIfNull(info);
var response = context.Response;
// RFC 8594: Deprecation header with IMF-fixdate
response.Headers[DeprecationHeader] = info.DeprecatedAt.ToUniversalTime().ToString("R");
// RFC 8594: Sunset header with IMF-fixdate
response.Headers[SunsetHeader] = info.SunsetAt.ToUniversalTime().ToString("R");
// Link header pointing to successor and/or documentation
var links = new List<string>();
if (!string.IsNullOrEmpty(info.SuccessorPath))
{
links.Add($"<{info.SuccessorPath}>; rel=\"successor-version\"");
}
if (!string.IsNullOrEmpty(info.DocumentationUrl))
{
links.Add($"<{info.DocumentationUrl}>; rel=\"deprecation\"");
}
if (links.Count > 0)
{
response.Headers.Append(LinkHeader, string.Join(", ", links));
}
// Warning header with deprecation notice
var reason = info.Reason ?? "This endpoint is deprecated and will be removed.";
var warning = $"299 - \"{reason} Use {info.SuccessorPath} instead. Sunset: {info.SunsetAt:yyyy-MM-dd}\"";
response.Headers[WarningHeader] = warning;
}
/// <summary>
/// Creates an endpoint filter that adds deprecation headers and logs usage.
/// </summary>
/// <param name="info">Deprecation metadata.</param>
/// <param name="loggerFactory">Logger factory for deprecation logging.</param>
/// <returns>An endpoint filter delegate.</returns>
public static Func<EndpointFilterInvocationContext, EndpointFilterDelegate, ValueTask<object?>>
CreateDeprecationFilter(DeprecationInfo info, ILoggerFactory? loggerFactory = null)
{
var logger = loggerFactory?.CreateLogger("DeprecatedEndpoint");
return async (context, next) =>
{
var httpContext = context.HttpContext;
// Add deprecation headers
httpContext.AddDeprecationHeaders(info);
// Log deprecated endpoint usage
logger?.LogWarning(
"Deprecated endpoint accessed: {Method} {Path} - Successor: {Successor}, Sunset: {Sunset}, Client: {ClientIp}",
httpContext.Request.Method,
httpContext.Request.Path,
info.SuccessorPath,
info.SunsetAt,
httpContext.Connection.RemoteIpAddress);
// If past sunset, optionally return 410 Gone
if (info.IsPastSunset)
{
logger?.LogError(
"Sunset endpoint accessed after removal date: {Method} {Path} - Was removed: {Sunset}",
httpContext.Request.Method,
httpContext.Request.Path,
info.SunsetAt);
return Results.Problem(
title: "Endpoint Removed",
detail: $"This endpoint was deprecated on {info.DeprecatedAt:yyyy-MM-dd} and removed on {info.SunsetAt:yyyy-MM-dd}. Use {info.SuccessorPath} instead.",
statusCode: StatusCodes.Status410Gone,
extensions: new Dictionary<string, object?>
{
["successorPath"] = info.SuccessorPath,
["documentationUrl"] = info.DocumentationUrl,
["sunsetDate"] = info.SunsetAt.ToString("o")
});
}
return await next(context);
};
}
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.ExportCenter.WebService.Deprecation;
/// <summary>
/// Describes deprecation metadata for an API endpoint.
/// </summary>
/// <param name="DeprecatedAt">UTC date when the endpoint was deprecated.</param>
/// <param name="SunsetAt">UTC date when the endpoint will be removed.</param>
/// <param name="SuccessorPath">Path to the replacement endpoint (e.g., "/v1/exports").</param>
/// <param name="DocumentationUrl">URL to deprecation documentation or migration guide.</param>
/// <param name="Reason">Human-readable reason for deprecation.</param>
public sealed record DeprecationInfo(
DateTimeOffset DeprecatedAt,
DateTimeOffset SunsetAt,
string SuccessorPath,
string? DocumentationUrl = null,
string? Reason = null)
{
/// <summary>
/// Returns true if the sunset date has passed.
/// </summary>
public bool IsPastSunset => DateTimeOffset.UtcNow >= SunsetAt;
/// <summary>
/// Days remaining until sunset.
/// </summary>
public int DaysUntilSunset => Math.Max(0, (int)(SunsetAt - DateTimeOffset.UtcNow).TotalDays);
}

View File

@@ -0,0 +1,106 @@
using Microsoft.Extensions.Logging;
namespace StellaOps.ExportCenter.WebService.Deprecation;
/// <summary>
/// Service for emitting notifications when deprecated endpoints are accessed.
/// </summary>
public interface IDeprecationNotificationService
{
/// <summary>
/// Records access to a deprecated endpoint.
/// </summary>
/// <param name="method">HTTP method.</param>
/// <param name="path">Request path.</param>
/// <param name="info">Deprecation metadata.</param>
/// <param name="clientInfo">Client identification (IP, user agent, etc.).</param>
/// <param name="cancellationToken">Cancellation token.</param>
Task RecordDeprecatedAccessAsync(
string method,
string path,
DeprecationInfo info,
DeprecationClientInfo clientInfo,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Information about the client accessing a deprecated endpoint.
/// </summary>
/// <param name="ClientIp">Client IP address.</param>
/// <param name="UserAgent">Client user agent string.</param>
/// <param name="TenantId">Tenant ID if available.</param>
/// <param name="UserId">User ID if authenticated.</param>
/// <param name="TraceId">Distributed trace ID.</param>
public sealed record DeprecationClientInfo(
string? ClientIp,
string? UserAgent,
string? TenantId,
string? UserId,
string? TraceId);
/// <summary>
/// Default implementation that logs deprecation events.
/// </summary>
public sealed class DeprecationNotificationService : IDeprecationNotificationService
{
private readonly ILogger<DeprecationNotificationService> _logger;
public DeprecationNotificationService(ILogger<DeprecationNotificationService> logger)
{
_logger = logger;
}
public Task RecordDeprecatedAccessAsync(
string method,
string path,
DeprecationInfo info,
DeprecationClientInfo clientInfo,
CancellationToken cancellationToken = default)
{
// Log structured event for telemetry/audit
_logger.LogWarning(
"Deprecated endpoint access: Method={Method}, Path={Path}, " +
"DeprecatedAt={DeprecatedAt}, SunsetAt={SunsetAt}, DaysUntilSunset={DaysUntilSunset}, " +
"Successor={Successor}, ClientIp={ClientIp}, UserAgent={UserAgent}, " +
"TenantId={TenantId}, UserId={UserId}, TraceId={TraceId}",
method,
path,
info.DeprecatedAt,
info.SunsetAt,
info.DaysUntilSunset,
info.SuccessorPath,
clientInfo.ClientIp,
clientInfo.UserAgent,
clientInfo.TenantId,
clientInfo.UserId,
clientInfo.TraceId);
// Emit custom metric counter
DeprecationMetrics.DeprecatedEndpointAccessCounter.Add(
1,
new KeyValuePair<string, object?>("method", method),
new KeyValuePair<string, object?>("path", path),
new KeyValuePair<string, object?>("successor", info.SuccessorPath),
new KeyValuePair<string, object?>("days_until_sunset", info.DaysUntilSunset));
return Task.CompletedTask;
}
}
/// <summary>
/// Metrics for deprecation tracking.
/// </summary>
public static class DeprecationMetrics
{
private static readonly System.Diagnostics.Metrics.Meter Meter =
new("StellaOps.ExportCenter.Deprecation", "1.0.0");
/// <summary>
/// Counter for deprecated endpoint accesses.
/// </summary>
public static readonly System.Diagnostics.Metrics.Counter<long> DeprecatedEndpointAccessCounter =
Meter.CreateCounter<long>(
"export_center_deprecated_endpoint_access_total",
"requests",
"Total number of requests to deprecated endpoints");
}

View File

@@ -0,0 +1,62 @@
using Microsoft.AspNetCore.Builder;
namespace StellaOps.ExportCenter.WebService.Deprecation;
/// <summary>
/// Extension methods for applying deprecation metadata to routes.
/// </summary>
public static class DeprecationRouteBuilderExtensions
{
/// <summary>
/// Marks the endpoint as deprecated with RFC 8594 headers.
/// </summary>
/// <param name="builder">The route handler builder.</param>
/// <param name="info">Deprecation metadata.</param>
/// <returns>The route handler builder for chaining.</returns>
public static RouteHandlerBuilder WithDeprecation(this RouteHandlerBuilder builder, DeprecationInfo info)
{
ArgumentNullException.ThrowIfNull(builder);
ArgumentNullException.ThrowIfNull(info);
return builder
.AddEndpointFilter(DeprecationHeaderExtensions.CreateDeprecationFilter(info))
.WithMetadata(info)
.WithMetadata(new DeprecatedAttribute())
.WithTags("Deprecated");
}
/// <summary>
/// Marks the endpoint as deprecated with standard sunset timeline.
/// </summary>
/// <param name="builder">The route handler builder.</param>
/// <param name="successorPath">Path to the replacement endpoint.</param>
/// <param name="deprecatedAt">When the endpoint was deprecated.</param>
/// <param name="sunsetAt">When the endpoint will be removed.</param>
/// <param name="documentationUrl">Optional documentation URL.</param>
/// <param name="reason">Optional deprecation reason.</param>
/// <returns>The route handler builder for chaining.</returns>
public static RouteHandlerBuilder WithDeprecation(
this RouteHandlerBuilder builder,
string successorPath,
DateTimeOffset deprecatedAt,
DateTimeOffset sunsetAt,
string? documentationUrl = null,
string? reason = null)
{
return builder.WithDeprecation(new DeprecationInfo(
deprecatedAt,
sunsetAt,
successorPath,
documentationUrl,
reason));
}
}
/// <summary>
/// Marker attribute indicating an endpoint is deprecated.
/// Used for OpenAPI documentation generation.
/// </summary>
[AttributeUsage(AttributeTargets.Method | AttributeTargets.Class, AllowMultiple = false)]
public sealed class DeprecatedAttribute : Attribute
{
}

View File

@@ -0,0 +1,203 @@
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.DependencyInjection.Extensions;
namespace StellaOps.ExportCenter.WebService.EvidenceLocker;
/// <summary>
/// Extension methods for registering evidence locker integration services.
/// </summary>
public static class EvidenceLockerServiceCollectionExtensions
{
/// <summary>
/// Adds evidence locker integration services to the service collection.
/// </summary>
/// <param name="services">The service collection.</param>
/// <param name="configureOptions">Optional configuration for the evidence locker client.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddExportEvidenceLocker(
this IServiceCollection services,
Action<ExportEvidenceLockerOptions>? configureOptions = null)
{
ArgumentNullException.ThrowIfNull(services);
// Configure options
if (configureOptions is not null)
{
services.Configure(configureOptions);
}
// Register Merkle tree calculator
services.TryAddSingleton<IExportMerkleTreeCalculator, ExportMerkleTreeCalculator>();
// Register HTTP client for evidence locker
services.AddHttpClient<IExportEvidenceLockerClient, ExportEvidenceLockerClient>((serviceProvider, client) =>
{
var options = serviceProvider.GetService<Microsoft.Extensions.Options.IOptions<ExportEvidenceLockerOptions>>()?.Value
?? ExportEvidenceLockerOptions.Default;
client.BaseAddress = new Uri(options.BaseUrl);
client.Timeout = options.Timeout;
client.DefaultRequestHeaders.Accept.Add(
new System.Net.Http.Headers.MediaTypeWithQualityHeaderValue("application/json"));
});
return services;
}
/// <summary>
/// Adds evidence locker integration with in-memory implementation for testing.
/// </summary>
/// <param name="services">The service collection.</param>
/// <returns>The service collection for chaining.</returns>
public static IServiceCollection AddExportEvidenceLockerInMemory(
this IServiceCollection services)
{
ArgumentNullException.ThrowIfNull(services);
services.TryAddSingleton<IExportMerkleTreeCalculator, ExportMerkleTreeCalculator>();
services.TryAddSingleton<IExportEvidenceLockerClient, InMemoryExportEvidenceLockerClient>();
return services;
}
}
/// <summary>
/// In-memory implementation of evidence locker client for testing.
/// </summary>
public sealed class InMemoryExportEvidenceLockerClient : IExportEvidenceLockerClient
{
private readonly IExportMerkleTreeCalculator _merkleCalculator;
private readonly Dictionary<string, ExportBundleManifest> _bundles = new(StringComparer.OrdinalIgnoreCase);
private readonly object _lock = new();
private int _bundleCounter;
public InMemoryExportEvidenceLockerClient(IExportMerkleTreeCalculator merkleCalculator)
{
_merkleCalculator = merkleCalculator ?? throw new ArgumentNullException(nameof(merkleCalculator));
}
public Task<ExportEvidenceSnapshotResult> PushSnapshotAsync(
ExportEvidenceSnapshotRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
var bundleId = Guid.NewGuid().ToString();
var entries = request.Materials.Select(m => new ExportManifestEntry
{
Section = m.Section,
CanonicalPath = $"{m.Section}/{m.Path}",
Sha256 = m.Sha256.ToLowerInvariant(),
SizeBytes = m.SizeBytes,
MediaType = m.MediaType ?? "application/octet-stream",
Attributes = m.Attributes
}).ToList();
var rootHash = _merkleCalculator.CalculateRootHash(entries);
var manifest = new ExportBundleManifest
{
BundleId = bundleId,
TenantId = request.TenantId,
ProfileId = request.ProfileId,
ExportRunId = request.ExportRunId,
Kind = request.Kind,
CreatedAt = DateTimeOffset.UtcNow,
RootHash = rootHash,
Metadata = request.Metadata ?? new Dictionary<string, string>(),
Entries = entries,
Distribution = request.Distribution
};
lock (_lock)
{
_bundles[bundleId] = manifest;
_bundleCounter++;
}
return Task.FromResult(ExportEvidenceSnapshotResult.Succeeded(bundleId, rootHash));
}
public Task<bool> UpdateDistributionTranscriptAsync(
string bundleId,
string tenantId,
ExportDistributionInfo distribution,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (!_bundles.TryGetValue(bundleId, out var existing))
{
return Task.FromResult(false);
}
_bundles[bundleId] = existing with { Distribution = distribution };
}
return Task.FromResult(true);
}
public Task<ExportBundleManifest?> GetBundleAsync(
string bundleId,
string tenantId,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
_bundles.TryGetValue(bundleId, out var manifest);
return Task.FromResult(manifest);
}
}
public Task<bool> VerifyRootHashAsync(
string bundleId,
string tenantId,
string expectedRootHash,
CancellationToken cancellationToken = default)
{
lock (_lock)
{
if (!_bundles.TryGetValue(bundleId, out var manifest))
{
return Task.FromResult(false);
}
return Task.FromResult(
string.Equals(manifest.RootHash, expectedRootHash, StringComparison.OrdinalIgnoreCase));
}
}
/// <summary>
/// Gets all stored bundles (for testing).
/// </summary>
public IReadOnlyList<ExportBundleManifest> GetAllBundles()
{
lock (_lock)
{
return _bundles.Values.ToList();
}
}
/// <summary>
/// Clears all stored bundles (for testing).
/// </summary>
public void Clear()
{
lock (_lock)
{
_bundles.Clear();
_bundleCounter = 0;
}
}
/// <summary>
/// Gets the count of stored bundles (for testing).
/// </summary>
public int Count
{
get
{
lock (_lock) { return _bundles.Count; }
}
}
}

View File

@@ -0,0 +1,386 @@
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using StellaOps.ExportCenter.WebService.Telemetry;
namespace StellaOps.ExportCenter.WebService.EvidenceLocker;
/// <summary>
/// HTTP client implementation for pushing export manifests to evidence locker.
/// </summary>
public sealed class ExportEvidenceLockerClient : IExportEvidenceLockerClient
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
WriteIndented = false
};
private readonly HttpClient _httpClient;
private readonly IExportMerkleTreeCalculator _merkleCalculator;
private readonly ILogger<ExportEvidenceLockerClient> _logger;
private readonly ExportEvidenceLockerOptions _options;
public ExportEvidenceLockerClient(
HttpClient httpClient,
IExportMerkleTreeCalculator merkleCalculator,
ILogger<ExportEvidenceLockerClient> logger,
IOptions<ExportEvidenceLockerOptions> options)
{
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
_merkleCalculator = merkleCalculator ?? throw new ArgumentNullException(nameof(merkleCalculator));
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_options = options?.Value ?? ExportEvidenceLockerOptions.Default;
}
public async Task<ExportEvidenceSnapshotResult> PushSnapshotAsync(
ExportEvidenceSnapshotRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (!_options.Enabled)
{
_logger.LogDebug("Evidence locker integration disabled; skipping snapshot push");
return ExportEvidenceSnapshotResult.Failed("Evidence locker integration disabled");
}
using var activity = ExportTelemetry.ActivitySource.StartActivity("evidence.push_snapshot");
activity?.SetTag("tenant_id", request.TenantId);
activity?.SetTag("export_run_id", request.ExportRunId);
activity?.SetTag("kind", request.Kind.ToString());
try
{
// Build manifest entries for Merkle calculation
var entries = request.Materials.Select(m => new ExportManifestEntry
{
Section = m.Section,
CanonicalPath = $"{m.Section}/{m.Path}",
Sha256 = m.Sha256.ToLowerInvariant(),
SizeBytes = m.SizeBytes,
MediaType = m.MediaType ?? "application/octet-stream",
Attributes = m.Attributes
}).ToList();
// Pre-calculate Merkle root for verification
var expectedRootHash = _merkleCalculator.CalculateRootHash(entries);
// Build request payload
var apiRequest = new EvidenceSnapshotApiRequest
{
Kind = MapKindToApi(request.Kind),
Description = request.Description,
Metadata = BuildMetadata(request),
Materials = request.Materials.Select(m => new EvidenceSnapshotMaterialApiDto
{
Section = m.Section,
Path = m.Path,
Sha256 = m.Sha256.ToLowerInvariant(),
SizeBytes = m.SizeBytes,
MediaType = m.MediaType ?? "application/octet-stream",
Attributes = m.Attributes?.ToDictionary(kvp => kvp.Key, kvp => kvp.Value)
}).ToList()
};
var response = await _httpClient.PostAsJsonAsync(
$"{_options.BaseUrl}/evidence/snapshot",
apiRequest,
SerializerOptions,
cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var errorBody = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
_logger.LogError(
"Evidence locker snapshot push failed with status {StatusCode}: {Error}",
response.StatusCode, errorBody);
return ExportEvidenceSnapshotResult.Failed(
$"HTTP {(int)response.StatusCode}: {errorBody}");
}
var apiResponse = await response.Content.ReadFromJsonAsync<EvidenceSnapshotApiResponse>(
SerializerOptions, cancellationToken).ConfigureAwait(false);
if (apiResponse is null)
{
return ExportEvidenceSnapshotResult.Failed("Empty response from evidence locker");
}
// Verify Merkle root matches
if (!string.Equals(apiResponse.RootHash, expectedRootHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Merkle root mismatch for export {ExportRunId}: expected {Expected}, got {Actual}",
request.ExportRunId, expectedRootHash, apiResponse.RootHash);
}
_logger.LogInformation(
"Pushed export manifest to evidence locker: bundle={BundleId}, root={RootHash}",
apiResponse.BundleId, apiResponse.RootHash);
ExportTelemetry.ExportArtifactsTotal.Add(1,
new KeyValuePair<string, object?>("artifact_type", "evidence_bundle"),
new KeyValuePair<string, object?>("tenant_id", request.TenantId));
return ExportEvidenceSnapshotResult.Succeeded(
apiResponse.BundleId.ToString(),
apiResponse.RootHash,
MapSignatureFromApi(apiResponse.Signature));
}
catch (HttpRequestException ex)
{
_logger.LogError(ex, "HTTP error pushing export manifest to evidence locker");
return ExportEvidenceSnapshotResult.Failed($"HTTP error: {ex.Message}");
}
catch (TaskCanceledException) when (cancellationToken.IsCancellationRequested)
{
throw;
}
catch (Exception ex)
{
_logger.LogError(ex, "Unexpected error pushing export manifest to evidence locker");
return ExportEvidenceSnapshotResult.Failed($"Unexpected error: {ex.Message}");
}
}
public async Task<bool> UpdateDistributionTranscriptAsync(
string bundleId,
string tenantId,
ExportDistributionInfo distribution,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundleId);
ArgumentNullException.ThrowIfNull(tenantId);
ArgumentNullException.ThrowIfNull(distribution);
if (!_options.Enabled)
{
return false;
}
try
{
var request = new { distribution };
var response = await _httpClient.PatchAsJsonAsync(
$"{_options.BaseUrl}/evidence/{bundleId}/distribution",
request,
SerializerOptions,
cancellationToken).ConfigureAwait(false);
return response.IsSuccessStatusCode;
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update distribution transcript for bundle {BundleId}", bundleId);
return false;
}
}
public async Task<ExportBundleManifest?> GetBundleAsync(
string bundleId,
string tenantId,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundleId);
ArgumentNullException.ThrowIfNull(tenantId);
if (!_options.Enabled)
{
return null;
}
try
{
var response = await _httpClient.GetAsync(
$"{_options.BaseUrl}/evidence/{bundleId}",
cancellationToken).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
return null;
}
return await response.Content.ReadFromJsonAsync<ExportBundleManifest>(
SerializerOptions, cancellationToken).ConfigureAwait(false);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to get evidence bundle {BundleId}", bundleId);
return null;
}
}
public async Task<bool> VerifyRootHashAsync(
string bundleId,
string tenantId,
string expectedRootHash,
CancellationToken cancellationToken = default)
{
var bundle = await GetBundleAsync(bundleId, tenantId, cancellationToken).ConfigureAwait(false);
if (bundle?.RootHash is null)
{
return false;
}
return string.Equals(bundle.RootHash, expectedRootHash, StringComparison.OrdinalIgnoreCase);
}
private static int MapKindToApi(ExportBundleKind kind)
{
return kind switch
{
ExportBundleKind.Evidence => 1,
ExportBundleKind.Attestation => 2,
ExportBundleKind.Mirror => 3,
ExportBundleKind.Risk => 3, // Maps to Export=3 in evidence locker
ExportBundleKind.OfflineKit => 3,
_ => 3
};
}
private static Dictionary<string, string> BuildMetadata(ExportEvidenceSnapshotRequest request)
{
var metadata = new Dictionary<string, string>(StringComparer.Ordinal)
{
["export_run_id"] = request.ExportRunId,
["export_kind"] = request.Kind.ToString().ToLowerInvariant()
};
if (!string.IsNullOrWhiteSpace(request.ProfileId))
{
metadata["profile_id"] = request.ProfileId;
}
if (request.Metadata is not null)
{
foreach (var (key, value) in request.Metadata)
{
metadata[key] = value;
}
}
return metadata;
}
private static ExportDsseSignatureInfo? MapSignatureFromApi(EvidenceSignatureApiDto? apiSignature)
{
if (apiSignature is null)
{
return null;
}
return new ExportDsseSignatureInfo
{
PayloadType = apiSignature.PayloadType,
Payload = apiSignature.Payload,
Signature = apiSignature.Signature,
KeyId = apiSignature.KeyId,
Algorithm = apiSignature.Algorithm,
Provider = apiSignature.Provider,
SignedAt = apiSignature.SignedAt,
TimestampedAt = apiSignature.TimestampedAt,
TimestampAuthority = apiSignature.TimestampAuthority
};
}
#region API DTOs
private sealed record EvidenceSnapshotApiRequest
{
[JsonPropertyName("kind")]
public int Kind { get; init; }
[JsonPropertyName("description")]
public string? Description { get; init; }
[JsonPropertyName("metadata")]
public Dictionary<string, string>? Metadata { get; init; }
[JsonPropertyName("materials")]
public List<EvidenceSnapshotMaterialApiDto>? Materials { get; init; }
}
private sealed record EvidenceSnapshotMaterialApiDto
{
[JsonPropertyName("section")]
public string? Section { get; init; }
[JsonPropertyName("path")]
public string? Path { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("size_bytes")]
public long SizeBytes { get; init; }
[JsonPropertyName("media_type")]
public string? MediaType { get; init; }
[JsonPropertyName("attributes")]
public Dictionary<string, string>? Attributes { get; init; }
}
private sealed record EvidenceSnapshotApiResponse
{
[JsonPropertyName("bundle_id")]
public Guid BundleId { get; init; }
[JsonPropertyName("root_hash")]
public required string RootHash { get; init; }
[JsonPropertyName("signature")]
public EvidenceSignatureApiDto? Signature { get; init; }
}
private sealed record EvidenceSignatureApiDto
{
[JsonPropertyName("payload_type")]
public required string PayloadType { get; init; }
[JsonPropertyName("payload")]
public required string Payload { get; init; }
[JsonPropertyName("signature")]
public required string Signature { get; init; }
[JsonPropertyName("key_id")]
public string? KeyId { get; init; }
[JsonPropertyName("algorithm")]
public required string Algorithm { get; init; }
[JsonPropertyName("provider")]
public required string Provider { get; init; }
[JsonPropertyName("signed_at")]
public DateTimeOffset SignedAt { get; init; }
[JsonPropertyName("timestamped_at")]
public DateTimeOffset? TimestampedAt { get; init; }
[JsonPropertyName("timestamp_authority")]
public string? TimestampAuthority { get; init; }
}
#endregion
}
/// <summary>
/// Configuration options for evidence locker integration.
/// </summary>
public sealed class ExportEvidenceLockerOptions
{
public bool Enabled { get; set; } = true;
public string BaseUrl { get; set; } = "http://evidence-locker:8080";
public TimeSpan Timeout { get; set; } = TimeSpan.FromSeconds(30);
public int MaxRetries { get; set; } = 3;
public static ExportEvidenceLockerOptions Default => new();
}

View File

@@ -0,0 +1,186 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.WebService.EvidenceLocker;
/// <summary>
/// Export bundle manifest for evidence locker submission.
/// Aligns with EvidenceLocker bundle-packaging.schema.json.
/// </summary>
public sealed record ExportBundleManifest
{
[JsonPropertyName("bundle_id")]
public required string BundleId { get; init; }
[JsonPropertyName("tenant_id")]
public required string TenantId { get; init; }
[JsonPropertyName("profile_id")]
public string? ProfileId { get; init; }
[JsonPropertyName("export_run_id")]
public required string ExportRunId { get; init; }
[JsonPropertyName("kind")]
public required ExportBundleKind Kind { get; init; }
[JsonPropertyName("created_at")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("root_hash")]
public string? RootHash { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>();
[JsonPropertyName("entries")]
public IReadOnlyList<ExportManifestEntry> Entries { get; init; } = [];
[JsonPropertyName("distribution")]
public ExportDistributionInfo? Distribution { get; init; }
}
/// <summary>
/// Export bundle kind for evidence locker categorization.
/// </summary>
[JsonConverter(typeof(JsonStringEnumConverter))]
public enum ExportBundleKind
{
Evidence = 1,
Attestation = 2,
Mirror = 3,
Risk = 4,
OfflineKit = 5
}
/// <summary>
/// Entry in export manifest representing a single artifact.
/// </summary>
public sealed record ExportManifestEntry
{
[JsonPropertyName("section")]
public required string Section { get; init; }
[JsonPropertyName("canonical_path")]
public required string CanonicalPath { get; init; }
[JsonPropertyName("sha256")]
public required string Sha256 { get; init; }
[JsonPropertyName("size_bytes")]
public required long SizeBytes { get; init; }
[JsonPropertyName("media_type")]
public required string MediaType { get; init; }
[JsonPropertyName("attributes")]
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
}
/// <summary>
/// Distribution information for export transcript.
/// </summary>
public sealed record ExportDistributionInfo
{
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("target_uri")]
public string? TargetUri { get; init; }
[JsonPropertyName("distributed_at")]
public DateTimeOffset? DistributedAt { get; init; }
[JsonPropertyName("checksum")]
public string? Checksum { get; init; }
[JsonPropertyName("size_bytes")]
public long? SizeBytes { get; init; }
}
/// <summary>
/// Request to push export manifest to evidence locker.
/// </summary>
public sealed record ExportEvidenceSnapshotRequest
{
public required string TenantId { get; init; }
public required string ExportRunId { get; init; }
public string? ProfileId { get; init; }
public required ExportBundleKind Kind { get; init; }
public string? Description { get; init; }
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
public required IReadOnlyList<ExportMaterialInput> Materials { get; init; }
public ExportDistributionInfo? Distribution { get; init; }
}
/// <summary>
/// Material input for evidence snapshot.
/// </summary>
public sealed record ExportMaterialInput
{
public required string Section { get; init; }
public required string Path { get; init; }
public required string Sha256 { get; init; }
public required long SizeBytes { get; init; }
public string? MediaType { get; init; }
public IReadOnlyDictionary<string, string>? Attributes { get; init; }
}
/// <summary>
/// Response from evidence locker after snapshot creation.
/// </summary>
public sealed record ExportEvidenceSnapshotResult
{
public bool Success { get; init; }
public string? BundleId { get; init; }
public string? RootHash { get; init; }
public ExportDsseSignatureInfo? Signature { get; init; }
public string? ErrorMessage { get; init; }
public static ExportEvidenceSnapshotResult Succeeded(
string bundleId,
string rootHash,
ExportDsseSignatureInfo? signature = null) =>
new()
{
Success = true,
BundleId = bundleId,
RootHash = rootHash,
Signature = signature
};
public static ExportEvidenceSnapshotResult Failed(string errorMessage) =>
new() { Success = false, ErrorMessage = errorMessage };
}
/// <summary>
/// DSSE signature information from evidence locker.
/// </summary>
public sealed record ExportDsseSignatureInfo
{
[JsonPropertyName("payload_type")]
public required string PayloadType { get; init; }
[JsonPropertyName("payload")]
public required string Payload { get; init; }
[JsonPropertyName("signature")]
public required string Signature { get; init; }
[JsonPropertyName("key_id")]
public string? KeyId { get; init; }
[JsonPropertyName("algorithm")]
public required string Algorithm { get; init; }
[JsonPropertyName("provider")]
public required string Provider { get; init; }
[JsonPropertyName("signed_at")]
public required DateTimeOffset SignedAt { get; init; }
[JsonPropertyName("timestamped_at")]
public DateTimeOffset? TimestampedAt { get; init; }
[JsonPropertyName("timestamp_authority")]
public string? TimestampAuthority { get; init; }
}

View File

@@ -0,0 +1,93 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.ExportCenter.WebService.EvidenceLocker;
/// <summary>
/// Calculates Merkle root hash for export manifest entries.
/// Aligns with EvidenceLocker's MerkleTreeCalculator implementation.
/// </summary>
public interface IExportMerkleTreeCalculator
{
/// <summary>
/// Calculates the Merkle root hash from manifest entries.
/// </summary>
/// <param name="entries">The manifest entries with canonical paths and hashes.</param>
/// <returns>The hex-encoded Merkle root hash.</returns>
string CalculateRootHash(IEnumerable<ExportManifestEntry> entries);
/// <summary>
/// Calculates the Merkle root hash from canonical leaf values.
/// </summary>
/// <param name="canonicalLeafValues">Leaf values in format "canonicalPath|sha256".</param>
/// <returns>The hex-encoded Merkle root hash.</returns>
string CalculateRootHash(IEnumerable<string> canonicalLeafValues);
}
/// <summary>
/// Default implementation of Merkle tree calculator for export manifests.
/// Uses SHA-256 and follows EvidenceLocker's deterministic tree construction.
/// </summary>
public sealed class ExportMerkleTreeCalculator : IExportMerkleTreeCalculator
{
private const string EmptyTreeMarker = "stellaops:evidence:empty";
public string CalculateRootHash(IEnumerable<ExportManifestEntry> entries)
{
ArgumentNullException.ThrowIfNull(entries);
var canonicalLeaves = entries
.OrderBy(e => e.CanonicalPath, StringComparer.Ordinal)
.Select(e => $"{e.CanonicalPath}|{e.Sha256.ToLowerInvariant()}");
return CalculateRootHash(canonicalLeaves);
}
public string CalculateRootHash(IEnumerable<string> canonicalLeafValues)
{
ArgumentNullException.ThrowIfNull(canonicalLeafValues);
var leaves = canonicalLeafValues
.Select(HashString)
.ToArray();
// Special case: empty tree
if (leaves.Length == 0)
{
return HashString(EmptyTreeMarker);
}
return BuildTree(leaves);
}
private static string BuildTree(IReadOnlyList<string> currentLevel)
{
if (currentLevel.Count == 1)
{
return currentLevel[0]; // Root node
}
var nextLevel = new List<string>((currentLevel.Count + 1) / 2);
for (var i = 0; i < currentLevel.Count; i += 2)
{
var left = currentLevel[i];
var right = i + 1 < currentLevel.Count ? currentLevel[i + 1] : left;
// Sort siblings canonically before combining (deterministic ordering)
var combined = string.CompareOrdinal(left, right) <= 0
? $"{left}|{right}"
: $"{right}|{left}";
nextLevel.Add(HashString(combined));
}
return BuildTree(nextLevel);
}
private static string HashString(string input)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexStringLower(bytes);
}
}

View File

@@ -0,0 +1,58 @@
namespace StellaOps.ExportCenter.WebService.EvidenceLocker;
/// <summary>
/// Client interface for pushing export manifests and transcripts to the evidence locker.
/// </summary>
public interface IExportEvidenceLockerClient
{
/// <summary>
/// Pushes an export manifest snapshot to the evidence locker.
/// Creates a new evidence bundle with the specified materials.
/// </summary>
/// <param name="request">The snapshot request containing materials and metadata.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Result containing bundle ID, root hash, and optional DSSE signature.</returns>
Task<ExportEvidenceSnapshotResult> PushSnapshotAsync(
ExportEvidenceSnapshotRequest request,
CancellationToken cancellationToken = default);
/// <summary>
/// Updates an existing evidence bundle with distribution transcript information.
/// </summary>
/// <param name="bundleId">The evidence bundle ID.</param>
/// <param name="tenantId">The tenant ID.</param>
/// <param name="distribution">Distribution information to record.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if update succeeded.</returns>
Task<bool> UpdateDistributionTranscriptAsync(
string bundleId,
string tenantId,
ExportDistributionInfo distribution,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the evidence bundle details including signature.
/// </summary>
/// <param name="bundleId">The evidence bundle ID.</param>
/// <param name="tenantId">The tenant ID.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The bundle manifest if found, null otherwise.</returns>
Task<ExportBundleManifest?> GetBundleAsync(
string bundleId,
string tenantId,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies that a bundle's Merkle root matches expected value.
/// </summary>
/// <param name="bundleId">The evidence bundle ID.</param>
/// <param name="tenantId">The tenant ID.</param>
/// <param name="expectedRootHash">The expected Merkle root hash.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>True if root hash matches.</returns>
Task<bool> VerifyRootHashAsync(
string bundleId,
string tenantId,
string expectedRootHash,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,167 @@
using System.Text.Json.Serialization;
namespace StellaOps.ExportCenter.WebService.Incident;
/// <summary>
/// Event types for export incidents.
/// </summary>
public static class ExportIncidentEventTypes
{
/// <summary>
/// Incident activated event.
/// </summary>
public const string IncidentActivated = "export.incident.activated";
/// <summary>
/// Incident updated event.
/// </summary>
public const string IncidentUpdated = "export.incident.updated";
/// <summary>
/// Incident escalated event.
/// </summary>
public const string IncidentEscalated = "export.incident.escalated";
/// <summary>
/// Incident de-escalated event.
/// </summary>
public const string IncidentDeescalated = "export.incident.deescalated";
/// <summary>
/// Incident resolved event.
/// </summary>
public const string IncidentResolved = "export.incident.resolved";
}
/// <summary>
/// Base class for incident events.
/// </summary>
public abstract record ExportIncidentEventBase
{
[JsonPropertyName("event_type")]
public abstract string EventType { get; }
[JsonPropertyName("incident_id")]
public required string IncidentId { get; init; }
[JsonPropertyName("type")]
public required ExportIncidentType Type { get; init; }
[JsonPropertyName("severity")]
public required ExportIncidentSeverity Severity { get; init; }
[JsonPropertyName("status")]
public required ExportIncidentStatus Status { get; init; }
[JsonPropertyName("summary")]
public required string Summary { get; init; }
[JsonPropertyName("timestamp")]
public required DateTimeOffset Timestamp { get; init; }
[JsonPropertyName("affected_tenants")]
public IReadOnlyList<string>? AffectedTenants { get; init; }
[JsonPropertyName("affected_profiles")]
public IReadOnlyList<string>? AffectedProfiles { get; init; }
[JsonPropertyName("correlation_id")]
public string? CorrelationId { get; init; }
}
/// <summary>
/// Event emitted when an incident is activated.
/// </summary>
public sealed record ExportIncidentActivatedEvent : ExportIncidentEventBase
{
public override string EventType => ExportIncidentEventTypes.IncidentActivated;
[JsonPropertyName("description")]
public string? Description { get; init; }
[JsonPropertyName("activated_by")]
public string? ActivatedBy { get; init; }
[JsonPropertyName("metadata")]
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Event emitted when an incident is updated.
/// </summary>
public sealed record ExportIncidentUpdatedEvent : ExportIncidentEventBase
{
public override string EventType => ExportIncidentEventTypes.IncidentUpdated;
[JsonPropertyName("previous_status")]
public ExportIncidentStatus? PreviousStatus { get; init; }
[JsonPropertyName("previous_severity")]
public ExportIncidentSeverity? PreviousSeverity { get; init; }
[JsonPropertyName("update_message")]
public required string UpdateMessage { get; init; }
[JsonPropertyName("updated_by")]
public string? UpdatedBy { get; init; }
}
/// <summary>
/// Event emitted when an incident is escalated.
/// </summary>
public sealed record ExportIncidentEscalatedEvent : ExportIncidentEventBase
{
public override string EventType => ExportIncidentEventTypes.IncidentEscalated;
[JsonPropertyName("previous_severity")]
public required ExportIncidentSeverity PreviousSeverity { get; init; }
[JsonPropertyName("escalation_reason")]
public required string EscalationReason { get; init; }
[JsonPropertyName("escalated_by")]
public string? EscalatedBy { get; init; }
}
/// <summary>
/// Event emitted when an incident is de-escalated.
/// </summary>
public sealed record ExportIncidentDeescalatedEvent : ExportIncidentEventBase
{
public override string EventType => ExportIncidentEventTypes.IncidentDeescalated;
[JsonPropertyName("previous_severity")]
public required ExportIncidentSeverity PreviousSeverity { get; init; }
[JsonPropertyName("deescalation_reason")]
public required string DeescalationReason { get; init; }
[JsonPropertyName("deescalated_by")]
public string? DeescalatedBy { get; init; }
}
/// <summary>
/// Event emitted when an incident is resolved.
/// </summary>
public sealed record ExportIncidentResolvedEvent : ExportIncidentEventBase
{
public override string EventType => ExportIncidentEventTypes.IncidentResolved;
[JsonPropertyName("resolution_message")]
public required string ResolutionMessage { get; init; }
[JsonPropertyName("is_false_positive")]
public bool IsFalsePositive { get; init; }
[JsonPropertyName("resolved_by")]
public string? ResolvedBy { get; init; }
[JsonPropertyName("activated_at")]
public required DateTimeOffset ActivatedAt { get; init; }
[JsonPropertyName("duration_seconds")]
public double DurationSeconds { get; init; }
[JsonPropertyName("post_incident_notes")]
public string? PostIncidentNotes { get; init; }
}

View File

@@ -0,0 +1,535 @@
using System.Collections.Concurrent;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.Extensions.Logging;
using StellaOps.ExportCenter.WebService.Telemetry;
using StellaOps.ExportCenter.WebService.Timeline;
namespace StellaOps.ExportCenter.WebService.Incident;
/// <summary>
/// Manages export incidents and emits events to timeline and notifier.
/// </summary>
public sealed class ExportIncidentManager : IExportIncidentManager
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower,
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.SnakeCaseLower) }
};
private readonly ILogger<ExportIncidentManager> _logger;
private readonly IExportTimelinePublisher _timelinePublisher;
private readonly IExportNotificationEmitter _notificationEmitter;
private readonly TimeProvider _timeProvider;
// In-memory store for incidents (production would use persistent storage)
private readonly ConcurrentDictionary<string, ExportIncident> _incidents = new();
public ExportIncidentManager(
ILogger<ExportIncidentManager> logger,
IExportTimelinePublisher timelinePublisher,
IExportNotificationEmitter notificationEmitter,
TimeProvider? timeProvider = null)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_timelinePublisher = timelinePublisher ?? throw new ArgumentNullException(nameof(timelinePublisher));
_notificationEmitter = notificationEmitter ?? throw new ArgumentNullException(nameof(notificationEmitter));
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<ExportIncidentResult> ActivateIncidentAsync(
ExportIncidentActivationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
try
{
var now = _timeProvider.GetUtcNow();
var incidentId = GenerateIncidentId();
var incident = new ExportIncident
{
IncidentId = incidentId,
Type = request.Type,
Severity = request.Severity,
Status = ExportIncidentStatus.Active,
Summary = request.Summary,
Description = request.Description,
AffectedTenants = request.AffectedTenants,
AffectedProfiles = request.AffectedProfiles,
ActivatedAt = now,
LastUpdatedAt = now,
ActivatedBy = request.ActivatedBy,
CorrelationId = request.CorrelationId,
Metadata = request.Metadata,
Updates = new List<ExportIncidentUpdate>
{
new()
{
UpdateId = GenerateUpdateId(),
Timestamp = now,
NewStatus = ExportIncidentStatus.Active,
Message = $"Incident activated: {request.Summary}"
}
}
};
if (!_incidents.TryAdd(incidentId, incident))
{
return ExportIncidentResult.Failed("Failed to store incident");
}
// Emit timeline event
var timelineEvent = new ExportIncidentActivatedEvent
{
IncidentId = incidentId,
Type = request.Type,
Severity = request.Severity,
Status = ExportIncidentStatus.Active,
Summary = request.Summary,
Description = request.Description,
Timestamp = now,
AffectedTenants = request.AffectedTenants,
AffectedProfiles = request.AffectedProfiles,
ActivatedBy = request.ActivatedBy,
CorrelationId = request.CorrelationId,
Metadata = request.Metadata
};
await PublishTimelineEventAsync(timelineEvent, cancellationToken);
// Emit notification
await _notificationEmitter.EmitIncidentActivatedAsync(incident, cancellationToken);
// Record metric
ExportTelemetry.IncidentsActivatedTotal.Add(1,
new("severity", request.Severity.ToString().ToLowerInvariant()),
new("type", request.Type.ToString().ToLowerInvariant()));
_logger.LogWarning(
"Export incident activated: {IncidentId} [{Type}] [{Severity}] - {Summary}",
incidentId, request.Type, request.Severity, request.Summary);
return ExportIncidentResult.Succeeded(incident);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to activate incident");
return ExportIncidentResult.Failed($"Activation failed: {ex.Message}");
}
}
public async Task<ExportIncidentResult> UpdateIncidentAsync(
string incidentId,
ExportIncidentUpdateRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (!_incidents.TryGetValue(incidentId, out var existingIncident))
{
return ExportIncidentResult.Failed("Incident not found");
}
if (existingIncident.Status is ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)
{
return ExportIncidentResult.Failed("Cannot update resolved incident");
}
try
{
var now = _timeProvider.GetUtcNow();
var previousStatus = existingIncident.Status;
var previousSeverity = existingIncident.Severity;
var newStatus = request.Status ?? existingIncident.Status;
var newSeverity = request.Severity ?? existingIncident.Severity;
var update = new ExportIncidentUpdate
{
UpdateId = GenerateUpdateId(),
Timestamp = now,
PreviousStatus = previousStatus != newStatus ? previousStatus : null,
NewStatus = newStatus,
PreviousSeverity = previousSeverity != newSeverity ? previousSeverity : null,
NewSeverity = previousSeverity != newSeverity ? newSeverity : null,
Message = request.Message,
UpdatedBy = request.UpdatedBy
};
var updatedIncident = existingIncident with
{
Status = newStatus,
Severity = newSeverity,
LastUpdatedAt = now,
Updates = [.. existingIncident.Updates, update]
};
if (!_incidents.TryUpdate(incidentId, updatedIncident, existingIncident))
{
return ExportIncidentResult.Failed("Concurrent update conflict");
}
// Determine event type based on severity change
ExportIncidentEventBase timelineEvent;
if (newSeverity > previousSeverity)
{
timelineEvent = new ExportIncidentEscalatedEvent
{
IncidentId = incidentId,
Type = updatedIncident.Type,
Severity = newSeverity,
Status = newStatus,
Summary = updatedIncident.Summary,
Timestamp = now,
AffectedTenants = updatedIncident.AffectedTenants,
AffectedProfiles = updatedIncident.AffectedProfiles,
CorrelationId = updatedIncident.CorrelationId,
PreviousSeverity = previousSeverity,
EscalationReason = request.Message,
EscalatedBy = request.UpdatedBy
};
ExportTelemetry.IncidentsEscalatedTotal.Add(1,
new("from_severity", previousSeverity.ToString().ToLowerInvariant()),
new("to_severity", newSeverity.ToString().ToLowerInvariant()));
}
else if (newSeverity < previousSeverity)
{
timelineEvent = new ExportIncidentDeescalatedEvent
{
IncidentId = incidentId,
Type = updatedIncident.Type,
Severity = newSeverity,
Status = newStatus,
Summary = updatedIncident.Summary,
Timestamp = now,
AffectedTenants = updatedIncident.AffectedTenants,
AffectedProfiles = updatedIncident.AffectedProfiles,
CorrelationId = updatedIncident.CorrelationId,
PreviousSeverity = previousSeverity,
DeescalationReason = request.Message,
DeescalatedBy = request.UpdatedBy
};
ExportTelemetry.IncidentsDeescalatedTotal.Add(1,
new("from_severity", previousSeverity.ToString().ToLowerInvariant()),
new("to_severity", newSeverity.ToString().ToLowerInvariant()));
}
else
{
timelineEvent = new ExportIncidentUpdatedEvent
{
IncidentId = incidentId,
Type = updatedIncident.Type,
Severity = newSeverity,
Status = newStatus,
Summary = updatedIncident.Summary,
Timestamp = now,
AffectedTenants = updatedIncident.AffectedTenants,
AffectedProfiles = updatedIncident.AffectedProfiles,
CorrelationId = updatedIncident.CorrelationId,
PreviousStatus = previousStatus != newStatus ? previousStatus : null,
PreviousSeverity = previousSeverity != newSeverity ? previousSeverity : null,
UpdateMessage = request.Message,
UpdatedBy = request.UpdatedBy
};
}
await PublishTimelineEventAsync(timelineEvent, cancellationToken);
await _notificationEmitter.EmitIncidentUpdatedAsync(updatedIncident, request.Message, cancellationToken);
_logger.LogInformation(
"Export incident updated: {IncidentId} [{Status}] [{Severity}] - {Message}",
incidentId, newStatus, newSeverity, request.Message);
return ExportIncidentResult.Succeeded(updatedIncident);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to update incident {IncidentId}", incidentId);
return ExportIncidentResult.Failed($"Update failed: {ex.Message}");
}
}
public async Task<ExportIncidentResult> ResolveIncidentAsync(
string incidentId,
ExportIncidentResolutionRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
if (!_incidents.TryGetValue(incidentId, out var existingIncident))
{
return ExportIncidentResult.Failed("Incident not found");
}
if (existingIncident.Status is ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive)
{
return ExportIncidentResult.Failed("Incident already resolved");
}
try
{
var now = _timeProvider.GetUtcNow();
var newStatus = request.IsFalsePositive
? ExportIncidentStatus.FalsePositive
: ExportIncidentStatus.Resolved;
var update = new ExportIncidentUpdate
{
UpdateId = GenerateUpdateId(),
Timestamp = now,
PreviousStatus = existingIncident.Status,
NewStatus = newStatus,
Message = request.ResolutionMessage,
UpdatedBy = request.ResolvedBy
};
var resolvedIncident = existingIncident with
{
Status = newStatus,
LastUpdatedAt = now,
ResolvedAt = now,
ResolvedBy = request.ResolvedBy,
Updates = [.. existingIncident.Updates, update]
};
if (!_incidents.TryUpdate(incidentId, resolvedIncident, existingIncident))
{
return ExportIncidentResult.Failed("Concurrent update conflict");
}
var duration = now - existingIncident.ActivatedAt;
var timelineEvent = new ExportIncidentResolvedEvent
{
IncidentId = incidentId,
Type = resolvedIncident.Type,
Severity = resolvedIncident.Severity,
Status = newStatus,
Summary = resolvedIncident.Summary,
Timestamp = now,
AffectedTenants = resolvedIncident.AffectedTenants,
AffectedProfiles = resolvedIncident.AffectedProfiles,
CorrelationId = resolvedIncident.CorrelationId,
ResolutionMessage = request.ResolutionMessage,
IsFalsePositive = request.IsFalsePositive,
ResolvedBy = request.ResolvedBy,
ActivatedAt = existingIncident.ActivatedAt,
DurationSeconds = duration.TotalSeconds,
PostIncidentNotes = request.PostIncidentNotes
};
await PublishTimelineEventAsync(timelineEvent, cancellationToken);
await _notificationEmitter.EmitIncidentResolvedAsync(
resolvedIncident, request.ResolutionMessage, request.IsFalsePositive, cancellationToken);
// Record metrics
ExportTelemetry.IncidentsResolvedTotal.Add(1,
new("severity", resolvedIncident.Severity.ToString().ToLowerInvariant()),
new("type", resolvedIncident.Type.ToString().ToLowerInvariant()),
new("is_false_positive", request.IsFalsePositive.ToString().ToLowerInvariant()));
ExportTelemetry.IncidentDurationSeconds.Record(duration.TotalSeconds,
new("severity", resolvedIncident.Severity.ToString().ToLowerInvariant()),
new("type", resolvedIncident.Type.ToString().ToLowerInvariant()));
_logger.LogInformation(
"Export incident resolved: {IncidentId} after {Duration:F1}s - {Message}",
incidentId, duration.TotalSeconds, request.ResolutionMessage);
return ExportIncidentResult.Succeeded(resolvedIncident);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to resolve incident {IncidentId}", incidentId);
return ExportIncidentResult.Failed($"Resolution failed: {ex.Message}");
}
}
public Task<ExportIncidentModeStatus> GetIncidentModeStatusAsync(
CancellationToken cancellationToken = default)
{
var activeIncidents = _incidents.Values
.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive))
.OrderByDescending(i => i.Severity)
.ThenByDescending(i => i.ActivatedAt)
.ToList();
var status = new ExportIncidentModeStatus
{
IncidentModeActive = activeIncidents.Count > 0,
ActiveIncidents = activeIncidents,
HighestSeverity = activeIncidents.Count > 0
? activeIncidents.Max(i => i.Severity)
: null,
AsOf = _timeProvider.GetUtcNow()
};
return Task.FromResult(status);
}
public Task<IReadOnlyList<ExportIncident>> GetActiveIncidentsAsync(
CancellationToken cancellationToken = default)
{
var activeIncidents = _incidents.Values
.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive))
.OrderByDescending(i => i.Severity)
.ThenByDescending(i => i.ActivatedAt)
.ToList();
return Task.FromResult<IReadOnlyList<ExportIncident>>(activeIncidents);
}
public Task<ExportIncident?> GetIncidentAsync(
string incidentId,
CancellationToken cancellationToken = default)
{
_incidents.TryGetValue(incidentId, out var incident);
return Task.FromResult(incident);
}
public Task<IReadOnlyList<ExportIncident>> GetRecentIncidentsAsync(
int limit = 50,
bool includeResolved = true,
CancellationToken cancellationToken = default)
{
var query = _incidents.Values.AsEnumerable();
if (!includeResolved)
{
query = query.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive));
}
var incidents = query
.OrderByDescending(i => i.LastUpdatedAt)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<ExportIncident>>(incidents);
}
public Task<bool> IsIncidentModeActiveAsync(
CancellationToken cancellationToken = default)
{
var isActive = _incidents.Values
.Any(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive));
return Task.FromResult(isActive);
}
public Task<ExportIncidentSeverity?> GetHighestActiveSeverityAsync(
CancellationToken cancellationToken = default)
{
var activeIncidents = _incidents.Values
.Where(i => i.Status is not (ExportIncidentStatus.Resolved or ExportIncidentStatus.FalsePositive))
.ToList();
var highestSeverity = activeIncidents.Count > 0
? activeIncidents.Max(i => i.Severity)
: (ExportIncidentSeverity?)null;
return Task.FromResult(highestSeverity);
}
private async Task PublishTimelineEventAsync(
ExportIncidentEventBase incidentEvent,
CancellationToken cancellationToken)
{
try
{
var eventJson = JsonSerializer.Serialize(incidentEvent, incidentEvent.GetType(), SerializerOptions);
// Publish to timeline using the timeline publisher
// Note: This creates a synthetic export started event to leverage existing publisher
await _timelinePublisher.PublishIncidentEventAsync(
incidentEvent.EventType,
incidentEvent.IncidentId,
eventJson,
incidentEvent.CorrelationId,
cancellationToken);
}
catch (Exception ex)
{
_logger.LogError(ex, "Failed to publish incident timeline event {EventType}", incidentEvent.EventType);
}
}
private static string GenerateIncidentId()
{
return $"inc-{Guid.NewGuid():N}"[..20];
}
private static string GenerateUpdateId()
{
return $"upd-{Guid.NewGuid():N}"[..16];
}
}
/// <summary>
/// Interface for emitting incident notifications.
/// </summary>
public interface IExportNotificationEmitter
{
Task EmitIncidentActivatedAsync(ExportIncident incident, CancellationToken cancellationToken = default);
Task EmitIncidentUpdatedAsync(ExportIncident incident, string updateMessage, CancellationToken cancellationToken = default);
Task EmitIncidentResolvedAsync(ExportIncident incident, string resolutionMessage, bool isFalsePositive, CancellationToken cancellationToken = default);
}
/// <summary>
/// Default implementation of notification emitter that logs notifications.
/// Production would integrate with actual notification service (Email, Slack, Teams, PagerDuty).
/// </summary>
public sealed class LoggingNotificationEmitter : IExportNotificationEmitter
{
private readonly ILogger<LoggingNotificationEmitter> _logger;
public LoggingNotificationEmitter(ILogger<LoggingNotificationEmitter> logger)
{
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
}
public Task EmitIncidentActivatedAsync(ExportIncident incident, CancellationToken cancellationToken = default)
{
_logger.LogWarning(
"NOTIFICATION: Incident Activated [{Severity}] - {Summary}. ID: {IncidentId}",
incident.Severity, incident.Summary, incident.IncidentId);
ExportTelemetry.NotificationsEmittedTotal.Add(1,
new("type", "incident_activated"),
new("severity", incident.Severity.ToString().ToLowerInvariant()));
return Task.CompletedTask;
}
public Task EmitIncidentUpdatedAsync(ExportIncident incident, string updateMessage, CancellationToken cancellationToken = default)
{
_logger.LogInformation(
"NOTIFICATION: Incident Updated [{Severity}] - {Message}. ID: {IncidentId}",
incident.Severity, updateMessage, incident.IncidentId);
ExportTelemetry.NotificationsEmittedTotal.Add(1,
new("type", "incident_updated"),
new("severity", incident.Severity.ToString().ToLowerInvariant()));
return Task.CompletedTask;
}
public Task EmitIncidentResolvedAsync(ExportIncident incident, string resolutionMessage, bool isFalsePositive, CancellationToken cancellationToken = default)
{
_logger.LogInformation(
"NOTIFICATION: Incident Resolved [{Status}] - {Message}. ID: {IncidentId}, FalsePositive: {IsFalsePositive}",
incident.Status, resolutionMessage, incident.IncidentId, isFalsePositive);
ExportTelemetry.NotificationsEmittedTotal.Add(1,
new("type", "incident_resolved"),
new("is_false_positive", isFalsePositive.ToString().ToLowerInvariant()));
return Task.CompletedTask;
}
}

Some files were not shown because too many files have changed in this diff Show More