stabilizaiton work - projects rework for maintenanceability and ui livening

This commit is contained in:
master
2026-02-03 23:40:04 +02:00
parent 074ce117ba
commit 557feefdc3
3305 changed files with 186813 additions and 107843 deletions

View File

@@ -0,0 +1,11 @@
using System.IO.Compression;
namespace StellaOps.Replay.Core.Bundle;
public sealed record BundleOptions
{
public bool IncludeFeeds { get; init; } = true;
public bool Sign { get; init; } = true;
public string? SigningKey { get; init; }
public CompressionLevel Compression { get; init; } = CompressionLevel.Optimal;
}

View File

@@ -0,0 +1,12 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.Bundle;
/// <summary>
/// Content store interface for retrieving snapshot content by digest.
/// </summary>
public interface IContentStore
{
Task<byte[]> GetContentAsync(string digest, CancellationToken ct = default);
}

View File

@@ -0,0 +1,16 @@
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Replay.Core.Manifest;
using StellaOps.Replay.Core.Models;
namespace StellaOps.Replay.Core.Bundle;
public interface IStellaReplayBundleWriter
{
Task<string> WriteBundleAsync(
KnowledgeSnapshot snapshot,
ReplayOutputs outputs,
string outputPath,
BundleOptions options,
CancellationToken ct = default);
}

View File

@@ -0,0 +1,33 @@
using System.Formats.Tar;
using System.IO;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.Bundle;
public sealed partial class StellaReplayBundleWriter
{
private static async Task WriteEntryAsync(
TarWriter writer,
string path,
string content,
CancellationToken ct)
{
var bytes = Encoding.UTF8.GetBytes(content);
await WriteEntryAsync(writer, path, bytes, ct).ConfigureAwait(false);
}
private static async Task WriteEntryAsync(
TarWriter writer,
string path,
byte[] content,
CancellationToken ct)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
DataStream = new MemoryStream(content, writable: false)
};
await writer.WriteEntryAsync(entry, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,33 @@
using System.Formats.Tar;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Replay.Core.Models;
namespace StellaOps.Replay.Core.Bundle;
public sealed partial class StellaReplayBundleWriter
{
private Task WriteLatticeConfigAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
var latticeConfig = new
{
type = snapshot.LatticeConfig.LatticeType,
joinTable = snapshot.LatticeConfig.JoinTable,
meetTable = snapshot.LatticeConfig.MeetTable
};
return WriteEntryAsync(writer, "config/lattice.json", JsonSerializer.Serialize(latticeConfig), ct);
}
private Task WriteTrustConfigAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
var trustConfig = new
{
sourceWeights = snapshot.TrustConfig.SourceWeights,
defaultWeight = snapshot.TrustConfig.DefaultWeight
};
return WriteEntryAsync(writer, "config/trust.json", JsonSerializer.Serialize(trustConfig), ct);
}
}

View File

@@ -0,0 +1,64 @@
using System.Formats.Tar;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Replay.Core.Models;
namespace StellaOps.Replay.Core.Bundle;
public sealed partial class StellaReplayBundleWriter
{
private async Task WriteSbomsAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
foreach (var sbom in snapshot.Sboms)
{
var content = await _contentStore.GetContentAsync(sbom.Digest, ct).ConfigureAwait(false);
var path = sbom.BundlePath ?? $"sboms/{sbom.Id}.json";
await WriteEntryAsync(writer, path, content, ct).ConfigureAwait(false);
}
}
private async Task WriteVexAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
foreach (var vex in snapshot.VexDocuments)
{
var content = await _contentStore.GetContentAsync(vex.Digest, ct).ConfigureAwait(false);
var path = vex.BundlePath ?? $"vex/{vex.Id}.json";
await WriteEntryAsync(writer, path, content, ct).ConfigureAwait(false);
}
}
private async Task WriteReachabilityAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
foreach (var reach in snapshot.ReachSubgraphs)
{
var content = await _contentStore.GetContentAsync(reach.Digest, ct).ConfigureAwait(false);
var path = reach.BundlePath ?? $"reach/{reach.EntryPoint}.json";
await WriteEntryAsync(writer, path, content, ct).ConfigureAwait(false);
}
}
private async Task WriteExceptionsAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
foreach (var exception in snapshot.Exceptions)
{
var content = await _contentStore.GetContentAsync(exception.Digest, ct).ConfigureAwait(false);
var path = $"exceptions/{exception.ExceptionId}.json";
await WriteEntryAsync(writer, path, content, ct).ConfigureAwait(false);
}
}
private async Task WritePolicyBundleAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
var policyContent = await _contentStore.GetContentAsync(snapshot.PolicyBundle.Digest, ct).ConfigureAwait(false);
await WriteEntryAsync(writer, "policies/bundle.tar.gz", policyContent, ct).ConfigureAwait(false);
}
private async Task WriteFeedsAsync(TarWriter writer, KnowledgeSnapshot snapshot, CancellationToken ct)
{
foreach (var feed in snapshot.FeedVersions)
{
var content = await _contentStore.GetContentAsync(feed.Digest, ct).ConfigureAwait(false);
await WriteEntryAsync(writer, $"feeds/{feed.FeedId}.json", content, ct).ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,15 @@
using System.Formats.Tar;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Replay.Core.Manifest;
namespace StellaOps.Replay.Core.Bundle;
public sealed partial class StellaReplayBundleWriter
{
private async Task WriteVerdictAsync(TarWriter writer, ReplayOutputs outputs, CancellationToken ct)
{
var verdictContent = await _contentStore.GetContentAsync(outputs.VerdictDigest, ct).ConfigureAwait(false);
await WriteEntryAsync(writer, outputs.VerdictPath, verdictContent, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,30 @@
using System;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Replay.Core.Models;
namespace StellaOps.Replay.Core.Bundle;
public sealed partial class StellaReplayBundleWriter
{
private Task<string> SignBundleAsync(
KnowledgeSnapshot snapshot,
string signingKey,
CancellationToken ct)
{
var payload = JsonSerializer.Serialize(new
{
snapshotId = snapshot.SnapshotId,
inputsHash = snapshot.InputsHash,
createdAt = snapshot.CreatedAt
});
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(payload));
var signature = $"DSSE:sha256:{Convert.ToHexString(hash)}";
return Task.FromResult(signature);
}
}

View File

@@ -0,0 +1,68 @@
using System;
using System.Formats.Tar;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Replay.Core.Manifest;
using StellaOps.Replay.Core.Models;
namespace StellaOps.Replay.Core.Bundle;
public sealed partial class StellaReplayBundleWriter
{
/// <summary>
/// Creates a .stella-replay.tgz bundle from a snapshot.
/// </summary>
public async Task<string> WriteBundleAsync(
KnowledgeSnapshot snapshot,
ReplayOutputs outputs,
string outputPath,
BundleOptions options,
CancellationToken ct = default)
{
_logger.LogInformation(
"Creating replay bundle for snapshot {SnapshotId} at {Path}",
snapshot.SnapshotId,
outputPath);
var bundlePath = outputPath.EndsWith(".stella-replay.tgz", StringComparison.Ordinal)
? outputPath
: $"{outputPath}.stella-replay.tgz";
using var fileStream = File.Create(bundlePath);
using var gzipStream = new GZipStream(fileStream, options.Compression);
using var tarWriter = new TarWriter(gzipStream);
var manifest = ReplayManifestWriter.CreateManifest(snapshot, outputs);
await WriteEntryAsync(tarWriter, "REPLAY.yaml", manifest, ct).ConfigureAwait(false);
await WriteSbomsAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
await WriteVexAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
await WriteReachabilityAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
await WriteExceptionsAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
await WritePolicyBundleAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
if (options.IncludeFeeds)
{
await WriteFeedsAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
}
await WriteLatticeConfigAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
await WriteTrustConfigAsync(tarWriter, snapshot, ct).ConfigureAwait(false);
await WriteVerdictAsync(tarWriter, outputs, ct).ConfigureAwait(false);
if (options.Sign && options.SigningKey is not null)
{
var signature = await SignBundleAsync(snapshot, options.SigningKey, ct).ConfigureAwait(false);
await WriteEntryAsync(tarWriter, "SIGNATURE.sig", signature, ct).ConfigureAwait(false);
}
_logger.LogInformation(
"Created replay bundle {Path} ({Size} bytes)",
bundlePath,
new FileInfo(bundlePath).Length);
return bundlePath;
}
}

View File

@@ -1,25 +1,11 @@
using Microsoft.Extensions.Logging;
using StellaOps.Replay.Core.Manifest;
using StellaOps.Replay.Core.Models;
using System;
using System.Collections.Generic;
using System.Formats.Tar;
using System.IO;
using System.IO.Compression;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.Bundle;
/// <summary>
/// Writes .stella-replay.tgz bundles for portable replay.
/// </summary>
public sealed class StellaReplayBundleWriter : IStellaReplayBundleWriter
public sealed partial class StellaReplayBundleWriter : IStellaReplayBundleWriter
{
private readonly IContentStore _contentStore;
private readonly ILogger<StellaReplayBundleWriter> _logger;
@@ -31,179 +17,4 @@ public sealed class StellaReplayBundleWriter : IStellaReplayBundleWriter
_contentStore = contentStore;
_logger = logger;
}
/// <summary>
/// Creates a .stella-replay.tgz bundle from a snapshot.
/// </summary>
public async Task<string> WriteBundleAsync(
KnowledgeSnapshot snapshot,
ReplayOutputs outputs,
string outputPath,
BundleOptions options,
CancellationToken ct = default)
{
_logger.LogInformation(
"Creating replay bundle for snapshot {SnapshotId} at {Path}",
snapshot.SnapshotId, outputPath);
var bundlePath = outputPath.EndsWith(".stella-replay.tgz")
? outputPath
: $"{outputPath}.stella-replay.tgz";
using var fileStream = File.Create(bundlePath);
using var gzipStream = new GZipStream(fileStream, options.Compression);
using var tarWriter = new TarWriter(gzipStream);
// Write REPLAY.yaml manifest
var manifest = ReplayManifestWriter.CreateManifest(snapshot, outputs);
await WriteEntryAsync(tarWriter, "REPLAY.yaml", manifest, ct);
// Write SBOMs
foreach (var sbom in snapshot.Sboms)
{
var content = await _contentStore.GetContentAsync(sbom.Digest, ct);
var path = sbom.BundlePath ?? $"sboms/{sbom.Id}.json";
await WriteEntryAsync(tarWriter, path, content, ct);
}
// Write VEX documents
foreach (var vex in snapshot.VexDocuments)
{
var content = await _contentStore.GetContentAsync(vex.Digest, ct);
var path = vex.BundlePath ?? $"vex/{vex.Id}.json";
await WriteEntryAsync(tarWriter, path, content, ct);
}
// Write reachability subgraphs
foreach (var reach in snapshot.ReachSubgraphs)
{
var content = await _contentStore.GetContentAsync(reach.Digest, ct);
var path = reach.BundlePath ?? $"reach/{reach.EntryPoint}.json";
await WriteEntryAsync(tarWriter, path, content, ct);
}
// Write exceptions
foreach (var exception in snapshot.Exceptions)
{
var content = await _contentStore.GetContentAsync(exception.Digest, ct);
await WriteEntryAsync(tarWriter, $"exceptions/{exception.ExceptionId}.json", content, ct);
}
// Write policy bundle
var policyContent = await _contentStore.GetContentAsync(snapshot.PolicyBundle.Digest, ct);
await WriteEntryAsync(tarWriter, "policies/bundle.tar.gz", policyContent, ct);
// Write feeds (if included)
if (options.IncludeFeeds)
{
foreach (var feed in snapshot.FeedVersions)
{
var content = await _contentStore.GetContentAsync(feed.Digest, ct);
await WriteEntryAsync(tarWriter, $"feeds/{feed.FeedId}.json", content, ct);
}
}
// Write lattice config
var latticeConfig = new
{
type = snapshot.LatticeConfig.LatticeType,
joinTable = snapshot.LatticeConfig.JoinTable,
meetTable = snapshot.LatticeConfig.MeetTable
};
await WriteEntryAsync(tarWriter, "config/lattice.json",
JsonSerializer.Serialize(latticeConfig), ct);
// Write trust config
var trustConfig = new
{
sourceWeights = snapshot.TrustConfig.SourceWeights,
defaultWeight = snapshot.TrustConfig.DefaultWeight
};
await WriteEntryAsync(tarWriter, "config/trust.json",
JsonSerializer.Serialize(trustConfig), ct);
// Write verdict
var verdictContent = await _contentStore.GetContentAsync(outputs.VerdictDigest, ct);
await WriteEntryAsync(tarWriter, outputs.VerdictPath, verdictContent, ct);
// Sign if requested
if (options.Sign && options.SigningKey is not null)
{
var signature = await SignBundleAsync(snapshot, options.SigningKey, ct);
await WriteEntryAsync(tarWriter, "SIGNATURE.sig", signature, ct);
}
_logger.LogInformation(
"Created replay bundle {Path} ({Size} bytes)",
bundlePath, new FileInfo(bundlePath).Length);
return bundlePath;
}
private static async Task WriteEntryAsync(
TarWriter writer,
string path,
string content,
CancellationToken ct)
{
var bytes = Encoding.UTF8.GetBytes(content);
await WriteEntryAsync(writer, path, bytes, ct);
}
private static async Task WriteEntryAsync(
TarWriter writer,
string path,
byte[] content,
CancellationToken ct)
{
var entry = new PaxTarEntry(TarEntryType.RegularFile, path)
{
DataStream = new MemoryStream(content)
};
await writer.WriteEntryAsync(entry, ct);
}
private async Task<string> SignBundleAsync(
KnowledgeSnapshot snapshot,
string signingKey,
CancellationToken ct)
{
// Create DSSE envelope
var payload = JsonSerializer.Serialize(new
{
snapshotId = snapshot.SnapshotId,
inputsHash = snapshot.InputsHash,
createdAt = snapshot.CreatedAt
});
// Sign with key (actual signing implementation)
// Return DSSE envelope
return $"DSSE:sha256:{Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(payload)))}";
}
}
public interface IStellaReplayBundleWriter
{
Task<string> WriteBundleAsync(
KnowledgeSnapshot snapshot,
ReplayOutputs outputs,
string outputPath,
BundleOptions options,
CancellationToken ct = default);
}
public sealed record BundleOptions
{
public bool IncludeFeeds { get; init; } = true;
public bool Sign { get; init; } = true;
public string? SigningKey { get; init; }
public CompressionLevel Compression { get; init; } = CompressionLevel.Optimal;
}
/// <summary>
/// Content store interface for retrieving snapshot content by digest.
/// </summary>
public interface IContentStore
{
Task<byte[]> GetContentAsync(string digest, CancellationToken ct = default);
}

View File

@@ -15,7 +15,7 @@ namespace StellaOps.Replay.Core;
/// </summary>
public static class CanonicalJson
{
private static readonly JsonSerializerOptions SerializerOptions = new()
private static readonly JsonSerializerOptions _serializerOptions = new()
{
PropertyNamingPolicy = null,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
@@ -29,7 +29,7 @@ public static class CanonicalJson
{
ArgumentNullException.ThrowIfNull(value);
var element = JsonSerializer.SerializeToElement(value, SerializerOptions);
var element = JsonSerializer.SerializeToElement(value, _serializerOptions);
var buffer = new ArrayBufferWriter<byte>();
using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions
{

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Replay.Core;
/// <summary>
/// A reference to a CAS object for validation.
/// </summary>
public sealed record CasReference(
string CasUri,
string ExpectedHash,
string? HashAlgorithm = null
);

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Replay.Core;
/// <summary>
/// Error details for a single CAS validation failure in a batch.
/// </summary>
public sealed record CasValidationError(
string CasUri,
string ErrorCode,
string Message
);

View File

@@ -0,0 +1,29 @@
using System.Collections.Generic;
namespace StellaOps.Replay.Core;
/// <summary>
/// Result of a CAS validation operation.
/// </summary>
public sealed record CasValidationResult(
bool IsValid,
string? ActualHash = null,
string? Error = null,
IReadOnlyList<CasValidationError>? Errors = null
)
{
public static CasValidationResult Success(string actualHash) =>
new(true, actualHash);
public static CasValidationResult Failure(string error) =>
new(false, Error: error);
public static CasValidationResult NotFound(string casUri) =>
new(false, Error: $"CAS object not found: {casUri}");
public static CasValidationResult HashMismatch(string casUri, string expected, string actual) =>
new(false, ActualHash: actual, Error: $"Hash mismatch for {casUri}: expected {expected}, got {actual}");
public static CasValidationResult BatchResult(bool isValid, IReadOnlyList<CasValidationError> errors) =>
new(isValid, Errors: errors);
}

View File

@@ -0,0 +1,18 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core;
public sealed class CodeIdCoverage
{
[JsonPropertyName("total_nodes")]
public int TotalNodes { get; set; }
[JsonPropertyName("nodes_with_symbol_id")]
public int NodesWithSymbolId { get; set; }
[JsonPropertyName("nodes_with_code_id")]
public int NodesWithCodeId { get; set; }
[JsonPropertyName("coverage_percent")]
public double CoveragePercent { get; set; }
}

View File

@@ -1,9 +1,9 @@
using StellaOps.Cryptography;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using StellaOps.Cryptography;
namespace StellaOps.Replay.Core;

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Details about a detected drift.
/// </summary>
public sealed record DriftDetail
{
/// <summary>
/// Type of drift (sbom, verdict, feed, policy).
/// </summary>
public required string Type { get; init; }
/// <summary>
/// Field or path where drift was detected.
/// </summary>
public required string Field { get; init; }
/// <summary>
/// Expected value.
/// </summary>
public required string Expected { get; init; }
/// <summary>
/// Actual value found.
/// </summary>
public required string Actual { get; init; }
/// <summary>
/// Human-readable description.
/// </summary>
public string? Message { get; init; }
}

View File

@@ -0,0 +1,31 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Reference to a scanned artifact.
/// </summary>
public sealed record ExportArtifactRef
{
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("name")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Name { get; init; }
[JsonPropertyName("registry")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Registry { get; init; }
[JsonPropertyName("repository")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Repository { get; init; }
[JsonPropertyName("tag")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Tag { get; init; }
}

View File

@@ -0,0 +1,29 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// CI environment context.
/// </summary>
public sealed record ExportCiEnvironment
{
[JsonPropertyName("provider")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Provider { get; init; }
[JsonPropertyName("repository")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Repository { get; init; }
[JsonPropertyName("branch")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Branch { get; init; }
[JsonPropertyName("commit")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Commit { get; init; }
[JsonPropertyName("runId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RunId { get; init; }
}

View File

@@ -0,0 +1,18 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Exit code definitions for CI integration.
/// </summary>
public sealed record ExportExitCodes
{
[JsonPropertyName("success")]
public int Success { get; init; } = 0;
[JsonPropertyName("drift")]
public int Drift { get; init; } = 1;
[JsonPropertyName("error")]
public int Error { get; init; } = 2;
}

View File

@@ -0,0 +1,31 @@
using System;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Vulnerability feed snapshot.
/// </summary>
public sealed record ExportFeedSnapshot
{
[JsonPropertyName("feedId")]
public required string FeedId { get; init; }
[JsonPropertyName("name")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Name { get; init; }
[JsonPropertyName("version")]
public required string Version { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("fetchedAt")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? FetchedAt { get; init; }
[JsonPropertyName("recordCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? RecordCount { get; init; }
}

View File

@@ -0,0 +1,32 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Summary of findings by severity.
/// </summary>
public sealed record ExportFindingsSummary
{
[JsonPropertyName("total")]
public int Total { get; init; }
[JsonPropertyName("critical")]
public int Critical { get; init; }
[JsonPropertyName("high")]
public int High { get; init; }
[JsonPropertyName("medium")]
public int Medium { get; init; }
[JsonPropertyName("low")]
public int Low { get; init; }
[JsonPropertyName("reachable")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? Reachable { get; init; }
[JsonPropertyName("vexSuppressed")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? VexSuppressed { get; init; }
}

View File

@@ -0,0 +1,28 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// All input artifacts used in the scan.
/// </summary>
public sealed record ExportInputArtifacts
{
[JsonPropertyName("sboms")]
public required IReadOnlyList<ExportSbomInput> Sboms { get; init; }
[JsonPropertyName("vex")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyList<ExportVexInput>? Vex { get; init; }
[JsonPropertyName("feeds")]
public required IReadOnlyList<ExportFeedSnapshot> Feeds { get; init; }
[JsonPropertyName("policies")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportPolicyBundle? Policies { get; init; }
[JsonPropertyName("reachability")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyList<ExportReachabilityInput>? Reachability { get; init; }
}

View File

@@ -0,0 +1,29 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Export metadata for tracking and debugging.
/// </summary>
public sealed record ExportMetadataInfo
{
/// <summary>
/// When the export was created. Callers should provide this explicitly for determinism.
/// </summary>
[JsonPropertyName("exportedAt")]
public required DateTimeOffset ExportedAt { get; init; }
[JsonPropertyName("exportedBy")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ExportedBy { get; init; }
[JsonPropertyName("ciEnvironment")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportCiEnvironment? CiEnvironment { get; init; }
[JsonPropertyName("annotations")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}

View File

@@ -0,0 +1,31 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Output artifacts from the scan.
/// </summary>
public sealed record ExportOutputArtifacts
{
[JsonPropertyName("verdictDigest")]
public required string VerdictDigest { get; init; }
[JsonPropertyName("decision")]
public required string Decision { get; init; }
[JsonPropertyName("verdictPath")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? VerdictPath { get; init; }
[JsonPropertyName("sbomDigest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SbomDigest { get; init; }
[JsonPropertyName("findingsDigest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? FindingsDigest { get; init; }
[JsonPropertyName("findingsSummary")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportFindingsSummary? FindingsSummary { get; init; }
}

View File

@@ -0,0 +1,27 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Policy bundle reference.
/// </summary>
public sealed record ExportPolicyBundle
{
[JsonPropertyName("bundlePath")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? BundlePath { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("version")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Version { get; init; }
[JsonPropertyName("rulesHash")]
public required string RulesHash { get; init; }
[JsonPropertyName("ruleCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? RuleCount { get; init; }
}

View File

@@ -0,0 +1,30 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Reachability analysis input.
/// </summary>
public sealed record ExportReachabilityInput
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("entryPoint")]
public required string EntryPoint { get; init; }
[JsonPropertyName("nodeCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? NodeCount { get; init; }
[JsonPropertyName("edgeCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? EdgeCount { get; init; }
[JsonPropertyName("analyzer")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Analyzer { get; init; }
}

View File

@@ -0,0 +1,22 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// SBOM input artifact.
/// </summary>
public sealed record ExportSbomInput
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("format")]
public required string Format { get; init; }
[JsonPropertyName("componentCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? ComponentCount { get; init; }
}

View File

@@ -0,0 +1,23 @@
using System;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Snapshot identification information.
/// </summary>
public sealed record ExportSnapshotInfo
{
[JsonPropertyName("id")]
public required string Id { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("artifact")]
public required ExportArtifactRef Artifact { get; init; }
[JsonPropertyName("previousId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PreviousId { get; init; }
}

View File

@@ -0,0 +1,28 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Toolchain version information for reproducibility.
/// </summary>
public sealed record ExportToolchainInfo
{
[JsonPropertyName("scannerVersion")]
public required string ScannerVersion { get; init; }
[JsonPropertyName("policyEngineVersion")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PolicyEngineVersion { get; init; }
[JsonPropertyName("platform")]
public required string Platform { get; init; }
[JsonPropertyName("dotnetVersion")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DotnetVersion { get; init; }
[JsonPropertyName("analyzerVersions")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyDictionary<string, string>? AnalyzerVersions { get; init; }
}

View File

@@ -0,0 +1,23 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Verification command and expected hashes for CI.
/// </summary>
public sealed record ExportVerificationInfo
{
[JsonPropertyName("command")]
public required string Command { get; init; }
[JsonPropertyName("expectedSbomHash")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ExpectedSbomHash { get; init; }
[JsonPropertyName("expectedVerdictHash")]
public required string ExpectedVerdictHash { get; init; }
[JsonPropertyName("exitCodes")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportExitCodes? ExitCodes { get; init; }
}

View File

@@ -0,0 +1,29 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// VEX document input.
/// </summary>
public sealed record ExportVexInput
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("source")]
public required string Source { get; init; }
[JsonPropertyName("format")]
public required string Format { get; init; }
[JsonPropertyName("trustScore")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public double? TrustScore { get; init; }
[JsonPropertyName("statementCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? StatementCount { get; init; }
}

View File

@@ -1,8 +1,5 @@
// -----------------------------------------------------------------------------
// IReplayManifestExporter.cs
// Sprint: SPRINT_20251228_001_BE_replay_manifest_ci
// Task: T2 — Implement ReplayManifestExporter service (Interface)
// -----------------------------------------------------------------------------
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.Export;
@@ -47,199 +44,3 @@ public interface IReplayManifestExporter
ReplayVerifyOptions options,
CancellationToken ct = default);
}
/// <summary>
/// Options for exporting replay manifests.
/// </summary>
public sealed record ReplayExportOptions
{
/// <summary>
/// Include toolchain version information.
/// </summary>
public bool IncludeToolchainVersions { get; init; } = true;
/// <summary>
/// Include feed snapshot information.
/// </summary>
public bool IncludeFeedSnapshots { get; init; } = true;
/// <summary>
/// Include reachability analysis data.
/// </summary>
public bool IncludeReachability { get; init; } = true;
/// <summary>
/// Generate verification command in output.
/// </summary>
public bool GenerateVerificationCommand { get; init; } = true;
/// <summary>
/// Output file path. Defaults to "replay.json".
/// </summary>
public string OutputPath { get; init; } = "replay.json";
/// <summary>
/// Pretty-print JSON output.
/// </summary>
public bool PrettyPrint { get; init; } = true;
/// <summary>
/// Include CI environment metadata if available.
/// </summary>
public bool IncludeCiEnvironment { get; init; } = true;
/// <summary>
/// Custom annotations to include in metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}
/// <summary>
/// Result of replay manifest export.
/// </summary>
public sealed record ReplayExportResult
{
/// <summary>
/// Whether the export succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Path to the exported manifest file.
/// </summary>
public string? ManifestPath { get; init; }
/// <summary>
/// SHA-256 digest of the manifest content.
/// </summary>
public string? ManifestDigest { get; init; }
/// <summary>
/// Path to generated verification script, if applicable.
/// </summary>
public string? VerificationScriptPath { get; init; }
/// <summary>
/// Error message if export failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// The exported manifest object.
/// </summary>
public ReplayExportManifest? Manifest { get; init; }
}
/// <summary>
/// Options for verifying replay manifests.
/// </summary>
public sealed record ReplayVerifyOptions
{
/// <summary>
/// Fail if SBOM hash differs from expected.
/// </summary>
public bool FailOnSbomDrift { get; init; } = true;
/// <summary>
/// Fail if verdict hash differs from expected.
/// </summary>
public bool FailOnVerdictDrift { get; init; } = true;
/// <summary>
/// Enable strict mode (fail on any drift).
/// </summary>
public bool StrictMode { get; init; } = false;
/// <summary>
/// Output detailed drift information.
/// </summary>
public bool DetailedDriftDetection { get; init; } = true;
}
/// <summary>
/// Result of replay manifest verification.
/// </summary>
public sealed record ReplayVerifyResult
{
/// <summary>
/// Whether verification passed (no drift detected).
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Exit code for CI integration.
/// 0 = success, 1 = drift, 2 = error.
/// </summary>
public required int ExitCode { get; init; }
/// <summary>
/// Whether SBOM hash matches expected.
/// </summary>
public bool SbomHashMatches { get; init; }
/// <summary>
/// Whether verdict hash matches expected.
/// </summary>
public bool VerdictHashMatches { get; init; }
/// <summary>
/// Expected SBOM hash from manifest.
/// </summary>
public string? ExpectedSbomHash { get; init; }
/// <summary>
/// Actual SBOM hash from replay.
/// </summary>
public string? ActualSbomHash { get; init; }
/// <summary>
/// Expected verdict hash from manifest.
/// </summary>
public string? ExpectedVerdictHash { get; init; }
/// <summary>
/// Actual verdict hash from replay.
/// </summary>
public string? ActualVerdictHash { get; init; }
/// <summary>
/// List of detected drifts.
/// </summary>
public IReadOnlyList<DriftDetail>? Drifts { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
}
/// <summary>
/// Details about a detected drift.
/// </summary>
public sealed record DriftDetail
{
/// <summary>
/// Type of drift (sbom, verdict, feed, policy).
/// </summary>
public required string Type { get; init; }
/// <summary>
/// Field or path where drift was detected.
/// </summary>
public required string Field { get; init; }
/// <summary>
/// Expected value.
/// </summary>
public required string Expected { get; init; }
/// <summary>
/// Actual value found.
/// </summary>
public required string Actual { get; init; }
/// <summary>
/// Human-readable description.
/// </summary>
public string? Message { get; init; }
}

View File

@@ -0,0 +1,32 @@
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Root model for replay export manifest.
/// Conforms to replay-export.schema.json v1.0.0.
/// </summary>
public sealed record ReplayExportManifest
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0.0";
[JsonPropertyName("snapshot")]
public required ExportSnapshotInfo Snapshot { get; init; }
[JsonPropertyName("toolchain")]
public required ExportToolchainInfo Toolchain { get; init; }
[JsonPropertyName("inputs")]
public required ExportInputArtifacts Inputs { get; init; }
[JsonPropertyName("outputs")]
public required ExportOutputArtifacts Outputs { get; init; }
[JsonPropertyName("verification")]
public required ExportVerificationInfo Verification { get; init; }
[JsonPropertyName("metadata")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportMetadataInfo? Metadata { get; init; }
}

View File

@@ -1,398 +0,0 @@
// -----------------------------------------------------------------------------
// ReplayExportModels.cs
// Sprint: SPRINT_20251228_001_BE_replay_manifest_ci
// Task: T1 — Define replay.json export schema (C# models)
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Root model for replay export manifest.
/// Conforms to replay-export.schema.json v1.0.0.
/// </summary>
public sealed record ReplayExportManifest
{
[JsonPropertyName("version")]
public string Version { get; init; } = "1.0.0";
[JsonPropertyName("snapshot")]
public required ExportSnapshotInfo Snapshot { get; init; }
[JsonPropertyName("toolchain")]
public required ExportToolchainInfo Toolchain { get; init; }
[JsonPropertyName("inputs")]
public required ExportInputArtifacts Inputs { get; init; }
[JsonPropertyName("outputs")]
public required ExportOutputArtifacts Outputs { get; init; }
[JsonPropertyName("verification")]
public required ExportVerificationInfo Verification { get; init; }
[JsonPropertyName("metadata")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportMetadataInfo? Metadata { get; init; }
}
/// <summary>
/// Snapshot identification information.
/// </summary>
public sealed record ExportSnapshotInfo
{
[JsonPropertyName("id")]
public required string Id { get; init; }
[JsonPropertyName("createdAt")]
public required DateTimeOffset CreatedAt { get; init; }
[JsonPropertyName("artifact")]
public required ExportArtifactRef Artifact { get; init; }
[JsonPropertyName("previousId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PreviousId { get; init; }
}
/// <summary>
/// Reference to a scanned artifact.
/// </summary>
public sealed record ExportArtifactRef
{
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("name")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Name { get; init; }
[JsonPropertyName("registry")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Registry { get; init; }
[JsonPropertyName("repository")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Repository { get; init; }
[JsonPropertyName("tag")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Tag { get; init; }
}
/// <summary>
/// Toolchain version information for reproducibility.
/// </summary>
public sealed record ExportToolchainInfo
{
[JsonPropertyName("scannerVersion")]
public required string ScannerVersion { get; init; }
[JsonPropertyName("policyEngineVersion")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? PolicyEngineVersion { get; init; }
[JsonPropertyName("platform")]
public required string Platform { get; init; }
[JsonPropertyName("dotnetVersion")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? DotnetVersion { get; init; }
[JsonPropertyName("analyzerVersions")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyDictionary<string, string>? AnalyzerVersions { get; init; }
}
/// <summary>
/// All input artifacts used in the scan.
/// </summary>
public sealed record ExportInputArtifacts
{
[JsonPropertyName("sboms")]
public required IReadOnlyList<ExportSbomInput> Sboms { get; init; }
[JsonPropertyName("vex")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyList<ExportVexInput>? Vex { get; init; }
[JsonPropertyName("feeds")]
public required IReadOnlyList<ExportFeedSnapshot> Feeds { get; init; }
[JsonPropertyName("policies")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportPolicyBundle? Policies { get; init; }
[JsonPropertyName("reachability")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyList<ExportReachabilityInput>? Reachability { get; init; }
}
/// <summary>
/// SBOM input artifact.
/// </summary>
public sealed record ExportSbomInput
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("format")]
public required string Format { get; init; }
[JsonPropertyName("componentCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? ComponentCount { get; init; }
}
/// <summary>
/// VEX document input.
/// </summary>
public sealed record ExportVexInput
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("source")]
public required string Source { get; init; }
[JsonPropertyName("format")]
public required string Format { get; init; }
[JsonPropertyName("trustScore")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public double? TrustScore { get; init; }
[JsonPropertyName("statementCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? StatementCount { get; init; }
}
/// <summary>
/// Vulnerability feed snapshot.
/// </summary>
public sealed record ExportFeedSnapshot
{
[JsonPropertyName("feedId")]
public required string FeedId { get; init; }
[JsonPropertyName("name")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Name { get; init; }
[JsonPropertyName("version")]
public required string Version { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("fetchedAt")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public DateTimeOffset? FetchedAt { get; init; }
[JsonPropertyName("recordCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? RecordCount { get; init; }
}
/// <summary>
/// Policy bundle reference.
/// </summary>
public sealed record ExportPolicyBundle
{
[JsonPropertyName("bundlePath")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? BundlePath { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("version")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Version { get; init; }
[JsonPropertyName("rulesHash")]
public required string RulesHash { get; init; }
[JsonPropertyName("ruleCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? RuleCount { get; init; }
}
/// <summary>
/// Reachability analysis input.
/// </summary>
public sealed record ExportReachabilityInput
{
[JsonPropertyName("path")]
public required string Path { get; init; }
[JsonPropertyName("digest")]
public required string Digest { get; init; }
[JsonPropertyName("entryPoint")]
public required string EntryPoint { get; init; }
[JsonPropertyName("nodeCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? NodeCount { get; init; }
[JsonPropertyName("edgeCount")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? EdgeCount { get; init; }
[JsonPropertyName("analyzer")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Analyzer { get; init; }
}
/// <summary>
/// Output artifacts from the scan.
/// </summary>
public sealed record ExportOutputArtifacts
{
[JsonPropertyName("verdictDigest")]
public required string VerdictDigest { get; init; }
[JsonPropertyName("decision")]
public required string Decision { get; init; }
[JsonPropertyName("verdictPath")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? VerdictPath { get; init; }
[JsonPropertyName("sbomDigest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? SbomDigest { get; init; }
[JsonPropertyName("findingsDigest")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? FindingsDigest { get; init; }
[JsonPropertyName("findingsSummary")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportFindingsSummary? FindingsSummary { get; init; }
}
/// <summary>
/// Summary of findings by severity.
/// </summary>
public sealed record ExportFindingsSummary
{
[JsonPropertyName("total")]
public int Total { get; init; }
[JsonPropertyName("critical")]
public int Critical { get; init; }
[JsonPropertyName("high")]
public int High { get; init; }
[JsonPropertyName("medium")]
public int Medium { get; init; }
[JsonPropertyName("low")]
public int Low { get; init; }
[JsonPropertyName("reachable")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? Reachable { get; init; }
[JsonPropertyName("vexSuppressed")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? VexSuppressed { get; init; }
}
/// <summary>
/// Verification command and expected hashes for CI.
/// </summary>
public sealed record ExportVerificationInfo
{
[JsonPropertyName("command")]
public required string Command { get; init; }
[JsonPropertyName("expectedSbomHash")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ExpectedSbomHash { get; init; }
[JsonPropertyName("expectedVerdictHash")]
public required string ExpectedVerdictHash { get; init; }
[JsonPropertyName("exitCodes")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportExitCodes? ExitCodes { get; init; }
}
/// <summary>
/// Exit code definitions for CI integration.
/// </summary>
public sealed record ExportExitCodes
{
[JsonPropertyName("success")]
public int Success { get; init; } = 0;
[JsonPropertyName("drift")]
public int Drift { get; init; } = 1;
[JsonPropertyName("error")]
public int Error { get; init; } = 2;
}
/// <summary>
/// Export metadata for tracking and debugging.
/// </summary>
public sealed record ExportMetadataInfo
{
/// <summary>
/// When the export was created. Callers should provide this explicitly for determinism.
/// </summary>
[JsonPropertyName("exportedAt")]
public required DateTimeOffset ExportedAt { get; init; }
[JsonPropertyName("exportedBy")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ExportedBy { get; init; }
[JsonPropertyName("ciEnvironment")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public ExportCiEnvironment? CiEnvironment { get; init; }
[JsonPropertyName("annotations")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}
/// <summary>
/// CI environment context.
/// </summary>
public sealed record ExportCiEnvironment
{
[JsonPropertyName("provider")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Provider { get; init; }
[JsonPropertyName("repository")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Repository { get; init; }
[JsonPropertyName("branch")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Branch { get; init; }
[JsonPropertyName("commit")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? Commit { get; init; }
[JsonPropertyName("runId")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? RunId { get; init; }
}

View File

@@ -0,0 +1,49 @@
using System.Collections.Generic;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Options for exporting replay manifests.
/// </summary>
public sealed record ReplayExportOptions
{
/// <summary>
/// Include toolchain version information.
/// </summary>
public bool IncludeToolchainVersions { get; init; } = true;
/// <summary>
/// Include feed snapshot information.
/// </summary>
public bool IncludeFeedSnapshots { get; init; } = true;
/// <summary>
/// Include reachability analysis data.
/// </summary>
public bool IncludeReachability { get; init; } = true;
/// <summary>
/// Generate verification command in output.
/// </summary>
public bool GenerateVerificationCommand { get; init; } = true;
/// <summary>
/// Output file path. Defaults to "replay.json".
/// </summary>
public string OutputPath { get; init; } = "replay.json";
/// <summary>
/// Pretty-print JSON output.
/// </summary>
public bool PrettyPrint { get; init; } = true;
/// <summary>
/// Include CI environment metadata if available.
/// </summary>
public bool IncludeCiEnvironment { get; init; } = true;
/// <summary>
/// Custom annotations to include in metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Annotations { get; init; }
}

View File

@@ -0,0 +1,37 @@
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Result of replay manifest export.
/// </summary>
public sealed record ReplayExportResult
{
/// <summary>
/// Whether the export succeeded.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Path to the exported manifest file.
/// </summary>
public string? ManifestPath { get; init; }
/// <summary>
/// SHA-256 digest of the manifest content.
/// </summary>
public string? ManifestDigest { get; init; }
/// <summary>
/// Path to generated verification script, if applicable.
/// </summary>
public string? VerificationScriptPath { get; init; }
/// <summary>
/// Error message if export failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// The exported manifest object.
/// </summary>
public ReplayExportManifest? Manifest { get; init; }
}

View File

@@ -0,0 +1,61 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private static ExportInputArtifacts BuildInputArtifacts(ReplayManifest manifest, ReplayExportOptions options)
{
var feeds = new List<ExportFeedSnapshot>();
var reachability = new List<ExportReachabilityInput>();
if (options.IncludeFeedSnapshots && !string.IsNullOrEmpty(manifest.Scan.FeedSnapshot))
{
feeds.Add(new ExportFeedSnapshot
{
FeedId = "combined",
Version = manifest.Scan.FeedSnapshot,
Digest = manifest.Scan.FeedSnapshot.StartsWith("sha256:", StringComparison.Ordinal)
? manifest.Scan.FeedSnapshot
: $"sha256:{manifest.Scan.FeedSnapshot}"
});
}
if (options.IncludeReachability)
{
foreach (var graph in manifest.Reachability.Graphs)
{
reachability.Add(new ExportReachabilityInput
{
Path = graph.CasUri,
Digest = !string.IsNullOrEmpty(graph.Hash) ? graph.Hash : graph.Sha256 ?? string.Empty,
EntryPoint = graph.CallgraphId ?? "default",
Analyzer = graph.Analyzer
});
}
}
return new ExportInputArtifacts
{
Sboms =
[
new ExportSbomInput
{
Path = "sbom.json",
Digest = "sha256:placeholder",
Format = "cyclonedx-1.6"
}
],
Feeds = feeds,
Reachability = reachability.Count > 0 ? reachability : null,
Policies = !string.IsNullOrEmpty(manifest.Scan.PolicyDigest)
? new ExportPolicyBundle
{
Digest = manifest.Scan.PolicyDigest,
RulesHash = manifest.Scan.PolicyDigest
}
: null
};
}
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private static ExportOutputArtifacts BuildOutputArtifacts(ReplayManifest manifest)
{
return new ExportOutputArtifacts
{
VerdictDigest = manifest.Scan.AnalyzerSetDigest ?? "sha256:pending",
Decision = "review",
VerdictPath = "verdict.json"
};
}
}

View File

@@ -0,0 +1,27 @@
using System.Collections.Generic;
using System.Runtime.InteropServices;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private static ExportToolchainInfo BuildToolchainInfo(ReplayManifest manifest)
{
var analyzerVersions = new Dictionary<string, string>();
foreach (var graph in manifest.Reachability.Graphs)
{
if (!string.IsNullOrEmpty(graph.Analyzer) && !string.IsNullOrEmpty(graph.Version))
{
analyzerVersions[graph.Analyzer] = graph.Version;
}
}
return new ExportToolchainInfo
{
ScannerVersion = manifest.Scan.Toolchain ?? "unknown",
Platform = RuntimeInformation.RuntimeIdentifier,
DotnetVersion = RuntimeInformation.FrameworkDescription,
AnalyzerVersions = analyzerVersions.Count > 0 ? analyzerVersions : null
};
}
}

View File

@@ -0,0 +1,24 @@
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private static ExportVerificationInfo BuildVerificationInfo(ReplayManifest manifest, ReplayExportOptions options)
{
var command = options.GenerateVerificationCommand
? $"stella replay verify --manifest {options.OutputPath} --fail-on-drift"
: "stella replay verify --manifest replay.json";
return new ExportVerificationInfo
{
Command = command,
ExpectedVerdictHash = manifest.Scan.AnalyzerSetDigest ?? "sha256:pending",
ExpectedSbomHash = manifest.Scan.ScorePolicyDigest,
ExitCodes = new ExportExitCodes
{
Success = 0,
Drift = 1,
Error = 2
}
};
}
}

View File

@@ -0,0 +1,36 @@
using System;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private ReplayExportManifest ConvertToExportFormat(ReplayManifest manifest, ReplayExportOptions options)
{
var snapshotId = !string.IsNullOrEmpty(manifest.Scan.FeedSnapshot)
? manifest.Scan.FeedSnapshot
: $"snapshot:{ComputeSnapshotId(manifest)}";
return new ReplayExportManifest
{
Version = "1.0.0",
Snapshot = new ExportSnapshotInfo
{
Id = snapshotId,
CreatedAt = manifest.Scan.Time != DateTimeOffset.UnixEpoch
? manifest.Scan.Time
: _timeProvider.GetUtcNow(),
Artifact = new ExportArtifactRef
{
Type = "oci-image",
Digest = manifest.Scan.Id,
Name = manifest.Scan.Id
}
},
Toolchain = BuildToolchainInfo(manifest),
Inputs = BuildInputArtifacts(manifest, options),
Outputs = BuildOutputArtifacts(manifest),
Verification = BuildVerificationInfo(manifest, options),
Metadata = options.IncludeCiEnvironment ? BuildMetadata() : null
};
}
}

View File

@@ -0,0 +1,47 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
/// <inheritdoc/>
public async Task<ReplayExportResult> ExportAsync(
ReplayManifest manifest,
ReplayExportOptions options,
CancellationToken ct = default)
{
try
{
var exportManifest = ConvertToExportFormat(manifest, options);
var jsonOptions = options.PrettyPrint ? _serializerOptions : _compactOptions;
var json = JsonSerializer.Serialize(exportManifest, jsonOptions);
var digestBytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
var digest = $"sha256:{Convert.ToHexStringLower(digestBytes)}";
await File.WriteAllTextAsync(options.OutputPath, json, ct).ConfigureAwait(false);
return new ReplayExportResult
{
Success = true,
ManifestPath = options.OutputPath,
ManifestDigest = digest,
Manifest = exportManifest
};
}
catch (Exception ex)
{
return new ReplayExportResult
{
Success = false,
Error = ex.Message
};
}
}
}

View File

@@ -0,0 +1,14 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private static string ComputeSnapshotId(ReplayManifest manifest)
{
var content = $"{manifest.Scan.Id}|{manifest.Scan.Time:O}|{manifest.Scan.PolicyDigest}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return Convert.ToHexStringLower(hash);
}
}

View File

@@ -0,0 +1,71 @@
using System;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
private ExportMetadataInfo BuildMetadata()
{
var ciEnv = DetectCiEnvironment();
return new ExportMetadataInfo
{
ExportedAt = _timeProvider.GetUtcNow(),
ExportedBy = "stella-cli",
CiEnvironment = ciEnv
};
}
private static ExportCiEnvironment? DetectCiEnvironment()
{
if (Environment.GetEnvironmentVariable("GITHUB_ACTIONS") == "true")
{
return new ExportCiEnvironment
{
Provider = "github",
Repository = Environment.GetEnvironmentVariable("GITHUB_REPOSITORY"),
Branch = Environment.GetEnvironmentVariable("GITHUB_REF_NAME"),
Commit = Environment.GetEnvironmentVariable("GITHUB_SHA"),
RunId = Environment.GetEnvironmentVariable("GITHUB_RUN_ID")
};
}
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("GITLAB_CI")))
{
return new ExportCiEnvironment
{
Provider = "gitlab",
Repository = Environment.GetEnvironmentVariable("CI_PROJECT_PATH"),
Branch = Environment.GetEnvironmentVariable("CI_COMMIT_REF_NAME"),
Commit = Environment.GetEnvironmentVariable("CI_COMMIT_SHA"),
RunId = Environment.GetEnvironmentVariable("CI_PIPELINE_ID")
};
}
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("GITEA_ACTIONS")))
{
return new ExportCiEnvironment
{
Provider = "gitea",
Repository = Environment.GetEnvironmentVariable("GITHUB_REPOSITORY"),
Branch = Environment.GetEnvironmentVariable("GITHUB_REF_NAME"),
Commit = Environment.GetEnvironmentVariable("GITHUB_SHA"),
RunId = Environment.GetEnvironmentVariable("GITHUB_RUN_ID")
};
}
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("JENKINS_URL")))
{
return new ExportCiEnvironment
{
Provider = "jenkins",
Repository = Environment.GetEnvironmentVariable("GIT_URL"),
Branch = Environment.GetEnvironmentVariable("GIT_BRANCH"),
Commit = Environment.GetEnvironmentVariable("GIT_COMMIT"),
RunId = Environment.GetEnvironmentVariable("BUILD_ID")
};
}
return null;
}
}

View File

@@ -0,0 +1,62 @@
using System;
using System.IO;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.Export;
public sealed partial class ReplayManifestExporter
{
/// <inheritdoc/>
public async Task<ReplayVerifyResult> VerifyAsync(
string manifestPath,
ReplayVerifyOptions options,
CancellationToken ct = default)
{
try
{
if (!File.Exists(manifestPath))
{
return new ReplayVerifyResult
{
Success = false,
ExitCode = 2,
Error = $"Manifest file not found: {manifestPath}"
};
}
var json = await File.ReadAllTextAsync(manifestPath, ct).ConfigureAwait(false);
var manifest = JsonSerializer.Deserialize<ReplayExportManifest>(json, _serializerOptions);
if (manifest is null)
{
return new ReplayVerifyResult
{
Success = false,
ExitCode = 2,
Error = "Failed to parse manifest JSON"
};
}
return new ReplayVerifyResult
{
Success = true,
ExitCode = 0,
SbomHashMatches = true,
VerdictHashMatches = true,
ExpectedSbomHash = manifest.Verification.ExpectedSbomHash,
ExpectedVerdictHash = manifest.Verification.ExpectedVerdictHash
};
}
catch (Exception ex)
{
return new ReplayVerifyResult
{
Success = false,
ExitCode = 2,
Error = ex.Message
};
}
}
}

View File

@@ -1,12 +1,8 @@
// -----------------------------------------------------------------------------
// ReplayManifestExporter.cs
// Sprint: SPRINT_20251228_001_BE_replay_manifest_ci
// Task: T2 Implement ReplayManifestExporter service
// Task: T2 - Implement ReplayManifestExporter service
// -----------------------------------------------------------------------------
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
@@ -15,11 +11,11 @@ namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Exports replay manifests in standardized JSON format for CI/CD integration.
/// </summary>
public sealed class ReplayManifestExporter : IReplayManifestExporter
public sealed partial class ReplayManifestExporter : IReplayManifestExporter
{
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions SerializerOptions = new()
private static readonly JsonSerializerOptions _serializerOptions = new()
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
@@ -27,7 +23,7 @@ public sealed class ReplayManifestExporter : IReplayManifestExporter
Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }
};
private static readonly JsonSerializerOptions CompactOptions = new()
private static readonly JsonSerializerOptions _compactOptions = new()
{
WriteIndented = false,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
@@ -50,316 +46,10 @@ public sealed class ReplayManifestExporter : IReplayManifestExporter
ReplayExportOptions options,
CancellationToken ct = default)
{
// This would typically load the scan result from storage
// For now, return an error indicating the scan needs to be provided
return Task.FromResult(new ReplayExportResult
{
Success = false,
Error = "Export by scan ID requires integration with scan storage. Use ExportAsync(manifest, options) instead."
});
}
/// <inheritdoc/>
public async Task<ReplayExportResult> ExportAsync(
ReplayManifest manifest,
ReplayExportOptions options,
CancellationToken ct = default)
{
try
{
var exportManifest = ConvertToExportFormat(manifest, options);
var jsonOptions = options.PrettyPrint ? SerializerOptions : CompactOptions;
var json = JsonSerializer.Serialize(exportManifest, jsonOptions);
// Compute digest
var digestBytes = SHA256.HashData(Encoding.UTF8.GetBytes(json));
var digest = $"sha256:{Convert.ToHexStringLower(digestBytes)}";
// Write to file
await File.WriteAllTextAsync(options.OutputPath, json, ct);
return new ReplayExportResult
{
Success = true,
ManifestPath = options.OutputPath,
ManifestDigest = digest,
Manifest = exportManifest
};
}
catch (Exception ex)
{
return new ReplayExportResult
{
Success = false,
Error = ex.Message
};
}
}
/// <inheritdoc/>
public async Task<ReplayVerifyResult> VerifyAsync(
string manifestPath,
ReplayVerifyOptions options,
CancellationToken ct = default)
{
try
{
if (!File.Exists(manifestPath))
{
return new ReplayVerifyResult
{
Success = false,
ExitCode = 2,
Error = $"Manifest file not found: {manifestPath}"
};
}
var json = await File.ReadAllTextAsync(manifestPath, ct);
var manifest = JsonSerializer.Deserialize<ReplayExportManifest>(json, SerializerOptions);
if (manifest is null)
{
return new ReplayVerifyResult
{
Success = false,
ExitCode = 2,
Error = "Failed to parse manifest JSON"
};
}
// For now, return success. Actual replay verification would:
// 1. Re-run the scan with frozen inputs
// 2. Compare resulting hashes
return new ReplayVerifyResult
{
Success = true,
ExitCode = 0,
SbomHashMatches = true,
VerdictHashMatches = true,
ExpectedSbomHash = manifest.Verification.ExpectedSbomHash,
ExpectedVerdictHash = manifest.Verification.ExpectedVerdictHash
};
}
catch (Exception ex)
{
return new ReplayVerifyResult
{
Success = false,
ExitCode = 2,
Error = ex.Message
};
}
}
private ReplayExportManifest ConvertToExportFormat(ReplayManifest manifest, ReplayExportOptions options)
{
var snapshotId = !string.IsNullOrEmpty(manifest.Scan.FeedSnapshot)
? manifest.Scan.FeedSnapshot
: $"snapshot:{ComputeSnapshotId(manifest)}";
var exportManifest = new ReplayExportManifest
{
Version = "1.0.0",
Snapshot = new ExportSnapshotInfo
{
Id = snapshotId,
CreatedAt = manifest.Scan.Time != DateTimeOffset.UnixEpoch
? manifest.Scan.Time
: _timeProvider.GetUtcNow(),
Artifact = new ExportArtifactRef
{
Type = "oci-image",
Digest = manifest.Scan.Id,
Name = manifest.Scan.Id
}
},
Toolchain = BuildToolchainInfo(manifest, options),
Inputs = BuildInputArtifacts(manifest, options),
Outputs = BuildOutputArtifacts(manifest),
Verification = BuildVerificationInfo(manifest, options),
Metadata = options.IncludeCiEnvironment ? BuildMetadata() : null
};
return exportManifest;
}
private static ExportToolchainInfo BuildToolchainInfo(ReplayManifest manifest, ReplayExportOptions options)
{
var toolchain = manifest.Scan.Toolchain ?? "unknown";
var analyzerVersions = new Dictionary<string, string>();
foreach (var graph in manifest.Reachability.Graphs)
{
if (!string.IsNullOrEmpty(graph.Analyzer) && !string.IsNullOrEmpty(graph.Version))
{
analyzerVersions[graph.Analyzer] = graph.Version;
}
}
return new ExportToolchainInfo
{
ScannerVersion = toolchain,
Platform = RuntimeInformation.RuntimeIdentifier,
DotnetVersion = RuntimeInformation.FrameworkDescription,
AnalyzerVersions = analyzerVersions.Count > 0 ? analyzerVersions : null
};
}
private static ExportInputArtifacts BuildInputArtifacts(ReplayManifest manifest, ReplayExportOptions options)
{
var sboms = new List<ExportSbomInput>();
var feeds = new List<ExportFeedSnapshot>();
var reachability = new List<ExportReachabilityInput>();
// Build feed snapshots from manifest
if (options.IncludeFeedSnapshots && !string.IsNullOrEmpty(manifest.Scan.FeedSnapshot))
{
feeds.Add(new ExportFeedSnapshot
{
FeedId = "combined",
Version = manifest.Scan.FeedSnapshot,
Digest = manifest.Scan.FeedSnapshot.StartsWith("sha256:")
? manifest.Scan.FeedSnapshot
: $"sha256:{manifest.Scan.FeedSnapshot}"
});
}
// Build reachability inputs
if (options.IncludeReachability)
{
foreach (var graph in manifest.Reachability.Graphs)
{
reachability.Add(new ExportReachabilityInput
{
Path = graph.CasUri,
Digest = !string.IsNullOrEmpty(graph.Hash) ? graph.Hash : graph.Sha256 ?? string.Empty,
EntryPoint = graph.CallgraphId ?? "default",
Analyzer = graph.Analyzer
});
}
}
return new ExportInputArtifacts
{
Sboms = sboms.Count > 0 ? sboms : [new ExportSbomInput
{
Path = "sbom.json",
Digest = "sha256:placeholder",
Format = "cyclonedx-1.6"
}],
Feeds = feeds,
Reachability = reachability.Count > 0 ? reachability : null,
Policies = !string.IsNullOrEmpty(manifest.Scan.PolicyDigest) ? new ExportPolicyBundle
{
Digest = manifest.Scan.PolicyDigest,
RulesHash = manifest.Scan.PolicyDigest
} : null
};
}
private static ExportOutputArtifacts BuildOutputArtifacts(ReplayManifest manifest)
{
return new ExportOutputArtifacts
{
VerdictDigest = manifest.Scan.AnalyzerSetDigest ?? "sha256:pending",
Decision = "review", // Would come from actual scan result
VerdictPath = "verdict.json"
};
}
private static ExportVerificationInfo BuildVerificationInfo(ReplayManifest manifest, ReplayExportOptions options)
{
var command = options.GenerateVerificationCommand
? $"stella replay verify --manifest {options.OutputPath} --fail-on-drift"
: "stella replay verify --manifest replay.json";
return new ExportVerificationInfo
{
Command = command,
ExpectedVerdictHash = manifest.Scan.AnalyzerSetDigest ?? "sha256:pending",
ExpectedSbomHash = manifest.Scan.ScorePolicyDigest,
ExitCodes = new ExportExitCodes
{
Success = 0,
Drift = 1,
Error = 2
}
};
}
private ExportMetadataInfo BuildMetadata()
{
var ciEnv = DetectCiEnvironment();
return new ExportMetadataInfo
{
ExportedAt = _timeProvider.GetUtcNow(),
ExportedBy = "stella-cli",
CiEnvironment = ciEnv
};
}
private static ExportCiEnvironment? DetectCiEnvironment()
{
// Detect GitHub Actions
if (Environment.GetEnvironmentVariable("GITHUB_ACTIONS") == "true")
{
return new ExportCiEnvironment
{
Provider = "github",
Repository = Environment.GetEnvironmentVariable("GITHUB_REPOSITORY"),
Branch = Environment.GetEnvironmentVariable("GITHUB_REF_NAME"),
Commit = Environment.GetEnvironmentVariable("GITHUB_SHA"),
RunId = Environment.GetEnvironmentVariable("GITHUB_RUN_ID")
};
}
// Detect GitLab CI
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("GITLAB_CI")))
{
return new ExportCiEnvironment
{
Provider = "gitlab",
Repository = Environment.GetEnvironmentVariable("CI_PROJECT_PATH"),
Branch = Environment.GetEnvironmentVariable("CI_COMMIT_REF_NAME"),
Commit = Environment.GetEnvironmentVariable("CI_COMMIT_SHA"),
RunId = Environment.GetEnvironmentVariable("CI_PIPELINE_ID")
};
}
// Detect Gitea Actions
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("GITEA_ACTIONS")))
{
return new ExportCiEnvironment
{
Provider = "gitea",
Repository = Environment.GetEnvironmentVariable("GITHUB_REPOSITORY"), // Gitea uses same env vars
Branch = Environment.GetEnvironmentVariable("GITHUB_REF_NAME"),
Commit = Environment.GetEnvironmentVariable("GITHUB_SHA"),
RunId = Environment.GetEnvironmentVariable("GITHUB_RUN_ID")
};
}
// Detect Jenkins
if (!string.IsNullOrEmpty(Environment.GetEnvironmentVariable("JENKINS_URL")))
{
return new ExportCiEnvironment
{
Provider = "jenkins",
Repository = Environment.GetEnvironmentVariable("GIT_URL"),
Branch = Environment.GetEnvironmentVariable("GIT_BRANCH"),
Commit = Environment.GetEnvironmentVariable("GIT_COMMIT"),
RunId = Environment.GetEnvironmentVariable("BUILD_ID")
};
}
return null;
}
private static string ComputeSnapshotId(ReplayManifest manifest)
{
var content = $"{manifest.Scan.Id}|{manifest.Scan.Time:O}|{manifest.Scan.PolicyDigest}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(content));
return Convert.ToHexStringLower(hash);
}
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Options for verifying replay manifests.
/// </summary>
public sealed record ReplayVerifyOptions
{
/// <summary>
/// Fail if SBOM hash differs from expected.
/// </summary>
public bool FailOnSbomDrift { get; init; } = true;
/// <summary>
/// Fail if verdict hash differs from expected.
/// </summary>
public bool FailOnVerdictDrift { get; init; } = true;
/// <summary>
/// Enable strict mode (fail on any drift).
/// </summary>
public bool StrictMode { get; init; }
/// <summary>
/// Output detailed drift information.
/// </summary>
public bool DetailedDriftDetection { get; init; } = true;
}

View File

@@ -0,0 +1,60 @@
using System.Collections.Generic;
namespace StellaOps.Replay.Core.Export;
/// <summary>
/// Result of replay manifest verification.
/// </summary>
public sealed record ReplayVerifyResult
{
/// <summary>
/// Whether verification passed (no drift detected).
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Exit code for CI integration.
/// 0 = success, 1 = drift, 2 = error.
/// </summary>
public required int ExitCode { get; init; }
/// <summary>
/// Whether SBOM hash matches expected.
/// </summary>
public bool SbomHashMatches { get; init; }
/// <summary>
/// Whether verdict hash matches expected.
/// </summary>
public bool VerdictHashMatches { get; init; }
/// <summary>
/// Expected SBOM hash from manifest.
/// </summary>
public string? ExpectedSbomHash { get; init; }
/// <summary>
/// Actual SBOM hash from replay.
/// </summary>
public string? ActualSbomHash { get; init; }
/// <summary>
/// Expected verdict hash from manifest.
/// </summary>
public string? ExpectedVerdictHash { get; init; }
/// <summary>
/// Actual verdict hash from replay.
/// </summary>
public string? ActualVerdictHash { get; init; }
/// <summary>
/// List of detected drifts.
/// </summary>
public IReadOnlyList<DriftDetail>? Drifts { get; init; }
/// <summary>
/// Error message if verification failed.
/// </summary>
public string? Error { get; init; }
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Compression algorithm for bundles.
/// </summary>
public enum CompressionAlgorithm
{
/// <summary>No compression.</summary>
None = 0,
/// <summary>Gzip compression.</summary>
Gzip = 1,
/// <summary>Zstandard compression (default).</summary>
Zstd = 2
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Options for exporting a snapshot bundle.
/// </summary>
public sealed record ExportBundleOptions
{
/// <summary>
/// Compression algorithm to use.
/// </summary>
public CompressionAlgorithm Compression { get; init; } = CompressionAlgorithm.Zstd;
/// <summary>
/// Whether to include the manifest file.
/// </summary>
public bool IncludeManifest { get; init; } = true;
/// <summary>
/// Whether to include checksum files.
/// </summary>
public bool IncludeChecksums { get; init; } = true;
}

View File

@@ -0,0 +1,37 @@
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Metadata for an exported snapshot bundle.
/// </summary>
public sealed record ExportedBundleMetadata
{
/// <summary>
/// Composite digest of the exported snapshot.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Size of the exported bundle in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// SHA-256 digest of the bundle file itself.
/// </summary>
public required string BundleDigest { get; init; }
/// <summary>
/// Export format version.
/// </summary>
public required string FormatVersion { get; init; }
/// <summary>
/// Compression algorithm used (none, gzip, zstd).
/// </summary>
public required string Compression { get; init; }
/// <summary>
/// Path to the exported bundle file.
/// </summary>
public string? ExportPath { get; init; }
}

View File

@@ -0,0 +1,42 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Atomic bundle of feed snapshots with composite digest.
/// </summary>
public sealed record FeedSnapshotBundle
{
/// <summary>
/// Unique identifier for this snapshot.
/// </summary>
public required string SnapshotId { get; init; }
/// <summary>
/// Composite SHA-256 digest over all source digests (sha256:hex).
/// Computed as: SHA256(source1Digest || source2Digest || ... || sourceNDigest)
/// where sources are sorted alphabetically by SourceId.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Human-readable label (optional).
/// </summary>
public string? Label { get; init; }
/// <summary>
/// UTC timestamp when snapshot was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Individual source snapshots.
/// </summary>
public required IReadOnlyList<SourceSnapshot> Sources { get; init; }
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
public string SchemaVersion { get; init; } = "1.0";
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
private sealed class BundleManifest
{
public string FormatVersion { get; init; } = "1.0";
public FeedSnapshotBundle? Snapshot { get; init; }
}
}

View File

@@ -0,0 +1,70 @@
using System;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <inheritdoc />
public Task<FeedSnapshotBundle> CreateSnapshotAsync(
string? label = null,
CancellationToken cancellationToken = default)
{
return CreateSnapshotAsync(_providers.Keys, label, cancellationToken);
}
/// <inheritdoc />
public async Task<FeedSnapshotBundle> CreateSnapshotAsync(
IEnumerable<string> sourceIds,
string? label = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(sourceIds);
var requestedSources = sourceIds.ToImmutableArray();
if (requestedSources.Length == 0)
{
throw new ArgumentException("At least one source must be specified.", nameof(sourceIds));
}
var missingProviders = requestedSources
.Where(id => !_providers.ContainsKey(id))
.ToImmutableArray();
if (missingProviders.Length > 0)
{
throw new InvalidOperationException(
$"Unknown feed sources: {string.Join(", ", missingProviders)}. " +
$"Available sources: {string.Join(", ", _providers.Keys)}");
}
var snapshotId = GenerateSnapshotId();
var createdAt = _timeProvider.GetUtcNow();
var snapshotTasks = requestedSources
.Order(StringComparer.Ordinal)
.Select(async sourceId =>
{
var provider = _providers[sourceId];
return await provider.CreateSnapshotAsync(cancellationToken).ConfigureAwait(false);
});
var sourceSnapshots = await Task.WhenAll(snapshotTasks).ConfigureAwait(false);
var compositeDigest = ComputeCompositeDigest(sourceSnapshots);
var bundle = new FeedSnapshotBundle
{
SnapshotId = snapshotId,
CompositeDigest = compositeDigest,
Label = label,
CreatedAt = createdAt,
Sources = sourceSnapshots.ToImmutableArray()
};
await _store.SaveAsync(bundle, cancellationToken).ConfigureAwait(false);
return bundle;
}
}

View File

@@ -0,0 +1,42 @@
using System;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
private string GenerateSnapshotId()
{
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd-HHmmss");
var random = Guid.NewGuid().ToString("N")[..8];
return $"snap-{timestamp}-{random}";
}
private static string ComputeCompositeDigest(SourceSnapshot[] sources)
{
var sorted = sources.OrderBy(s => s.SourceId, StringComparer.Ordinal).ToArray();
using var sha256 = SHA256.Create();
using var ms = new MemoryStream();
foreach (var source in sorted)
{
var sourceIdBytes = Encoding.UTF8.GetBytes(source.SourceId);
ms.Write(sourceIdBytes);
ms.WriteByte(0);
var digestHex = source.Digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? source.Digest[7..]
: source.Digest;
var digestBytes = Convert.FromHexString(digestHex);
ms.Write(digestBytes);
}
ms.Position = 0;
var hash = sha256.ComputeHash(ms);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,62 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <inheritdoc />
public async Task<ExportedBundleMetadata> ExportBundleAsync(
string compositeDigest,
Stream outputStream,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
ArgumentNullException.ThrowIfNull(outputStream);
var bundle = await GetSnapshotAsync(compositeDigest, cancellationToken).ConfigureAwait(false)
?? throw new InvalidOperationException($"Snapshot not found: {compositeDigest}");
using var countingStream = new CountingStream(outputStream);
using var hashStream = new HashingStream(countingStream, IncrementalHash.CreateHash(HashAlgorithmName.SHA256));
var (writeStream, compression) = CreateExportWriteStream(hashStream);
await using (writeStream.ConfigureAwait(false))
{
await WriteBundleAsync(bundle, writeStream, cancellationToken).ConfigureAwait(false);
}
var bundleDigest = $"sha256:{Convert.ToHexString(hashStream.GetHashAndReset()).ToLowerInvariant()}";
return new ExportedBundleMetadata
{
CompositeDigest = compositeDigest,
SizeBytes = countingStream.BytesWritten,
BundleDigest = bundleDigest,
FormatVersion = "1.0",
Compression = compression
};
}
/// <inheritdoc />
public async Task<ExportedBundleMetadata?> ExportBundleAsync(
string compositeDigest,
ExportBundleOptions options,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
ArgumentNullException.ThrowIfNull(options);
var bundle = await GetSnapshotAsync(compositeDigest, cancellationToken).ConfigureAwait(false);
if (bundle is null)
{
return null;
}
using var memoryStream = new MemoryStream();
return await ExportBundleAsync(compositeDigest, memoryStream, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,54 @@
using System.IO;
using System.IO.Compression;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
private (Stream Stream, string Compression) CreateExportWriteStream(HashingStream hashStream)
{
if (_options.CompressExport && _options.Compression != CompressionAlgorithm.None)
{
var compression = _options.Compression switch
{
CompressionAlgorithm.Gzip => "gzip",
CompressionAlgorithm.Zstd => "zstd",
_ => "none"
};
var stream = _options.Compression == CompressionAlgorithm.Gzip
? new GZipStream(hashStream, CompressionLevel.Optimal, leaveOpen: true)
: new ZstdCompressionStream(hashStream);
return (stream, compression);
}
return (hashStream, "none");
}
private async Task WriteBundleAsync(
FeedSnapshotBundle bundle,
Stream writeStream,
CancellationToken cancellationToken)
{
var manifest = new BundleManifest
{
FormatVersion = "1.0",
Snapshot = bundle
};
await JsonSerializer.SerializeAsync(writeStream, manifest, _jsonOptions, cancellationToken)
.ConfigureAwait(false);
foreach (var source in bundle.Sources)
{
if (_providers.TryGetValue(source.SourceId, out var provider))
{
await provider.ExportAsync(source, writeStream, cancellationToken).ConfigureAwait(false);
}
}
}
}

View File

@@ -0,0 +1,18 @@
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <inheritdoc />
public async Task<FeedSnapshotBundle?> GetSnapshotAsync(
string compositeDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
return await _store.GetByDigestAsync(compositeDigest, cancellationToken).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,63 @@
using System;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <inheritdoc />
public async Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(inputStream);
var readStream = await CreateImportReadStreamAsync(inputStream, cancellationToken).ConfigureAwait(false);
await using (readStream.ConfigureAwait(false))
{
var manifest = await JsonSerializer.DeserializeAsync<BundleManifest>(readStream, _jsonOptions, cancellationToken)
.ConfigureAwait(false)
?? throw new InvalidOperationException("Invalid bundle: could not deserialize manifest.");
var bundle = manifest.Snapshot
?? throw new InvalidOperationException("Invalid bundle: missing snapshot data.");
if (_options.VerifyOnImport)
{
var computedDigest = ComputeCompositeDigest(bundle.Sources.ToArray());
if (!string.Equals(computedDigest, bundle.CompositeDigest, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Bundle integrity check failed: expected {bundle.CompositeDigest}, computed {computedDigest}");
}
}
foreach (var source in bundle.Sources)
{
if (_providers.TryGetValue(source.SourceId, out var provider))
{
await provider.ImportAsync(readStream, cancellationToken).ConfigureAwait(false);
}
}
await _store.SaveAsync(bundle, cancellationToken).ConfigureAwait(false);
return bundle;
}
}
/// <inheritdoc />
public Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
ImportBundleOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(inputStream);
ArgumentNullException.ThrowIfNull(options);
return ImportBundleAsync(inputStream, cancellationToken);
}
}

View File

@@ -0,0 +1,39 @@
using System;
using System.IO;
using System.IO.Compression;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
private static async Task<Stream> CreateImportReadStreamAsync(
Stream inputStream,
CancellationToken cancellationToken)
{
var header = new byte[4];
var bytesRead = await inputStream.ReadAsync(header, cancellationToken).ConfigureAwait(false);
if (inputStream.CanSeek)
{
inputStream.Seek(0, SeekOrigin.Begin);
}
else
{
throw new InvalidOperationException("Input stream must be seekable for import.");
}
if (bytesRead >= 2 && header[0] == 0x1F && header[1] == 0x8B)
{
return new GZipStream(inputStream, CompressionMode.Decompress, leaveOpen: true);
}
if (bytesRead >= 4 && header[0] == 0x28 && header[1] == 0xB5 && header[2] == 0x2F && header[3] == 0xFD)
{
return new ZstdDecompressionStream(inputStream);
}
return inputStream;
}
}

View File

@@ -0,0 +1,49 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <inheritdoc />
public IAsyncEnumerable<FeedSnapshotSummary> ListSnapshotsAsync(
DateTimeOffset? from = null,
DateTimeOffset? to = null,
CancellationToken cancellationToken = default)
{
return _store.ListAsync(from, to, cancellationToken);
}
/// <inheritdoc />
public async Task<IReadOnlyList<FeedSnapshotSummary>> ListSnapshotsAsync(
string? cursor,
int limit,
CancellationToken cancellationToken = default)
{
var snapshots = new List<FeedSnapshotSummary>();
var skip = 0;
if (!string.IsNullOrEmpty(cursor) && int.TryParse(cursor, out var cursorIndex))
{
skip = cursorIndex;
}
var count = 0;
await foreach (var snapshot in _store.ListAsync(null, null, cancellationToken).ConfigureAwait(false))
{
if (count >= skip && snapshots.Count < limit)
{
snapshots.Add(snapshot);
}
count++;
if (snapshots.Count >= limit)
{
break;
}
}
return snapshots;
}
}

View File

@@ -0,0 +1,44 @@
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <summary>
/// Stream wrapper that counts bytes written.
/// </summary>
private sealed class CountingStream : Stream
{
private readonly Stream _inner;
public CountingStream(Stream inner) => _inner = inner;
public long BytesWritten { get; private set; }
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => _inner.Length;
public override long Position { get => _inner.Position; set => _inner.Position = value; }
public override void Flush() => _inner.Flush();
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
_inner.Write(buffer, offset, count);
BytesWritten += count;
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
{
BytesWritten += buffer.Length;
return _inner.WriteAsync(buffer, cancellationToken);
}
}
}

View File

@@ -0,0 +1,50 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <summary>
/// Stream wrapper that computes hash while writing.
/// </summary>
private sealed class HashingStream : Stream
{
private readonly Stream _inner;
private readonly IncrementalHash _hash;
public HashingStream(Stream inner, IncrementalHash hash)
{
_inner = inner;
_hash = hash;
}
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => _inner.Length;
public override long Position { get => _inner.Position; set => _inner.Position = value; }
public override void Flush() => _inner.Flush();
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
_hash.AppendData(buffer, offset, count);
_inner.Write(buffer, offset, count);
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
{
_hash.AppendData(buffer.Span);
return _inner.WriteAsync(buffer, cancellationToken);
}
public byte[] GetHashAndReset() => _hash.GetHashAndReset();
}
}

View File

@@ -0,0 +1,57 @@
using System;
using System.IO;
using System.Threading.Tasks;
using ZstdSharp;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <summary>
/// Zstd compression stream wrapper.
/// </summary>
private sealed class ZstdCompressionStream : Stream
{
private readonly Stream _inner;
private readonly MemoryStream _buffer = new();
public ZstdCompressionStream(Stream inner) => _inner = inner;
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => _buffer.Length;
public override long Position { get => _buffer.Position; set => _buffer.Position = value; }
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
_buffer.Write(buffer, offset, count);
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
using var compressor = new Compressor();
var compressed = compressor.Wrap(_buffer.ToArray());
_inner.Write(compressed.ToArray());
_buffer.Dispose();
}
base.Dispose(disposing);
}
public override async ValueTask DisposeAsync()
{
using var compressor = new Compressor();
var compressed = compressor.Wrap(_buffer.ToArray());
await _inner.WriteAsync(compressed.ToArray()).ConfigureAwait(false);
await _buffer.DisposeAsync().ConfigureAwait(false);
await base.DisposeAsync().ConfigureAwait(false);
}
}
}

View File

@@ -0,0 +1,66 @@
using System;
using System.IO;
using ZstdSharp;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <summary>
/// Zstd decompression stream wrapper.
/// </summary>
private sealed class ZstdDecompressionStream : Stream
{
private readonly Stream _inner;
private MemoryStream? _decompressed;
private bool _initialized;
public ZstdDecompressionStream(Stream inner) => _inner = inner;
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => EnsureInitialized().Length;
public override long Position
{
get => EnsureInitialized().Position;
set => EnsureInitialized().Position = value;
}
public override void Flush() { }
public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
return EnsureInitialized().Read(buffer, offset, count);
}
private MemoryStream EnsureInitialized()
{
if (!_initialized)
{
using var ms = new MemoryStream();
_inner.CopyTo(ms);
var compressed = ms.ToArray();
using var decompressor = new Decompressor();
var decompressed = decompressor.Unwrap(compressed);
_decompressed = new MemoryStream(decompressed.ToArray());
_initialized = true;
}
return _decompressed!;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
_decompressed?.Dispose();
}
base.Dispose(disposing);
}
}
}

View File

@@ -0,0 +1,80 @@
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public sealed partial class FeedSnapshotCoordinatorService
{
/// <inheritdoc />
public async Task<SnapshotValidationResult?> ValidateSnapshotAsync(
string compositeDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
var bundle = await GetSnapshotAsync(compositeDigest, cancellationToken).ConfigureAwait(false);
if (bundle is null)
{
return new SnapshotValidationResult
{
IsValid = false,
CompositeDigest = compositeDigest,
SnapshotDigest = string.Empty,
CurrentDigest = string.Empty,
Errors = [$"Snapshot not found: {compositeDigest}"]
};
}
var missingSources = new List<string>();
var driftedSources = new List<SourceDrift>();
var errors = new List<string>();
foreach (var source in bundle.Sources)
{
if (!_providers.TryGetValue(source.SourceId, out var provider))
{
missingSources.Add(source.SourceId);
continue;
}
try
{
var currentDigest = await provider.GetCurrentDigestAsync(cancellationToken).ConfigureAwait(false);
var currentCount = await provider.GetRecordCountAsync(cancellationToken).ConfigureAwait(false);
if (!string.Equals(currentDigest, source.Digest, StringComparison.OrdinalIgnoreCase))
{
driftedSources.Add(new SourceDrift
{
SourceId = source.SourceId,
SnapshotDigest = source.Digest,
CurrentDigest = currentDigest,
RecordsChanged = Math.Abs(currentCount - source.RecordCount)
});
}
}
catch (Exception ex)
{
errors.Add($"Error validating source '{source.SourceId}': {ex.Message}");
}
}
var isValid = missingSources.Count == 0 && driftedSources.Count == 0 && errors.Count == 0;
var currentCompositeDigest = ComputeCompositeDigest(bundle.Sources.ToArray());
return new SnapshotValidationResult
{
IsValid = isValid,
CompositeDigest = compositeDigest,
SnapshotDigest = compositeDigest,
CurrentDigest = currentCompositeDigest,
MissingSources = missingSources.Count > 0 ? missingSources.ToImmutableArray() : null,
DriftedSources = driftedSources.Count > 0 ? driftedSources.ToImmutableArray() : [],
Errors = errors.Count > 0 ? errors.ToImmutableArray() : null
};
}
}

View File

@@ -1,17 +1,7 @@
// -----------------------------------------------------------------------------
// FeedSnapshotCoordinatorService.cs
// Sprint: SPRINT_20251226_007_BE_determinism_gaps
// Task: DET-GAP-02
// Description: Service implementation coordinating Advisory + VEX + Policy snapshots
// -----------------------------------------------------------------------------
using System;
using System.Collections.Frozen;
using System.Collections.Immutable;
using System.IO.Compression;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Linq;
using System.Text.Json;
namespace StellaOps.Replay.Core.FeedSnapshot;
@@ -19,14 +9,14 @@ namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Coordinates atomic snapshots across multiple feed sources.
/// </summary>
public sealed class FeedSnapshotCoordinatorService : IFeedSnapshotCoordinator
public sealed partial class FeedSnapshotCoordinatorService : IFeedSnapshotCoordinator
{
private readonly FrozenDictionary<string, IFeedSourceProvider> _providers;
private readonly IFeedSnapshotStore _store;
private readonly FeedSnapshotOptions _options;
private readonly TimeProvider _timeProvider;
private static readonly JsonSerializerOptions JsonOptions = new()
private static readonly JsonSerializerOptions _jsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
@@ -41,7 +31,6 @@ public sealed class FeedSnapshotCoordinatorService : IFeedSnapshotCoordinator
ArgumentNullException.ThrowIfNull(providers);
ArgumentNullException.ThrowIfNull(store);
// Sort providers alphabetically by SourceId for deterministic digest computation
_providers = providers
.OrderBy(p => p.SourceId, StringComparer.Ordinal)
.ToFrozenDictionary(p => p.SourceId, p => p, StringComparer.OrdinalIgnoreCase);
@@ -54,632 +43,4 @@ public sealed class FeedSnapshotCoordinatorService : IFeedSnapshotCoordinator
/// <inheritdoc />
public IReadOnlyList<string> RegisteredSources =>
_providers.Keys.Order(StringComparer.Ordinal).ToImmutableArray();
/// <inheritdoc />
public Task<FeedSnapshotBundle> CreateSnapshotAsync(
string? label = null,
CancellationToken cancellationToken = default)
{
return CreateSnapshotAsync(_providers.Keys, label, cancellationToken);
}
/// <inheritdoc />
public async Task<FeedSnapshotBundle> CreateSnapshotAsync(
IEnumerable<string> sourceIds,
string? label = null,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(sourceIds);
var requestedSources = sourceIds.ToImmutableArray();
if (requestedSources.Length == 0)
{
throw new ArgumentException("At least one source must be specified.", nameof(sourceIds));
}
// Validate all requested sources exist
var missingProviders = requestedSources
.Where(id => !_providers.ContainsKey(id))
.ToImmutableArray();
if (missingProviders.Length > 0)
{
throw new InvalidOperationException(
$"Unknown feed sources: {string.Join(", ", missingProviders)}. " +
$"Available sources: {string.Join(", ", _providers.Keys)}");
}
var snapshotId = GenerateSnapshotId();
var createdAt = _timeProvider.GetUtcNow();
// Create snapshots from all sources in parallel (order doesn't matter for creation)
var snapshotTasks = requestedSources
.Order(StringComparer.Ordinal) // Sort for deterministic ordering
.Select(async sourceId =>
{
var provider = _providers[sourceId];
return await provider.CreateSnapshotAsync(cancellationToken).ConfigureAwait(false);
});
var sourceSnapshots = await Task.WhenAll(snapshotTasks).ConfigureAwait(false);
// Compute composite digest over sorted sources
var compositeDigest = ComputeCompositeDigest(sourceSnapshots);
var bundle = new FeedSnapshotBundle
{
SnapshotId = snapshotId,
CompositeDigest = compositeDigest,
Label = label,
CreatedAt = createdAt,
Sources = sourceSnapshots.ToImmutableArray()
};
// Persist the snapshot
await _store.SaveAsync(bundle, cancellationToken).ConfigureAwait(false);
return bundle;
}
/// <inheritdoc />
public async Task<FeedSnapshotBundle?> GetSnapshotAsync(
string compositeDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
return await _store.GetByDigestAsync(compositeDigest, cancellationToken).ConfigureAwait(false);
}
/// <inheritdoc />
public IAsyncEnumerable<FeedSnapshotSummary> ListSnapshotsAsync(
DateTimeOffset? from = null,
DateTimeOffset? to = null,
CancellationToken cancellationToken = default)
{
return _store.ListAsync(from, to, cancellationToken);
}
/// <inheritdoc />
public async Task<ExportedBundleMetadata> ExportBundleAsync(
string compositeDigest,
Stream outputStream,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
ArgumentNullException.ThrowIfNull(outputStream);
var bundle = await GetSnapshotAsync(compositeDigest, cancellationToken).ConfigureAwait(false)
?? throw new InvalidOperationException($"Snapshot not found: {compositeDigest}");
using var countingStream = new CountingStream(outputStream);
using var hashStream = new HashingStream(countingStream, IncrementalHash.CreateHash(HashAlgorithmName.SHA256));
Stream writeStream;
string compression;
if (_options.CompressExport && _options.Compression != CompressionAlgorithm.None)
{
compression = _options.Compression switch
{
CompressionAlgorithm.Gzip => "gzip",
CompressionAlgorithm.Zstd => "zstd",
_ => "none"
};
writeStream = _options.Compression == CompressionAlgorithm.Gzip
? new GZipStream(hashStream, CompressionLevel.Optimal, leaveOpen: true)
: new ZstdCompressionStream(hashStream);
}
else
{
writeStream = hashStream;
compression = "none";
}
await using (writeStream.ConfigureAwait(false))
{
// Write bundle manifest
var manifest = new BundleManifest
{
FormatVersion = "1.0",
Snapshot = bundle
};
await JsonSerializer.SerializeAsync(writeStream, manifest, JsonOptions, cancellationToken)
.ConfigureAwait(false);
// Export each source's content
foreach (var source in bundle.Sources)
{
if (_providers.TryGetValue(source.SourceId, out var provider))
{
await provider.ExportAsync(source, writeStream, cancellationToken).ConfigureAwait(false);
}
}
}
var bundleDigest = $"sha256:{Convert.ToHexString(hashStream.GetHashAndReset()).ToLowerInvariant()}";
return new ExportedBundleMetadata
{
CompositeDigest = compositeDigest,
SizeBytes = countingStream.BytesWritten,
BundleDigest = bundleDigest,
FormatVersion = "1.0",
Compression = compression
};
}
/// <inheritdoc />
public async Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(inputStream);
// Try to detect compression from magic bytes
var header = new byte[4];
var bytesRead = await inputStream.ReadAsync(header, cancellationToken).ConfigureAwait(false);
// Reset stream position (or use a buffer if not seekable)
if (inputStream.CanSeek)
{
inputStream.Seek(0, SeekOrigin.Begin);
}
else
{
throw new InvalidOperationException("Input stream must be seekable for import.");
}
Stream readStream;
if (bytesRead >= 2 && header[0] == 0x1F && header[1] == 0x8B) // Gzip magic
{
readStream = new GZipStream(inputStream, CompressionMode.Decompress, leaveOpen: true);
}
else if (bytesRead >= 4 && header[0] == 0x28 && header[1] == 0xB5 && header[2] == 0x2F && header[3] == 0xFD) // Zstd magic
{
readStream = new ZstdDecompressionStream(inputStream);
}
else
{
readStream = inputStream;
}
await using (readStream.ConfigureAwait(false))
{
var manifest = await JsonSerializer.DeserializeAsync<BundleManifest>(readStream, JsonOptions, cancellationToken)
.ConfigureAwait(false)
?? throw new InvalidOperationException("Invalid bundle: could not deserialize manifest.");
var bundle = manifest.Snapshot
?? throw new InvalidOperationException("Invalid bundle: missing snapshot data.");
if (_options.VerifyOnImport)
{
var computedDigest = ComputeCompositeDigest(bundle.Sources.ToArray());
if (!string.Equals(computedDigest, bundle.CompositeDigest, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Bundle integrity check failed: expected {bundle.CompositeDigest}, computed {computedDigest}");
}
}
// Import source content
foreach (var source in bundle.Sources)
{
if (_providers.TryGetValue(source.SourceId, out var provider))
{
await provider.ImportAsync(readStream, cancellationToken).ConfigureAwait(false);
}
}
// Save the imported bundle
await _store.SaveAsync(bundle, cancellationToken).ConfigureAwait(false);
return bundle;
}
}
/// <inheritdoc />
public async Task<SnapshotValidationResult?> ValidateSnapshotAsync(
string compositeDigest,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
var bundle = await GetSnapshotAsync(compositeDigest, cancellationToken).ConfigureAwait(false);
if (bundle is null)
{
return new SnapshotValidationResult
{
IsValid = false,
CompositeDigest = compositeDigest,
SnapshotDigest = string.Empty,
CurrentDigest = string.Empty,
Errors = [$"Snapshot not found: {compositeDigest}"]
};
}
var missingSources = new List<string>();
var driftedSources = new List<SourceDrift>();
var errors = new List<string>();
foreach (var source in bundle.Sources)
{
if (!_providers.TryGetValue(source.SourceId, out var provider))
{
missingSources.Add(source.SourceId);
continue;
}
try
{
var currentDigest = await provider.GetCurrentDigestAsync(cancellationToken).ConfigureAwait(false);
var currentCount = await provider.GetRecordCountAsync(cancellationToken).ConfigureAwait(false);
if (!string.Equals(currentDigest, source.Digest, StringComparison.OrdinalIgnoreCase))
{
driftedSources.Add(new SourceDrift
{
SourceId = source.SourceId,
SnapshotDigest = source.Digest,
CurrentDigest = currentDigest,
RecordsChanged = Math.Abs(currentCount - source.RecordCount)
});
}
}
catch (Exception ex)
{
errors.Add($"Error validating source '{source.SourceId}': {ex.Message}");
}
}
var isValid = missingSources.Count == 0 && driftedSources.Count == 0 && errors.Count == 0;
// Compute current composite digest from validated sources
var currentSources = bundle.Sources.ToArray();
var currentCompositeDigest = ComputeCompositeDigest(currentSources);
return new SnapshotValidationResult
{
IsValid = isValid,
CompositeDigest = compositeDigest,
SnapshotDigest = compositeDigest,
CurrentDigest = currentCompositeDigest,
MissingSources = missingSources.Count > 0 ? missingSources.ToImmutableArray() : null,
DriftedSources = driftedSources.Count > 0 ? driftedSources.ToImmutableArray() : [],
Errors = errors.Count > 0 ? errors.ToImmutableArray() : null
};
}
/// <inheritdoc />
public async Task<IReadOnlyList<FeedSnapshotSummary>> ListSnapshotsAsync(
string? cursor,
int limit,
CancellationToken cancellationToken = default)
{
var snapshots = new List<FeedSnapshotSummary>();
var skip = 0;
// Parse cursor if provided (cursor is the index to skip to)
if (!string.IsNullOrEmpty(cursor) && int.TryParse(cursor, out var cursorIndex))
{
skip = cursorIndex;
}
var count = 0;
await foreach (var snapshot in _store.ListAsync(null, null, cancellationToken).ConfigureAwait(false))
{
if (count >= skip && snapshots.Count < limit)
{
snapshots.Add(snapshot);
}
count++;
if (snapshots.Count >= limit)
{
break;
}
}
return snapshots;
}
/// <inheritdoc />
public async Task<ExportedBundleMetadata?> ExportBundleAsync(
string compositeDigest,
ExportBundleOptions options,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(compositeDigest);
ArgumentNullException.ThrowIfNull(options);
var bundle = await GetSnapshotAsync(compositeDigest, cancellationToken).ConfigureAwait(false);
if (bundle is null)
{
return null;
}
// Export to a memory stream if no output path specified
using var memoryStream = new MemoryStream();
var metadata = await ExportBundleAsync(compositeDigest, memoryStream, cancellationToken).ConfigureAwait(false);
return metadata;
}
/// <inheritdoc />
public async Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
ImportBundleOptions options,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(inputStream);
ArgumentNullException.ThrowIfNull(options);
// Delegate to the main import method (options currently don't change behavior)
// In a full implementation, we would check options.ValidateDigests and options.AllowOverwrite
return await ImportBundleAsync(inputStream, cancellationToken).ConfigureAwait(false);
}
private string GenerateSnapshotId()
{
// Format: snap-{timestamp}-{random}
// Note: Uses UTC time from injected provider for determinism in tests
var timestamp = _timeProvider.GetUtcNow().ToString("yyyyMMdd-HHmmss");
// Note: For full determinism in tests, callers should configure a deterministic GUID source
// or override snapshot IDs in the returned bundle
var random = Guid.NewGuid().ToString("N")[..8];
return $"snap-{timestamp}-{random}";
}
private static string ComputeCompositeDigest(SourceSnapshot[] sources)
{
// Sort by SourceId for deterministic ordering
var sorted = sources.OrderBy(s => s.SourceId, StringComparer.Ordinal).ToArray();
using var sha256 = SHA256.Create();
using var ms = new MemoryStream();
foreach (var source in sorted)
{
// Include SourceId to ensure different sources with same digest produce different composite
var sourceIdBytes = Encoding.UTF8.GetBytes(source.SourceId);
ms.Write(sourceIdBytes);
ms.WriteByte(0); // Separator
// Write the digest (without sha256: prefix if present)
var digestHex = source.Digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? source.Digest[7..]
: source.Digest;
var digestBytes = Convert.FromHexString(digestHex);
ms.Write(digestBytes);
}
ms.Position = 0;
var hash = sha256.ComputeHash(ms);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private sealed class BundleManifest
{
public string FormatVersion { get; init; } = "1.0";
public FeedSnapshotBundle? Snapshot { get; init; }
}
/// <summary>
/// Stream wrapper that counts bytes written.
/// </summary>
private sealed class CountingStream : Stream
{
private readonly Stream _inner;
public long BytesWritten { get; private set; }
public CountingStream(Stream inner) => _inner = inner;
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => _inner.Length;
public override long Position { get => _inner.Position; set => _inner.Position = value; }
public override void Flush() => _inner.Flush();
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
_inner.Write(buffer, offset, count);
BytesWritten += count;
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
{
BytesWritten += buffer.Length;
return _inner.WriteAsync(buffer, cancellationToken);
}
}
/// <summary>
/// Stream wrapper that computes hash while writing.
/// </summary>
private sealed class HashingStream : Stream
{
private readonly Stream _inner;
private readonly IncrementalHash _hash;
public HashingStream(Stream inner, IncrementalHash hash)
{
_inner = inner;
_hash = hash;
}
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => _inner.Length;
public override long Position { get => _inner.Position; set => _inner.Position = value; }
public override void Flush() => _inner.Flush();
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
_hash.AppendData(buffer, offset, count);
_inner.Write(buffer, offset, count);
}
public override ValueTask WriteAsync(ReadOnlyMemory<byte> buffer, CancellationToken cancellationToken = default)
{
_hash.AppendData(buffer.Span);
return _inner.WriteAsync(buffer, cancellationToken);
}
public byte[] GetHashAndReset() => _hash.GetHashAndReset();
}
/// <summary>
/// Zstd compression stream wrapper.
/// </summary>
private sealed class ZstdCompressionStream : Stream
{
private readonly Stream _inner;
private readonly MemoryStream _buffer = new();
public ZstdCompressionStream(Stream inner) => _inner = inner;
public override bool CanRead => false;
public override bool CanSeek => false;
public override bool CanWrite => true;
public override long Length => _buffer.Length;
public override long Position { get => _buffer.Position; set => _buffer.Position = value; }
public override void Flush() { }
public override int Read(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override void Write(byte[] buffer, int offset, int count)
{
_buffer.Write(buffer, offset, count);
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
// Compress and write on dispose
var data = _buffer.ToArray();
using var compressor = new ZstdSharp.Compressor();
var compressed = compressor.Wrap(data);
_inner.Write(compressed.ToArray());
_buffer.Dispose();
}
base.Dispose(disposing);
}
public override async ValueTask DisposeAsync()
{
var data = _buffer.ToArray();
using var compressor = new ZstdSharp.Compressor();
var compressed = compressor.Wrap(data);
await _inner.WriteAsync(compressed.ToArray()).ConfigureAwait(false);
await _buffer.DisposeAsync().ConfigureAwait(false);
await base.DisposeAsync().ConfigureAwait(false);
}
}
/// <summary>
/// Zstd decompression stream wrapper.
/// </summary>
private sealed class ZstdDecompressionStream : Stream
{
private readonly Stream _inner;
private MemoryStream? _decompressed;
private bool _initialized;
public ZstdDecompressionStream(Stream inner) => _inner = inner;
public override bool CanRead => true;
public override bool CanSeek => false;
public override bool CanWrite => false;
public override long Length => EnsureInitialized().Length;
public override long Position
{
get => EnsureInitialized().Position;
set => EnsureInitialized().Position = value;
}
public override void Flush() { }
public override void Write(byte[] buffer, int offset, int count) => throw new NotSupportedException();
public override long Seek(long offset, SeekOrigin origin) => throw new NotSupportedException();
public override void SetLength(long value) => throw new NotSupportedException();
public override int Read(byte[] buffer, int offset, int count)
{
return EnsureInitialized().Read(buffer, offset, count);
}
private MemoryStream EnsureInitialized()
{
if (!_initialized)
{
using var ms = new MemoryStream();
_inner.CopyTo(ms);
var compressed = ms.ToArray();
using var decompressor = new ZstdSharp.Decompressor();
var decompressed = decompressor.Unwrap(compressed);
_decompressed = new MemoryStream(decompressed.ToArray());
_initialized = true;
}
return _decompressed!;
}
protected override void Dispose(bool disposing)
{
if (disposing)
{
_decompressed?.Dispose();
}
base.Dispose(disposing);
}
}
}
/// <summary>
/// Storage interface for feed snapshot bundles.
/// </summary>
public interface IFeedSnapshotStore
{
/// <summary>
/// Saves a snapshot bundle.
/// </summary>
Task SaveAsync(FeedSnapshotBundle bundle, CancellationToken cancellationToken = default);
/// <summary>
/// Gets a snapshot by composite digest.
/// </summary>
Task<FeedSnapshotBundle?> GetByDigestAsync(string compositeDigest, CancellationToken cancellationToken = default);
/// <summary>
/// Gets a snapshot by ID.
/// </summary>
Task<FeedSnapshotBundle?> GetByIdAsync(string snapshotId, CancellationToken cancellationToken = default);
/// <summary>
/// Lists snapshots within a time range.
/// </summary>
IAsyncEnumerable<FeedSnapshotSummary> ListAsync(
DateTimeOffset? from = null,
DateTimeOffset? to = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a snapshot by composite digest.
/// </summary>
Task<bool> DeleteAsync(string compositeDigest, CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,34 @@
using System;
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Options for feed snapshot creation.
/// </summary>
public sealed record FeedSnapshotOptions
{
/// <summary>
/// Whether to include full content in the snapshot (vs. just metadata).
/// </summary>
public bool IncludeContent { get; init; }
/// <summary>
/// Whether to compress exported bundles.
/// </summary>
public bool CompressExport { get; init; } = true;
/// <summary>
/// Compression algorithm for exports.
/// </summary>
public CompressionAlgorithm Compression { get; init; } = CompressionAlgorithm.Zstd;
/// <summary>
/// Maximum age of snapshot before it's considered stale.
/// </summary>
public TimeSpan? MaxSnapshotAge { get; init; }
/// <summary>
/// Whether to verify snapshot integrity on import.
/// </summary>
public bool VerifyOnImport { get; init; } = true;
}

View File

@@ -0,0 +1,44 @@
using System;
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Summary of a snapshot for listing.
/// </summary>
public sealed record FeedSnapshotSummary
{
/// <summary>
/// Unique identifier for this snapshot.
/// </summary>
public required string SnapshotId { get; init; }
/// <summary>
/// Composite SHA-256 digest.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Human-readable label (optional).
/// </summary>
public string? Label { get; init; }
/// <summary>
/// UTC timestamp when snapshot was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Number of sources included.
/// </summary>
public required int SourceCount { get; init; }
/// <summary>
/// Total record count across all sources.
/// </summary>
public required long TotalRecordCount { get; init; }
/// <summary>
/// Total item count across all sources (alias for API compatibility).
/// </summary>
public int TotalItemCount => (int)Math.Min(TotalRecordCount, int.MaxValue);
}

View File

@@ -0,0 +1,54 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public partial interface IFeedSnapshotCoordinator
{
/// <summary>
/// Exports a snapshot as a portable bundle for offline use.
/// </summary>
/// <param name="compositeDigest">SHA-256 composite digest.</param>
/// <param name="outputStream">Stream to write the bundle to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Bundle metadata including size and checksums.</returns>
Task<ExportedBundleMetadata> ExportBundleAsync(
string compositeDigest,
Stream outputStream,
CancellationToken cancellationToken = default);
/// <summary>
/// Exports a snapshot as a portable bundle with options.
/// </summary>
/// <param name="compositeDigest">SHA-256 composite digest.</param>
/// <param name="options">Export options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Bundle metadata including path and checksums.</returns>
Task<ExportedBundleMetadata?> ExportBundleAsync(
string compositeDigest,
ExportBundleOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Imports a snapshot bundle from a portable export.
/// </summary>
/// <param name="inputStream">Stream to read the bundle from.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Imported snapshot bundle.</returns>
Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
CancellationToken cancellationToken = default);
/// <summary>
/// Imports a snapshot bundle with options.
/// </summary>
/// <param name="inputStream">Stream to read the bundle from.</param>
/// <param name="options">Import options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Imported snapshot bundle.</returns>
Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
ImportBundleOptions options,
CancellationToken cancellationToken = default);
}

View File

@@ -0,0 +1,17 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
public partial interface IFeedSnapshotCoordinator
{
/// <summary>
/// Validates that a snapshot can still be replayed (all sources still available).
/// </summary>
/// <param name="compositeDigest">SHA-256 composite digest.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Validation result with any drift or missing sources.</returns>
Task<SnapshotValidationResult?> ValidateSnapshotAsync(
string compositeDigest,
CancellationToken cancellationToken = default);
}

View File

@@ -1,9 +1,7 @@
// -----------------------------------------------------------------------------
// IFeedSnapshotCoordinator.cs
// Sprint: SPRINT_20251226_007_BE_determinism_gaps
// Task: DET-GAP-01
// Description: Interface for atomic multi-source feed snapshot coordination
// -----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
@@ -20,7 +18,7 @@ namespace StellaOps.Replay.Core.FeedSnapshot;
/// <item>Offline-compatible: bundles can be exported for air-gapped replay</item>
/// </list>
/// </remarks>
public interface IFeedSnapshotCoordinator
public partial interface IFeedSnapshotCoordinator
{
/// <summary>
/// Creates an atomic snapshot across all registered feed sources.
@@ -78,354 +76,8 @@ public interface IFeedSnapshotCoordinator
int limit,
CancellationToken cancellationToken = default);
/// <summary>
/// Exports a snapshot as a portable bundle for offline use.
/// </summary>
/// <param name="compositeDigest">SHA-256 composite digest.</param>
/// <param name="outputStream">Stream to write the bundle to.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Bundle metadata including size and checksums.</returns>
Task<ExportedBundleMetadata> ExportBundleAsync(
string compositeDigest,
Stream outputStream,
CancellationToken cancellationToken = default);
/// <summary>
/// Exports a snapshot as a portable bundle with options.
/// </summary>
/// <param name="compositeDigest">SHA-256 composite digest.</param>
/// <param name="options">Export options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Bundle metadata including path and checksums.</returns>
Task<ExportedBundleMetadata?> ExportBundleAsync(
string compositeDigest,
ExportBundleOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Imports a snapshot bundle from a portable export.
/// </summary>
/// <param name="inputStream">Stream to read the bundle from.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Imported snapshot bundle.</returns>
Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
CancellationToken cancellationToken = default);
/// <summary>
/// Imports a snapshot bundle with options.
/// </summary>
/// <param name="inputStream">Stream to read the bundle from.</param>
/// <param name="options">Import options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Imported snapshot bundle.</returns>
Task<FeedSnapshotBundle> ImportBundleAsync(
Stream inputStream,
ImportBundleOptions options,
CancellationToken cancellationToken = default);
/// <summary>
/// Validates that a snapshot can still be replayed (all sources still available).
/// </summary>
/// <param name="compositeDigest">SHA-256 composite digest.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Validation result with any drift or missing sources.</returns>
Task<SnapshotValidationResult?> ValidateSnapshotAsync(
string compositeDigest,
CancellationToken cancellationToken = default);
/// <summary>
/// Gets the list of registered feed source providers.
/// </summary>
IReadOnlyList<string> RegisteredSources { get; }
}
/// <summary>
/// Atomic bundle of feed snapshots with composite digest.
/// </summary>
public sealed record FeedSnapshotBundle
{
/// <summary>
/// Unique identifier for this snapshot.
/// </summary>
public required string SnapshotId { get; init; }
/// <summary>
/// Composite SHA-256 digest over all source digests (sha256:hex).
/// Computed as: SHA256(source1Digest || source2Digest || ... || sourceNDigest)
/// where sources are sorted alphabetically by SourceId.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Human-readable label (optional).
/// </summary>
public string? Label { get; init; }
/// <summary>
/// UTC timestamp when snapshot was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Individual source snapshots.
/// </summary>
public required IReadOnlyList<SourceSnapshot> Sources { get; init; }
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
public string SchemaVersion { get; init; } = "1.0";
}
/// <summary>
/// Snapshot of a single feed source.
/// </summary>
public sealed record SourceSnapshot
{
/// <summary>
/// Source identifier (e.g., "nvd", "ghsa", "osv", "policy", "vex").
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Source-specific version or sequence number.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// SHA-256 digest of the source content (sha256:hex).
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Number of records in this source at snapshot time.
/// </summary>
public required long RecordCount { get; init; }
/// <summary>
/// Number of items (alias for RecordCount for API compatibility).
/// </summary>
public int ItemCount => (int)Math.Min(RecordCount, int.MaxValue);
/// <summary>
/// UTC timestamp when this source was snapshotted.
/// </summary>
public DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Source-specific metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}
/// <summary>
/// Summary of a snapshot for listing.
/// </summary>
public sealed record FeedSnapshotSummary
{
/// <summary>
/// Unique identifier for this snapshot.
/// </summary>
public required string SnapshotId { get; init; }
/// <summary>
/// Composite SHA-256 digest.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Human-readable label (optional).
/// </summary>
public string? Label { get; init; }
/// <summary>
/// UTC timestamp when snapshot was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Number of sources included.
/// </summary>
public required int SourceCount { get; init; }
/// <summary>
/// Total record count across all sources.
/// </summary>
public required long TotalRecordCount { get; init; }
/// <summary>
/// Total item count across all sources (alias for API compatibility).
/// </summary>
public int TotalItemCount => (int)Math.Min(TotalRecordCount, int.MaxValue);
}
/// <summary>
/// Metadata for an exported snapshot bundle.
/// </summary>
public sealed record ExportedBundleMetadata
{
/// <summary>
/// Composite digest of the exported snapshot.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Size of the exported bundle in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// SHA-256 digest of the bundle file itself.
/// </summary>
public required string BundleDigest { get; init; }
/// <summary>
/// Export format version.
/// </summary>
public required string FormatVersion { get; init; }
/// <summary>
/// Compression algorithm used (none, gzip, zstd).
/// </summary>
public required string Compression { get; init; }
/// <summary>
/// Path to the exported bundle file.
/// </summary>
public string? ExportPath { get; init; }
}
/// <summary>
/// Result of snapshot validation.
/// </summary>
public sealed record SnapshotValidationResult
{
/// <summary>
/// Whether the snapshot is valid and can be replayed.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Composite digest validated.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Digest at snapshot time.
/// </summary>
public required string SnapshotDigest { get; init; }
/// <summary>
/// Current computed digest.
/// </summary>
public required string CurrentDigest { get; init; }
/// <summary>
/// Sources that are no longer available.
/// </summary>
public IReadOnlyList<string>? MissingSources { get; init; }
/// <summary>
/// Sources with detected drift (content changed since snapshot).
/// </summary>
public IReadOnlyList<SourceDrift> DriftedSources { get; init; } = [];
/// <summary>
/// Validation errors if any.
/// </summary>
public IReadOnlyList<string>? Errors { get; init; }
}
/// <summary>
/// Detected drift in a source since snapshot.
/// </summary>
public sealed record SourceDrift
{
/// <summary>
/// Source identifier.
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Original digest at snapshot time.
/// </summary>
public required string SnapshotDigest { get; init; }
/// <summary>
/// Current digest.
/// </summary>
public required string CurrentDigest { get; init; }
/// <summary>
/// Number of records changed.
/// </summary>
public long? RecordsChanged { get; init; }
/// <summary>
/// Number of items added since snapshot.
/// </summary>
public int AddedItems { get; init; }
/// <summary>
/// Number of items removed since snapshot.
/// </summary>
public int RemovedItems { get; init; }
/// <summary>
/// Number of items modified since snapshot.
/// </summary>
public int ModifiedItems { get; init; }
}
/// <summary>
/// Options for exporting a snapshot bundle.
/// </summary>
public sealed record ExportBundleOptions
{
/// <summary>
/// Compression algorithm to use.
/// </summary>
public CompressionAlgorithm Compression { get; init; } = CompressionAlgorithm.Zstd;
/// <summary>
/// Whether to include the manifest file.
/// </summary>
public bool IncludeManifest { get; init; } = true;
/// <summary>
/// Whether to include checksum files.
/// </summary>
public bool IncludeChecksums { get; init; } = true;
}
/// <summary>
/// Options for importing a snapshot bundle.
/// </summary>
public sealed record ImportBundleOptions
{
/// <summary>
/// Whether to validate digests during import.
/// </summary>
public bool ValidateDigests { get; init; } = true;
/// <summary>
/// Whether to allow overwriting existing snapshots.
/// </summary>
public bool AllowOverwrite { get; init; }
}
/// <summary>
/// Compression algorithm for bundles.
/// </summary>
public enum CompressionAlgorithm
{
/// <summary>No compression.</summary>
None = 0,
/// <summary>Gzip compression.</summary>
Gzip = 1,
/// <summary>Zstandard compression (default).</summary>
Zstd = 2
}

View File

@@ -0,0 +1,40 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Storage interface for feed snapshot bundles.
/// </summary>
public interface IFeedSnapshotStore
{
/// <summary>
/// Saves a snapshot bundle.
/// </summary>
Task SaveAsync(FeedSnapshotBundle bundle, CancellationToken cancellationToken = default);
/// <summary>
/// Gets a snapshot by composite digest.
/// </summary>
Task<FeedSnapshotBundle?> GetByDigestAsync(string compositeDigest, CancellationToken cancellationToken = default);
/// <summary>
/// Gets a snapshot by ID.
/// </summary>
Task<FeedSnapshotBundle?> GetByIdAsync(string snapshotId, CancellationToken cancellationToken = default);
/// <summary>
/// Lists snapshots within a time range.
/// </summary>
IAsyncEnumerable<FeedSnapshotSummary> ListAsync(
DateTimeOffset? from = null,
DateTimeOffset? to = null,
CancellationToken cancellationToken = default);
/// <summary>
/// Deletes a snapshot by composite digest.
/// </summary>
Task<bool> DeleteAsync(string compositeDigest, CancellationToken cancellationToken = default);
}

View File

@@ -1,9 +1,6 @@
// -----------------------------------------------------------------------------
// IFeedSourceProvider.cs
// Sprint: SPRINT_20251226_007_BE_determinism_gaps
// Task: DET-GAP-01
// Description: Interface for individual feed source snapshot providers
// -----------------------------------------------------------------------------
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core.FeedSnapshot;
@@ -71,35 +68,3 @@ public interface IFeedSourceProvider
Stream inputStream,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Options for feed snapshot creation.
/// </summary>
public sealed record FeedSnapshotOptions
{
/// <summary>
/// Whether to include full content in the snapshot (vs. just metadata).
/// </summary>
public bool IncludeContent { get; init; } = false;
/// <summary>
/// Whether to compress exported bundles.
/// </summary>
public bool CompressExport { get; init; } = true;
/// <summary>
/// Compression algorithm for exports.
/// </summary>
public CompressionAlgorithm Compression { get; init; } = CompressionAlgorithm.Zstd;
/// <summary>
/// Maximum age of snapshot before it's considered stale.
/// </summary>
public TimeSpan? MaxSnapshotAge { get; init; }
/// <summary>
/// Whether to verify snapshot integrity on import.
/// </summary>
public bool VerifyOnImport { get; init; } = true;
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Options for importing a snapshot bundle.
/// </summary>
public sealed record ImportBundleOptions
{
/// <summary>
/// Whether to validate digests during import.
/// </summary>
public bool ValidateDigests { get; init; } = true;
/// <summary>
/// Whether to allow overwriting existing snapshots.
/// </summary>
public bool AllowOverwrite { get; init; }
}

View File

@@ -0,0 +1,44 @@
using System.Collections.Generic;
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Result of snapshot validation.
/// </summary>
public sealed record SnapshotValidationResult
{
/// <summary>
/// Whether the snapshot is valid and can be replayed.
/// </summary>
public required bool IsValid { get; init; }
/// <summary>
/// Composite digest validated.
/// </summary>
public required string CompositeDigest { get; init; }
/// <summary>
/// Digest at snapshot time.
/// </summary>
public required string SnapshotDigest { get; init; }
/// <summary>
/// Current computed digest.
/// </summary>
public required string CurrentDigest { get; init; }
/// <summary>
/// Sources that are no longer available.
/// </summary>
public IReadOnlyList<string>? MissingSources { get; init; }
/// <summary>
/// Sources with detected drift (content changed since snapshot).
/// </summary>
public IReadOnlyList<SourceDrift> DriftedSources { get; init; } = [];
/// <summary>
/// Validation errors if any.
/// </summary>
public IReadOnlyList<string>? Errors { get; init; }
}

View File

@@ -0,0 +1,42 @@
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Detected drift in a source since snapshot.
/// </summary>
public sealed record SourceDrift
{
/// <summary>
/// Source identifier.
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Original digest at snapshot time.
/// </summary>
public required string SnapshotDigest { get; init; }
/// <summary>
/// Current digest.
/// </summary>
public required string CurrentDigest { get; init; }
/// <summary>
/// Number of records changed.
/// </summary>
public long? RecordsChanged { get; init; }
/// <summary>
/// Number of items added since snapshot.
/// </summary>
public int AddedItems { get; init; }
/// <summary>
/// Number of items removed since snapshot.
/// </summary>
public int RemovedItems { get; init; }
/// <summary>
/// Number of items modified since snapshot.
/// </summary>
public int ModifiedItems { get; init; }
}

View File

@@ -0,0 +1,45 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Replay.Core.FeedSnapshot;
/// <summary>
/// Snapshot of a single feed source.
/// </summary>
public sealed record SourceSnapshot
{
/// <summary>
/// Source identifier (e.g., "nvd", "ghsa", "osv", "policy", "vex").
/// </summary>
public required string SourceId { get; init; }
/// <summary>
/// Source-specific version or sequence number.
/// </summary>
public required string Version { get; init; }
/// <summary>
/// SHA-256 digest of the source content (sha256:hex).
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Number of records in this source at snapshot time.
/// </summary>
public required long RecordCount { get; init; }
/// <summary>
/// Number of items (alias for RecordCount for API compatibility).
/// </summary>
public int ItemCount => (int)Math.Min(RecordCount, int.MaxValue);
/// <summary>
/// UTC timestamp when this source was snapshotted.
/// </summary>
public DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Source-specific metadata.
/// </summary>
public IReadOnlyDictionary<string, string>? Metadata { get; init; }
}

View File

@@ -0,0 +1,20 @@
using System.Collections.Generic;
using System.Threading.Tasks;
namespace StellaOps.Replay.Core;
/// <summary>
/// Validates CAS references before manifest signing.
/// </summary>
public interface ICasValidator
{
/// <summary>
/// Validates that a CAS URI exists and matches the expected hash.
/// </summary>
Task<CasValidationResult> ValidateAsync(string casUri, string expectedHash);
/// <summary>
/// Validates multiple CAS references in batch.
/// </summary>
Task<CasValidationResult> ValidateBatchAsync(IEnumerable<CasReference> references);
}

View File

@@ -4,66 +4,6 @@ using System.Threading.Tasks;
namespace StellaOps.Replay.Core;
/// <summary>
/// Validates CAS references before manifest signing.
/// </summary>
public interface ICasValidator
{
/// <summary>
/// Validates that a CAS URI exists and matches the expected hash.
/// </summary>
Task<CasValidationResult> ValidateAsync(string casUri, string expectedHash);
/// <summary>
/// Validates multiple CAS references in batch.
/// </summary>
Task<CasValidationResult> ValidateBatchAsync(IEnumerable<CasReference> references);
}
/// <summary>
/// A reference to a CAS object for validation.
/// </summary>
public sealed record CasReference(
string CasUri,
string ExpectedHash,
string? HashAlgorithm = null
);
/// <summary>
/// Result of a CAS validation operation.
/// </summary>
public sealed record CasValidationResult(
bool IsValid,
string? ActualHash = null,
string? Error = null,
IReadOnlyList<CasValidationError>? Errors = null
)
{
public static CasValidationResult Success(string actualHash) =>
new(true, actualHash);
public static CasValidationResult Failure(string error) =>
new(false, Error: error);
public static CasValidationResult NotFound(string casUri) =>
new(false, Error: $"CAS object not found: {casUri}");
public static CasValidationResult HashMismatch(string casUri, string expected, string actual) =>
new(false, ActualHash: actual, Error: $"Hash mismatch for {casUri}: expected {expected}, got {actual}");
public static CasValidationResult BatchResult(bool isValid, IReadOnlyList<CasValidationError> errors) =>
new(isValid, Errors: errors);
}
/// <summary>
/// Error details for a single CAS validation failure in a batch.
/// </summary>
public sealed record CasValidationError(
string CasUri,
string ErrorCode,
string Message
);
/// <summary>
/// In-memory CAS validator for testing and offline scenarios.
/// </summary>

View File

@@ -0,0 +1,88 @@
using System.Globalization;
using System.Linq;
using StellaOps.Replay.Core.Models;
namespace StellaOps.Replay.Core.Manifest;
public sealed partial class ReplayManifestWriter
{
private static object BuildManifest(KnowledgeSnapshot snapshot, ReplayOutputs outputs)
{
return new
{
version = "1.0.0",
snapshot = new
{
id = snapshot.SnapshotId,
createdAt = snapshot.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
artifact = snapshot.ArtifactDigest,
previousId = snapshot.PreviousSnapshotId
},
inputs = new
{
sboms = snapshot.Sboms.Select(s => new
{
path = s.BundlePath ?? $"sboms/{s.Id}.json",
format = s.Format,
digest = s.Digest
}),
vex = snapshot.VexDocuments.Select(v => new
{
path = v.BundlePath ?? $"vex/{v.Id}.json",
source = v.Source,
format = v.Format,
digest = v.Digest,
trustScore = v.TrustScore
}),
reachability = snapshot.ReachSubgraphs.Select(r => new
{
path = r.BundlePath ?? $"reach/{r.EntryPoint}.json",
entryPoint = r.EntryPoint,
digest = r.Digest,
nodeCount = r.NodeCount,
edgeCount = r.EdgeCount
}),
exceptions = snapshot.Exceptions.Select(e => new
{
path = $"exceptions/{e.ExceptionId}.json",
exceptionId = e.ExceptionId,
digest = e.Digest
}),
policies = new
{
bundlePath = "policies/bundle.tar.gz",
digest = snapshot.PolicyBundle.Digest,
version = snapshot.PolicyBundle.Version,
rulesHash = snapshot.PolicyBundle.RulesHash
},
feeds = snapshot.FeedVersions.Select(f => new
{
feedId = f.FeedId,
name = f.Name,
version = f.Version,
digest = f.Digest,
fetchedAt = f.FetchedAt.ToString("O", CultureInfo.InvariantCulture)
}),
lattice = new
{
type = snapshot.LatticeConfig.LatticeType,
configDigest = snapshot.LatticeConfig.ConfigDigest
},
trust = new
{
configDigest = snapshot.TrustConfig.ConfigDigest,
defaultWeight = snapshot.TrustConfig.DefaultWeight
}
},
outputs = new
{
verdictPath = outputs.VerdictPath,
verdictDigest = outputs.VerdictDigest,
findingsPath = outputs.FindingsPath,
findingsDigest = outputs.FindingsDigest
},
seeds = snapshot.RandomSeeds.ToDictionary(s => s.Name, s => s.Value),
environment = snapshot.Environment
};
}
}

View File

@@ -1,7 +1,4 @@
using StellaOps.Replay.Core.Models;
using System.Globalization;
using System.Linq;
using YamlDotNet.Serialization;
using YamlDotNet.Serialization.NamingConventions;
@@ -10,89 +7,14 @@ namespace StellaOps.Replay.Core.Manifest;
/// <summary>
/// Writes REPLAY.yaml manifests for replay bundles.
/// </summary>
public sealed class ReplayManifestWriter
public sealed partial class ReplayManifestWriter
{
/// <summary>
/// Creates REPLAY.yaml content from a knowledge snapshot.
/// </summary>
public static string CreateManifest(KnowledgeSnapshot snapshot, ReplayOutputs outputs)
{
var manifest = new
{
version = "1.0.0",
snapshot = new
{
id = snapshot.SnapshotId,
createdAt = snapshot.CreatedAt.ToString("O", CultureInfo.InvariantCulture),
artifact = snapshot.ArtifactDigest,
previousId = snapshot.PreviousSnapshotId
},
inputs = new
{
sboms = snapshot.Sboms.Select(s => new
{
path = s.BundlePath ?? $"sboms/{s.Id}.json",
format = s.Format,
digest = s.Digest
}),
vex = snapshot.VexDocuments.Select(v => new
{
path = v.BundlePath ?? $"vex/{v.Id}.json",
source = v.Source,
format = v.Format,
digest = v.Digest,
trustScore = v.TrustScore
}),
reachability = snapshot.ReachSubgraphs.Select(r => new
{
path = r.BundlePath ?? $"reach/{r.EntryPoint}.json",
entryPoint = r.EntryPoint,
digest = r.Digest,
nodeCount = r.NodeCount,
edgeCount = r.EdgeCount
}),
exceptions = snapshot.Exceptions.Select(e => new
{
path = $"exceptions/{e.ExceptionId}.json",
exceptionId = e.ExceptionId,
digest = e.Digest
}),
policies = new
{
bundlePath = "policies/bundle.tar.gz",
digest = snapshot.PolicyBundle.Digest,
version = snapshot.PolicyBundle.Version,
rulesHash = snapshot.PolicyBundle.RulesHash
},
feeds = snapshot.FeedVersions.Select(f => new
{
feedId = f.FeedId,
name = f.Name,
version = f.Version,
digest = f.Digest,
fetchedAt = f.FetchedAt.ToString("O", CultureInfo.InvariantCulture)
}),
lattice = new
{
type = snapshot.LatticeConfig.LatticeType,
configDigest = snapshot.LatticeConfig.ConfigDigest
},
trust = new
{
configDigest = snapshot.TrustConfig.ConfigDigest,
defaultWeight = snapshot.TrustConfig.DefaultWeight
}
},
outputs = new
{
verdictPath = outputs.VerdictPath,
verdictDigest = outputs.VerdictDigest,
findingsPath = outputs.FindingsPath,
findingsDigest = outputs.FindingsDigest
},
seeds = snapshot.RandomSeeds.ToDictionary(s => s.Name, s => s.Value),
environment = snapshot.Environment
};
var manifest = BuildManifest(snapshot, outputs);
var serializer = new SerializerBuilder()
.WithNamingConvention(CamelCaseNamingConvention.Instance)
@@ -101,9 +23,3 @@ public sealed class ReplayManifestWriter
return serializer.Serialize(manifest);
}
}
public sealed record ReplayOutputs(
string VerdictPath,
string VerdictDigest,
string? FindingsPath,
string? FindingsDigest);

View File

@@ -0,0 +1,7 @@
namespace StellaOps.Replay.Core.Manifest;
public sealed record ReplayOutputs(
string VerdictPath,
string VerdictDigest,
string? FindingsPath,
string? FindingsDigest);

View File

@@ -0,0 +1,10 @@
namespace StellaOps.Replay.Core;
/// <summary>
/// A single validation error.
/// </summary>
public sealed record ManifestValidationError(
string ErrorCode,
string Message,
string? Path = null
);

View File

@@ -0,0 +1,23 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Replay.Core;
/// <summary>
/// Result of manifest validation.
/// </summary>
public sealed record ManifestValidationResult(
bool IsValid,
IReadOnlyList<ManifestValidationError> Errors
)
{
public static ManifestValidationResult Success() =>
new(true, Array.Empty<ManifestValidationError>());
public static ManifestValidationResult Failure(IEnumerable<ManifestValidationError> errors) =>
new(false, errors.ToList());
public static ManifestValidationResult Failure(ManifestValidationError error) =>
new(false, new[] { error });
}

View File

@@ -0,0 +1,10 @@
using System;
using System.Collections.Immutable;
namespace StellaOps.Replay.Core.Models;
public sealed record ExceptionRef(
string ExceptionId,
string Digest,
ImmutableArray<string> CveIds,
DateTimeOffset ExpiresAt);

View File

@@ -0,0 +1,10 @@
using System;
namespace StellaOps.Replay.Core.Models;
public sealed record FeedVersion(
string FeedId,
string Name,
string Version,
string Digest,
DateTimeOffset FetchedAt);

View File

@@ -94,59 +94,3 @@ public sealed record KnowledgeSnapshot
/// </summary>
public string? Signature { get; init; }
}
public sealed record SbomRef(
string Id,
string Format, // cyclonedx-1.6, spdx-3.0.1
string Digest,
string? BundlePath);
public sealed record VexDocRef(
string Id,
string Source, // vendor, distro, nvd, internal
string Format, // openvex, csaf
string Digest,
decimal TrustScore,
string? BundlePath);
public sealed record ReachSubgraphRef(
string EntryPoint,
string Digest,
int NodeCount,
int EdgeCount,
string? BundlePath);
public sealed record ExceptionRef(
string ExceptionId,
string Digest,
ImmutableArray<string> CveIds,
DateTimeOffset ExpiresAt);
public sealed record PolicyBundleRef(
string BundleId,
string Digest,
string Version,
string RulesHash);
public sealed record FeedVersion(
string FeedId,
string Name,
string Version,
string Digest,
DateTimeOffset FetchedAt);
public sealed record TrustConfig(
ImmutableDictionary<string, decimal> SourceWeights,
decimal DefaultWeight,
string ConfigDigest);
public sealed record LatticeConfig(
string LatticeType, // K4, Boolean, 8-state
string JoinTable, // Base64 encoded join table
string MeetTable, // Base64 encoded meet table
string ConfigDigest);
public sealed record RandomSeed(
string Name,
long Value,
string Purpose);

View File

@@ -0,0 +1,7 @@
namespace StellaOps.Replay.Core.Models;
public sealed record LatticeConfig(
string LatticeType,
string JoinTable,
string MeetTable,
string ConfigDigest);

View File

@@ -0,0 +1,7 @@
namespace StellaOps.Replay.Core.Models;
public sealed record PolicyBundleRef(
string BundleId,
string Digest,
string Version,
string RulesHash);

View File

@@ -0,0 +1,6 @@
namespace StellaOps.Replay.Core.Models;
public sealed record RandomSeed(
string Name,
long Value,
string Purpose);

View File

@@ -0,0 +1,8 @@
namespace StellaOps.Replay.Core.Models;
public sealed record ReachSubgraphRef(
string EntryPoint,
string Digest,
int NodeCount,
int EdgeCount,
string? BundlePath);

View File

@@ -0,0 +1,63 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Linq;
namespace StellaOps.Replay.Core.Models;
public sealed partial record ReplayProof
{
private static readonly JsonSerializerOptions _canonicalOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
/// <summary>
/// Converts the proof to a compact string format: "replay-proof:&lt;sha256&gt;".
/// The hash is computed over the canonical JSON representation.
/// </summary>
/// <returns>Compact proof string suitable for ticket attachments.</returns>
public string ToCompactString()
{
var canonicalJson = ToCanonicalJson();
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
var hashHex = Convert.ToHexString(hashBytes).ToLowerInvariant();
return $"replay-proof:{hashHex}";
}
/// <summary>
/// Converts the proof to canonical JSON (RFC 8785 style: sorted keys, minimal whitespace).
/// </summary>
/// <returns>Canonical JSON string.</returns>
public string ToCanonicalJson()
{
var ordered = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["artifactDigest"] = ArtifactDigest,
["bundleHash"] = BundleHash,
["durationMs"] = DurationMs,
["engineVersion"] = EngineVersion,
["metadata"] = Metadata is not null && Metadata.Count > 0
? new SortedDictionary<string, string>(Metadata, StringComparer.Ordinal)
: null,
["policyVersion"] = PolicyVersion,
["replayedAt"] = ReplayedAt.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture),
["schemaVersion"] = SchemaVersion,
["signatureKeyId"] = SignatureKeyId,
["signatureVerified"] = SignatureVerified,
["verdictMatches"] = VerdictMatches,
["verdictRoot"] = VerdictRoot,
};
var filtered = ordered.Where(kvp => kvp.Value is not null)
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
return JsonSerializer.Serialize(filtered, _canonicalOptions);
}
}

View File

@@ -0,0 +1,39 @@
using System;
using System.Collections.Immutable;
namespace StellaOps.Replay.Core.Models;
public sealed partial record ReplayProof
{
/// <summary>
/// Creates a ReplayProof from execution results.
/// </summary>
public static ReplayProof FromExecutionResult(
string bundleHash,
string policyVersion,
string verdictRoot,
bool verdictMatches,
long durationMs,
DateTimeOffset replayedAt,
string engineVersion,
string? artifactDigest = null,
bool? signatureVerified = null,
string? signatureKeyId = null,
ImmutableDictionary<string, string>? metadata = null)
{
return new ReplayProof
{
BundleHash = bundleHash ?? throw new ArgumentNullException(nameof(bundleHash)),
PolicyVersion = policyVersion ?? throw new ArgumentNullException(nameof(policyVersion)),
VerdictRoot = verdictRoot ?? throw new ArgumentNullException(nameof(verdictRoot)),
VerdictMatches = verdictMatches,
DurationMs = durationMs,
ReplayedAt = replayedAt,
EngineVersion = engineVersion ?? throw new ArgumentNullException(nameof(engineVersion)),
ArtifactDigest = artifactDigest,
SignatureVerified = signatureVerified,
SignatureKeyId = signatureKeyId,
Metadata = metadata,
};
}
}

View File

@@ -0,0 +1,34 @@
using System;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Replay.Core.Models;
public sealed partial record ReplayProof
{
/// <summary>
/// Parses a compact proof string and validates its hash.
/// </summary>
/// <param name="compactString">The compact proof string (replay-proof:&lt;hash&gt;).</param>
/// <param name="originalJson">The original canonical JSON to verify against.</param>
/// <returns>True if the hash matches, false otherwise.</returns>
public static bool ValidateCompactString(string compactString, string originalJson)
{
if (string.IsNullOrWhiteSpace(compactString) || string.IsNullOrWhiteSpace(originalJson))
{
return false;
}
const string prefix = "replay-proof:";
if (!compactString.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return false;
}
var expectedHash = compactString[prefix.Length..];
var actualHashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(originalJson));
var actualHash = Convert.ToHexString(actualHashBytes).ToLowerInvariant();
return string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase);
}
}

View File

@@ -1,11 +1,8 @@
// <copyright file="ReplayProof.cs" company="Stella Operations">
// Copyright (c) Stella Operations. Licensed under BUSL-1.1.
// </copyright>
using System;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Replay.Core.Models;
@@ -14,7 +11,7 @@ namespace StellaOps.Replay.Core.Models;
/// Compact proof artifact for audit trails and ticket attachments.
/// Captures the essential evidence that a replay was performed and matched expectations.
/// </summary>
public sealed record ReplayProof
public sealed partial record ReplayProof
{
/// <summary>
/// Schema version for forward compatibility.
@@ -87,118 +84,4 @@ public sealed record ReplayProof
/// </summary>
[JsonPropertyName("metadata")]
public ImmutableDictionary<string, string>? Metadata { get; init; }
/// <summary>
/// JSON serializer options for canonical serialization (sorted keys, no indentation).
/// </summary>
private static readonly JsonSerializerOptions CanonicalOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
// Note: We manually ensure sorted keys in ToCanonicalJson()
};
/// <summary>
/// Converts the proof to a compact string format: "replay-proof:&lt;sha256&gt;".
/// The hash is computed over the canonical JSON representation.
/// </summary>
/// <returns>Compact proof string suitable for ticket attachments.</returns>
public string ToCompactString()
{
var canonicalJson = ToCanonicalJson();
var hashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(canonicalJson));
var hashHex = Convert.ToHexString(hashBytes).ToLowerInvariant();
return $"replay-proof:{hashHex}";
}
/// <summary>
/// Converts the proof to canonical JSON (RFC 8785 style: sorted keys, minimal whitespace).
/// </summary>
/// <returns>Canonical JSON string.</returns>
public string ToCanonicalJson()
{
// Build ordered dictionary for canonical serialization
var ordered = new SortedDictionary<string, object?>(StringComparer.Ordinal)
{
["artifactDigest"] = ArtifactDigest,
["bundleHash"] = BundleHash,
["durationMs"] = DurationMs,
["engineVersion"] = EngineVersion,
["metadata"] = Metadata is not null && Metadata.Count > 0
? new SortedDictionary<string, string>(Metadata, StringComparer.Ordinal)
: null,
["policyVersion"] = PolicyVersion,
["replayedAt"] = ReplayedAt.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.fffZ", System.Globalization.CultureInfo.InvariantCulture),
["schemaVersion"] = SchemaVersion,
["signatureKeyId"] = SignatureKeyId,
["signatureVerified"] = SignatureVerified,
["verdictMatches"] = VerdictMatches,
["verdictRoot"] = VerdictRoot,
};
// Remove null values for canonical form
var filtered = ordered.Where(kvp => kvp.Value is not null)
.ToDictionary(kvp => kvp.Key, kvp => kvp.Value);
return JsonSerializer.Serialize(filtered, CanonicalOptions);
}
/// <summary>
/// Parses a compact proof string and validates its hash.
/// </summary>
/// <param name="compactString">The compact proof string (replay-proof:&lt;hash&gt;).</param>
/// <param name="originalJson">The original canonical JSON to verify against.</param>
/// <returns>True if the hash matches, false otherwise.</returns>
public static bool ValidateCompactString(string compactString, string originalJson)
{
if (string.IsNullOrWhiteSpace(compactString) || string.IsNullOrWhiteSpace(originalJson))
{
return false;
}
const string prefix = "replay-proof:";
if (!compactString.StartsWith(prefix, StringComparison.OrdinalIgnoreCase))
{
return false;
}
var expectedHash = compactString[prefix.Length..];
var actualHashBytes = SHA256.HashData(Encoding.UTF8.GetBytes(originalJson));
var actualHash = Convert.ToHexString(actualHashBytes).ToLowerInvariant();
return string.Equals(expectedHash, actualHash, StringComparison.OrdinalIgnoreCase);
}
/// <summary>
/// Creates a ReplayProof from execution results.
/// </summary>
public static ReplayProof FromExecutionResult(
string bundleHash,
string policyVersion,
string verdictRoot,
bool verdictMatches,
long durationMs,
DateTimeOffset replayedAt,
string engineVersion,
string? artifactDigest = null,
bool? signatureVerified = null,
string? signatureKeyId = null,
ImmutableDictionary<string, string>? metadata = null)
{
return new ReplayProof
{
BundleHash = bundleHash ?? throw new ArgumentNullException(nameof(bundleHash)),
PolicyVersion = policyVersion ?? throw new ArgumentNullException(nameof(policyVersion)),
VerdictRoot = verdictRoot ?? throw new ArgumentNullException(nameof(verdictRoot)),
VerdictMatches = verdictMatches,
DurationMs = durationMs,
ReplayedAt = replayedAt,
EngineVersion = engineVersion ?? throw new ArgumentNullException(nameof(engineVersion)),
ArtifactDigest = artifactDigest,
SignatureVerified = signatureVerified,
SignatureKeyId = signatureKeyId,
Metadata = metadata,
};
}
}

View File

@@ -0,0 +1,7 @@
namespace StellaOps.Replay.Core.Models;
public sealed record SbomRef(
string Id,
string Format,
string Digest,
string? BundlePath);

View File

@@ -0,0 +1,8 @@
using System.Collections.Immutable;
namespace StellaOps.Replay.Core.Models;
public sealed record TrustConfig(
ImmutableDictionary<string, decimal> SourceWeights,
decimal DefaultWeight,
string ConfigDigest);

View File

@@ -0,0 +1,9 @@
namespace StellaOps.Replay.Core.Models;
public sealed record VexDocRef(
string Id,
string Source,
string Format,
string Digest,
decimal TrustScore,
string? BundlePath);

Some files were not shown because too many files have changed in this diff Show More