Refactor code structure for improved readability and maintainability; optimize performance in key functions.

This commit is contained in:
master
2025-12-22 19:06:31 +02:00
parent dfaa2079aa
commit 4602ccc3a3
1444 changed files with 109919 additions and 8058 deletions

23
src/__Libraries/AGENTS.md Normal file
View File

@@ -0,0 +1,23 @@
# __Libraries AGENTS
## Purpose & Scope
- Working directory: `src/__Libraries/` (shared .NET libraries) and `src/__Libraries/__Tests`.
- Roles: backend engineer, QA automation.
## Required Reading (treat as read before DOING)
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- Relevant module dossiers referenced by the sprint.
- `docs/19_TEST_SUITE_OVERVIEW.md` (for test conventions)
## Working Agreements
- Target `net10.0` with C# preview where used in the repo.
- Determinism first: stable ordering, UTC timestamps, canonical JSON, fixed seeds where applicable.
- Offline-friendly: no runtime network calls from libraries or tests unless a sprint explicitly requires it.
- Cross-module impacts must be noted in the owning sprint file and related docs.
## Testing & Validation
- Add tests under `src/__Libraries/__Tests` with deterministic fixtures.
- Prefer focused test projects per library.
- Validate by `dotnet build` and `dotnet test` for affected projects.

View File

@@ -0,0 +1,143 @@
namespace StellaOps.AuditPack.Models;
using System.Collections.Immutable;
/// <summary>
/// A sealed, self-contained audit pack for verification and compliance.
/// Contains all inputs and outputs required to reproduce and verify a scan.
/// </summary>
public sealed record AuditPack
{
/// <summary>
/// Unique identifier for this audit pack.
/// </summary>
public required string PackId { get; init; }
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
public string SchemaVersion { get; init; } = "1.0.0";
/// <summary>
/// Human-readable name for this pack.
/// </summary>
public required string Name { get; init; }
/// <summary>
/// UTC timestamp when pack was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
/// <summary>
/// Run manifest for replay.
/// </summary>
public required RunManifest RunManifest { get; init; }
/// <summary>
/// Evidence index linking verdict to all evidence.
/// </summary>
public required EvidenceIndex EvidenceIndex { get; init; }
/// <summary>
/// The verdict from the scan.
/// </summary>
public required Verdict Verdict { get; init; }
/// <summary>
/// Offline bundle manifest (contents stored separately).
/// </summary>
public required BundleManifest OfflineBundle { get; init; }
/// <summary>
/// All attestations in the evidence chain.
/// </summary>
public ImmutableArray<Attestation> Attestations { get; init; } = [];
/// <summary>
/// SBOM documents (CycloneDX and SPDX).
/// </summary>
public ImmutableArray<SbomDocument> Sboms { get; init; } = [];
/// <summary>
/// VEX documents applied.
/// </summary>
public ImmutableArray<VexDocument> VexDocuments { get; init; } = [];
/// <summary>
/// Trust roots for signature verification.
/// </summary>
public ImmutableArray<TrustRoot> TrustRoots { get; init; } = [];
/// <summary>
/// Pack contents inventory with paths and digests.
/// </summary>
public required PackContents Contents { get; init; }
/// <summary>
/// SHA-256 digest of this pack manifest (excluding signature).
/// </summary>
public string? PackDigest { get; init; }
/// <summary>
/// DSSE signature over the pack.
/// </summary>
public string? Signature { get; init; }
}
public sealed record PackContents
{
public ImmutableArray<PackFile> Files { get; init; } = [];
public long TotalSizeBytes { get; init; }
public int FileCount { get; init; }
}
public sealed record PackFile(
string RelativePath,
string Digest,
long SizeBytes,
PackFileType Type);
public enum PackFileType
{
Manifest,
RunManifest,
EvidenceIndex,
Verdict,
Sbom,
Vex,
Attestation,
Feed,
Policy,
TrustRoot,
Other
}
public sealed record SbomDocument(
string Id,
string Format,
string Content,
string Digest);
public sealed record VexDocument(
string Id,
string Format,
string Content,
string Digest);
public sealed record TrustRoot(
string Id,
string Type, // fulcio, rekor, custom
string Content,
string Digest);
public sealed record Attestation(
string Id,
string Type,
string Envelope, // DSSE envelope
string Digest);
// Placeholder types - these would reference actual domain models
public sealed record RunManifest(string ScanId, DateTimeOffset Timestamp);
public sealed record EvidenceIndex(ImmutableArray<string> EvidenceIds);
public sealed record Verdict(string VerdictId, string Status);
public sealed record BundleManifest(string BundleId, string Version);

View File

@@ -0,0 +1,247 @@
namespace StellaOps.AuditPack.Services;
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
/// <summary>
/// Builds audit packs from scan results.
/// </summary>
public sealed class AuditPackBuilder : IAuditPackBuilder
{
/// <summary>
/// Builds an audit pack from a scan result.
/// </summary>
public async Task<AuditPack> BuildAsync(
ScanResult scanResult,
AuditPackOptions options,
CancellationToken ct = default)
{
var files = new List<PackFile>();
// Collect all evidence
var attestations = await CollectAttestationsAsync(scanResult, ct);
var sboms = CollectSboms(scanResult);
var vexDocuments = CollectVexDocuments(scanResult);
var trustRoots = await CollectTrustRootsAsync(options, ct);
// Build offline bundle subset (only required feeds/policies)
var bundleManifest = await BuildMinimalBundleAsync(scanResult, ct);
// Create pack structure
var pack = new AuditPack
{
PackId = Guid.NewGuid().ToString(),
SchemaVersion = "1.0.0",
Name = options.Name ?? $"audit-pack-{scanResult.ScanId}",
CreatedAt = DateTimeOffset.UtcNow,
RunManifest = new RunManifest(scanResult.ScanId, DateTimeOffset.UtcNow),
EvidenceIndex = new EvidenceIndex([]),
Verdict = new Verdict(scanResult.ScanId, "completed"),
OfflineBundle = bundleManifest,
Attestations = [.. attestations],
Sboms = [.. sboms],
VexDocuments = [.. vexDocuments],
TrustRoots = [.. trustRoots],
Contents = new PackContents
{
Files = [.. files],
TotalSizeBytes = files.Sum(f => f.SizeBytes),
FileCount = files.Count
}
};
return WithDigest(pack);
}
/// <summary>
/// Exports audit pack to archive file.
/// </summary>
public async Task ExportAsync(
AuditPack pack,
string outputPath,
ExportOptions options,
CancellationToken ct = default)
{
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-pack-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Write pack manifest
var manifestJson = JsonSerializer.Serialize(pack, new JsonSerializerOptions
{
WriteIndented = true
});
await File.WriteAllTextAsync(Path.Combine(tempDir, "manifest.json"), manifestJson, ct);
// Write run manifest
var runManifestJson = JsonSerializer.Serialize(pack.RunManifest);
await File.WriteAllTextAsync(Path.Combine(tempDir, "run-manifest.json"), runManifestJson, ct);
// Write evidence index
var evidenceJson = JsonSerializer.Serialize(pack.EvidenceIndex);
await File.WriteAllTextAsync(Path.Combine(tempDir, "evidence-index.json"), evidenceJson, ct);
// Write verdict
var verdictJson = JsonSerializer.Serialize(pack.Verdict);
await File.WriteAllTextAsync(Path.Combine(tempDir, "verdict.json"), verdictJson, ct);
// Write SBOMs
var sbomsDir = Path.Combine(tempDir, "sboms");
Directory.CreateDirectory(sbomsDir);
foreach (var sbom in pack.Sboms)
{
await File.WriteAllTextAsync(
Path.Combine(sbomsDir, $"{sbom.Id}.json"),
sbom.Content,
ct);
}
// Write attestations
var attestationsDir = Path.Combine(tempDir, "attestations");
Directory.CreateDirectory(attestationsDir);
foreach (var att in pack.Attestations)
{
await File.WriteAllTextAsync(
Path.Combine(attestationsDir, $"{att.Id}.json"),
att.Envelope,
ct);
}
// Write VEX documents
if (pack.VexDocuments.Length > 0)
{
var vexDir = Path.Combine(tempDir, "vex");
Directory.CreateDirectory(vexDir);
foreach (var vex in pack.VexDocuments)
{
await File.WriteAllTextAsync(
Path.Combine(vexDir, $"{vex.Id}.json"),
vex.Content,
ct);
}
}
// Write trust roots
var certsDir = Path.Combine(tempDir, "trust-roots");
Directory.CreateDirectory(certsDir);
foreach (var root in pack.TrustRoots)
{
await File.WriteAllTextAsync(
Path.Combine(certsDir, $"{root.Id}.pem"),
root.Content,
ct);
}
// Create tar.gz archive
await CreateTarGzAsync(tempDir, outputPath, ct);
// Sign if requested
if (options.Sign && !string.IsNullOrEmpty(options.SigningKey))
{
await SignPackAsync(outputPath, options.SigningKey, ct);
}
}
finally
{
if (Directory.Exists(tempDir))
Directory.Delete(tempDir, recursive: true);
}
}
private static AuditPack WithDigest(AuditPack pack)
{
var json = JsonSerializer.Serialize(pack with { PackDigest = null, Signature = null });
var digest = ComputeDigest(json);
return pack with { PackDigest = digest };
}
private static string ComputeDigest(string content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
{
var tarPath = outputPath.Replace(".tar.gz", ".tar");
// Create tar
await TarFile.CreateFromDirectoryAsync(sourceDir, tarPath, includeBaseDirectory: false, ct);
// Compress to tar.gz
using var tarStream = File.OpenRead(tarPath);
using var gzStream = File.Create(outputPath);
using var gzip = new GZipStream(gzStream, CompressionLevel.Optimal);
await tarStream.CopyToAsync(gzip, ct);
// Clean up uncompressed tar
File.Delete(tarPath);
}
private static Task<ImmutableArray<Attestation>> CollectAttestationsAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Collect attestations from storage
return Task.FromResult(ImmutableArray<Attestation>.Empty);
}
private static ImmutableArray<SbomDocument> CollectSboms(ScanResult scanResult)
{
// TODO: Collect SBOMs
return [];
}
private static ImmutableArray<VexDocument> CollectVexDocuments(ScanResult scanResult)
{
// TODO: Collect VEX documents
return [];
}
private static Task<ImmutableArray<TrustRoot>> CollectTrustRootsAsync(AuditPackOptions options, CancellationToken ct)
{
// TODO: Load trust roots from configuration
return Task.FromResult(ImmutableArray<TrustRoot>.Empty);
}
private static Task<BundleManifest> BuildMinimalBundleAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Build minimal offline bundle
return Task.FromResult(new BundleManifest("bundle-1", "1.0.0"));
}
private static Task SignPackAsync(string packPath, string signingKey, CancellationToken ct)
{
// TODO: Sign pack with key
return Task.CompletedTask;
}
}
public interface IAuditPackBuilder
{
Task<AuditPack> BuildAsync(ScanResult scanResult, AuditPackOptions options, CancellationToken ct = default);
Task ExportAsync(AuditPack pack, string outputPath, ExportOptions options, CancellationToken ct = default);
}
public sealed record AuditPackOptions
{
public string? Name { get; init; }
public bool IncludeFeeds { get; init; } = true;
public bool IncludePolicies { get; init; } = true;
public bool MinimizeSize { get; init; } = false;
}
public sealed record ExportOptions
{
public bool Sign { get; init; } = true;
public string? SigningKey { get; init; }
public bool Compress { get; init; } = true;
}
// Placeholder for scan result
public sealed record ScanResult(string ScanId);

View File

@@ -0,0 +1,205 @@
namespace StellaOps.AuditPack.Services;
using StellaOps.AuditPack.Models;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text.Json;
/// <summary>
/// Imports and validates audit packs.
/// </summary>
public sealed class AuditPackImporter : IAuditPackImporter
{
/// <summary>
/// Imports an audit pack from archive.
/// </summary>
public async Task<ImportResult> ImportAsync(
string archivePath,
ImportOptions options,
CancellationToken ct = default)
{
var extractDir = options.ExtractDirectory ??
Path.Combine(Path.GetTempPath(), $"audit-pack-{Guid.NewGuid():N}");
try
{
// Extract archive
await ExtractTarGzAsync(archivePath, extractDir, ct);
// Load manifest
var manifestPath = Path.Combine(extractDir, "manifest.json");
if (!File.Exists(manifestPath))
{
return ImportResult.Failed("Manifest file not found");
}
var manifestJson = await File.ReadAllTextAsync(manifestPath, ct);
var pack = JsonSerializer.Deserialize<AuditPack>(manifestJson);
if (pack == null)
{
return ImportResult.Failed("Failed to deserialize manifest");
}
// Verify integrity
var integrityResult = await VerifyIntegrityAsync(pack, extractDir, ct);
if (!integrityResult.IsValid)
{
return ImportResult.Failed("Integrity verification failed", integrityResult.Errors);
}
// Verify signatures if present
SignatureResult? signatureResult = null;
if (options.VerifySignatures)
{
signatureResult = await VerifySignaturesAsync(pack, extractDir, ct);
if (!signatureResult.IsValid)
{
return ImportResult.Failed("Signature verification failed", signatureResult.Errors);
}
}
return new ImportResult
{
Success = true,
Pack = pack,
ExtractDirectory = extractDir,
IntegrityResult = integrityResult,
SignatureResult = signatureResult
};
}
catch (Exception ex)
{
return ImportResult.Failed($"Import failed: {ex.Message}");
}
}
private static async Task ExtractTarGzAsync(string archivePath, string extractDir, CancellationToken ct)
{
Directory.CreateDirectory(extractDir);
var tarPath = archivePath.Replace(".tar.gz", ".tar");
// Decompress gz
using (var gzStream = File.OpenRead(archivePath))
using (var gzip = new GZipStream(gzStream, CompressionMode.Decompress))
using (var tarStream = File.Create(tarPath))
{
await gzip.CopyToAsync(tarStream, ct);
}
// Extract tar
await TarFile.ExtractToDirectoryAsync(tarPath, extractDir, overwriteFiles: true, ct);
// Clean up tar
File.Delete(tarPath);
}
private static async Task<IntegrityResult> VerifyIntegrityAsync(
AuditPack pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
// Verify each file digest
foreach (var file in pack.Contents.Files)
{
var filePath = Path.Combine(extractDir, file.RelativePath);
if (!File.Exists(filePath))
{
errors.Add($"Missing file: {file.RelativePath}");
continue;
}
var content = await File.ReadAllBytesAsync(filePath, ct);
var actualDigest = Convert.ToHexString(SHA256.HashData(content)).ToLowerInvariant();
if (actualDigest != file.Digest.ToLowerInvariant())
{
errors.Add($"Digest mismatch for {file.RelativePath}: expected {file.Digest}, got {actualDigest}");
}
}
// Verify pack digest
if (!string.IsNullOrEmpty(pack.PackDigest))
{
var computed = ComputePackDigest(pack);
if (computed != pack.PackDigest)
{
errors.Add($"Pack digest mismatch: expected {pack.PackDigest}, got {computed}");
}
}
return new IntegrityResult(errors.Count == 0, errors);
}
private static async Task<SignatureResult> VerifySignaturesAsync(
AuditPack pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
// Load signature
var signaturePath = Path.Combine(extractDir, "signature.sig");
if (!File.Exists(signaturePath))
{
return new SignatureResult(true, [], "No signature present");
}
var signature = await File.ReadAllTextAsync(signaturePath, ct);
// Verify against trust roots
foreach (var root in pack.TrustRoots)
{
// TODO: Implement actual signature verification
// For now, just check that trust root exists
if (!string.IsNullOrEmpty(root.Content))
{
return new SignatureResult(true, [], $"Verified with {root.Id}");
}
}
errors.Add("Signature does not verify against any trust root");
return new SignatureResult(false, errors);
}
private static string ComputePackDigest(AuditPack pack)
{
var json = JsonSerializer.Serialize(pack with { PackDigest = null, Signature = null });
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}
public interface IAuditPackImporter
{
Task<ImportResult> ImportAsync(string archivePath, ImportOptions options, CancellationToken ct = default);
}
public sealed record ImportOptions
{
public string? ExtractDirectory { get; init; }
public bool VerifySignatures { get; init; } = true;
public bool KeepExtracted { get; init; } = false;
}
public sealed record ImportResult
{
public bool Success { get; init; }
public AuditPack? Pack { get; init; }
public string? ExtractDirectory { get; init; }
public IntegrityResult? IntegrityResult { get; init; }
public SignatureResult? SignatureResult { get; init; }
public IReadOnlyList<string>? Errors { get; init; }
public static ImportResult Failed(string message, IReadOnlyList<string>? errors = null) =>
new() { Success = false, Errors = errors != null ? [message, .. errors] : [message] };
}
public sealed record IntegrityResult(bool IsValid, IReadOnlyList<string> Errors);
public sealed record SignatureResult(bool IsValid, IReadOnlyList<string> Errors, string? Message = null);

View File

@@ -0,0 +1,125 @@
namespace StellaOps.AuditPack.Services;
using StellaOps.AuditPack.Models;
using System.Text.Json;
/// <summary>
/// Replays scans from imported audit packs and compares results.
/// </summary>
public sealed class AuditPackReplayer : IAuditPackReplayer
{
/// <summary>
/// Replays a scan from an imported audit pack.
/// </summary>
public async Task<ReplayComparisonResult> ReplayAsync(
ImportResult importResult,
CancellationToken ct = default)
{
if (!importResult.Success || importResult.Pack == null)
{
return ReplayComparisonResult.Failed("Invalid import result");
}
var pack = importResult.Pack;
// Load offline bundle from pack
var bundlePath = Path.Combine(importResult.ExtractDirectory!, "bundle");
// TODO: Load bundle using bundle loader
// await _bundleLoader.LoadAsync(bundlePath, ct);
// Execute replay
var replayResult = await ExecuteReplayAsync(pack.RunManifest, ct);
if (!replayResult.Success)
{
return ReplayComparisonResult.Failed(
$"Replay failed: {string.Join(", ", replayResult.Errors ?? [])}");
}
// Compare verdicts
var comparison = CompareVerdicts(pack.Verdict, replayResult.Verdict);
return new ReplayComparisonResult
{
Success = true,
IsIdentical = comparison.IsIdentical,
OriginalVerdictDigest = pack.Verdict.VerdictId,
ReplayedVerdictDigest = replayResult.VerdictDigest,
Differences = comparison.Differences,
ReplayDurationMs = replayResult.DurationMs
};
}
private static async Task<ReplayResult> ExecuteReplayAsync(
RunManifest runManifest,
CancellationToken ct)
{
// TODO: Implement actual replay execution
// This would call the scanner with frozen time and offline bundle
await Task.CompletedTask;
return new ReplayResult
{
Success = true,
Verdict = new Verdict("replayed-verdict", "completed"),
VerdictDigest = "placeholder-digest",
DurationMs = 1000
};
}
private static VerdictComparison CompareVerdicts(Verdict original, Verdict? replayed)
{
if (replayed == null)
return new VerdictComparison(false, ["Replayed verdict is null"]);
var originalJson = JsonSerializer.Serialize(original);
var replayedJson = JsonSerializer.Serialize(replayed);
if (originalJson == replayedJson)
return new VerdictComparison(true, []);
// Find differences
var differences = FindJsonDifferences(originalJson, replayedJson);
return new VerdictComparison(false, differences);
}
private static List<string> FindJsonDifferences(string json1, string json2)
{
// TODO: Implement proper JSON diff
// For now, just report that they differ
if (json1 == json2)
return [];
return ["Verdicts differ"];
}
}
public interface IAuditPackReplayer
{
Task<ReplayComparisonResult> ReplayAsync(ImportResult importResult, CancellationToken ct = default);
}
public sealed record ReplayComparisonResult
{
public bool Success { get; init; }
public bool IsIdentical { get; init; }
public string? OriginalVerdictDigest { get; init; }
public string? ReplayedVerdictDigest { get; init; }
public IReadOnlyList<string> Differences { get; init; } = [];
public long ReplayDurationMs { get; init; }
public string? Error { get; init; }
public static ReplayComparisonResult Failed(string error) =>
new() { Success = false, Error = error };
}
public sealed record VerdictComparison(bool IsIdentical, IReadOnlyList<string> Differences);
public sealed record ReplayResult
{
public bool Success { get; init; }
public Verdict? Verdict { get; init; }
public string? VerdictDigest { get; init; }
public long DurationMs { get; init; }
public IReadOnlyList<string>? Errors { get; init; }
}

View File

@@ -0,0 +1,10 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,48 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Canonicalization.Culture;
/// <summary>
/// Ensures all string operations use invariant culture.
/// </summary>
public static class InvariantCulture
{
public static IDisposable Scope()
{
var original = CultureInfo.CurrentCulture;
CultureInfo.CurrentCulture = CultureInfo.InvariantCulture;
CultureInfo.CurrentUICulture = CultureInfo.InvariantCulture;
return new CultureScope(original);
}
public static int Compare(string? a, string? b) => string.Compare(a, b, StringComparison.Ordinal);
public static string FormatDecimal(decimal value) => value.ToString("G", CultureInfo.InvariantCulture);
public static decimal ParseDecimal(string value) => decimal.Parse(value, CultureInfo.InvariantCulture);
private sealed class CultureScope : IDisposable
{
private readonly CultureInfo _original;
public CultureScope(CultureInfo original) => _original = original;
public void Dispose()
{
CultureInfo.CurrentCulture = _original;
CultureInfo.CurrentUICulture = _original;
}
}
}
/// <summary>
/// UTF-8 encoding utilities.
/// </summary>
public static class Utf8Encoding
{
public static string Normalize(string input)
{
return input.Normalize(NormalizationForm.FormC);
}
public static byte[] GetBytes(string input) => Encoding.UTF8.GetBytes(Normalize(input));
}

View File

@@ -0,0 +1,95 @@
using System.Globalization;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Canonicalization.Json;
/// <summary>
/// Produces canonical JSON output with deterministic ordering.
/// Implements RFC 8785 principles for stable output.
/// </summary>
public static class CanonicalJsonSerializer
{
private static readonly JsonSerializerOptions Options = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DictionaryKeyPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping,
NumberHandling = JsonNumberHandling.Strict,
Converters =
{
new StableDictionaryConverterFactory(),
new Iso8601DateTimeConverter()
}
};
public static string Serialize<T>(T value)
=> JsonSerializer.Serialize(value, Options);
public static (string Json, string Digest) SerializeWithDigest<T>(T value)
{
var json = Serialize(value);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
var digest = Convert.ToHexString(hash).ToLowerInvariant();
return (json, digest);
}
public static T Deserialize<T>(string json)
{
return JsonSerializer.Deserialize<T>(json, Options)
?? throw new InvalidOperationException($"Failed to deserialize {typeof(T).Name}");
}
}
/// <summary>
/// Converter factory that orders dictionary keys alphabetically.
/// </summary>
public sealed class StableDictionaryConverterFactory : JsonConverterFactory
{
public override bool CanConvert(Type typeToConvert)
{
if (!typeToConvert.IsGenericType) return false;
var generic = typeToConvert.GetGenericTypeDefinition();
return generic == typeof(Dictionary<,>) || generic == typeof(IDictionary<,>) || generic == typeof(IReadOnlyDictionary<,>);
}
public override JsonConverter CreateConverter(Type typeToConvert, JsonSerializerOptions options)
{
var args = typeToConvert.GetGenericArguments();
var converterType = typeof(StableDictionaryConverter<,>).MakeGenericType(args[0], args[1]);
return (JsonConverter)Activator.CreateInstance(converterType)!;
}
}
public sealed class StableDictionaryConverter<TKey, TValue> : JsonConverter<IDictionary<TKey, TValue>>
where TKey : notnull
{
public override IDictionary<TKey, TValue>? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
=> JsonSerializer.Deserialize<Dictionary<TKey, TValue>>(ref reader, options);
public override void Write(Utf8JsonWriter writer, IDictionary<TKey, TValue> value, JsonSerializerOptions options)
{
writer.WriteStartObject();
foreach (var kvp in value.OrderBy(x => x.Key?.ToString(), StringComparer.Ordinal))
{
writer.WritePropertyName(kvp.Key?.ToString() ?? string.Empty);
JsonSerializer.Serialize(writer, kvp.Value, options);
}
writer.WriteEndObject();
}
}
/// <summary>
/// Converter for ISO 8601 date/time with UTC normalization.
/// </summary>
public sealed class Iso8601DateTimeConverter : JsonConverter<DateTimeOffset>
{
public override DateTimeOffset Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
=> DateTimeOffset.Parse(reader.GetString()!, CultureInfo.InvariantCulture);
public override void Write(Utf8JsonWriter writer, DateTimeOffset value, JsonSerializerOptions options)
=> writer.WriteStringValue(value.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss.fffZ", CultureInfo.InvariantCulture));
}

View File

@@ -0,0 +1,79 @@
namespace StellaOps.Canonicalization.Ordering;
/// <summary>
/// Provides stable ordering for SBOM packages.
/// Order: purl -> name -> version -> type.
/// </summary>
public static class PackageOrderer
{
public static IOrderedEnumerable<T> StableOrder<T>(
this IEnumerable<T> packages,
Func<T, string?> getPurl,
Func<T, string?> getName,
Func<T, string?> getVersion,
Func<T, string?> getType)
{
return packages
.OrderBy(p => getPurl(p) ?? string.Empty, StringComparer.Ordinal)
.ThenBy(p => getName(p) ?? string.Empty, StringComparer.Ordinal)
.ThenBy(p => getVersion(p) ?? string.Empty, StringComparer.Ordinal)
.ThenBy(p => getType(p) ?? string.Empty, StringComparer.Ordinal);
}
}
/// <summary>
/// Provides stable ordering for vulnerabilities.
/// Order: id -> source -> severity.
/// </summary>
public static class VulnerabilityOrderer
{
public static IOrderedEnumerable<T> StableOrder<T>(
this IEnumerable<T> vulnerabilities,
Func<T, string> getId,
Func<T, string?> getSource,
Func<T, decimal?> getSeverity)
{
return vulnerabilities
.OrderBy(v => getId(v), StringComparer.Ordinal)
.ThenBy(v => getSource(v) ?? string.Empty, StringComparer.Ordinal)
.ThenByDescending(v => getSeverity(v) ?? 0);
}
}
/// <summary>
/// Provides stable ordering for graph edges.
/// Order: source -> target -> type.
/// </summary>
public static class EdgeOrderer
{
public static IOrderedEnumerable<T> StableOrder<T>(
this IEnumerable<T> edges,
Func<T, string> getSource,
Func<T, string> getTarget,
Func<T, string?> getType)
{
return edges
.OrderBy(e => getSource(e), StringComparer.Ordinal)
.ThenBy(e => getTarget(e), StringComparer.Ordinal)
.ThenBy(e => getType(e) ?? string.Empty, StringComparer.Ordinal);
}
}
/// <summary>
/// Provides stable ordering for evidence lists.
/// Order: type -> id -> digest.
/// </summary>
public static class EvidenceOrderer
{
public static IOrderedEnumerable<T> StableOrder<T>(
this IEnumerable<T> evidence,
Func<T, string> getType,
Func<T, string> getId,
Func<T, string?> getDigest)
{
return evidence
.OrderBy(e => getType(e), StringComparer.Ordinal)
.ThenBy(e => getId(e), StringComparer.Ordinal)
.ThenBy(e => getDigest(e) ?? string.Empty, StringComparer.Ordinal);
}
}

View File

@@ -0,0 +1,12 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Text.Json" Version="9.0.0" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,98 @@
using System.Text.Json;
using StellaOps.Canonicalization.Json;
namespace StellaOps.Canonicalization.Verification;
/// <summary>
/// Verifies that serialization produces identical output across runs.
/// </summary>
public sealed class DeterminismVerifier
{
public DeterminismResult Verify<T>(T value, int iterations = 10)
{
var outputs = new HashSet<string>(StringComparer.Ordinal);
var digests = new HashSet<string>(StringComparer.Ordinal);
for (var i = 0; i < iterations; i++)
{
var (json, digest) = CanonicalJsonSerializer.SerializeWithDigest(value);
outputs.Add(json);
digests.Add(digest);
}
return new DeterminismResult(
IsDeterministic: outputs.Count == 1 && digests.Count == 1,
UniqueOutputs: outputs.Count,
UniqueDigests: digests.Count,
SampleOutput: outputs.FirstOrDefault() ?? string.Empty,
SampleDigest: digests.FirstOrDefault() ?? string.Empty);
}
public ComparisonResult Compare(string jsonA, string jsonB)
{
if (string.Equals(jsonA, jsonB, StringComparison.Ordinal))
{
return new ComparisonResult(true, []);
}
var differences = FindDifferences(jsonA, jsonB);
return new ComparisonResult(false, differences);
}
private static IReadOnlyList<string> FindDifferences(string a, string b)
{
var differences = new List<string>();
using var docA = JsonDocument.Parse(a);
using var docB = JsonDocument.Parse(b);
CompareElements(docA.RootElement, docB.RootElement, "$", differences);
return differences;
}
private static void CompareElements(JsonElement a, JsonElement b, string path, List<string> differences)
{
if (a.ValueKind != b.ValueKind)
{
differences.Add($"{path}: type mismatch ({a.ValueKind} vs {b.ValueKind})");
return;
}
switch (a.ValueKind)
{
case JsonValueKind.Object:
var propsA = a.EnumerateObject().ToDictionary(p => p.Name, StringComparer.Ordinal);
var propsB = b.EnumerateObject().ToDictionary(p => p.Name, StringComparer.Ordinal);
foreach (var key in propsA.Keys.Union(propsB.Keys).OrderBy(k => k, StringComparer.Ordinal))
{
var hasA = propsA.TryGetValue(key, out var propA);
var hasB = propsB.TryGetValue(key, out var propB);
if (!hasA) differences.Add($"{path}.{key}: missing in first");
else if (!hasB) differences.Add($"{path}.{key}: missing in second");
else CompareElements(propA.Value, propB.Value, $"{path}.{key}", differences);
}
break;
case JsonValueKind.Array:
var arrA = a.EnumerateArray().ToList();
var arrB = b.EnumerateArray().ToList();
if (arrA.Count != arrB.Count)
differences.Add($"{path}: array length mismatch ({arrA.Count} vs {arrB.Count})");
for (var i = 0; i < Math.Min(arrA.Count, arrB.Count); i++)
CompareElements(arrA[i], arrB[i], $"{path}[{i}]", differences);
break;
default:
if (a.GetRawText() != b.GetRawText())
differences.Add($"{path}: value mismatch");
break;
}
}
}
public sealed record DeterminismResult(
bool IsDeterministic,
int UniqueOutputs,
int UniqueDigests,
string SampleOutput,
string SampleDigest);
public sealed record ComparisonResult(
bool IsIdentical,
IReadOnlyList<string> Differences);

View File

@@ -0,0 +1,234 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using StellaOps.DeltaVerdict.Models;
namespace StellaOps.DeltaVerdict.Engine;
public interface IDeltaComputationEngine
{
DeltaVerdict.Models.DeltaVerdict ComputeDelta(Verdict baseVerdict, Verdict headVerdict);
}
public sealed class DeltaComputationEngine : IDeltaComputationEngine
{
private readonly TimeProvider _timeProvider;
public DeltaComputationEngine(TimeProvider? timeProvider = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
}
public DeltaVerdict.Models.DeltaVerdict ComputeDelta(Verdict baseVerdict, Verdict headVerdict)
{
ArgumentNullException.ThrowIfNull(baseVerdict);
ArgumentNullException.ThrowIfNull(headVerdict);
var baseComponents = baseVerdict.Components
.ToDictionary(c => c.Purl, c => c, StringComparer.Ordinal);
var headComponents = headVerdict.Components
.ToDictionary(c => c.Purl, c => c, StringComparer.Ordinal);
var addedComponents = ComputeAddedComponents(baseComponents, headComponents);
var removedComponents = ComputeRemovedComponents(baseComponents, headComponents);
var changedComponents = ComputeChangedComponents(baseComponents, headComponents);
var baseVulns = baseVerdict.Vulnerabilities
.ToDictionary(v => v.Id, v => v, StringComparer.Ordinal);
var headVulns = headVerdict.Vulnerabilities
.ToDictionary(v => v.Id, v => v, StringComparer.Ordinal);
var addedVulns = ComputeAddedVulnerabilities(baseVulns, headVulns);
var removedVulns = ComputeRemovedVulnerabilities(baseVulns, headVulns);
var changedStatuses = ComputeStatusChanges(baseVulns, headVulns);
var riskDelta = ComputeRiskScoreDelta(baseVerdict.RiskScore, headVerdict.RiskScore);
var totalChanges = addedComponents.Length + removedComponents.Length + changedComponents.Length
+ addedVulns.Length + removedVulns.Length + changedStatuses.Length;
var summary = new DeltaSummary(
ComponentsAdded: addedComponents.Length,
ComponentsRemoved: removedComponents.Length,
ComponentsChanged: changedComponents.Length,
VulnerabilitiesAdded: addedVulns.Length,
VulnerabilitiesRemoved: removedVulns.Length,
VulnerabilityStatusChanges: changedStatuses.Length,
TotalChanges: totalChanges,
Magnitude: ClassifyMagnitude(totalChanges));
return new DeltaVerdict.Models.DeltaVerdict
{
DeltaId = ComputeDeltaId(baseVerdict, headVerdict),
SchemaVersion = "1.0.0",
BaseVerdict = CreateVerdictReference(baseVerdict),
HeadVerdict = CreateVerdictReference(headVerdict),
AddedComponents = addedComponents,
RemovedComponents = removedComponents,
ChangedComponents = changedComponents,
AddedVulnerabilities = addedVulns,
RemovedVulnerabilities = removedVulns,
ChangedVulnerabilityStatuses = changedStatuses,
RiskScoreDelta = riskDelta,
Summary = summary,
ComputedAt = _timeProvider.GetUtcNow()
};
}
private static ImmutableArray<ComponentDelta> ComputeAddedComponents(
IReadOnlyDictionary<string, Component> baseComponents,
IReadOnlyDictionary<string, Component> headComponents)
{
return headComponents
.Where(kv => !baseComponents.ContainsKey(kv.Key))
.OrderBy(kv => kv.Key, StringComparer.Ordinal)
.Select(kv => new ComponentDelta(
kv.Value.Purl,
kv.Value.Name,
kv.Value.Version,
kv.Value.Type,
kv.Value.Vulnerabilities))
.ToImmutableArray();
}
private static ImmutableArray<ComponentDelta> ComputeRemovedComponents(
IReadOnlyDictionary<string, Component> baseComponents,
IReadOnlyDictionary<string, Component> headComponents)
{
return baseComponents
.Where(kv => !headComponents.ContainsKey(kv.Key))
.OrderBy(kv => kv.Key, StringComparer.Ordinal)
.Select(kv => new ComponentDelta(
kv.Value.Purl,
kv.Value.Name,
kv.Value.Version,
kv.Value.Type,
kv.Value.Vulnerabilities))
.ToImmutableArray();
}
private static ImmutableArray<ComponentVersionDelta> ComputeChangedComponents(
IReadOnlyDictionary<string, Component> baseComponents,
IReadOnlyDictionary<string, Component> headComponents)
{
return baseComponents
.Where(kv => headComponents.TryGetValue(kv.Key, out var head)
&& !string.Equals(kv.Value.Version, head.Version, StringComparison.Ordinal))
.OrderBy(kv => kv.Key, StringComparer.Ordinal)
.Select(kv =>
{
var baseComponent = kv.Value;
var headComponent = headComponents[kv.Key];
var fixedVulns = baseComponent.Vulnerabilities
.Except(headComponent.Vulnerabilities, StringComparer.Ordinal)
.OrderBy(v => v, StringComparer.Ordinal)
.ToImmutableArray();
var introducedVulns = headComponent.Vulnerabilities
.Except(baseComponent.Vulnerabilities, StringComparer.Ordinal)
.OrderBy(v => v, StringComparer.Ordinal)
.ToImmutableArray();
return new ComponentVersionDelta(
baseComponent.Purl,
baseComponent.Name,
baseComponent.Version,
headComponent.Version,
fixedVulns,
introducedVulns);
})
.ToImmutableArray();
}
private static ImmutableArray<VulnerabilityDelta> ComputeAddedVulnerabilities(
IReadOnlyDictionary<string, Vulnerability> baseVulns,
IReadOnlyDictionary<string, Vulnerability> headVulns)
{
return headVulns
.Where(kv => !baseVulns.ContainsKey(kv.Key))
.OrderBy(kv => kv.Key, StringComparer.Ordinal)
.Select(kv => new VulnerabilityDelta(
kv.Value.Id,
kv.Value.Severity,
kv.Value.CvssScore,
kv.Value.ComponentPurl,
kv.Value.ReachabilityStatus))
.ToImmutableArray();
}
private static ImmutableArray<VulnerabilityDelta> ComputeRemovedVulnerabilities(
IReadOnlyDictionary<string, Vulnerability> baseVulns,
IReadOnlyDictionary<string, Vulnerability> headVulns)
{
return baseVulns
.Where(kv => !headVulns.ContainsKey(kv.Key))
.OrderBy(kv => kv.Key, StringComparer.Ordinal)
.Select(kv => new VulnerabilityDelta(
kv.Value.Id,
kv.Value.Severity,
kv.Value.CvssScore,
kv.Value.ComponentPurl,
kv.Value.ReachabilityStatus))
.ToImmutableArray();
}
private static ImmutableArray<VulnerabilityStatusDelta> ComputeStatusChanges(
IReadOnlyDictionary<string, Vulnerability> baseVulns,
IReadOnlyDictionary<string, Vulnerability> headVulns)
{
var deltas = new List<VulnerabilityStatusDelta>();
foreach (var (id, baseVuln) in baseVulns.OrderBy(kv => kv.Key, StringComparer.Ordinal))
{
if (!headVulns.TryGetValue(id, out var headVuln))
{
continue;
}
var oldStatus = baseVuln.Status ?? baseVuln.ReachabilityStatus ?? "unknown";
var newStatus = headVuln.Status ?? headVuln.ReachabilityStatus ?? "unknown";
if (!string.Equals(oldStatus, newStatus, StringComparison.OrdinalIgnoreCase))
{
deltas.Add(new VulnerabilityStatusDelta(id, oldStatus, newStatus, null));
}
}
return deltas.ToImmutableArray();
}
private static RiskScoreDelta ComputeRiskScoreDelta(decimal oldScore, decimal newScore)
{
var change = newScore - oldScore;
var percentChange = oldScore > 0 ? (change / oldScore) * 100 : (newScore > 0 ? 100 : 0);
var trend = change switch
{
< 0 => RiskTrend.Improved,
> 0 => RiskTrend.Degraded,
_ => RiskTrend.Stable
};
return new RiskScoreDelta(oldScore, newScore, change, percentChange, trend);
}
private static DeltaMagnitude ClassifyMagnitude(int totalChanges) => totalChanges switch
{
0 => DeltaMagnitude.None,
<= 5 => DeltaMagnitude.Minimal,
<= 20 => DeltaMagnitude.Small,
<= 50 => DeltaMagnitude.Medium,
<= 100 => DeltaMagnitude.Large,
_ => DeltaMagnitude.Major
};
private static VerdictReference CreateVerdictReference(Verdict verdict)
=> new(verdict.VerdictId, verdict.Digest, verdict.ArtifactRef, verdict.ScannedAt);
private static string ComputeDeltaId(Verdict baseVerdict, Verdict headVerdict)
{
var baseKey = baseVerdict.Digest ?? baseVerdict.VerdictId;
var headKey = headVerdict.Digest ?? headVerdict.VerdictId;
var input = $"{baseKey}:{headKey}";
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(input));
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,158 @@
using System.Collections.Immutable;
namespace StellaOps.DeltaVerdict.Models;
/// <summary>
/// Represents the difference between two scan verdicts.
/// Used for diff-aware release gates and risk budget computation.
/// </summary>
public sealed record DeltaVerdict
{
/// <summary>
/// Unique identifier for this delta.
/// </summary>
public required string DeltaId { get; init; }
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
public required string SchemaVersion { get; init; } = "1.0.0";
/// <summary>
/// Reference to the base (before) verdict.
/// </summary>
public required VerdictReference BaseVerdict { get; init; }
/// <summary>
/// Reference to the head (after) verdict.
/// </summary>
public required VerdictReference HeadVerdict { get; init; }
/// <summary>
/// Components added in head.
/// </summary>
public ImmutableArray<ComponentDelta> AddedComponents { get; init; } = [];
/// <summary>
/// Components removed in head.
/// </summary>
public ImmutableArray<ComponentDelta> RemovedComponents { get; init; } = [];
/// <summary>
/// Components with version changes.
/// </summary>
public ImmutableArray<ComponentVersionDelta> ChangedComponents { get; init; } = [];
/// <summary>
/// New vulnerabilities introduced in head.
/// </summary>
public ImmutableArray<VulnerabilityDelta> AddedVulnerabilities { get; init; } = [];
/// <summary>
/// Vulnerabilities fixed in head.
/// </summary>
public ImmutableArray<VulnerabilityDelta> RemovedVulnerabilities { get; init; } = [];
/// <summary>
/// Vulnerabilities with status changes (e.g., VEX update).
/// </summary>
public ImmutableArray<VulnerabilityStatusDelta> ChangedVulnerabilityStatuses { get; init; } = [];
/// <summary>
/// Risk score changes.
/// </summary>
public required RiskScoreDelta RiskScoreDelta { get; init; }
/// <summary>
/// Summary statistics for the delta.
/// </summary>
public required DeltaSummary Summary { get; init; }
/// <summary>
/// Whether this is an "empty delta" (no changes).
/// </summary>
public bool IsEmpty => Summary.TotalChanges == 0;
/// <summary>
/// UTC timestamp when delta was computed.
/// </summary>
public required DateTimeOffset ComputedAt { get; init; }
/// <summary>
/// SHA-256 digest of this delta (excluding this field and signature).
/// </summary>
public string? DeltaDigest { get; init; }
/// <summary>
/// DSSE signature if signed (JSON envelope).
/// </summary>
public string? Signature { get; init; }
}
public sealed record VerdictReference(
string VerdictId,
string? Digest,
string? ArtifactRef,
DateTimeOffset ScannedAt);
public sealed record ComponentDelta(
string Purl,
string Name,
string Version,
string Type,
ImmutableArray<string> AssociatedVulnerabilities);
public sealed record ComponentVersionDelta(
string Purl,
string Name,
string OldVersion,
string NewVersion,
ImmutableArray<string> VulnerabilitiesFixed,
ImmutableArray<string> VulnerabilitiesIntroduced);
public sealed record VulnerabilityDelta(
string VulnerabilityId,
string Severity,
decimal? CvssScore,
string? ComponentPurl,
string? ReachabilityStatus);
public sealed record VulnerabilityStatusDelta(
string VulnerabilityId,
string OldStatus,
string NewStatus,
string? Reason);
public sealed record RiskScoreDelta(
decimal OldScore,
decimal NewScore,
decimal Change,
decimal PercentChange,
RiskTrend Trend);
public enum RiskTrend
{
Improved,
Degraded,
Stable
}
public sealed record DeltaSummary(
int ComponentsAdded,
int ComponentsRemoved,
int ComponentsChanged,
int VulnerabilitiesAdded,
int VulnerabilitiesRemoved,
int VulnerabilityStatusChanges,
int TotalChanges,
DeltaMagnitude Magnitude);
public enum DeltaMagnitude
{
None,
Minimal,
Small,
Medium,
Large,
Major
}

View File

@@ -0,0 +1,29 @@
using System.Collections.Immutable;
namespace StellaOps.DeltaVerdict.Models;
public sealed record Verdict
{
public required string VerdictId { get; init; }
public string? Digest { get; init; }
public string? ArtifactRef { get; init; }
public required DateTimeOffset ScannedAt { get; init; }
public decimal RiskScore { get; init; }
public ImmutableArray<Component> Components { get; init; } = [];
public ImmutableArray<Vulnerability> Vulnerabilities { get; init; } = [];
}
public sealed record Component(
string Purl,
string Name,
string Version,
string Type,
ImmutableArray<string> Vulnerabilities);
public sealed record Vulnerability(
string Id,
string Severity,
decimal? CvssScore,
string? ComponentPurl,
string? ReachabilityStatus,
string? Status);

View File

@@ -0,0 +1,44 @@
using StellaOps.DeltaVerdict.Models;
using StellaOps.DeltaVerdict.Serialization;
namespace StellaOps.DeltaVerdict.Oci;
public sealed class DeltaOciAttacher : IDeltaOciAttacher
{
public OciAttachment CreateAttachment(DeltaVerdict.Models.DeltaVerdict delta, string artifactRef)
{
ArgumentNullException.ThrowIfNull(delta);
if (string.IsNullOrWhiteSpace(artifactRef))
{
throw new ArgumentException("Artifact reference is required.", nameof(artifactRef));
}
var payload = DeltaVerdictSerializer.Serialize(delta);
var annotations = new Dictionary<string, string>(StringComparer.Ordinal)
{
["org.opencontainers.image.title"] = "stellaops.delta-verdict",
["org.opencontainers.image.description"] = "Delta verdict for diff-aware release gates",
["stellaops.delta.base.digest"] = delta.BaseVerdict.Digest ?? string.Empty,
["stellaops.delta.head.digest"] = delta.HeadVerdict.Digest ?? string.Empty,
["stellaops.delta.base.id"] = delta.BaseVerdict.VerdictId,
["stellaops.delta.head.id"] = delta.HeadVerdict.VerdictId
};
return new OciAttachment(
ArtifactReference: artifactRef,
MediaType: "application/vnd.stellaops.delta-verdict+json",
Payload: payload,
Annotations: annotations);
}
}
public interface IDeltaOciAttacher
{
OciAttachment CreateAttachment(DeltaVerdict.Models.DeltaVerdict delta, string artifactRef);
}
public sealed record OciAttachment(
string ArtifactReference,
string MediaType,
string Payload,
IReadOnlyDictionary<string, string> Annotations);

View File

@@ -0,0 +1,89 @@
using System.Collections.Immutable;
using StellaOps.DeltaVerdict.Models;
namespace StellaOps.DeltaVerdict.Policy;
/// <summary>
/// Evaluates delta verdicts against risk budgets for release gates.
/// </summary>
public sealed class RiskBudgetEvaluator : IRiskBudgetEvaluator
{
public RiskBudgetResult Evaluate(DeltaVerdict.Models.DeltaVerdict delta, RiskBudget budget)
{
ArgumentNullException.ThrowIfNull(delta);
ArgumentNullException.ThrowIfNull(budget);
var violations = new List<RiskBudgetViolation>();
var criticalAdded = delta.AddedVulnerabilities
.Count(v => string.Equals(v.Severity, "critical", StringComparison.OrdinalIgnoreCase));
if (criticalAdded > budget.MaxNewCriticalVulnerabilities)
{
violations.Add(new RiskBudgetViolation(
"CriticalVulnerabilities",
$"Added {criticalAdded} critical vulnerabilities (budget: {budget.MaxNewCriticalVulnerabilities})"));
}
var highAdded = delta.AddedVulnerabilities
.Count(v => string.Equals(v.Severity, "high", StringComparison.OrdinalIgnoreCase));
if (highAdded > budget.MaxNewHighVulnerabilities)
{
violations.Add(new RiskBudgetViolation(
"HighVulnerabilities",
$"Added {highAdded} high vulnerabilities (budget: {budget.MaxNewHighVulnerabilities})"));
}
if (delta.RiskScoreDelta.Change > budget.MaxRiskScoreIncrease)
{
violations.Add(new RiskBudgetViolation(
"RiskScoreIncrease",
$"Risk score increased by {delta.RiskScoreDelta.Change} (budget: {budget.MaxRiskScoreIncrease})"));
}
if ((int)delta.Summary.Magnitude > (int)budget.MaxMagnitude)
{
violations.Add(new RiskBudgetViolation(
"DeltaMagnitude",
$"Delta magnitude {delta.Summary.Magnitude} exceeds budget {budget.MaxMagnitude}"));
}
foreach (var vuln in delta.AddedVulnerabilities)
{
if (budget.BlockedVulnerabilities.Contains(vuln.VulnerabilityId))
{
violations.Add(new RiskBudgetViolation(
"BlockedVulnerability",
$"Added blocked vulnerability {vuln.VulnerabilityId}"));
}
}
return new RiskBudgetResult(
IsWithinBudget: violations.Count == 0,
Violations: violations,
Delta: delta,
Budget: budget);
}
}
public interface IRiskBudgetEvaluator
{
RiskBudgetResult Evaluate(DeltaVerdict.Models.DeltaVerdict delta, RiskBudget budget);
}
public sealed record RiskBudget
{
public int MaxNewCriticalVulnerabilities { get; init; } = 0;
public int MaxNewHighVulnerabilities { get; init; } = 3;
public decimal MaxRiskScoreIncrease { get; init; } = 10;
public DeltaMagnitude MaxMagnitude { get; init; } = DeltaMagnitude.Medium;
public ImmutableHashSet<string> BlockedVulnerabilities { get; init; }
= ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase);
}
public sealed record RiskBudgetResult(
bool IsWithinBudget,
IReadOnlyList<RiskBudgetViolation> Violations,
DeltaVerdict.Models.DeltaVerdict Delta,
RiskBudget Budget);
public sealed record RiskBudgetViolation(string Category, string Message);

View File

@@ -0,0 +1,44 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.DeltaVerdict.Models;
namespace StellaOps.DeltaVerdict.Serialization;
public static class DeltaVerdictSerializer
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
public static string Serialize(DeltaVerdict.Models.DeltaVerdict delta)
{
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(delta, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
public static DeltaVerdict.Models.DeltaVerdict Deserialize(string json)
{
return JsonSerializer.Deserialize<DeltaVerdict.Models.DeltaVerdict>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize delta verdict");
}
public static string ComputeDigest(DeltaVerdict.Models.DeltaVerdict delta)
{
var unsigned = delta with { DeltaDigest = null, Signature = null };
var json = Serialize(unsigned);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(hash).ToLowerInvariant();
}
public static DeltaVerdict.Models.DeltaVerdict WithDigest(DeltaVerdict.Models.DeltaVerdict delta)
=> delta with { DeltaDigest = ComputeDigest(delta) };
}

View File

@@ -0,0 +1,44 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.DeltaVerdict.Models;
namespace StellaOps.DeltaVerdict.Serialization;
public static class VerdictSerializer
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
public static string Serialize(Verdict verdict)
{
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(verdict, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
public static Verdict Deserialize(string json)
{
return JsonSerializer.Deserialize<Verdict>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize verdict");
}
public static string ComputeDigest(Verdict verdict)
{
var withoutDigest = verdict with { Digest = null };
var json = Serialize(withoutDigest);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(hash).ToLowerInvariant();
}
public static Verdict WithDigest(Verdict verdict)
=> verdict with { Digest = ComputeDigest(verdict) };
}

View File

@@ -0,0 +1,195 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.DeltaVerdict.Models;
using StellaOps.DeltaVerdict.Serialization;
namespace StellaOps.DeltaVerdict.Signing;
public interface IDeltaSigningService
{
Task<DeltaVerdict.Models.DeltaVerdict> SignAsync(
DeltaVerdict.Models.DeltaVerdict delta,
SigningOptions options,
CancellationToken ct = default);
Task<VerificationResult> VerifyAsync(
DeltaVerdict.Models.DeltaVerdict delta,
VerificationOptions options,
CancellationToken ct = default);
}
public sealed class DeltaSigningService : IDeltaSigningService
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false
};
public Task<DeltaVerdict.Models.DeltaVerdict> SignAsync(
DeltaVerdict.Models.DeltaVerdict delta,
SigningOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(delta);
ArgumentNullException.ThrowIfNull(options);
ct.ThrowIfCancellationRequested();
var withDigest = DeltaVerdictSerializer.WithDigest(delta);
var payloadJson = DeltaVerdictSerializer.Serialize(withDigest with { Signature = null });
var payloadBytes = Encoding.UTF8.GetBytes(payloadJson);
var envelope = BuildEnvelope(payloadBytes, options);
var envelopeJson = JsonSerializer.Serialize(envelope, JsonOptions);
return Task.FromResult(withDigest with { Signature = envelopeJson });
}
public Task<VerificationResult> VerifyAsync(
DeltaVerdict.Models.DeltaVerdict delta,
VerificationOptions options,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(delta);
ArgumentNullException.ThrowIfNull(options);
ct.ThrowIfCancellationRequested();
if (string.IsNullOrEmpty(delta.Signature))
{
return Task.FromResult(VerificationResult.Fail("Delta is not signed"));
}
DsseEnvelope? envelope;
try
{
envelope = JsonSerializer.Deserialize<DsseEnvelope>(delta.Signature, JsonOptions);
}
catch (JsonException ex)
{
return Task.FromResult(VerificationResult.Fail($"Invalid signature envelope: {ex.Message}"));
}
if (envelope is null)
{
return Task.FromResult(VerificationResult.Fail("Signature envelope is empty"));
}
var payloadBytes = Convert.FromBase64String(envelope.Payload);
var pae = BuildPae(envelope.PayloadType, payloadBytes);
var expectedSig = ComputeSignature(pae, options);
var matched = envelope.Signatures.Any(sig =>
string.Equals(sig.KeyId, options.KeyId, StringComparison.Ordinal)
&& string.Equals(sig.Sig, expectedSig, StringComparison.Ordinal));
if (!matched)
{
return Task.FromResult(VerificationResult.Fail("Signature verification failed"));
}
if (!string.IsNullOrEmpty(delta.DeltaDigest))
{
var computed = DeltaVerdictSerializer.ComputeDigest(delta);
if (!string.Equals(computed, delta.DeltaDigest, StringComparison.OrdinalIgnoreCase))
{
return Task.FromResult(VerificationResult.Fail("Delta digest mismatch"));
}
}
return Task.FromResult(VerificationResult.Success());
}
private static DsseEnvelope BuildEnvelope(byte[] payload, SigningOptions options)
{
var pae = BuildPae(options.PayloadType, payload);
var signature = ComputeSignature(pae, options);
return new DsseEnvelope(
options.PayloadType,
Convert.ToBase64String(payload),
[new DsseSignature(options.KeyId, signature)]);
}
private static string ComputeSignature(byte[] pae, SigningOptions options)
{
return options.Algorithm switch
{
SigningAlgorithm.HmacSha256 => ComputeHmac(pae, options.SecretBase64),
SigningAlgorithm.Sha256 => Convert.ToBase64String(SHA256.HashData(pae)),
_ => throw new InvalidOperationException($"Unsupported signing algorithm: {options.Algorithm}")
};
}
private static string ComputeHmac(byte[] data, string? secretBase64)
{
if (string.IsNullOrWhiteSpace(secretBase64))
{
throw new InvalidOperationException("HMAC signing requires a base64 secret.");
}
var secret = Convert.FromBase64String(secretBase64);
using var hmac = new HMACSHA256(secret);
var sig = hmac.ComputeHash(data);
return Convert.ToBase64String(sig);
}
private static byte[] BuildPae(string payloadType, byte[] payload)
{
var prefix = "DSSEv1";
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
var prefixBytes = Encoding.UTF8.GetBytes(prefix);
var lengthType = Encoding.UTF8.GetBytes(typeBytes.Length.ToString());
var lengthPayload = Encoding.UTF8.GetBytes(payload.Length.ToString());
using var stream = new MemoryStream();
stream.Write(prefixBytes);
stream.WriteByte((byte)' ');
stream.Write(lengthType);
stream.WriteByte((byte)' ');
stream.Write(typeBytes);
stream.WriteByte((byte)' ');
stream.Write(lengthPayload);
stream.WriteByte((byte)' ');
stream.Write(payload);
return stream.ToArray();
}
}
public sealed record SigningOptions
{
public required string KeyId { get; init; }
public SigningAlgorithm Algorithm { get; init; } = SigningAlgorithm.HmacSha256;
public string? SecretBase64 { get; init; }
public string PayloadType { get; init; } = "application/vnd.stellaops.delta-verdict+json";
}
public sealed record VerificationOptions
{
public required string KeyId { get; init; }
public SigningAlgorithm Algorithm { get; init; } = SigningAlgorithm.HmacSha256;
public string? SecretBase64 { get; init; }
}
public enum SigningAlgorithm
{
HmacSha256,
Sha256
}
public sealed record VerificationResult
{
public required bool IsValid { get; init; }
public string? Error { get; init; }
public static VerificationResult Success() => new() { IsValid = true };
public static VerificationResult Fail(string error) => new() { IsValid = false, Error = error };
}
public sealed record DsseEnvelope(
string PayloadType,
string Payload,
IReadOnlyList<DsseSignature> Signatures);
public sealed record DsseSignature(string KeyId, string Sig);

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Text.Json" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,119 @@
namespace StellaOps.Evidence.Budgets;
/// <summary>
/// Budget configuration for evidence storage.
/// </summary>
public sealed record EvidenceBudget
{
/// <summary>
/// Maximum total evidence size per scan (bytes).
/// </summary>
public required long MaxScanSizeBytes { get; init; }
/// <summary>
/// Maximum size per evidence type (bytes).
/// </summary>
public IReadOnlyDictionary<EvidenceType, long> MaxPerType { get; init; }
= new Dictionary<EvidenceType, long>();
/// <summary>
/// Retention policy by tier.
/// </summary>
public required IReadOnlyDictionary<RetentionTier, RetentionPolicy> RetentionPolicies { get; init; }
/// <summary>
/// Action when budget is exceeded.
/// </summary>
public BudgetExceededAction ExceededAction { get; init; } = BudgetExceededAction.Warn;
/// <summary>
/// Evidence types to always preserve (never prune).
/// </summary>
public IReadOnlySet<EvidenceType> AlwaysPreserve { get; init; }
= new HashSet<EvidenceType> { EvidenceType.Verdict, EvidenceType.Attestation };
public static EvidenceBudget Default => new()
{
MaxScanSizeBytes = 100 * 1024 * 1024, // 100 MB
MaxPerType = new Dictionary<EvidenceType, long>
{
[EvidenceType.CallGraph] = 50 * 1024 * 1024,
[EvidenceType.RuntimeCapture] = 20 * 1024 * 1024,
[EvidenceType.Sbom] = 10 * 1024 * 1024,
[EvidenceType.PolicyTrace] = 5 * 1024 * 1024
},
RetentionPolicies = new Dictionary<RetentionTier, RetentionPolicy>
{
[RetentionTier.Hot] = new RetentionPolicy { Duration = TimeSpan.FromDays(7) },
[RetentionTier.Warm] = new RetentionPolicy { Duration = TimeSpan.FromDays(30) },
[RetentionTier.Cold] = new RetentionPolicy { Duration = TimeSpan.FromDays(90) },
[RetentionTier.Archive] = new RetentionPolicy { Duration = TimeSpan.FromDays(365) }
}
};
}
public enum EvidenceType
{
Verdict,
PolicyTrace,
CallGraph,
RuntimeCapture,
Sbom,
Vex,
Attestation,
PathWitness,
Advisory
}
public enum RetentionTier
{
/// <summary>Immediately accessible, highest cost.</summary>
Hot,
/// <summary>Quick retrieval, moderate cost.</summary>
Warm,
/// <summary>Delayed retrieval, lower cost.</summary>
Cold,
/// <summary>Long-term storage, lowest cost.</summary>
Archive
}
public sealed record RetentionPolicy
{
/// <summary>
/// How long evidence stays in this tier.
/// </summary>
public required TimeSpan Duration { get; init; }
/// <summary>
/// Compression algorithm for this tier.
/// </summary>
public CompressionLevel Compression { get; init; } = CompressionLevel.None;
/// <summary>
/// Whether to deduplicate within this tier.
/// </summary>
public bool Deduplicate { get; init; } = true;
}
public enum CompressionLevel
{
None,
Fast,
Optimal,
Maximum
}
public enum BudgetExceededAction
{
/// <summary>Log warning but continue.</summary>
Warn,
/// <summary>Block the operation.</summary>
Block,
/// <summary>Automatically prune lowest priority evidence.</summary>
AutoPrune
}

View File

@@ -0,0 +1,247 @@
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace StellaOps.Evidence.Budgets;
public interface IEvidenceBudgetService
{
BudgetCheckResult CheckBudget(Guid scanId, EvidenceItem item);
BudgetStatus GetBudgetStatus(Guid scanId);
Task<PruneResult> PruneToFitAsync(Guid scanId, long targetBytes, CancellationToken ct);
}
public sealed class EvidenceBudgetService : IEvidenceBudgetService
{
private readonly IEvidenceRepository _repository;
private readonly IOptionsMonitor<EvidenceBudget> _options;
private readonly ILogger<EvidenceBudgetService> _logger;
public EvidenceBudgetService(
IEvidenceRepository repository,
IOptionsMonitor<EvidenceBudget> options,
ILogger<EvidenceBudgetService> logger)
{
_repository = repository;
_options = options;
_logger = logger;
}
public BudgetCheckResult CheckBudget(Guid scanId, EvidenceItem item)
{
var budget = _options.CurrentValue;
var currentUsage = GetCurrentUsage(scanId);
var issues = new List<string>();
// Check total budget
var projectedTotal = currentUsage.TotalBytes + item.SizeBytes;
if (projectedTotal > budget.MaxScanSizeBytes)
{
issues.Add($"Would exceed total budget: {projectedTotal:N0} > {budget.MaxScanSizeBytes:N0} bytes");
}
// Check per-type budget
if (budget.MaxPerType.TryGetValue(item.Type, out var typeLimit))
{
var typeUsage = currentUsage.ByType.GetValueOrDefault(item.Type, 0);
var projectedType = typeUsage + item.SizeBytes;
if (projectedType > typeLimit)
{
issues.Add($"Would exceed {item.Type} budget: {projectedType:N0} > {typeLimit:N0} bytes");
}
}
if (issues.Count == 0)
{
return BudgetCheckResult.WithinBudget();
}
return new BudgetCheckResult
{
IsWithinBudget = false,
Issues = issues,
RecommendedAction = budget.ExceededAction,
CanAutoPrune = budget.ExceededAction == BudgetExceededAction.AutoPrune,
BytesToFree = Math.Max(0, projectedTotal - budget.MaxScanSizeBytes)
};
}
public BudgetStatus GetBudgetStatus(Guid scanId)
{
var budget = _options.CurrentValue;
var usage = GetCurrentUsage(scanId);
return new BudgetStatus
{
ScanId = scanId,
TotalBudgetBytes = budget.MaxScanSizeBytes,
UsedBytes = usage.TotalBytes,
RemainingBytes = Math.Max(0, budget.MaxScanSizeBytes - usage.TotalBytes),
UtilizationPercent = (decimal)usage.TotalBytes / budget.MaxScanSizeBytes * 100,
ByType = usage.ByType.ToDictionary(
kvp => kvp.Key,
kvp => new TypeBudgetStatus
{
Type = kvp.Key,
UsedBytes = kvp.Value,
LimitBytes = budget.MaxPerType.GetValueOrDefault(kvp.Key),
UtilizationPercent = budget.MaxPerType.TryGetValue(kvp.Key, out var limit)
? (decimal)kvp.Value / limit * 100
: 0
})
};
}
public async Task<PruneResult> PruneToFitAsync(
Guid scanId,
long targetBytes,
CancellationToken ct)
{
var budget = _options.CurrentValue;
var usage = GetCurrentUsage(scanId);
if (usage.TotalBytes <= targetBytes)
{
return PruneResult.NoPruningNeeded();
}
var bytesToPrune = usage.TotalBytes - targetBytes;
var pruned = new List<PrunedItem>();
// Get all evidence items, sorted by pruning priority
var items = await _repository.GetByScanIdAsync(scanId, ct);
var candidates = items
.Where(i => !budget.AlwaysPreserve.Contains(i.Type))
.OrderBy(i => GetPrunePriority(i))
.ToList();
long prunedBytes = 0;
foreach (var item in candidates)
{
if (prunedBytes >= bytesToPrune)
break;
// Move to archive tier or delete
await _repository.MoveToTierAsync(item.Id, RetentionTier.Archive, ct);
pruned.Add(new PrunedItem(item.Id, item.Type, item.SizeBytes));
prunedBytes += item.SizeBytes;
}
_logger.LogInformation(
"Pruned {Count} items ({Bytes:N0} bytes) for scan {ScanId}",
pruned.Count, prunedBytes, scanId);
return new PruneResult
{
Success = prunedBytes >= bytesToPrune,
BytesPruned = prunedBytes,
ItemsPruned = pruned,
BytesRemaining = usage.TotalBytes - prunedBytes
};
}
private static int GetPrunePriority(EvidenceItem item)
{
// Lower = prune first
return item.Type switch
{
EvidenceType.RuntimeCapture => 1,
EvidenceType.CallGraph => 2,
EvidenceType.Advisory => 3,
EvidenceType.PathWitness => 4,
EvidenceType.PolicyTrace => 5,
EvidenceType.Sbom => 6,
EvidenceType.Vex => 7,
EvidenceType.Attestation => 8,
EvidenceType.Verdict => 9, // Never prune
_ => 5
};
}
private UsageStats GetCurrentUsage(Guid scanId)
{
// Implementation to calculate current usage from repository
var items = _repository.GetByScanIdAsync(scanId, CancellationToken.None)
.GetAwaiter().GetResult();
var totalBytes = items.Sum(i => i.SizeBytes);
var byType = items
.GroupBy(i => i.Type)
.ToDictionary(g => g.Key, g => g.Sum(i => i.SizeBytes));
return new UsageStats
{
TotalBytes = totalBytes,
ByType = byType
};
}
}
public sealed record BudgetCheckResult
{
public required bool IsWithinBudget { get; init; }
public IReadOnlyList<string> Issues { get; init; } = [];
public BudgetExceededAction RecommendedAction { get; init; }
public bool CanAutoPrune { get; init; }
public long BytesToFree { get; init; }
public static BudgetCheckResult WithinBudget() => new() { IsWithinBudget = true };
}
public sealed record BudgetStatus
{
public required Guid ScanId { get; init; }
public required long TotalBudgetBytes { get; init; }
public required long UsedBytes { get; init; }
public required long RemainingBytes { get; init; }
public required decimal UtilizationPercent { get; init; }
public required IReadOnlyDictionary<EvidenceType, TypeBudgetStatus> ByType { get; init; }
}
public sealed record TypeBudgetStatus
{
public required EvidenceType Type { get; init; }
public required long UsedBytes { get; init; }
public long? LimitBytes { get; init; }
public decimal UtilizationPercent { get; init; }
}
public sealed record PruneResult
{
public required bool Success { get; init; }
public long BytesPruned { get; init; }
public IReadOnlyList<PrunedItem> ItemsPruned { get; init; } = [];
public long BytesRemaining { get; init; }
public static PruneResult NoPruningNeeded() => new() { Success = true };
}
public sealed record PrunedItem(Guid ItemId, EvidenceType Type, long SizeBytes);
public sealed record UsageStats
{
public long TotalBytes { get; init; }
public IReadOnlyDictionary<EvidenceType, long> ByType { get; init; } = new Dictionary<EvidenceType, long>();
}
// Supporting interfaces and types
public interface IEvidenceRepository
{
Task<IReadOnlyList<EvidenceItem>> GetByScanIdAsync(Guid scanId, CancellationToken ct);
Task<IReadOnlyList<EvidenceItem>> GetByScanIdAndTypeAsync(Guid scanId, EvidenceType type, CancellationToken ct);
Task<IReadOnlyList<EvidenceItem>> GetOlderThanAsync(RetentionTier tier, DateTimeOffset cutoff, CancellationToken ct);
Task MoveToTierAsync(Guid itemId, RetentionTier tier, CancellationToken ct);
Task UpdateContentAsync(Guid itemId, byte[] content, CancellationToken ct);
}
public sealed record EvidenceItem
{
public required Guid Id { get; init; }
public required Guid ScanId { get; init; }
public required EvidenceType Type { get; init; }
public required long SizeBytes { get; init; }
public required RetentionTier Tier { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public string? ArchiveKey { get; init; }
}

View File

@@ -0,0 +1,102 @@
using System.Collections.Immutable;
namespace StellaOps.Evidence.Models;
/// <summary>
/// Machine-readable index linking a verdict to all supporting evidence.
/// </summary>
public sealed record EvidenceIndex
{
public required string IndexId { get; init; }
public string SchemaVersion { get; init; } = "1.0.0";
public required VerdictReference Verdict { get; init; }
public required ImmutableArray<SbomEvidence> Sboms { get; init; }
public required ImmutableArray<AttestationEvidence> Attestations { get; init; }
public ImmutableArray<VexEvidence> VexDocuments { get; init; } = [];
public ImmutableArray<ReachabilityEvidence> ReachabilityProofs { get; init; } = [];
public ImmutableArray<UnknownEvidence> Unknowns { get; init; } = [];
public required ToolChainEvidence ToolChain { get; init; }
public required string RunManifestDigest { get; init; }
public required DateTimeOffset CreatedAt { get; init; }
public string? IndexDigest { get; init; }
}
public sealed record VerdictReference(
string VerdictId,
string Digest,
VerdictOutcome Outcome,
string? PolicyVersion);
public enum VerdictOutcome
{
Pass,
Fail,
Warn,
Unknown
}
public sealed record SbomEvidence(
string SbomId,
string Format,
string Digest,
string? Uri,
int ComponentCount,
DateTimeOffset GeneratedAt);
public sealed record AttestationEvidence(
string AttestationId,
string Type,
string Digest,
string SignerKeyId,
bool SignatureValid,
DateTimeOffset SignedAt,
string? RekorLogIndex);
public sealed record VexEvidence(
string VexId,
string Format,
string Digest,
string Source,
int StatementCount,
ImmutableArray<string> AffectedVulnerabilities);
public sealed record ReachabilityEvidence(
string ProofId,
string VulnerabilityId,
string ComponentPurl,
ReachabilityStatus Status,
string? EntryPoint,
ImmutableArray<string> CallPath,
string Digest);
public enum ReachabilityStatus
{
Reachable,
NotReachable,
Inconclusive,
NotAnalyzed
}
public sealed record UnknownEvidence(
string UnknownId,
string ReasonCode,
string Description,
string? ComponentPurl,
string? VulnerabilityId,
UnknownSeverity Severity);
public enum UnknownSeverity
{
Low,
Medium,
High,
Critical
}
public sealed record ToolChainEvidence(
string ScannerVersion,
string SbomGeneratorVersion,
string ReachabilityEngineVersion,
string AttestorVersion,
string PolicyEngineVersion,
ImmutableDictionary<string, string> AdditionalTools);

View File

@@ -0,0 +1,152 @@
using Microsoft.Extensions.Options;
using StellaOps.Evidence.Budgets;
namespace StellaOps.Evidence.Retention;
public interface IRetentionTierManager
{
Task<TierMigrationResult> RunMigrationAsync(CancellationToken ct);
RetentionTier GetCurrentTier(EvidenceItem item);
Task EnsureAuditCompleteAsync(Guid scanId, CancellationToken ct);
}
public sealed class RetentionTierManager : IRetentionTierManager
{
private readonly IEvidenceRepository _repository;
private readonly IArchiveStorage _archiveStorage;
private readonly IOptionsMonitor<EvidenceBudget> _options;
public RetentionTierManager(
IEvidenceRepository repository,
IArchiveStorage archiveStorage,
IOptionsMonitor<EvidenceBudget> options)
{
_repository = repository;
_archiveStorage = archiveStorage;
_options = options;
}
public async Task<TierMigrationResult> RunMigrationAsync(CancellationToken ct)
{
var budget = _options.CurrentValue;
var now = DateTimeOffset.UtcNow;
var migrated = new List<MigratedItem>();
// Hot → Warm
var hotExpiry = now - budget.RetentionPolicies[RetentionTier.Hot].Duration;
var toWarm = await _repository.GetOlderThanAsync(RetentionTier.Hot, hotExpiry, ct);
foreach (var item in toWarm)
{
await MigrateAsync(item, RetentionTier.Warm, ct);
migrated.Add(new MigratedItem(item.Id, RetentionTier.Hot, RetentionTier.Warm));
}
// Warm → Cold
var warmExpiry = now - budget.RetentionPolicies[RetentionTier.Warm].Duration;
var toCold = await _repository.GetOlderThanAsync(RetentionTier.Warm, warmExpiry, ct);
foreach (var item in toCold)
{
await MigrateAsync(item, RetentionTier.Cold, ct);
migrated.Add(new MigratedItem(item.Id, RetentionTier.Warm, RetentionTier.Cold));
}
// Cold → Archive
var coldExpiry = now - budget.RetentionPolicies[RetentionTier.Cold].Duration;
var toArchive = await _repository.GetOlderThanAsync(RetentionTier.Cold, coldExpiry, ct);
foreach (var item in toArchive)
{
await MigrateAsync(item, RetentionTier.Archive, ct);
migrated.Add(new MigratedItem(item.Id, RetentionTier.Cold, RetentionTier.Archive));
}
return new TierMigrationResult
{
MigratedCount = migrated.Count,
Items = migrated
};
}
public RetentionTier GetCurrentTier(EvidenceItem item)
{
var budget = _options.CurrentValue;
var age = DateTimeOffset.UtcNow - item.CreatedAt;
if (age < budget.RetentionPolicies[RetentionTier.Hot].Duration)
return RetentionTier.Hot;
if (age < budget.RetentionPolicies[RetentionTier.Warm].Duration)
return RetentionTier.Warm;
if (age < budget.RetentionPolicies[RetentionTier.Cold].Duration)
return RetentionTier.Cold;
return RetentionTier.Archive;
}
public async Task EnsureAuditCompleteAsync(Guid scanId, CancellationToken ct)
{
var budget = _options.CurrentValue;
// Ensure all AlwaysPreserve types are in Hot tier for audit export
foreach (var type in budget.AlwaysPreserve)
{
var items = await _repository.GetByScanIdAndTypeAsync(scanId, type, ct);
foreach (var item in items.Where(i => i.Tier != RetentionTier.Hot))
{
await RestoreToHotAsync(item, ct);
}
}
}
private async Task MigrateAsync(EvidenceItem item, RetentionTier targetTier, CancellationToken ct)
{
var policy = _options.CurrentValue.RetentionPolicies[targetTier];
if (policy.Compression != CompressionLevel.None)
{
// Compress before migration
var compressed = await CompressAsync(item, policy.Compression, ct);
await _repository.UpdateContentAsync(item.Id, compressed, ct);
}
await _repository.MoveToTierAsync(item.Id, targetTier, ct);
}
private async Task RestoreToHotAsync(EvidenceItem item, CancellationToken ct)
{
if (item.Tier == RetentionTier.Archive)
{
// Retrieve from archive storage
var content = await _archiveStorage.RetrieveAsync(item.ArchiveKey!, ct);
await _repository.UpdateContentAsync(item.Id, content, ct);
}
await _repository.MoveToTierAsync(item.Id, RetentionTier.Hot, ct);
}
private async Task<byte[]> CompressAsync(
EvidenceItem item,
CompressionLevel level,
CancellationToken ct)
{
// Placeholder for compression logic
// In real implementation, would read content, compress, and return
await Task.CompletedTask;
return Array.Empty<byte>();
}
}
public sealed record TierMigrationResult
{
public required int MigratedCount { get; init; }
public IReadOnlyList<MigratedItem> Items { get; init; } = [];
}
public sealed record MigratedItem(Guid ItemId, RetentionTier FromTier, RetentionTier ToTier);
/// <summary>
/// Archive storage interface for long-term retention.
/// </summary>
public interface IArchiveStorage
{
Task<byte[]> RetrieveAsync(string archiveKey, CancellationToken ct);
Task<string> StoreAsync(byte[] content, CancellationToken ct);
}

View File

@@ -0,0 +1,116 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stellaops.io/schemas/evidence-index/v1",
"title": "StellaOps Evidence Index",
"type": "object",
"required": [
"indexId",
"schemaVersion",
"verdict",
"sboms",
"attestations",
"toolChain",
"runManifestDigest",
"createdAt"
],
"properties": {
"indexId": { "type": "string" },
"schemaVersion": { "type": "string" },
"verdict": { "$ref": "#/$defs/verdictReference" },
"sboms": { "type": "array", "items": { "$ref": "#/$defs/sbomEvidence" } },
"attestations": { "type": "array", "items": { "$ref": "#/$defs/attestationEvidence" } },
"vexDocuments": { "type": "array", "items": { "$ref": "#/$defs/vexEvidence" } },
"reachabilityProofs": { "type": "array", "items": { "$ref": "#/$defs/reachabilityEvidence" } },
"unknowns": { "type": "array", "items": { "$ref": "#/$defs/unknownEvidence" } },
"toolChain": { "$ref": "#/$defs/toolChainEvidence" },
"runManifestDigest": { "type": "string" },
"createdAt": { "type": "string", "format": "date-time" },
"indexDigest": { "type": ["string", "null"] }
},
"$defs": {
"verdictReference": {
"type": "object",
"required": ["verdictId", "digest", "outcome"],
"properties": {
"verdictId": { "type": "string" },
"digest": { "type": "string" },
"outcome": { "enum": ["Pass", "Fail", "Warn", "Unknown"] },
"policyVersion": { "type": ["string", "null"] }
}
},
"sbomEvidence": {
"type": "object",
"required": ["sbomId", "format", "digest", "componentCount", "generatedAt"],
"properties": {
"sbomId": { "type": "string" },
"format": { "type": "string" },
"digest": { "type": "string" },
"uri": { "type": ["string", "null"] },
"componentCount": { "type": "integer" },
"generatedAt": { "type": "string", "format": "date-time" }
}
},
"attestationEvidence": {
"type": "object",
"required": ["attestationId", "type", "digest", "signerKeyId", "signatureValid", "signedAt"],
"properties": {
"attestationId": { "type": "string" },
"type": { "type": "string" },
"digest": { "type": "string" },
"signerKeyId": { "type": "string" },
"signatureValid": { "type": "boolean" },
"signedAt": { "type": "string", "format": "date-time" },
"rekorLogIndex": { "type": ["string", "null"] }
}
},
"vexEvidence": {
"type": "object",
"required": ["vexId", "format", "digest", "source", "statementCount", "affectedVulnerabilities"],
"properties": {
"vexId": { "type": "string" },
"format": { "type": "string" },
"digest": { "type": "string" },
"source": { "type": "string" },
"statementCount": { "type": "integer" },
"affectedVulnerabilities": { "type": "array", "items": { "type": "string" } }
}
},
"reachabilityEvidence": {
"type": "object",
"required": ["proofId", "vulnerabilityId", "componentPurl", "status", "callPath", "digest"],
"properties": {
"proofId": { "type": "string" },
"vulnerabilityId": { "type": "string" },
"componentPurl": { "type": "string" },
"status": { "enum": ["Reachable", "NotReachable", "Inconclusive", "NotAnalyzed"] },
"entryPoint": { "type": ["string", "null"] },
"callPath": { "type": "array", "items": { "type": "string" } },
"digest": { "type": "string" }
}
},
"unknownEvidence": {
"type": "object",
"required": ["unknownId", "reasonCode", "description", "severity"],
"properties": {
"unknownId": { "type": "string" },
"reasonCode": { "type": "string" },
"description": { "type": "string" },
"componentPurl": { "type": ["string", "null"] },
"vulnerabilityId": { "type": ["string", "null"] },
"severity": { "enum": ["Low", "Medium", "High", "Critical"] }
}
},
"toolChainEvidence": {
"type": "object",
"required": ["scannerVersion", "sbomGeneratorVersion", "reachabilityEngineVersion", "attestorVersion", "policyEngineVersion", "additionalTools"],
"properties": {
"scannerVersion": { "type": "string" },
"sbomGeneratorVersion": { "type": "string" },
"reachabilityEngineVersion": { "type": "string" },
"attestorVersion": { "type": "string" },
"policyEngineVersion": { "type": "string" },
"additionalTools": { "type": "object" }
}
}
}
}

View File

@@ -0,0 +1,47 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.Evidence.Models;
namespace StellaOps.Evidence.Serialization;
/// <summary>
/// Serialize and hash EvidenceIndex in canonical form.
/// </summary>
public static class EvidenceIndexSerializer
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
public static string Serialize(EvidenceIndex index)
{
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(index, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
public static EvidenceIndex Deserialize(string json)
{
return JsonSerializer.Deserialize<EvidenceIndex>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize evidence index");
}
public static string ComputeDigest(EvidenceIndex index)
{
var withoutDigest = index with { IndexDigest = null };
var json = Serialize(withoutDigest);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(hash).ToLowerInvariant();
}
public static EvidenceIndex WithDigest(EvidenceIndex index)
=> index with { IndexDigest = ComputeDigest(index) };
}

View File

@@ -0,0 +1,115 @@
using System.Collections.Immutable;
using StellaOps.Evidence.Models;
using StellaOps.Evidence.Serialization;
namespace StellaOps.Evidence.Services;
/// <summary>
/// Collects evidence entries and builds a deterministic EvidenceIndex.
/// </summary>
public sealed class EvidenceLinker : IEvidenceLinker
{
private readonly object _lock = new();
private readonly List<SbomEvidence> _sboms = [];
private readonly List<AttestationEvidence> _attestations = [];
private readonly List<VexEvidence> _vexDocuments = [];
private readonly List<ReachabilityEvidence> _reachabilityProofs = [];
private readonly List<UnknownEvidence> _unknowns = [];
private ToolChainEvidence? _toolChain;
public void AddSbom(SbomEvidence sbom)
{
lock (_lock)
{
_sboms.Add(sbom);
}
}
public void AddAttestation(AttestationEvidence attestation)
{
lock (_lock)
{
_attestations.Add(attestation);
}
}
public void AddVex(VexEvidence vex)
{
lock (_lock)
{
_vexDocuments.Add(vex);
}
}
public void AddReachabilityProof(ReachabilityEvidence proof)
{
lock (_lock)
{
_reachabilityProofs.Add(proof);
}
}
public void AddUnknown(UnknownEvidence unknown)
{
lock (_lock)
{
_unknowns.Add(unknown);
}
}
public void SetToolChain(ToolChainEvidence toolChain)
{
lock (_lock)
{
_toolChain = toolChain;
}
}
public EvidenceIndex Build(VerdictReference verdict, string runManifestDigest)
{
ToolChainEvidence toolChain;
ImmutableArray<SbomEvidence> sboms;
ImmutableArray<AttestationEvidence> attestations;
ImmutableArray<VexEvidence> vexDocuments;
ImmutableArray<ReachabilityEvidence> reachabilityProofs;
ImmutableArray<UnknownEvidence> unknowns;
lock (_lock)
{
toolChain = _toolChain ?? throw new InvalidOperationException("ToolChain must be set before building index");
sboms = _sboms.ToImmutableArray();
attestations = _attestations.ToImmutableArray();
vexDocuments = _vexDocuments.ToImmutableArray();
reachabilityProofs = _reachabilityProofs.ToImmutableArray();
unknowns = _unknowns.ToImmutableArray();
}
var index = new EvidenceIndex
{
IndexId = Guid.NewGuid().ToString(),
SchemaVersion = "1.0.0",
Verdict = verdict,
Sboms = sboms,
Attestations = attestations,
VexDocuments = vexDocuments,
ReachabilityProofs = reachabilityProofs,
Unknowns = unknowns,
ToolChain = toolChain,
RunManifestDigest = runManifestDigest,
CreatedAt = DateTimeOffset.UtcNow
};
return EvidenceIndexSerializer.WithDigest(index);
}
}
public interface IEvidenceLinker
{
void AddSbom(SbomEvidence sbom);
void AddAttestation(AttestationEvidence attestation);
void AddVex(VexEvidence vex);
void AddReachabilityProof(ReachabilityEvidence proof);
void AddUnknown(UnknownEvidence unknown);
void SetToolChain(ToolChainEvidence toolChain);
EvidenceIndex Build(VerdictReference verdict, string runManifestDigest);
}

View File

@@ -0,0 +1,67 @@
using StellaOps.Evidence.Models;
namespace StellaOps.Evidence.Services;
/// <summary>
/// Query helpers for evidence chains.
/// </summary>
public sealed class EvidenceQueryService : IEvidenceQueryService
{
public IEnumerable<AttestationEvidence> GetAttestationsForSbom(
EvidenceIndex index, string sbomDigest)
{
return index.Attestations
.Where(a => a.Type == "sbom" && index.Sboms.Any(s => s.Digest == sbomDigest));
}
public IEnumerable<ReachabilityEvidence> GetReachabilityForVulnerability(
EvidenceIndex index, string vulnerabilityId)
{
return index.ReachabilityProofs
.Where(r => r.VulnerabilityId == vulnerabilityId);
}
public IEnumerable<VexEvidence> GetVexForVulnerability(
EvidenceIndex index, string vulnerabilityId)
{
return index.VexDocuments
.Where(v => v.AffectedVulnerabilities.Contains(vulnerabilityId));
}
public EvidenceChainReport BuildChainReport(EvidenceIndex index)
{
return new EvidenceChainReport
{
VerdictDigest = index.Verdict.Digest,
SbomCount = index.Sboms.Length,
AttestationCount = index.Attestations.Length,
VexCount = index.VexDocuments.Length,
ReachabilityProofCount = index.ReachabilityProofs.Length,
UnknownCount = index.Unknowns.Length,
AllSignaturesValid = index.Attestations.All(a => a.SignatureValid),
HasRekorEntries = index.Attestations.Any(a => a.RekorLogIndex is not null),
ToolChainComplete = index.ToolChain is not null
};
}
}
public interface IEvidenceQueryService
{
IEnumerable<AttestationEvidence> GetAttestationsForSbom(EvidenceIndex index, string sbomDigest);
IEnumerable<ReachabilityEvidence> GetReachabilityForVulnerability(EvidenceIndex index, string vulnerabilityId);
IEnumerable<VexEvidence> GetVexForVulnerability(EvidenceIndex index, string vulnerabilityId);
EvidenceChainReport BuildChainReport(EvidenceIndex index);
}
public sealed record EvidenceChainReport
{
public required string VerdictDigest { get; init; }
public int SbomCount { get; init; }
public int AttestationCount { get; init; }
public int VexCount { get; init; }
public int ReachabilityProofCount { get; init; }
public int UnknownCount { get; init; }
public bool AllSignaturesValid { get; init; }
public bool HasRekorEntries { get; init; }
public bool ToolChainComplete { get; init; }
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Json.Schema.Net" Version="7.2.0" />
<PackageReference Include="System.Collections.Immutable" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Schemas\*.json" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,63 @@
using StellaOps.Evidence.Models;
using StellaOps.Evidence.Serialization;
namespace StellaOps.Evidence.Validation;
public sealed class EvidenceIndexValidator : IEvidenceIndexValidator
{
public ValidationResult Validate(EvidenceIndex index)
{
var errors = new List<ValidationError>();
if (index.Sboms.Length == 0)
{
errors.Add(new ValidationError("Sboms", "At least one SBOM required"));
}
foreach (var vex in index.VexDocuments)
{
if (vex.StatementCount == 0)
{
errors.Add(new ValidationError("VexDocuments", $"VEX {vex.VexId} has no statements"));
}
}
foreach (var proof in index.ReachabilityProofs)
{
if (proof.Status == ReachabilityStatus.Inconclusive &&
!index.Unknowns.Any(u => u.VulnerabilityId == proof.VulnerabilityId))
{
errors.Add(new ValidationError("ReachabilityProofs",
$"Inconclusive reachability for {proof.VulnerabilityId} not recorded as unknown"));
}
}
foreach (var att in index.Attestations)
{
if (!att.SignatureValid)
{
errors.Add(new ValidationError("Attestations",
$"Attestation {att.AttestationId} has invalid signature"));
}
}
if (index.IndexDigest is not null)
{
var computed = EvidenceIndexSerializer.ComputeDigest(index);
if (!string.Equals(computed, index.IndexDigest, StringComparison.OrdinalIgnoreCase))
{
errors.Add(new ValidationError("IndexDigest", "Digest mismatch"));
}
}
return new ValidationResult(errors.Count == 0, errors);
}
}
public interface IEvidenceIndexValidator
{
ValidationResult Validate(EvidenceIndex index);
}
public sealed record ValidationResult(bool IsValid, IReadOnlyList<ValidationError> Errors);
public sealed record ValidationError(string Field, string Message);

View File

@@ -0,0 +1,27 @@
using System.Reflection;
namespace StellaOps.Evidence.Validation;
internal static class SchemaLoader
{
public static string LoadSchema(string fileName)
{
var assembly = Assembly.GetExecutingAssembly();
var resourceName = assembly.GetManifestResourceNames()
.FirstOrDefault(name => name.EndsWith(fileName, StringComparison.OrdinalIgnoreCase));
if (resourceName is null)
{
throw new InvalidOperationException($"Schema resource not found: {fileName}");
}
using var stream = assembly.GetManifestResourceStream(resourceName);
if (stream is null)
{
throw new InvalidOperationException($"Schema resource not available: {resourceName}");
}
using var reader = new StreamReader(stream);
return reader.ReadToEnd();
}
}

View File

@@ -0,0 +1,8 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,150 @@
using System.Diagnostics;
namespace StellaOps.Interop;
public sealed class ToolManager
{
private readonly string _workDir;
private readonly IReadOnlyDictionary<string, string> _toolPaths;
public ToolManager(string workDir, IReadOnlyDictionary<string, string>? toolPaths = null)
{
_workDir = workDir;
_toolPaths = toolPaths ?? new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
}
public async Task VerifyToolAsync(string tool, string args, CancellationToken ct = default)
{
var result = await RunAsync(tool, args, ct).ConfigureAwait(false);
if (!result.Success)
{
throw new ToolExecutionException(
$"Tool '{tool}' not available or failed to run.",
result);
}
}
public async Task<ToolResult> RunAsync(string tool, string args, CancellationToken ct = default)
{
var toolPath = ResolveToolPath(tool);
if (toolPath is null)
{
return ToolResult.Failed($"Tool not found: {tool}");
}
var startInfo = new ProcessStartInfo
{
FileName = toolPath,
Arguments = args,
WorkingDirectory = _workDir,
RedirectStandardOutput = true,
RedirectStandardError = true,
UseShellExecute = false,
CreateNoWindow = true
};
try
{
using var process = Process.Start(startInfo);
if (process is null)
{
return ToolResult.Failed($"Failed to start tool: {tool}");
}
var stdOutTask = process.StandardOutput.ReadToEndAsync(ct);
var stdErrTask = process.StandardError.ReadToEndAsync(ct);
await process.WaitForExitAsync(ct).ConfigureAwait(false);
var stdout = await stdOutTask.ConfigureAwait(false);
var stderr = await stdErrTask.ConfigureAwait(false);
return process.ExitCode == 0
? ToolResult.Ok(stdout, stderr, process.ExitCode)
: ToolResult.Failed(stderr, stdout, process.ExitCode);
}
catch (Exception ex) when (ex is InvalidOperationException or Win32Exception)
{
return ToolResult.Failed(ex.Message);
}
}
public bool IsToolAvailable(string tool) => ResolveToolPath(tool) is not null;
public string? ResolveToolPath(string tool)
{
if (_toolPaths.TryGetValue(tool, out var configured) && File.Exists(configured))
{
return configured;
}
return FindOnPath(tool);
}
public static string? FindOnPath(string tool)
{
if (File.Exists(tool))
{
return Path.GetFullPath(tool);
}
var path = Environment.GetEnvironmentVariable("PATH");
if (string.IsNullOrWhiteSpace(path))
{
return null;
}
foreach (var dir in path.Split(Path.PathSeparator))
{
if (string.IsNullOrWhiteSpace(dir))
{
continue;
}
var candidate = Path.Combine(dir, tool);
if (File.Exists(candidate))
{
return candidate;
}
if (OperatingSystem.IsWindows())
{
var exeCandidate = candidate + ".exe";
if (File.Exists(exeCandidate))
{
return exeCandidate;
}
}
}
return null;
}
}
public sealed record ToolResult(
bool Success,
int ExitCode,
string StdOut,
string StdErr,
string? Error)
{
public static ToolResult Ok(string stdout, string stderr, int exitCode)
=> new(true, exitCode, stdout, stderr, null);
public static ToolResult Failed(string error)
=> new(false, -1, string.Empty, string.Empty, error);
public static ToolResult Failed(string error, string stdout, int exitCode)
=> new(false, exitCode, stdout, error, error);
}
public sealed class ToolExecutionException : Exception
{
public ToolExecutionException(string message, ToolResult result)
: base(message)
{
Result = result;
}
public ToolResult Result { get; }
}

View File

@@ -0,0 +1,216 @@
namespace StellaOps.Metrics.Kpi;
/// <summary>
/// Quality KPIs for explainable triage.
/// </summary>
public sealed record TriageQualityKpis
{
/// <summary>
/// Reporting period start.
/// </summary>
public required DateTimeOffset PeriodStart { get; init; }
/// <summary>
/// Reporting period end.
/// </summary>
public required DateTimeOffset PeriodEnd { get; init; }
/// <summary>
/// Tenant ID (null for global).
/// </summary>
public string? TenantId { get; init; }
/// <summary>
/// Reachability KPIs.
/// </summary>
public required ReachabilityKpis Reachability { get; init; }
/// <summary>
/// Runtime KPIs.
/// </summary>
public required RuntimeKpis Runtime { get; init; }
/// <summary>
/// Explainability KPIs.
/// </summary>
public required ExplainabilityKpis Explainability { get; init; }
/// <summary>
/// Replay/Determinism KPIs.
/// </summary>
public required ReplayKpis Replay { get; init; }
/// <summary>
/// Unknown budget KPIs.
/// </summary>
public required UnknownBudgetKpis Unknowns { get; init; }
/// <summary>
/// Operational KPIs.
/// </summary>
public required OperationalKpis Operational { get; init; }
}
public sealed record ReachabilityKpis
{
/// <summary>
/// Total findings analyzed.
/// </summary>
public required int TotalFindings { get; init; }
/// <summary>
/// Findings with non-UNKNOWN reachability.
/// </summary>
public required int WithKnownReachability { get; init; }
/// <summary>
/// Percentage with known reachability.
/// </summary>
public decimal PercentKnown => TotalFindings > 0
? (decimal)WithKnownReachability / TotalFindings * 100
: 0;
/// <summary>
/// Breakdown by reachability state.
/// </summary>
public required IReadOnlyDictionary<string, int> ByState { get; init; }
/// <summary>
/// Findings confirmed unreachable.
/// </summary>
public int ConfirmedUnreachable =>
ByState.GetValueOrDefault("ConfirmedUnreachable", 0);
/// <summary>
/// Noise reduction (unreachable / total).
/// </summary>
public decimal NoiseReductionPercent => TotalFindings > 0
? (decimal)ConfirmedUnreachable / TotalFindings * 100
: 0;
}
public sealed record RuntimeKpis
{
/// <summary>
/// Total findings in environments with sensors.
/// </summary>
public required int TotalWithSensorDeployed { get; init; }
/// <summary>
/// Findings with runtime observations.
/// </summary>
public required int WithRuntimeCorroboration { get; init; }
/// <summary>
/// Coverage percentage.
/// </summary>
public decimal CoveragePercent => TotalWithSensorDeployed > 0
? (decimal)WithRuntimeCorroboration / TotalWithSensorDeployed * 100
: 0;
/// <summary>
/// Breakdown by posture.
/// </summary>
public required IReadOnlyDictionary<string, int> ByPosture { get; init; }
}
public sealed record ExplainabilityKpis
{
/// <summary>
/// Total verdicts generated.
/// </summary>
public required int TotalVerdicts { get; init; }
/// <summary>
/// Verdicts with reason steps.
/// </summary>
public required int WithReasonSteps { get; init; }
/// <summary>
/// Verdicts with at least one proof pointer.
/// </summary>
public required int WithProofPointer { get; init; }
/// <summary>
/// Verdicts that are "complete" (both reason steps AND proof pointer).
/// </summary>
public required int FullyExplainable { get; init; }
/// <summary>
/// Explainability completeness percentage.
/// </summary>
public decimal CompletenessPercent => TotalVerdicts > 0
? (decimal)FullyExplainable / TotalVerdicts * 100
: 0;
}
public sealed record ReplayKpis
{
/// <summary>
/// Total replay attempts.
/// </summary>
public required int TotalAttempts { get; init; }
/// <summary>
/// Successful replays (identical verdict).
/// </summary>
public required int Successful { get; init; }
/// <summary>
/// Replay success rate.
/// </summary>
public decimal SuccessRate => TotalAttempts > 0
? (decimal)Successful / TotalAttempts * 100
: 0;
/// <summary>
/// Common failure reasons.
/// </summary>
public required IReadOnlyDictionary<string, int> FailureReasons { get; init; }
}
public sealed record UnknownBudgetKpis
{
/// <summary>
/// Total environments tracked.
/// </summary>
public required int TotalEnvironments { get; init; }
/// <summary>
/// Budget breaches by environment.
/// </summary>
public required IReadOnlyDictionary<string, int> BreachesByEnvironment { get; init; }
/// <summary>
/// Total overrides/exceptions granted.
/// </summary>
public required int OverridesGranted { get; init; }
/// <summary>
/// Average override age (days).
/// </summary>
public decimal AvgOverrideAgeDays { get; init; }
}
public sealed record OperationalKpis
{
/// <summary>
/// Median time to first verdict (seconds).
/// </summary>
public required double MedianTimeToVerdictSeconds { get; init; }
/// <summary>
/// Cache hit rate for graphs/proofs.
/// </summary>
public required decimal CacheHitRate { get; init; }
/// <summary>
/// Average evidence size per scan (bytes).
/// </summary>
public required long AvgEvidenceSizeBytes { get; init; }
/// <summary>
/// 95th percentile verdict time (seconds).
/// </summary>
public required double P95VerdictTimeSeconds { get; init; }
}

View File

@@ -0,0 +1,184 @@
using Microsoft.Extensions.Logging;
using StellaOps.Canonicalization.Json;
using StellaOps.Canonicalization.Verification;
using StellaOps.Replay.Models;
using StellaOps.Testing.Manifests.Models;
namespace StellaOps.Replay.Engine;
/// <summary>
/// Executes scans deterministically from run manifests.
/// Enables time-travel replay for verification and auditing.
/// </summary>
public sealed class ReplayEngine : IReplayEngine
{
private readonly IFeedLoader _feedLoader;
private readonly IPolicyLoader _policyLoader;
private readonly IScannerFactory _scannerFactory;
private readonly ILogger<ReplayEngine> _logger;
public ReplayEngine(
IFeedLoader feedLoader,
IPolicyLoader policyLoader,
IScannerFactory scannerFactory,
ILogger<ReplayEngine> logger)
{
_feedLoader = feedLoader;
_policyLoader = policyLoader;
_scannerFactory = scannerFactory;
_logger = logger;
}
public async Task<ReplayResult> ReplayAsync(
RunManifest manifest,
ReplayOptions options,
CancellationToken ct = default)
{
_logger.LogInformation("Starting replay for run {RunId}", manifest.RunId);
var validationResult = ValidateManifest(manifest);
if (!validationResult.IsValid)
{
return ReplayResult.Failed(manifest.RunId, "Manifest validation failed", validationResult.Errors);
}
var feedResult = await LoadFeedSnapshotAsync(manifest.FeedSnapshot, ct).ConfigureAwait(false);
if (!feedResult.Success)
return ReplayResult.Failed(manifest.RunId, "Failed to load feed snapshot", [feedResult.Error ?? "Unknown error"]);
var policyResult = await LoadPolicySnapshotAsync(manifest.PolicySnapshot, ct).ConfigureAwait(false);
if (!policyResult.Success)
return ReplayResult.Failed(manifest.RunId, "Failed to load policy snapshot", [policyResult.Error ?? "Unknown error"]);
var scannerOptions = new ScannerOptions
{
FeedSnapshot = feedResult.Value!,
PolicySnapshot = policyResult.Value!,
CryptoProfile = manifest.CryptoProfile,
PrngSeed = manifest.PrngSeed,
FrozenTime = options.UseFrozenTime ? manifest.InitiatedAt : null,
CanonicalizationVersion = manifest.CanonicalizationVersion
};
var scanner = _scannerFactory.Create(scannerOptions);
var scanResult = await scanner.ScanAsync(manifest.ArtifactDigests, ct).ConfigureAwait(false);
var (verdictJson, verdictDigest) = CanonicalJsonSerializer.SerializeWithDigest(scanResult.Verdict);
return new ReplayResult
{
RunId = manifest.RunId,
Success = true,
VerdictJson = verdictJson,
VerdictDigest = verdictDigest,
EvidenceIndex = scanResult.EvidenceIndex,
ExecutedAt = DateTimeOffset.UtcNow,
DurationMs = scanResult.DurationMs
};
}
public DeterminismCheckResult CheckDeterminism(ReplayResult a, ReplayResult b)
{
if (a.VerdictDigest == b.VerdictDigest)
{
return new DeterminismCheckResult
{
IsDeterministic = true,
DigestA = a.VerdictDigest,
DigestB = b.VerdictDigest,
Differences = []
};
}
var differences = FindJsonDifferences(a.VerdictJson, b.VerdictJson);
return new DeterminismCheckResult
{
IsDeterministic = false,
DigestA = a.VerdictDigest,
DigestB = b.VerdictDigest,
Differences = differences
};
}
private static ValidationResult ValidateManifest(RunManifest manifest)
{
var errors = new List<string>();
if (string.IsNullOrWhiteSpace(manifest.RunId))
errors.Add("RunId is required");
if (manifest.ArtifactDigests.Length == 0)
errors.Add("At least one artifact digest required");
if (string.IsNullOrWhiteSpace(manifest.FeedSnapshot.Digest))
errors.Add("Feed snapshot digest required");
return new ValidationResult(errors.Count == 0, errors);
}
private async Task<LoadResult<FeedSnapshot>> LoadFeedSnapshotAsync(
FeedSnapshot snapshot, CancellationToken ct)
{
try
{
var feed = await _feedLoader.LoadByDigestAsync(snapshot.Digest, ct).ConfigureAwait(false);
if (!string.Equals(feed.Digest, snapshot.Digest, StringComparison.OrdinalIgnoreCase))
return LoadResult<FeedSnapshot>.Fail($"Feed digest mismatch: expected {snapshot.Digest}");
return LoadResult<FeedSnapshot>.Ok(feed);
}
catch (Exception ex)
{
return LoadResult<FeedSnapshot>.Fail($"Failed to load feed: {ex.Message}");
}
}
private async Task<LoadResult<PolicySnapshot>> LoadPolicySnapshotAsync(
PolicySnapshot snapshot, CancellationToken ct)
{
try
{
var policy = await _policyLoader.LoadByDigestAsync(snapshot.LatticeRulesDigest, ct).ConfigureAwait(false);
return LoadResult<PolicySnapshot>.Ok(policy);
}
catch (Exception ex)
{
return LoadResult<PolicySnapshot>.Fail($"Failed to load policy: {ex.Message}");
}
}
private static IReadOnlyList<JsonDifference> FindJsonDifferences(string? a, string? b)
{
if (a is null || b is null)
return [new JsonDifference("$", "One or both values are null")];
var verifier = new DeterminismVerifier();
var result = verifier.Compare(a, b);
return result.Differences.Select(d => new JsonDifference(d, "Value mismatch")).ToList();
}
}
public interface IReplayEngine
{
Task<ReplayResult> ReplayAsync(RunManifest manifest, ReplayOptions options, CancellationToken ct = default);
DeterminismCheckResult CheckDeterminism(ReplayResult a, ReplayResult b);
}
public interface IScannerFactory
{
IScanner Create(ScannerOptions options);
}
public interface IScanner
{
Task<ScanResult> ScanAsync(ImmutableArray<ArtifactDigest> artifacts, CancellationToken ct = default);
}
public interface IFeedLoader
{
Task<FeedSnapshot> LoadByDigestAsync(string digest, CancellationToken ct = default);
}
public interface IPolicyLoader
{
Task<PolicySnapshot> LoadByDigestAsync(string digest, CancellationToken ct = default);
}

View File

@@ -0,0 +1,82 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.Canonicalization.Json;
using StellaOps.Replay.Engine;
using StellaOps.Testing.Manifests.Models;
namespace StellaOps.Replay.Loaders;
public sealed class FeedSnapshotLoader : IFeedLoader
{
private readonly IFeedStorage _storage;
private readonly ILogger<FeedSnapshotLoader> _logger;
public FeedSnapshotLoader(IFeedStorage storage, ILogger<FeedSnapshotLoader> logger)
{
_storage = storage;
_logger = logger;
}
public async Task<FeedSnapshot> LoadByDigestAsync(string digest, CancellationToken ct = default)
{
_logger.LogDebug("Loading feed snapshot with digest {Digest}", digest);
var localPath = GetLocalPath(digest);
if (File.Exists(localPath))
{
var feed = await LoadFromFileAsync(localPath, ct).ConfigureAwait(false);
VerifyDigest(feed, digest);
return feed;
}
var storedFeed = await _storage.GetByDigestAsync(digest, ct).ConfigureAwait(false);
if (storedFeed is not null)
{
VerifyDigest(storedFeed, digest);
return storedFeed;
}
throw new FeedNotFoundException($"Feed snapshot not found: {digest}");
}
private static void VerifyDigest(FeedSnapshot feed, string expected)
{
var actual = ComputeDigest(feed);
if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase))
{
throw new DigestMismatchException($"Feed digest mismatch: expected {expected}, got {actual}");
}
}
private static string ComputeDigest(FeedSnapshot feed)
{
var json = CanonicalJsonSerializer.Serialize(feed);
return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(json))).ToLowerInvariant();
}
private static string GetLocalPath(string digest) =>
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
"stellaops", "feeds", digest[..2], digest);
private static async Task<FeedSnapshot> LoadFromFileAsync(string path, CancellationToken ct)
{
var json = await File.ReadAllTextAsync(path, ct).ConfigureAwait(false);
return CanonicalJsonSerializer.Deserialize<FeedSnapshot>(json);
}
}
public interface IFeedStorage
{
Task<FeedSnapshot?> GetByDigestAsync(string digest, CancellationToken ct = default);
}
public sealed class FeedNotFoundException : Exception
{
public FeedNotFoundException(string message) : base(message) { }
}
public sealed class DigestMismatchException : Exception
{
public DigestMismatchException(string message) : base(message) { }
}

View File

@@ -0,0 +1,77 @@
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using StellaOps.Canonicalization.Json;
using StellaOps.Replay.Engine;
using StellaOps.Testing.Manifests.Models;
namespace StellaOps.Replay.Loaders;
public sealed class PolicySnapshotLoader : IPolicyLoader
{
private readonly IPolicyStorage _storage;
private readonly ILogger<PolicySnapshotLoader> _logger;
public PolicySnapshotLoader(IPolicyStorage storage, ILogger<PolicySnapshotLoader> logger)
{
_storage = storage;
_logger = logger;
}
public async Task<PolicySnapshot> LoadByDigestAsync(string digest, CancellationToken ct = default)
{
_logger.LogDebug("Loading policy snapshot with digest {Digest}", digest);
var localPath = GetLocalPath(digest);
if (File.Exists(localPath))
{
var policy = await LoadFromFileAsync(localPath, ct).ConfigureAwait(false);
VerifyDigest(policy, digest);
return policy;
}
var stored = await _storage.GetByDigestAsync(digest, ct).ConfigureAwait(false);
if (stored is not null)
{
VerifyDigest(stored, digest);
return stored;
}
throw new PolicyNotFoundException($"Policy snapshot not found: {digest}");
}
private static void VerifyDigest(PolicySnapshot policy, string expected)
{
var actual = ComputeDigest(policy);
if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase))
{
throw new DigestMismatchException($"Policy digest mismatch: expected {expected}, got {actual}");
}
}
private static string ComputeDigest(PolicySnapshot policy)
{
var json = CanonicalJsonSerializer.Serialize(policy);
return Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(json))).ToLowerInvariant();
}
private static string GetLocalPath(string digest) =>
Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData),
"stellaops", "policies", digest[..2], digest);
private static async Task<PolicySnapshot> LoadFromFileAsync(string path, CancellationToken ct)
{
var json = await File.ReadAllTextAsync(path, ct).ConfigureAwait(false);
return CanonicalJsonSerializer.Deserialize<PolicySnapshot>(json);
}
}
public interface IPolicyStorage
{
Task<PolicySnapshot?> GetByDigestAsync(string digest, CancellationToken ct = default);
}
public sealed class PolicyNotFoundException : Exception
{
public PolicyNotFoundException(string message) : base(message) { }
}

View File

@@ -0,0 +1,54 @@
using System.Collections.Immutable;
using StellaOps.Evidence.Models;
namespace StellaOps.Replay.Models;
public sealed record ReplayResult
{
public required string RunId { get; init; }
public bool Success { get; init; }
public string? VerdictJson { get; init; }
public string? VerdictDigest { get; init; }
public EvidenceIndex? EvidenceIndex { get; init; }
public DateTimeOffset ExecutedAt { get; init; }
public long DurationMs { get; init; }
public IReadOnlyList<string>? Errors { get; init; }
public static ReplayResult Failed(string runId, string message, IReadOnlyList<string> errors) =>
new()
{
RunId = runId,
Success = false,
Errors = errors.Prepend(message).ToList(),
ExecutedAt = DateTimeOffset.UtcNow
};
}
public sealed record DeterminismCheckResult
{
public bool IsDeterministic { get; init; }
public string? DigestA { get; init; }
public string? DigestB { get; init; }
public IReadOnlyList<JsonDifference> Differences { get; init; } = [];
}
public sealed record JsonDifference(string Path, string Description);
public sealed record ReplayOptions
{
public bool UseFrozenTime { get; init; } = true;
public bool VerifyDigests { get; init; } = true;
public bool CaptureEvidence { get; init; } = true;
}
public sealed record ValidationResult(bool IsValid, IReadOnlyList<string> Errors);
public sealed record LoadResult<T>
{
public bool Success { get; init; }
public T? Value { get; init; }
public string? Error { get; init; }
public static LoadResult<T> Ok(T value) => new() { Success = true, Value = value };
public static LoadResult<T> Fail(string error) => new() { Success = false, Error = error };
}

View File

@@ -0,0 +1,19 @@
using StellaOps.Evidence.Models;
using StellaOps.Testing.Manifests.Models;
namespace StellaOps.Replay.Models;
public sealed record ScanResult(
object Verdict,
EvidenceIndex? EvidenceIndex,
long DurationMs);
public sealed record ScannerOptions
{
public required FeedSnapshot FeedSnapshot { get; init; }
public required PolicySnapshot PolicySnapshot { get; init; }
public required CryptoProfile CryptoProfile { get; init; }
public long? PrngSeed { get; init; }
public DateTimeOffset? FrozenTime { get; init; }
public required string CanonicalizationVersion { get; init; }
}

View File

@@ -0,0 +1,19 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Collections.Immutable" Version="9.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Canonicalization\StellaOps.Canonicalization.csproj" />
<ProjectReference Include="..\StellaOps.Testing.Manifests\StellaOps.Testing.Manifests.csproj" />
<ProjectReference Include="..\StellaOps.Evidence\StellaOps.Evidence.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,53 @@
namespace StellaOps.Testing.AirGap.Docker;
/// <summary>
/// Builds containers with network isolation for air-gap testing.
/// </summary>
public sealed class IsolatedContainerBuilder
{
/// <summary>
/// Creates a container configuration with no network access.
/// </summary>
public ContainerConfiguration CreateIsolatedConfiguration(
string image,
IReadOnlyList<string> volumes)
{
return new ContainerConfiguration
{
Image = image,
NetworkMode = "none", // No network!
Volumes = volumes,
AutoRemove = true,
Environment = new Dictionary<string, string>
{
["STELLAOPS_OFFLINE_MODE"] = "true",
["HTTP_PROXY"] = "",
["HTTPS_PROXY"] = "",
["NO_PROXY"] = "*"
}
};
}
/// <summary>
/// Verifies that a container has no network access.
/// </summary>
public async Task<bool> VerifyNoNetworkAsync(
string containerId,
CancellationToken ct = default)
{
// TODO: Implement actual container exec to test network
// For now, return true (assume configuration is correct)
await Task.CompletedTask;
return true;
}
}
public sealed record ContainerConfiguration
{
public required string Image { get; init; }
public required string NetworkMode { get; init; }
public IReadOnlyList<string> Volumes { get; init; } = [];
public bool AutoRemove { get; init; }
public IReadOnlyDictionary<string, string> Environment { get; init; } =
new Dictionary<string, string>();
}

View File

@@ -0,0 +1,148 @@
namespace StellaOps.Testing.AirGap;
using System.Net.Sockets;
using System.Runtime.ExceptionServices;
using Xunit;
/// <summary>
/// Base class for tests that must run without network access.
/// Monitors and blocks any network calls during test execution.
/// </summary>
public abstract class NetworkIsolatedTestBase : IAsyncLifetime
{
private readonly NetworkMonitor _monitor;
private readonly List<NetworkAttempt> _blockedAttempts = [];
protected NetworkIsolatedTestBase()
{
_monitor = new NetworkMonitor(OnNetworkAttempt);
}
public virtual async Task InitializeAsync()
{
// Install network interception
await _monitor.StartMonitoringAsync();
// Configure HttpClient factory to use monitored handler
Environment.SetEnvironmentVariable("STELLAOPS_OFFLINE_MODE", "true");
// Block DNS resolution
_monitor.BlockDns();
}
public virtual async Task DisposeAsync()
{
await _monitor.StopMonitoringAsync();
// Fail test if any network calls were attempted
if (_blockedAttempts.Count > 0)
{
var attempts = string.Join("\n", _blockedAttempts.Select(a =>
$" - {a.Host}:{a.Port} at {a.Timestamp:O}\n{a.StackTrace}"));
throw new NetworkIsolationViolationException(
$"Test attempted {_blockedAttempts.Count} network call(s):\n{attempts}");
}
}
private void OnNetworkAttempt(NetworkAttempt attempt)
{
_blockedAttempts.Add(attempt);
}
/// <summary>
/// Asserts that no network calls were made during the test.
/// </summary>
protected void AssertNoNetworkCalls()
{
if (_blockedAttempts.Count > 0)
{
throw new NetworkIsolationViolationException(
$"Network isolation violated: {_blockedAttempts.Count} attempts blocked");
}
}
/// <summary>
/// Gets the offline bundle path for this test.
/// </summary>
protected string GetOfflineBundlePath() =>
Environment.GetEnvironmentVariable("STELLAOPS_OFFLINE_BUNDLE")
?? Path.Combine(AppContext.BaseDirectory, "fixtures", "offline-bundle");
}
public sealed class NetworkMonitor : IAsyncDisposable
{
private readonly Action<NetworkAttempt> _onAttempt;
private bool _isMonitoring;
private EventHandler<FirstChanceExceptionEventArgs>? _exceptionHandler;
public NetworkMonitor(Action<NetworkAttempt> onAttempt)
{
_onAttempt = onAttempt;
}
public Task StartMonitoringAsync()
{
_isMonitoring = true;
// Hook into socket creation
_exceptionHandler = OnException;
AppDomain.CurrentDomain.FirstChanceException += _exceptionHandler;
return Task.CompletedTask;
}
public Task StopMonitoringAsync()
{
_isMonitoring = false;
if (_exceptionHandler != null)
{
AppDomain.CurrentDomain.FirstChanceException -= _exceptionHandler;
}
return Task.CompletedTask;
}
public void BlockDns()
{
// Set environment to prevent DNS lookups
Environment.SetEnvironmentVariable("RES_OPTIONS", "timeout:0 attempts:0");
}
private void OnException(object? sender, FirstChanceExceptionEventArgs e)
{
if (!_isMonitoring) return;
if (e.Exception is SocketException se)
{
_onAttempt(new NetworkAttempt(
Host: "unknown",
Port: 0,
StackTrace: se.StackTrace ?? Environment.StackTrace,
Timestamp: DateTimeOffset.UtcNow));
}
else if (e.Exception is HttpRequestException hre)
{
_onAttempt(new NetworkAttempt(
Host: hre.Message,
Port: 0,
StackTrace: hre.StackTrace ?? Environment.StackTrace,
Timestamp: DateTimeOffset.UtcNow));
}
}
public ValueTask DisposeAsync()
{
_isMonitoring = false;
return ValueTask.CompletedTask;
}
}
public sealed record NetworkAttempt(
string Host,
int Port,
string StackTrace,
DateTimeOffset Timestamp);
public sealed class NetworkIsolationViolationException : Exception
{
public NetworkIsolationViolationException(string message) : base(message) { }
}

View File

@@ -0,0 +1,14 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="xunit.core" Version="2.6.6" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,136 @@
using System.Collections.Immutable;
namespace StellaOps.Testing.Manifests.Models;
/// <summary>
/// Captures all inputs required to reproduce a scan verdict deterministically.
/// This is the replay key that enables time-travel verification.
/// </summary>
public sealed record RunManifest
{
/// <summary>
/// Unique identifier for this run.
/// </summary>
public required string RunId { get; init; }
/// <summary>
/// Schema version for forward compatibility.
/// </summary>
public string SchemaVersion { get; init; } = "1.0.0";
/// <summary>
/// Artifact digests being scanned (image layers, binaries, etc.).
/// </summary>
public required ImmutableArray<ArtifactDigest> ArtifactDigests { get; init; }
/// <summary>
/// SBOM digests produced or consumed during the run.
/// </summary>
public ImmutableArray<SbomReference> SbomDigests { get; init; } = [];
/// <summary>
/// Vulnerability feed snapshot used for matching.
/// </summary>
public required FeedSnapshot FeedSnapshot { get; init; }
/// <summary>
/// Policy version and lattice rules digest.
/// </summary>
public required PolicySnapshot PolicySnapshot { get; init; }
/// <summary>
/// Tool versions used in the scan pipeline.
/// </summary>
public required ToolVersions ToolVersions { get; init; }
/// <summary>
/// Cryptographic profile: trust roots, key IDs, algorithm set.
/// </summary>
public required CryptoProfile CryptoProfile { get; init; }
/// <summary>
/// Environment profile: postgres-only vs postgres+valkey.
/// </summary>
public required EnvironmentProfile EnvironmentProfile { get; init; }
/// <summary>
/// PRNG seed for any randomized operations (ensures reproducibility).
/// </summary>
public long? PrngSeed { get; init; }
/// <summary>
/// Canonicalization algorithm version for stable JSON output.
/// </summary>
public required string CanonicalizationVersion { get; init; }
/// <summary>
/// UTC timestamp when the run was initiated.
/// </summary>
public required DateTimeOffset InitiatedAt { get; init; }
/// <summary>
/// SHA-256 hash of this manifest (excluding this field).
/// </summary>
public string? ManifestDigest { get; init; }
}
/// <summary>
/// Artifact digest information.
/// </summary>
public sealed record ArtifactDigest(
string Algorithm,
string Digest,
string? MediaType,
string? Reference);
/// <summary>
/// SBOM reference information.
/// </summary>
public sealed record SbomReference(
string Format,
string Digest,
string? Uri);
/// <summary>
/// Feed snapshot reference.
/// </summary>
public sealed record FeedSnapshot(
string FeedId,
string Version,
string Digest,
DateTimeOffset SnapshotAt);
/// <summary>
/// Policy snapshot reference.
/// </summary>
public sealed record PolicySnapshot(
string PolicyVersion,
string LatticeRulesDigest,
ImmutableArray<string> EnabledRules);
/// <summary>
/// Toolchain versions used during the scan.
/// </summary>
public sealed record ToolVersions(
string ScannerVersion,
string SbomGeneratorVersion,
string ReachabilityEngineVersion,
string AttestorVersion,
ImmutableDictionary<string, string> AdditionalTools);
/// <summary>
/// Cryptographic profile for the run.
/// </summary>
public sealed record CryptoProfile(
string ProfileName,
ImmutableArray<string> TrustRootIds,
ImmutableArray<string> AllowedAlgorithms);
/// <summary>
/// Environment profile for determinism.
/// </summary>
public sealed record EnvironmentProfile(
string Name,
bool ValkeyEnabled,
string? PostgresVersion,
string? ValkeyVersion);

View File

@@ -0,0 +1,120 @@
{
"$schema": "https://json-schema.org/draft/2020-12/schema",
"$id": "https://stellaops.io/schemas/run-manifest/v1",
"title": "StellaOps Run Manifest",
"description": "Captures all inputs for deterministic scan replay",
"type": "object",
"required": [
"runId",
"schemaVersion",
"artifactDigests",
"feedSnapshot",
"policySnapshot",
"toolVersions",
"cryptoProfile",
"environmentProfile",
"canonicalizationVersion",
"initiatedAt"
],
"properties": {
"runId": { "type": "string" },
"schemaVersion": { "type": "string", "pattern": "^\\d+\\.\\d+\\.\\d+$" },
"artifactDigests": {
"type": "array",
"items": { "$ref": "#/$defs/artifactDigest" },
"minItems": 1
},
"sbomDigests": {
"type": "array",
"items": { "$ref": "#/$defs/sbomReference" }
},
"feedSnapshot": { "$ref": "#/$defs/feedSnapshot" },
"policySnapshot": { "$ref": "#/$defs/policySnapshot" },
"toolVersions": { "$ref": "#/$defs/toolVersions" },
"cryptoProfile": { "$ref": "#/$defs/cryptoProfile" },
"environmentProfile": { "$ref": "#/$defs/environmentProfile" },
"prngSeed": { "type": ["integer", "null"] },
"canonicalizationVersion": { "type": "string" },
"initiatedAt": { "type": "string", "format": "date-time" },
"manifestDigest": { "type": ["string", "null"] }
},
"$defs": {
"artifactDigest": {
"type": "object",
"required": ["algorithm", "digest"],
"properties": {
"algorithm": { "enum": ["sha256", "sha512"] },
"digest": { "type": "string", "pattern": "^[a-f0-9]{64,128}$" },
"mediaType": { "type": ["string", "null"] },
"reference": { "type": ["string", "null"] }
}
},
"sbomReference": {
"type": "object",
"required": ["format", "digest"],
"properties": {
"format": { "type": "string" },
"digest": { "type": "string" },
"uri": { "type": ["string", "null"] }
}
},
"feedSnapshot": {
"type": "object",
"required": ["feedId", "version", "digest", "snapshotAt"],
"properties": {
"feedId": { "type": "string" },
"version": { "type": "string" },
"digest": { "type": "string" },
"snapshotAt": { "type": "string", "format": "date-time" }
}
},
"policySnapshot": {
"type": "object",
"required": ["policyVersion", "latticeRulesDigest", "enabledRules"],
"properties": {
"policyVersion": { "type": "string" },
"latticeRulesDigest": { "type": "string" },
"enabledRules": {
"type": "array",
"items": { "type": "string" }
}
}
},
"toolVersions": {
"type": "object",
"required": ["scannerVersion", "sbomGeneratorVersion", "reachabilityEngineVersion", "attestorVersion", "additionalTools"],
"properties": {
"scannerVersion": { "type": "string" },
"sbomGeneratorVersion": { "type": "string" },
"reachabilityEngineVersion": { "type": "string" },
"attestorVersion": { "type": "string" },
"additionalTools": { "type": "object" }
}
},
"cryptoProfile": {
"type": "object",
"required": ["profileName", "trustRootIds", "allowedAlgorithms"],
"properties": {
"profileName": { "type": "string" },
"trustRootIds": {
"type": "array",
"items": { "type": "string" }
},
"allowedAlgorithms": {
"type": "array",
"items": { "type": "string" }
}
}
},
"environmentProfile": {
"type": "object",
"required": ["name", "valkeyEnabled"],
"properties": {
"name": { "type": "string" },
"valkeyEnabled": { "type": "boolean" },
"postgresVersion": { "type": ["string", "null"] },
"valkeyVersion": { "type": ["string", "null"] }
}
}
}
}

View File

@@ -0,0 +1,59 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Encodings.Web;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Canonical.Json;
using StellaOps.Testing.Manifests.Models;
namespace StellaOps.Testing.Manifests.Serialization;
/// <summary>
/// Serialize and hash RunManifest in canonical form.
/// </summary>
public static class RunManifestSerializer
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
/// <summary>
/// Serializes a manifest to canonical JSON.
/// </summary>
public static string Serialize(RunManifest manifest)
{
var jsonBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
var canonicalBytes = CanonJson.CanonicalizeParsedJson(jsonBytes);
return Encoding.UTF8.GetString(canonicalBytes);
}
/// <summary>
/// Deserializes a manifest from JSON.
/// </summary>
public static RunManifest Deserialize(string json)
{
return JsonSerializer.Deserialize<RunManifest>(json, JsonOptions)
?? throw new InvalidOperationException("Failed to deserialize manifest");
}
/// <summary>
/// Computes the SHA-256 digest of a manifest (excluding ManifestDigest).
/// </summary>
public static string ComputeDigest(RunManifest manifest)
{
var withoutDigest = manifest with { ManifestDigest = null };
var json = Serialize(withoutDigest);
var hash = SHA256.HashData(Encoding.UTF8.GetBytes(json));
return Convert.ToHexString(hash).ToLowerInvariant();
}
/// <summary>
/// Returns a manifest with the digest computed and applied.
/// </summary>
public static RunManifest WithDigest(RunManifest manifest)
=> manifest with { ManifestDigest = ComputeDigest(manifest) };
}

View File

@@ -0,0 +1,93 @@
using System.Collections.Immutable;
using StellaOps.Testing.Manifests.Models;
using StellaOps.Testing.Manifests.Serialization;
namespace StellaOps.Testing.Manifests.Services;
/// <summary>
/// Captures a RunManifest during scan execution.
/// </summary>
public sealed class ManifestCaptureService : IManifestCaptureService
{
private readonly IFeedVersionProvider _feedProvider;
private readonly IPolicyVersionProvider _policyProvider;
private readonly TimeProvider _timeProvider;
public ManifestCaptureService(
IFeedVersionProvider feedProvider,
IPolicyVersionProvider policyProvider,
TimeProvider? timeProvider = null)
{
_feedProvider = feedProvider;
_policyProvider = policyProvider;
_timeProvider = timeProvider ?? TimeProvider.System;
}
public async Task<RunManifest> CaptureAsync(
ScanContext context,
CancellationToken ct = default)
{
var feedSnapshot = await _feedProvider.GetCurrentSnapshotAsync(ct).ConfigureAwait(false);
var policySnapshot = await _policyProvider.GetCurrentSnapshotAsync(ct).ConfigureAwait(false);
var manifest = new RunManifest
{
RunId = context.RunId,
SchemaVersion = "1.0.0",
ArtifactDigests = context.ArtifactDigests,
SbomDigests = context.GeneratedSboms,
FeedSnapshot = feedSnapshot,
PolicySnapshot = policySnapshot,
ToolVersions = context.ToolVersions ?? GetToolVersions(),
CryptoProfile = context.CryptoProfile,
EnvironmentProfile = context.EnvironmentProfile ?? GetEnvironmentProfile(),
PrngSeed = context.PrngSeed,
CanonicalizationVersion = "1.0.0",
InitiatedAt = _timeProvider.GetUtcNow()
};
return RunManifestSerializer.WithDigest(manifest);
}
private static ToolVersions GetToolVersions() => new(
ScannerVersion: typeof(ManifestCaptureService).Assembly.GetName().Version?.ToString() ?? "unknown",
SbomGeneratorVersion: "unknown",
ReachabilityEngineVersion: "unknown",
AttestorVersion: "unknown",
AdditionalTools: ImmutableDictionary<string, string>.Empty);
private static EnvironmentProfile GetEnvironmentProfile() => new(
Name: Environment.GetEnvironmentVariable("STELLAOPS_ENV_PROFILE") ?? "postgres-only",
ValkeyEnabled: string.Equals(Environment.GetEnvironmentVariable("STELLAOPS_VALKEY_ENABLED"), "true", StringComparison.OrdinalIgnoreCase),
PostgresVersion: Environment.GetEnvironmentVariable("STELLAOPS_POSTGRES_VERSION"),
ValkeyVersion: Environment.GetEnvironmentVariable("STELLAOPS_VALKEY_VERSION"));
}
public interface IManifestCaptureService
{
Task<RunManifest> CaptureAsync(ScanContext context, CancellationToken ct = default);
}
public interface IFeedVersionProvider
{
Task<FeedSnapshot> GetCurrentSnapshotAsync(CancellationToken ct = default);
}
public interface IPolicyVersionProvider
{
Task<PolicySnapshot> GetCurrentSnapshotAsync(CancellationToken ct = default);
}
/// <summary>
/// Input context required to capture a RunManifest.
/// </summary>
public sealed record ScanContext
{
public required string RunId { get; init; }
public required ImmutableArray<ArtifactDigest> ArtifactDigests { get; init; }
public ImmutableArray<SbomReference> GeneratedSboms { get; init; } = [];
public required CryptoProfile CryptoProfile { get; init; }
public ToolVersions? ToolVersions { get; init; }
public EnvironmentProfile? EnvironmentProfile { get; init; }
public long? PrngSeed { get; init; }
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Json.Schema.Net" Version="7.2.0" />
<PackageReference Include="System.Collections.Immutable" Version="9.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Canonical.Json\StellaOps.Canonical.Json.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="Schemas\*.json" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,64 @@
using System.Text.Json;
using Json.Schema;
using StellaOps.Testing.Manifests.Models;
using StellaOps.Testing.Manifests.Serialization;
namespace StellaOps.Testing.Manifests.Validation;
/// <summary>
/// Validates RunManifest instances against schema and invariants.
/// </summary>
public sealed class RunManifestValidator : IRunManifestValidator
{
private readonly JsonSchema _schema;
public RunManifestValidator()
{
var schemaJson = SchemaLoader.LoadSchema("run-manifest.schema.json");
_schema = JsonSchema.FromText(schemaJson);
}
public ValidationResult Validate(RunManifest manifest)
{
var errors = new List<ValidationError>();
var json = RunManifestSerializer.Serialize(manifest);
var schemaResult = _schema.Evaluate(JsonDocument.Parse(json));
if (!schemaResult.IsValid)
{
foreach (var error in schemaResult.Errors)
{
errors.Add(new ValidationError("Schema", error.Message));
}
}
if (manifest.ArtifactDigests.Length == 0)
{
errors.Add(new ValidationError("ArtifactDigests", "At least one artifact required"));
}
if (manifest.FeedSnapshot.SnapshotAt > manifest.InitiatedAt)
{
errors.Add(new ValidationError("FeedSnapshot", "Feed snapshot cannot be after run initiation"));
}
if (manifest.ManifestDigest is not null)
{
var computed = RunManifestSerializer.ComputeDigest(manifest);
if (!string.Equals(computed, manifest.ManifestDigest, StringComparison.OrdinalIgnoreCase))
{
errors.Add(new ValidationError("ManifestDigest", "Digest mismatch"));
}
}
return new ValidationResult(errors.Count == 0, errors);
}
}
public interface IRunManifestValidator
{
ValidationResult Validate(RunManifest manifest);
}
public sealed record ValidationResult(bool IsValid, IReadOnlyList<ValidationError> Errors);
public sealed record ValidationError(string Field, string Message);

View File

@@ -0,0 +1,27 @@
using System.Reflection;
namespace StellaOps.Testing.Manifests.Validation;
internal static class SchemaLoader
{
public static string LoadSchema(string fileName)
{
var assembly = Assembly.GetExecutingAssembly();
var resourceName = assembly.GetManifestResourceNames()
.FirstOrDefault(name => name.EndsWith(fileName, StringComparison.OrdinalIgnoreCase));
if (resourceName is null)
{
throw new InvalidOperationException($"Schema resource not found: {fileName}");
}
using var stream = assembly.GetManifestResourceStream(resourceName);
if (stream is null)
{
throw new InvalidOperationException($"Schema resource not available: {resourceName}");
}
using var reader = new StreamReader(stream);
return reader.ReadToEnd();
}
}

View File

@@ -0,0 +1,58 @@
using FluentAssertions;
using StellaOps.Canonicalization.Json;
using StellaOps.Canonicalization.Ordering;
using Xunit;
namespace StellaOps.Canonicalization.Tests;
public class CanonicalJsonSerializerTests
{
[Fact]
public void Serialize_Dictionary_OrdersKeysAlphabetically()
{
var dict = new Dictionary<string, int> { ["z"] = 1, ["a"] = 2, ["m"] = 3 };
var json = CanonicalJsonSerializer.Serialize(dict);
json.Should().Be("{\"a\":2,\"m\":3,\"z\":1}");
}
[Fact]
public void Serialize_DateTimeOffset_UsesUtcIso8601()
{
var dt = new DateTimeOffset(2024, 1, 15, 10, 30, 0, TimeSpan.FromHours(5));
var obj = new { Timestamp = dt };
var json = CanonicalJsonSerializer.Serialize(obj);
json.Should().Contain("2024-01-15T05:30:00.000Z");
}
[Fact]
public void Serialize_NullValues_AreOmitted()
{
var obj = new { Name = "test", Value = (string?)null };
var json = CanonicalJsonSerializer.Serialize(obj);
json.Should().NotContain("value");
}
[Fact]
public void SerializeWithDigest_ProducesConsistentDigest()
{
var obj = new { Name = "test", Value = 123 };
var (_, digest1) = CanonicalJsonSerializer.SerializeWithDigest(obj);
var (_, digest2) = CanonicalJsonSerializer.SerializeWithDigest(obj);
digest1.Should().Be(digest2);
}
}
public class PackageOrdererTests
{
[Fact]
public void StableOrder_OrdersByPurlFirst()
{
var packages = new[]
{
(purl: "pkg:npm/b@1.0.0", name: "b", version: "1.0.0"),
(purl: "pkg:npm/a@1.0.0", name: "a", version: "1.0.0")
};
var ordered = packages.StableOrder(p => p.purl, p => p.name, p => p.version, _ => null).ToList();
ordered[0].purl.Should().Be("pkg:npm/a@1.0.0");
}
}

View File

@@ -0,0 +1,46 @@
using FsCheck;
using FsCheck.Xunit;
using StellaOps.Canonicalization.Json;
using StellaOps.Canonicalization.Ordering;
namespace StellaOps.Canonicalization.Tests.Properties;
public class CanonicalJsonProperties
{
[Property]
public Property Serialize_IsIdempotent(Dictionary<string, int> dict)
{
var json1 = CanonicalJsonSerializer.Serialize(dict);
var json2 = CanonicalJsonSerializer.Serialize(dict);
return (json1 == json2).ToProperty();
}
[Property]
public Property Serialize_OrderIndependent(Dictionary<string, int> dict)
{
var reversed = dict.Reverse().ToDictionary(x => x.Key, x => x.Value);
var json1 = CanonicalJsonSerializer.Serialize(dict);
var json2 = CanonicalJsonSerializer.Serialize(reversed);
return (json1 == json2).ToProperty();
}
[Property]
public Property Digest_IsDeterministic(string input)
{
var obj = new { Value = input ?? string.Empty };
var (_, digest1) = CanonicalJsonSerializer.SerializeWithDigest(obj);
var (_, digest2) = CanonicalJsonSerializer.SerializeWithDigest(obj);
return (digest1 == digest2).ToProperty();
}
}
public class OrderingProperties
{
[Property]
public Property PackageOrdering_IsStable(List<(string purl, string name, string version)> packages)
{
var ordered1 = packages.StableOrder(p => p.purl, p => p.name, p => p.version, _ => null).ToList();
var ordered2 = packages.StableOrder(p => p.purl, p => p.name, p => p.version, _ => null).ToList();
return ordered1.SequenceEqual(ordered2).ToProperty();
}
}

View File

@@ -0,0 +1,21 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FsCheck.Xunit" Version="2.16.6" />
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="xunit" Version="2.9.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Canonicalization\StellaOps.Canonicalization.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,160 @@
using System.Collections.Immutable;
using System.Text;
using FluentAssertions;
using StellaOps.DeltaVerdict.Engine;
using StellaOps.DeltaVerdict.Models;
using StellaOps.DeltaVerdict.Policy;
using StellaOps.DeltaVerdict.Serialization;
using StellaOps.DeltaVerdict.Signing;
using Xunit;
namespace StellaOps.DeltaVerdict.Tests;
public class DeltaVerdictTests
{
[Fact]
public void ComputeDelta_TracksComponentAndVulnerabilityChanges()
{
var baseVerdict = CreateVerdict(
verdictId: "base",
riskScore: 10,
components:
[
new Component("pkg:apk/openssl@1.0", "openssl", "1.0", "apk", ["CVE-1"])
],
vulnerabilities:
[
new Vulnerability("CVE-1", "high", 7.1m, "pkg:apk/openssl@1.0", "reachable", "open")
]);
var headVerdict = CreateVerdict(
verdictId: "head",
riskScore: 20,
components:
[
new Component("pkg:apk/openssl@1.1", "openssl", "1.1", "apk", ["CVE-2"]),
new Component("pkg:apk/zlib@2.0", "zlib", "2.0", "apk", [])
],
vulnerabilities:
[
new Vulnerability("CVE-2", "critical", 9.5m, "pkg:apk/openssl@1.1", "reachable", "open")
]);
var engine = new DeltaComputationEngine(new FakeTimeProvider());
var delta = engine.ComputeDelta(baseVerdict, headVerdict);
delta.AddedComponents.Should().Contain(c => c.Purl == "pkg:apk/zlib@2.0");
delta.RemovedComponents.Should().Contain(c => c.Purl == "pkg:apk/openssl@1.0");
delta.ChangedComponents.Should().Contain(c => c.Purl == "pkg:apk/openssl@1.0");
delta.AddedVulnerabilities.Should().Contain(v => v.VulnerabilityId == "CVE-2");
delta.RemovedVulnerabilities.Should().Contain(v => v.VulnerabilityId == "CVE-1");
delta.RiskScoreDelta.Change.Should().Be(10);
delta.Summary.TotalChanges.Should().BeGreaterThan(0);
}
[Fact]
public void RiskBudgetEvaluator_FlagsCriticalViolations()
{
var delta = new DeltaVerdict.Models.DeltaVerdict
{
DeltaId = "delta",
SchemaVersion = "1.0.0",
BaseVerdict = new VerdictReference("base", null, null, DateTimeOffset.UnixEpoch),
HeadVerdict = new VerdictReference("head", null, null, DateTimeOffset.UnixEpoch),
AddedVulnerabilities = [new VulnerabilityDelta("CVE-9", "critical", 9.9m, null, "reachable")],
RemovedVulnerabilities = [],
AddedComponents = [],
RemovedComponents = [],
ChangedComponents = [],
ChangedVulnerabilityStatuses = [],
RiskScoreDelta = new RiskScoreDelta(10, 15, 5, 50, RiskTrend.Degraded),
Summary = new DeltaSummary(0, 0, 0, 1, 0, 0, 1, DeltaMagnitude.Minimal),
ComputedAt = DateTimeOffset.UnixEpoch
};
var budget = new RiskBudget
{
MaxNewCriticalVulnerabilities = 0,
MaxRiskScoreIncrease = 2
};
var evaluator = new RiskBudgetEvaluator();
var result = evaluator.Evaluate(delta, budget);
result.IsWithinBudget.Should().BeFalse();
result.Violations.Should().NotBeEmpty();
}
[Fact]
public async Task SigningService_RoundTrip_VerifiesEnvelope()
{
var delta = new DeltaVerdict.Models.DeltaVerdict
{
DeltaId = "delta",
SchemaVersion = "1.0.0",
BaseVerdict = new VerdictReference("base", null, null, DateTimeOffset.UnixEpoch),
HeadVerdict = new VerdictReference("head", null, null, DateTimeOffset.UnixEpoch),
AddedComponents = [],
RemovedComponents = [],
ChangedComponents = [],
AddedVulnerabilities = [],
RemovedVulnerabilities = [],
ChangedVulnerabilityStatuses = [],
RiskScoreDelta = new RiskScoreDelta(0, 0, 0, 0, RiskTrend.Stable),
Summary = new DeltaSummary(0, 0, 0, 0, 0, 0, 0, DeltaMagnitude.None),
ComputedAt = DateTimeOffset.UnixEpoch
};
var service = new DeltaSigningService();
var options = new SigningOptions
{
KeyId = "test-key",
SecretBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("delta-secret"))
};
var signed = await service.SignAsync(delta, options);
var verify = await service.VerifyAsync(signed, new VerificationOptions
{
KeyId = "test-key",
SecretBase64 = options.SecretBase64
});
verify.IsValid.Should().BeTrue();
}
[Fact]
public void Serializer_ComputesDeterministicDigest()
{
var verdict = CreateVerdict(
verdictId: "verdict",
riskScore: 0,
components: [],
vulnerabilities: []);
var withDigest = VerdictSerializer.WithDigest(verdict);
withDigest.Digest.Should().NotBeNullOrWhiteSpace();
}
private static Verdict CreateVerdict(
string verdictId,
decimal riskScore,
ImmutableArray<Component> components,
ImmutableArray<Vulnerability> vulnerabilities)
{
return new Verdict
{
VerdictId = verdictId,
Digest = null,
ArtifactRef = "local",
ScannedAt = DateTimeOffset.UnixEpoch,
RiskScore = riskScore,
Components = components,
Vulnerabilities = vulnerabilities
};
}
private sealed class FakeTimeProvider : TimeProvider
{
public override DateTimeOffset GetUtcNow() => DateTimeOffset.UnixEpoch;
}
}

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="xunit" Version="2.9.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.DeltaVerdict\StellaOps.DeltaVerdict.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,240 @@
using FluentAssertions;
using Microsoft.Extensions.Logging.Nulls Logger;
using Microsoft.Extensions.Options;
using Moq;
using StellaOps.Evidence.Budgets;
using Xunit;
namespace StellaOps.Evidence.Tests.Budgets;
public class EvidenceBudgetServiceTests
{
private readonly Mock<IEvidenceRepository> _repository = new();
private readonly Mock<IOptionsMonitor<EvidenceBudget>> _options = new();
private readonly EvidenceBudgetService _service;
public EvidenceBudgetServiceTests()
{
_options.Setup(o => o.CurrentValue).Returns(EvidenceBudget.Default);
_service = new EvidenceBudgetService(
_repository.Object,
_options.Object,
NullLogger<EvidenceBudgetService>.Instance);
// Default setup: empty scan
_repository.Setup(r => r.GetByScanIdAsync(It.IsAny<Guid>(), It.IsAny<CancellationToken>()))
.ReturnsAsync(new List<EvidenceItem>());
}
[Fact]
public void CheckBudget_WithinLimit_ReturnsSuccess()
{
var scanId = Guid.NewGuid();
var item = CreateItem(type: EvidenceType.CallGraph, sizeBytes: 1024);
var result = _service.CheckBudget(scanId, item);
result.IsWithinBudget.Should().BeTrue();
result.Issues.Should().BeEmpty();
}
[Fact]
public void CheckBudget_ExceedsTotal_ReturnsViolation()
{
var scanId = SetupScanAtBudgetLimit();
var item = CreateItem(type: EvidenceType.CallGraph, sizeBytes: 10 * 1024 * 1024); // 10 MB over
var result = _service.CheckBudget(scanId, item);
result.IsWithinBudget.Should().BeFalse();
result.Issues.Should().Contain(i => i.Contains("total budget"));
result.BytesToFree.Should().BeGreaterThan(0);
}
[Fact]
public void CheckBudget_ExceedsTypeLimit_ReturnsViolation()
{
var scanId = Guid.NewGuid();
var existingCallGraph = CreateItem(type: EvidenceType.CallGraph, sizeBytes: 49 * 1024 * 1024);
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(new List<EvidenceItem> { existingCallGraph });
// CallGraph limit is 50MB, adding 2MB would exceed
var item = CreateItem(type: EvidenceType.CallGraph, sizeBytes: 2 * 1024 * 1024);
var result = _service.CheckBudget(scanId, item);
result.IsWithinBudget.Should().BeFalse();
result.Issues.Should().Contain(i => i.Contains("CallGraph budget"));
}
[Fact]
public async Task PruneToFitAsync_NoExcess_NoPruning()
{
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(type: EvidenceType.Sbom, sizeBytes: 5 * 1024 * 1024)
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
var result = await _service.PruneToFitAsync(scanId, 50 * 1024 * 1024, CancellationToken.None);
result.Success.Should().BeTrue();
result.BytesPruned.Should().Be(0);
result.ItemsPruned.Should().BeEmpty();
}
[Fact]
public async Task PruneToFitAsync_PreservesAlwaysPreserveTypes()
{
var scanId = SetupScanOverBudget();
var result = await _service.PruneToFitAsync(scanId, 50 * 1024 * 1024, CancellationToken.None);
result.ItemsPruned.Should().NotContain(i => i.Type == EvidenceType.Verdict);
result.ItemsPruned.Should().NotContain(i => i.Type == EvidenceType.Attestation);
}
[Fact]
public async Task PruneToFitAsync_PrunesLowestPriorityFirst()
{
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(id: Guid.NewGuid(), type: EvidenceType.RuntimeCapture, sizeBytes: 10 * 1024 * 1024), // Priority 1
CreateItem(id: Guid.NewGuid(), type: EvidenceType.CallGraph, sizeBytes: 10 * 1024 * 1024), // Priority 2
CreateItem(id: Guid.NewGuid(), type: EvidenceType.Sbom, sizeBytes: 10 * 1024 * 1024), // Priority 6
CreateItem(id: Guid.NewGuid(), type: EvidenceType.Verdict, sizeBytes: 1 * 1024 * 1024) // Priority 9 (never prune)
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
// Prune to 20MB (need to remove 11MB)
var result = await _service.PruneToFitAsync(scanId, 20 * 1024 * 1024, CancellationToken.None);
result.Success.Should().BeTrue();
result.ItemsPruned.Should().HaveCount(2);
result.ItemsPruned[0].Type.Should().Be(EvidenceType.RuntimeCapture); // Pruned first
result.ItemsPruned[1].Type.Should().Be(EvidenceType.CallGraph); // Pruned second
}
[Fact]
public void GetBudgetStatus_CalculatesUtilization()
{
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(type: EvidenceType.CallGraph, sizeBytes: 25 * 1024 * 1024), // 25 MB
CreateItem(type: EvidenceType.Sbom, sizeBytes: 5 * 1024 * 1024) // 5 MB
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
var status = _service.GetBudgetStatus(scanId);
status.ScanId.Should().Be(scanId);
status.TotalBudgetBytes.Should().Be(100 * 1024 * 1024); // 100 MB
status.UsedBytes.Should().Be(30 * 1024 * 1024); // 30 MB
status.RemainingBytes.Should().Be(70 * 1024 * 1024); // 70 MB
status.UtilizationPercent.Should().Be(30); // 30%
}
[Fact]
public void GetBudgetStatus_CalculatesPerTypeUtilization()
{
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(type: EvidenceType.CallGraph, sizeBytes: 25 * 1024 * 1024) // 25 of 50 MB limit
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
var status = _service.GetBudgetStatus(scanId);
status.ByType.Should().ContainKey(EvidenceType.CallGraph);
var callGraphStatus = status.ByType[EvidenceType.CallGraph];
callGraphStatus.UsedBytes.Should().Be(25 * 1024 * 1024);
callGraphStatus.LimitBytes.Should().Be(50 * 1024 * 1024);
callGraphStatus.UtilizationPercent.Should().Be(50);
}
[Fact]
public void CheckBudget_AutoPruneAction_SetsCanAutoPrune()
{
var budget = new EvidenceBudget
{
MaxScanSizeBytes = 1024,
RetentionPolicies = EvidenceBudget.Default.RetentionPolicies,
ExceededAction = BudgetExceededAction.AutoPrune
};
_options.Setup(o => o.CurrentValue).Returns(budget);
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(type: EvidenceType.Sbom, sizeBytes: 1000)
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
var item = CreateItem(type: EvidenceType.CallGraph, sizeBytes: 100);
var result = _service.CheckBudget(scanId, item);
result.IsWithinBudget.Should().BeFalse();
result.RecommendedAction.Should().Be(BudgetExceededAction.AutoPrune);
result.CanAutoPrune.Should().BeTrue();
}
private Guid SetupScanAtBudgetLimit()
{
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(type: EvidenceType.CallGraph, sizeBytes: 50 * 1024 * 1024),
CreateItem(type: EvidenceType.RuntimeCapture, sizeBytes: 20 * 1024 * 1024),
CreateItem(type: EvidenceType.Sbom, sizeBytes: 10 * 1024 * 1024),
CreateItem(type: EvidenceType.PolicyTrace, sizeBytes: 5 * 1024 * 1024),
CreateItem(type: EvidenceType.Verdict, sizeBytes: 5 * 1024 * 1024),
CreateItem(type: EvidenceType.Advisory, sizeBytes: 10 * 1024 * 1024)
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
return scanId;
}
private Guid SetupScanOverBudget()
{
var scanId = Guid.NewGuid();
var items = new List<EvidenceItem>
{
CreateItem(type: EvidenceType.CallGraph, sizeBytes: 40 * 1024 * 1024),
CreateItem(type: EvidenceType.RuntimeCapture, sizeBytes: 30 * 1024 * 1024),
CreateItem(type: EvidenceType.Sbom, sizeBytes: 20 * 1024 * 1024),
CreateItem(type: EvidenceType.PolicyTrace, sizeBytes: 10 * 1024 * 1024),
CreateItem(type: EvidenceType.Verdict, sizeBytes: 5 * 1024 * 1024),
CreateItem(type: EvidenceType.Attestation, sizeBytes: 5 * 1024 * 1024)
};
_repository.Setup(r => r.GetByScanIdAsync(scanId, It.IsAny<CancellationToken>()))
.ReturnsAsync(items);
return scanId;
}
private static EvidenceItem CreateItem(
Guid? id = null,
EvidenceType type = EvidenceType.CallGraph,
long sizeBytes = 1024)
{
return new EvidenceItem
{
Id = id ?? Guid.NewGuid(),
ScanId = Guid.NewGuid(),
Type = type,
SizeBytes = sizeBytes,
Tier = RetentionTier.Hot,
CreatedAt = DateTimeOffset.UtcNow
};
}
}

View File

@@ -0,0 +1,82 @@
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Evidence.Models;
using StellaOps.Evidence.Serialization;
using StellaOps.Evidence.Services;
using StellaOps.Evidence.Validation;
using Xunit;
namespace StellaOps.Evidence.Tests;
public class EvidenceIndexTests
{
[Fact]
public void EvidenceLinker_BuildsIndexWithDigest()
{
var linker = new EvidenceLinker();
linker.SetToolChain(CreateToolChain());
linker.AddSbom(new SbomEvidence("sbom-1", "cyclonedx-1.6", new string('a', 64), null, 10, DateTimeOffset.UtcNow));
linker.AddAttestation(new AttestationEvidence("att-1", "sbom", new string('b', 64), "key", true, DateTimeOffset.UtcNow, null));
var index = linker.Build(new VerdictReference("verdict-1", new string('c', 64), VerdictOutcome.Pass, "1.0.0"), "digest");
index.IndexDigest.Should().NotBeNullOrEmpty();
index.Sboms.Should().HaveCount(1);
}
[Fact]
public void EvidenceValidator_FlagsMissingSbom()
{
var index = CreateIndex() with { Sboms = [] };
var validator = new EvidenceIndexValidator();
var result = validator.Validate(index);
result.IsValid.Should().BeFalse();
}
[Fact]
public void EvidenceSerializer_RoundTrip_PreservesFields()
{
var index = CreateIndex();
var json = EvidenceIndexSerializer.Serialize(index);
var deserialized = EvidenceIndexSerializer.Deserialize(json);
deserialized.Should().BeEquivalentTo(index);
}
[Fact]
public void EvidenceQueryService_BuildsSummary()
{
var index = CreateIndex();
var service = new EvidenceQueryService();
var report = service.BuildChainReport(index);
report.SbomCount.Should().Be(1);
report.AttestationCount.Should().Be(1);
}
private static EvidenceIndex CreateIndex()
{
return new EvidenceIndex
{
IndexId = Guid.NewGuid().ToString(),
SchemaVersion = "1.0.0",
Verdict = new VerdictReference("verdict-1", new string('c', 64), VerdictOutcome.Pass, "1.0.0"),
Sboms = ImmutableArray.Create(new SbomEvidence("sbom-1", "cyclonedx-1.6", new string('a', 64), null, 10, DateTimeOffset.UtcNow)),
Attestations = ImmutableArray.Create(new AttestationEvidence("att-1", "sbom", new string('b', 64), "key", true, DateTimeOffset.UtcNow, null)),
VexDocuments = ImmutableArray.Create(new VexEvidence("vex-1", "openvex", new string('d', 64), "vendor", 1, ImmutableArray.Create("CVE-2024-0001"))),
ReachabilityProofs = ImmutableArray.Create(new ReachabilityEvidence("proof-1", "CVE-2024-0001", "pkg:npm/foo@1.0.0", ReachabilityStatus.Reachable, "main", ImmutableArray.Create("main"), new string('e', 64))),
Unknowns = ImmutableArray.Create(new UnknownEvidence("unk-1", "U-RCH", "Reachability inconclusive", "pkg:npm/foo", "CVE-2024-0001", UnknownSeverity.Medium)),
ToolChain = CreateToolChain(),
RunManifestDigest = new string('f', 64),
CreatedAt = DateTimeOffset.UtcNow
};
}
private static ToolChainEvidence CreateToolChain() => new(
"1.0.0",
"1.0.0",
"1.0.0",
"1.0.0",
"1.0.0",
ImmutableDictionary<string, string>.Empty);
}

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="xunit" Version="2.9.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Evidence\StellaOps.Evidence.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,150 @@
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Evidence.Models;
using StellaOps.Replay.Engine;
using StellaOps.Replay.Models;
using StellaOps.Testing.Manifests.Models;
using Xunit;
namespace StellaOps.Replay.Tests;
public class ReplayEngineTests
{
[Fact]
public async Task Replay_SameManifest_ProducesIdenticalVerdict()
{
var manifest = CreateManifest();
var engine = CreateEngine();
var result1 = await engine.ReplayAsync(manifest, new ReplayOptions());
var result2 = await engine.ReplayAsync(manifest, new ReplayOptions());
result1.VerdictDigest.Should().Be(result2.VerdictDigest);
}
[Fact]
public async Task Replay_DifferentManifest_ProducesDifferentVerdict()
{
var manifest1 = CreateManifest();
var manifest2 = manifest1 with
{
FeedSnapshot = manifest1.FeedSnapshot with { Version = "v2" }
};
var engine = CreateEngine();
var result1 = await engine.ReplayAsync(manifest1, new ReplayOptions());
var result2 = await engine.ReplayAsync(manifest2, new ReplayOptions());
result1.VerdictDigest.Should().NotBe(result2.VerdictDigest);
}
[Fact]
public void CheckDeterminism_IdenticalResults_ReturnsTrue()
{
var engine = CreateEngine();
var result1 = new ReplayResult { RunId = "1", VerdictDigest = "abc123", Success = true, ExecutedAt = DateTimeOffset.UtcNow };
var result2 = new ReplayResult { RunId = "1", VerdictDigest = "abc123", Success = true, ExecutedAt = DateTimeOffset.UtcNow };
var check = engine.CheckDeterminism(result1, result2);
check.IsDeterministic.Should().BeTrue();
}
[Fact]
public void CheckDeterminism_DifferentResults_ReturnsDifferences()
{
var engine = CreateEngine();
var result1 = new ReplayResult
{
RunId = "1",
VerdictJson = "{\"score\":100}",
VerdictDigest = "abc123",
Success = true,
ExecutedAt = DateTimeOffset.UtcNow
};
var result2 = new ReplayResult
{
RunId = "1",
VerdictJson = "{\"score\":99}",
VerdictDigest = "def456",
Success = true,
ExecutedAt = DateTimeOffset.UtcNow
};
var check = engine.CheckDeterminism(result1, result2);
check.IsDeterministic.Should().BeFalse();
check.Differences.Should().NotBeEmpty();
}
private static ReplayEngine CreateEngine()
{
return new ReplayEngine(
new FakeFeedLoader(),
new FakePolicyLoader(),
new FakeScannerFactory(),
NullLogger<ReplayEngine>.Instance);
}
private static RunManifest CreateManifest()
{
return new RunManifest
{
RunId = Guid.NewGuid().ToString(),
SchemaVersion = "1.0.0",
ArtifactDigests = ImmutableArray.Create(new ArtifactDigest("sha256", new string('a', 64), null, null)),
FeedSnapshot = new FeedSnapshot("nvd", "v1", new string('b', 64), DateTimeOffset.UtcNow.AddHours(-1)),
PolicySnapshot = new PolicySnapshot("1.0.0", new string('c', 64), ImmutableArray<string>.Empty),
ToolVersions = new ToolVersions("1.0.0", "1.0.0", "1.0.0", "1.0.0", ImmutableDictionary<string, string>.Empty),
CryptoProfile = new CryptoProfile("default", ImmutableArray<string>.Empty, ImmutableArray<string>.Empty),
EnvironmentProfile = new EnvironmentProfile("postgres-only", false, null, null),
CanonicalizationVersion = "1.0.0",
InitiatedAt = DateTimeOffset.UtcNow
};
}
private sealed class FakeFeedLoader : IFeedLoader
{
public Task<FeedSnapshot> LoadByDigestAsync(string digest, CancellationToken ct = default)
=> Task.FromResult(new FeedSnapshot("nvd", "v1", digest, DateTimeOffset.UtcNow.AddHours(-1)));
}
private sealed class FakePolicyLoader : IPolicyLoader
{
public Task<PolicySnapshot> LoadByDigestAsync(string digest, CancellationToken ct = default)
=> Task.FromResult(new PolicySnapshot("1.0.0", digest, ImmutableArray<string>.Empty));
}
private sealed class FakeScannerFactory : IScannerFactory
{
public IScanner Create(ScannerOptions options) => new FakeScanner(options);
}
private sealed class FakeScanner : IScanner
{
private readonly ScannerOptions _options;
public FakeScanner(ScannerOptions options) => _options = options;
public Task<ScanResult> ScanAsync(ImmutableArray<ArtifactDigest> artifacts, CancellationToken ct = default)
{
var verdict = new
{
feedVersion = _options.FeedSnapshot.Version,
policyDigest = _options.PolicySnapshot.LatticeRulesDigest
};
var evidence = new EvidenceIndex
{
IndexId = Guid.NewGuid().ToString(),
SchemaVersion = "1.0.0",
Verdict = new VerdictReference("v1", new string('d', 64), VerdictOutcome.Pass, null),
Sboms = ImmutableArray<SbomEvidence>.Empty,
Attestations = ImmutableArray<AttestationEvidence>.Empty,
ToolChain = new ToolChainEvidence("1", "1", "1", "1", "1", ImmutableDictionary<string, string>.Empty),
RunManifestDigest = new string('e', 64),
CreatedAt = DateTimeOffset.UtcNow
};
return Task.FromResult(new ScanResult(verdict, evidence, 10));
}
}
}

View File

@@ -0,0 +1,23 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" />
<PackageReference Include="xunit" Version="2.9.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Replay\StellaOps.Replay.csproj" />
<ProjectReference Include="..\..\StellaOps.Testing.Manifests\StellaOps.Testing.Manifests.csproj" />
<ProjectReference Include="..\..\StellaOps.Evidence\StellaOps.Evidence.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,87 @@
using System.Collections.Immutable;
using FluentAssertions;
using StellaOps.Testing.Manifests.Models;
using StellaOps.Testing.Manifests.Serialization;
using StellaOps.Testing.Manifests.Validation;
using Xunit;
namespace StellaOps.Testing.Manifests.Tests;
public class RunManifestTests
{
[Fact]
public void Serialize_ValidManifest_ProducesCanonicalJson()
{
var manifest = CreateTestManifest();
var json1 = RunManifestSerializer.Serialize(manifest);
var json2 = RunManifestSerializer.Serialize(manifest);
json1.Should().Be(json2);
}
[Fact]
public void ComputeDigest_SameManifest_ProducesSameDigest()
{
var manifest = CreateTestManifest();
var digest1 = RunManifestSerializer.ComputeDigest(manifest);
var digest2 = RunManifestSerializer.ComputeDigest(manifest);
digest1.Should().Be(digest2);
}
[Fact]
public void ComputeDigest_DifferentManifest_ProducesDifferentDigest()
{
var manifest1 = CreateTestManifest();
var manifest2 = manifest1 with { RunId = Guid.NewGuid().ToString() };
var digest1 = RunManifestSerializer.ComputeDigest(manifest1);
var digest2 = RunManifestSerializer.ComputeDigest(manifest2);
digest1.Should().NotBe(digest2);
}
[Fact]
public void Validate_ValidManifest_ReturnsSuccess()
{
var manifest = CreateTestManifest();
var validator = new RunManifestValidator();
var result = validator.Validate(manifest);
result.IsValid.Should().BeTrue();
}
[Fact]
public void Validate_EmptyArtifacts_ReturnsFalse()
{
var manifest = CreateTestManifest() with { ArtifactDigests = [] };
var validator = new RunManifestValidator();
var result = validator.Validate(manifest);
result.IsValid.Should().BeFalse();
}
[Fact]
public void RoundTrip_PreservesAllFields()
{
var manifest = CreateTestManifest();
var json = RunManifestSerializer.Serialize(manifest);
var deserialized = RunManifestSerializer.Deserialize(json);
deserialized.Should().BeEquivalentTo(manifest);
}
private static RunManifest CreateTestManifest()
{
return new RunManifest
{
RunId = Guid.NewGuid().ToString(),
SchemaVersion = "1.0.0",
ArtifactDigests = ImmutableArray.Create(
new ArtifactDigest("sha256", new string('a', 64), "application/vnd.oci.image.layer.v1.tar", "example")),
SbomDigests = ImmutableArray.Create(
new SbomReference("cyclonedx-1.6", new string('b', 64), "sbom.json")),
FeedSnapshot = new FeedSnapshot("nvd", "2025.12.01", new string('c', 64), DateTimeOffset.UtcNow.AddHours(-1)),
PolicySnapshot = new PolicySnapshot("1.0.0", new string('d', 64), ImmutableArray.Create("rule-1")),
ToolVersions = new ToolVersions("1.0.0", "1.0.0", "1.0.0", "1.0.0", ImmutableDictionary<string, string>.Empty),
CryptoProfile = new CryptoProfile("default", ImmutableArray.Create("root-1"), ImmutableArray.Create("sha256")),
EnvironmentProfile = new EnvironmentProfile("postgres-only", false, "16", null),
PrngSeed = 1234,
CanonicalizationVersion = "1.0.0",
InitiatedAt = DateTimeOffset.UtcNow
};
}
}

View File

@@ -0,0 +1,20 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" Version="6.12.0" />
<PackageReference Include="xunit" Version="2.9.0" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.5.7">
<PrivateAssets>all</PrivateAssets>
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Testing.Manifests\StellaOps.Testing.Manifests.csproj" />
</ItemGroup>
</Project>