Files
git.stella-ops.org/src/__Libraries/StellaOps.AuditPack/Services/AuditBundleReader.cs
StellaOps Bot 7e384ab610 feat: Implement IsolatedReplayContext for deterministic audit replay
- Added IsolatedReplayContext class to provide an isolated environment for replaying audit bundles without external calls.
- Introduced methods for initializing the context, verifying input digests, and extracting inputs for policy evaluation.
- Created supporting interfaces and options for context configuration.

feat: Create ReplayExecutor for executing policy re-evaluation and verdict comparison

- Developed ReplayExecutor class to handle the execution of replay processes, including input verification and verdict comparison.
- Implemented detailed drift detection and error handling during replay execution.
- Added interfaces for policy evaluation and replay execution options.

feat: Add ScanSnapshotFetcher for fetching scan data and snapshots

- Introduced ScanSnapshotFetcher class to retrieve necessary scan data and snapshots for audit bundle creation.
- Implemented methods to fetch scan metadata, advisory feeds, policy snapshots, and VEX statements.
- Created supporting interfaces for scan data, feed snapshots, and policy snapshots.
2025-12-23 07:46:40 +02:00

674 lines
23 KiB
C#

// -----------------------------------------------------------------------------
// AuditBundleReader.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Tasks: REPLAY-005, REPLAY-007 - AuditBundleReader with verification
// Description: Reads and verifies audit bundles for offline replay.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Reads and verifies audit bundles for deterministic offline replay.
/// </summary>
public sealed class AuditBundleReader : IAuditBundleReader
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Reads and verifies an audit bundle.
/// </summary>
public async Task<AuditBundleReadResult> ReadAsync(
AuditBundleReadRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
if (!File.Exists(request.BundlePath))
{
return AuditBundleReadResult.Failed("Bundle file not found");
}
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-read-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Extract bundle
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
// Read manifest
var manifestPath = Path.Combine(tempDir, "manifest.json");
if (!File.Exists(manifestPath))
{
return AuditBundleReadResult.Failed("Manifest not found in bundle");
}
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
var manifest = JsonSerializer.Deserialize<AuditBundleManifest>(manifestBytes, JsonOptions);
if (manifest is null)
{
return AuditBundleReadResult.Failed("Failed to parse manifest");
}
var result = new AuditBundleReadResult
{
Success = true,
Manifest = manifest,
BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken),
ExtractedPath = request.ExtractToPath is not null ? null : tempDir
};
// Verify signature if requested
if (request.VerifySignature)
{
var signaturePath = Path.Combine(tempDir, "manifest.sig");
if (File.Exists(signaturePath))
{
var signatureBytes = await File.ReadAllBytesAsync(signaturePath, cancellationToken);
var signatureResult = await VerifySignatureAsync(
manifestBytes, signatureBytes, request.PublicKey, cancellationToken);
result = result with
{
SignatureVerified = signatureResult.Verified,
SignatureKeyId = signatureResult.KeyId,
SignatureError = signatureResult.Error
};
if (!signatureResult.Verified && request.RequireValidSignature)
{
return result with
{
Success = false,
Error = $"Signature verification failed: {signatureResult.Error}"
};
}
}
else if (request.RequireValidSignature)
{
return AuditBundleReadResult.Failed("Signature file not found but signature is required");
}
}
// Verify merkle root if requested
if (request.VerifyMerkleRoot)
{
var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, cancellationToken);
result = result with
{
MerkleRootVerified = merkleResult.Verified,
MerkleRootError = merkleResult.Error
};
if (!merkleResult.Verified && request.RequireValidMerkleRoot)
{
return result with
{
Success = false,
Error = $"Merkle root verification failed: {merkleResult.Error}"
};
}
}
// Verify input digests if requested
if (request.VerifyInputDigests)
{
var digestResult = await VerifyInputDigestsAsync(tempDir, manifest, cancellationToken);
result = result with
{
InputDigestsVerified = digestResult.Verified,
InputDigestErrors = digestResult.Errors
};
if (!digestResult.Verified && request.RequireValidInputDigests)
{
return result with
{
Success = false,
Error = $"Input digest verification failed: {string.Join("; ", digestResult.Errors ?? [])}"
};
}
}
// Extract contents if requested
if (request.ExtractToPath is not null)
{
if (Directory.Exists(request.ExtractToPath))
{
if (!request.OverwriteExisting)
{
return result with
{
Success = false,
Error = "Extract path already exists and overwrite is not enabled"
};
}
Directory.Delete(request.ExtractToPath, recursive: true);
}
Directory.Move(tempDir, request.ExtractToPath);
result = result with { ExtractedPath = request.ExtractToPath };
// Create a new temp dir for cleanup
tempDir = Path.Combine(Path.GetTempPath(), $"audit-read-empty-{Guid.NewGuid():N}");
}
// Load replay inputs if requested
if (request.LoadReplayInputs)
{
var extractPath = result.ExtractedPath ?? tempDir;
var inputs = await LoadReplayInputsAsync(extractPath, manifest, cancellationToken);
result = result with { ReplayInputs = inputs };
}
return result;
}
catch (Exception ex)
{
return AuditBundleReadResult.Failed($"Failed to read bundle: {ex.Message}");
}
finally
{
// Clean up temp directory
try
{
if (Directory.Exists(tempDir) && request.ExtractToPath is null)
{
// Only cleanup if we didn't move to extract path
Directory.Delete(tempDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
}
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
{
await using var fileStream = File.OpenRead(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
}
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
{
await using var stream = File.OpenRead(filePath);
var hash = await SHA256.HashDataAsync(stream, ct);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static async Task<SignatureVerificationResult> VerifySignatureAsync(
byte[] manifestBytes,
byte[] signatureEnvelopeBytes,
AsymmetricAlgorithm? publicKey,
CancellationToken cancellationToken)
{
try
{
var signer = new AuditBundleSigner();
var result = await signer.VerifyAsync(
new AuditBundleVerificationRequest
{
EnvelopeBytes = signatureEnvelopeBytes,
PublicKey = publicKey
},
cancellationToken);
if (!result.Success)
{
return new SignatureVerificationResult
{
Verified = false,
Error = result.Error
};
}
// Verify payload digest matches manifest
var manifestDigest = ComputeSha256(manifestBytes);
if (result.PayloadDigest != manifestDigest)
{
return new SignatureVerificationResult
{
Verified = false,
Error = "Manifest digest does not match signed payload"
};
}
var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId;
var anyVerified = publicKey is null || (result.VerifiedSignatures?.Any(s => s.Verified) ?? false);
return new SignatureVerificationResult
{
Verified = anyVerified,
KeyId = keyId
};
}
catch (Exception ex)
{
return new SignatureVerificationResult
{
Verified = false,
Error = ex.Message
};
}
}
private static async Task<MerkleVerificationResult> VerifyMerkleRootAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken cancellationToken)
{
try
{
var entries = new List<BundleEntry>();
// Verify all files listed in manifest
foreach (var file in manifest.Files)
{
var filePath = Path.Combine(bundleDir, file.RelativePath.Replace('/', Path.DirectorySeparatorChar));
if (!File.Exists(filePath))
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Missing file: {file.RelativePath}"
};
}
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
var digest = ComputeSha256(content);
if (digest != file.Digest)
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Digest mismatch for {file.RelativePath}: expected {file.Digest}, got {digest}"
};
}
entries.Add(new BundleEntry(file.RelativePath, digest, content.Length));
}
// Compute and verify merkle root
var computedRoot = ComputeMerkleRoot(entries);
if (computedRoot != manifest.MerkleRoot)
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}"
};
}
return new MerkleVerificationResult { Verified = true };
}
catch (Exception ex)
{
return new MerkleVerificationResult
{
Verified = false,
Error = ex.Message
};
}
}
private static async Task<InputDigestVerificationResult> VerifyInputDigestsAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken cancellationToken)
{
var errors = new List<string>();
// Verify SBOM digest
var sbomPath = Path.Combine(bundleDir, "sbom.json");
if (File.Exists(sbomPath))
{
var sbomContent = await File.ReadAllBytesAsync(sbomPath, cancellationToken);
var sbomDigest = ComputeSha256(sbomContent);
if (sbomDigest != manifest.Inputs.SbomDigest)
{
errors.Add($"SBOM digest mismatch: expected {manifest.Inputs.SbomDigest}, got {sbomDigest}");
}
}
else
{
errors.Add("SBOM file not found");
}
// Verify feeds digest
var feedsPath = Path.Combine(bundleDir, "feeds", "feeds-snapshot.ndjson");
if (File.Exists(feedsPath))
{
var feedsContent = await File.ReadAllBytesAsync(feedsPath, cancellationToken);
var feedsDigest = ComputeSha256(feedsContent);
if (feedsDigest != manifest.Inputs.FeedsDigest)
{
errors.Add($"Feeds digest mismatch: expected {manifest.Inputs.FeedsDigest}, got {feedsDigest}");
}
}
else
{
errors.Add("Feeds snapshot file not found");
}
// Verify policy digest
var policyPath = Path.Combine(bundleDir, "policy", "policy-bundle.tar.gz");
if (File.Exists(policyPath))
{
var policyContent = await File.ReadAllBytesAsync(policyPath, cancellationToken);
var policyDigest = ComputeSha256(policyContent);
if (policyDigest != manifest.Inputs.PolicyDigest)
{
errors.Add($"Policy digest mismatch: expected {manifest.Inputs.PolicyDigest}, got {policyDigest}");
}
}
else
{
errors.Add("Policy bundle file not found");
}
// Verify VEX digest (optional)
if (manifest.Inputs.VexDigest is not null)
{
var vexPath = Path.Combine(bundleDir, "vex", "vex-statements.json");
if (File.Exists(vexPath))
{
var vexContent = await File.ReadAllBytesAsync(vexPath, cancellationToken);
var vexDigest = ComputeSha256(vexContent);
if (vexDigest != manifest.Inputs.VexDigest)
{
errors.Add($"VEX digest mismatch: expected {manifest.Inputs.VexDigest}, got {vexDigest}");
}
}
else
{
errors.Add("VEX file not found but digest specified in manifest");
}
}
// Verify scoring digest (optional)
if (manifest.Inputs.ScoringDigest is not null)
{
var scoringPath = Path.Combine(bundleDir, "scoring-rules.json");
if (File.Exists(scoringPath))
{
var scoringContent = await File.ReadAllBytesAsync(scoringPath, cancellationToken);
var scoringDigest = ComputeSha256(scoringContent);
if (scoringDigest != manifest.Inputs.ScoringDigest)
{
errors.Add($"Scoring rules digest mismatch: expected {manifest.Inputs.ScoringDigest}, got {scoringDigest}");
}
}
else
{
errors.Add("Scoring rules file not found but digest specified in manifest");
}
}
// Verify trust roots digest (optional)
if (manifest.Inputs.TrustRootsDigest is not null)
{
var trustPath = Path.Combine(bundleDir, "trust", "trust-roots.json");
if (File.Exists(trustPath))
{
var trustContent = await File.ReadAllBytesAsync(trustPath, cancellationToken);
var trustDigest = ComputeSha256(trustContent);
if (trustDigest != manifest.Inputs.TrustRootsDigest)
{
errors.Add($"Trust roots digest mismatch: expected {manifest.Inputs.TrustRootsDigest}, got {trustDigest}");
}
}
else
{
errors.Add("Trust roots file not found but digest specified in manifest");
}
}
return new InputDigestVerificationResult
{
Verified = errors.Count == 0,
Errors = errors.Count > 0 ? [.. errors] : null
};
}
private static async Task<ReplayInputs> LoadReplayInputsAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken cancellationToken)
{
var inputs = new ReplayInputs();
// Load SBOM
var sbomPath = Path.Combine(bundleDir, "sbom.json");
if (File.Exists(sbomPath))
{
inputs = inputs with { Sbom = await File.ReadAllBytesAsync(sbomPath, cancellationToken) };
}
// Load feeds
var feedsPath = Path.Combine(bundleDir, "feeds", "feeds-snapshot.ndjson");
if (File.Exists(feedsPath))
{
inputs = inputs with { FeedsSnapshot = await File.ReadAllBytesAsync(feedsPath, cancellationToken) };
}
// Load policy
var policyPath = Path.Combine(bundleDir, "policy", "policy-bundle.tar.gz");
if (File.Exists(policyPath))
{
inputs = inputs with { PolicyBundle = await File.ReadAllBytesAsync(policyPath, cancellationToken) };
}
// Load VEX (optional)
var vexPath = Path.Combine(bundleDir, "vex", "vex-statements.json");
if (File.Exists(vexPath))
{
inputs = inputs with { VexStatements = await File.ReadAllBytesAsync(vexPath, cancellationToken) };
}
// Load verdict
var verdictPath = Path.Combine(bundleDir, "verdict.json");
if (File.Exists(verdictPath))
{
inputs = inputs with { Verdict = await File.ReadAllBytesAsync(verdictPath, cancellationToken) };
}
return inputs;
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string ComputeMerkleRoot(List<BundleEntry> entries)
{
if (entries.Count == 0)
{
return string.Empty;
}
var leaves = entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
.ToArray();
while (leaves.Length > 1)
{
leaves = PairwiseHash(leaves).ToArray();
}
return $"sha256:{Convert.ToHexString(leaves[0]).ToLowerInvariant()}";
}
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
{
for (var i = 0; i < nodes.Length; i += 2)
{
if (i + 1 >= nodes.Length)
{
yield return SHA256.HashData(nodes[i]);
continue;
}
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
yield return SHA256.HashData(combined);
}
}
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
private sealed record SignatureVerificationResult
{
public bool Verified { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
}
private sealed record MerkleVerificationResult
{
public bool Verified { get; init; }
public string? Error { get; init; }
}
private sealed record InputDigestVerificationResult
{
public bool Verified { get; init; }
public ImmutableArray<string>? Errors { get; init; }
}
}
/// <summary>
/// Interface for audit bundle reading.
/// </summary>
public interface IAuditBundleReader
{
Task<AuditBundleReadResult> ReadAsync(
AuditBundleReadRequest request,
CancellationToken cancellationToken = default);
}
#region Request and Result Models
/// <summary>
/// Request for reading an audit bundle.
/// </summary>
public sealed record AuditBundleReadRequest
{
public required string BundlePath { get; init; }
/// <summary>
/// Verify the manifest signature.
/// </summary>
public bool VerifySignature { get; init; } = true;
/// <summary>
/// Fail if signature is invalid.
/// </summary>
public bool RequireValidSignature { get; init; }
/// <summary>
/// Verify the merkle root.
/// </summary>
public bool VerifyMerkleRoot { get; init; } = true;
/// <summary>
/// Fail if merkle root is invalid.
/// </summary>
public bool RequireValidMerkleRoot { get; init; } = true;
/// <summary>
/// Verify input digests match manifest.
/// </summary>
public bool VerifyInputDigests { get; init; } = true;
/// <summary>
/// Fail if input digests are invalid.
/// </summary>
public bool RequireValidInputDigests { get; init; } = true;
/// <summary>
/// Extract bundle contents to this path.
/// </summary>
public string? ExtractToPath { get; init; }
/// <summary>
/// Overwrite existing extraction directory.
/// </summary>
public bool OverwriteExisting { get; init; }
/// <summary>
/// Load replay inputs into memory.
/// </summary>
public bool LoadReplayInputs { get; init; }
/// <summary>
/// Public key for signature verification.
/// </summary>
public AsymmetricAlgorithm? PublicKey { get; init; }
}
/// <summary>
/// Result of reading an audit bundle.
/// </summary>
public sealed record AuditBundleReadResult
{
public bool Success { get; init; }
public AuditBundleManifest? Manifest { get; init; }
public string? BundleDigest { get; init; }
public string? ExtractedPath { get; init; }
public string? Error { get; init; }
// Signature verification
public bool? SignatureVerified { get; init; }
public string? SignatureKeyId { get; init; }
public string? SignatureError { get; init; }
// Merkle root verification
public bool? MerkleRootVerified { get; init; }
public string? MerkleRootError { get; init; }
// Input digest verification
public bool? InputDigestsVerified { get; init; }
public ImmutableArray<string>? InputDigestErrors { get; init; }
// Replay inputs
public ReplayInputs? ReplayInputs { get; init; }
public static AuditBundleReadResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Loaded replay inputs from a bundle.
/// </summary>
public sealed record ReplayInputs
{
public byte[]? Sbom { get; init; }
public byte[]? FeedsSnapshot { get; init; }
public byte[]? PolicyBundle { get; init; }
public byte[]? VexStatements { get; init; }
public byte[]? Verdict { get; init; }
}
#endregion