stabilizaiton work - projects rework for maintenanceability and ui livening

This commit is contained in:
master
2026-02-03 23:40:04 +02:00
parent 074ce117ba
commit 557feefdc3
3305 changed files with 186813 additions and 107843 deletions

View File

@@ -0,0 +1,7 @@
namespace StellaOps.AuditPack.Models;
public sealed record Attestation(
string Id,
string Type,
string Envelope,
string Digest);

View File

@@ -1,10 +1,3 @@
// -----------------------------------------------------------------------------
// AuditBundleManifest.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Task: REPLAY-001 - Define audit bundle manifest schema
// Description: Defines the manifest schema for self-contained audit bundles.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.AuditPack.Models;
@@ -105,105 +98,3 @@ public sealed record AuditBundleManifest
/// </summary>
public string? PolicyVersion { get; init; }
}
/// <summary>
/// Input digest hashes for deterministic replay.
/// These must match exactly for replay to succeed.
/// </summary>
public sealed record InputDigests
{
/// <summary>
/// SHA-256 digest of the SBOM document.
/// </summary>
public required string SbomDigest { get; init; }
/// <summary>
/// SHA-256 digest of the advisory feeds snapshot.
/// </summary>
public required string FeedsDigest { get; init; }
/// <summary>
/// SHA-256 digest of the policy bundle.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// SHA-256 digest of the VEX statements.
/// </summary>
public string? VexDigest { get; init; }
/// <summary>
/// SHA-256 digest of the scoring rules.
/// </summary>
public string? ScoringDigest { get; init; }
/// <summary>
/// SHA-256 digest of the trust roots.
/// </summary>
public string? TrustRootsDigest { get; init; }
}
/// <summary>
/// Entry for a file in the bundle.
/// </summary>
public sealed record BundleFileEntry
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// SHA-256 digest of the file.
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Size of the file in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// Type of content.
/// </summary>
public required BundleContentType ContentType { get; init; }
}
/// <summary>
/// Type of content in the bundle.
/// </summary>
public enum BundleContentType
{
Manifest,
Signature,
Sbom,
Feeds,
Policy,
Vex,
Verdict,
ProofBundle,
TrustRoot,
TimeAnchor,
Other
}
/// <summary>
/// Time anchor for establishing evaluation time.
/// </summary>
public sealed record TimeAnchor
{
/// <summary>
/// Anchor timestamp.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Source of the time anchor (local, roughtime, rfc3161).
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Digest of the time anchor token.
/// </summary>
public string? TokenDigest { get; init; }
}

View File

@@ -2,7 +2,6 @@ using System.Collections.Immutable;
namespace StellaOps.AuditPack.Models;
/// <summary>
/// A sealed, self-contained audit pack for verification and compliance.
/// Contains all inputs and outputs required to reproduce and verify a scan.
@@ -84,61 +83,3 @@ public sealed record AuditPack
/// </summary>
public string? Signature { get; init; }
}
public sealed record PackContents
{
public ImmutableArray<PackFile> Files { get; init; } = [];
public long TotalSizeBytes { get; init; }
public int FileCount { get; init; }
}
public sealed record PackFile(
string RelativePath,
string Digest,
long SizeBytes,
PackFileType Type);
public enum PackFileType
{
Manifest,
RunManifest,
EvidenceIndex,
Verdict,
Sbom,
Vex,
Attestation,
Feed,
Policy,
TrustRoot,
Other
}
public sealed record SbomDocument(
string Id,
string Format,
string Content,
string Digest);
public sealed record VexDocument(
string Id,
string Format,
string Content,
string Digest);
public sealed record TrustRoot(
string Id,
string Type, // fulcio, rekor, custom
string Content,
string Digest);
public sealed record Attestation(
string Id,
string Type,
string Envelope, // DSSE envelope
string Digest);
// Placeholder types - these would reference actual domain models
public sealed record RunManifest(string ScanId, DateTimeOffset Timestamp);
public sealed record EvidenceIndex(ImmutableArray<string> EvidenceIds);
public sealed record Verdict(string VerdictId, string Status);
public sealed record BundleManifest(string BundleId, string Version);

View File

@@ -0,0 +1,19 @@
namespace StellaOps.AuditPack.Models;
/// <summary>
/// Type of content in the bundle.
/// </summary>
public enum BundleContentType
{
Manifest,
Signature,
Sbom,
Feeds,
Policy,
Vex,
Verdict,
ProofBundle,
TrustRoot,
TimeAnchor,
Other
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.AuditPack.Models;
/// <summary>
/// Entry for a file in the bundle.
/// </summary>
public sealed record BundleFileEntry
{
/// <summary>
/// Relative path within the bundle.
/// </summary>
public required string RelativePath { get; init; }
/// <summary>
/// SHA-256 digest of the file.
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Size of the file in bytes.
/// </summary>
public required long SizeBytes { get; init; }
/// <summary>
/// Type of content.
/// </summary>
public required BundleContentType ContentType { get; init; }
}

View File

@@ -0,0 +1,3 @@
namespace StellaOps.AuditPack.Models;
public sealed record BundleManifest(string BundleId, string Version);

View File

@@ -0,0 +1,5 @@
using System.Collections.Immutable;
namespace StellaOps.AuditPack.Models;
public sealed record EvidenceIndex(ImmutableArray<string> EvidenceIds);

View File

@@ -0,0 +1,38 @@
namespace StellaOps.AuditPack.Models;
/// <summary>
/// Input digest hashes for deterministic replay.
/// These must match exactly for replay to succeed.
/// </summary>
public sealed record InputDigests
{
/// <summary>
/// SHA-256 digest of the SBOM document.
/// </summary>
public required string SbomDigest { get; init; }
/// <summary>
/// SHA-256 digest of the advisory feeds snapshot.
/// </summary>
public required string FeedsDigest { get; init; }
/// <summary>
/// SHA-256 digest of the policy bundle.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// SHA-256 digest of the VEX statements.
/// </summary>
public string? VexDigest { get; init; }
/// <summary>
/// SHA-256 digest of the scoring rules.
/// </summary>
public string? ScoringDigest { get; init; }
/// <summary>
/// SHA-256 digest of the trust roots.
/// </summary>
public string? TrustRootsDigest { get; init; }
}

View File

@@ -0,0 +1,10 @@
using System.Collections.Immutable;
namespace StellaOps.AuditPack.Models;
public sealed record PackContents
{
public ImmutableArray<PackFile> Files { get; init; } = [];
public long TotalSizeBytes { get; init; }
public int FileCount { get; init; }
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.AuditPack.Models;
public sealed record PackFile(
string RelativePath,
string Digest,
long SizeBytes,
PackFileType Type);
public enum PackFileType
{
Manifest,
RunManifest,
EvidenceIndex,
Verdict,
Sbom,
Vex,
Attestation,
Feed,
Policy,
TrustRoot,
Other
}

View File

@@ -0,0 +1,3 @@
namespace StellaOps.AuditPack.Models;
public sealed record RunManifest(string ScanId, DateTimeOffset Timestamp);

View File

@@ -0,0 +1,7 @@
namespace StellaOps.AuditPack.Models;
public sealed record SbomDocument(
string Id,
string Format,
string Content,
string Digest);

View File

@@ -0,0 +1,22 @@
namespace StellaOps.AuditPack.Models;
/// <summary>
/// Time anchor for establishing evaluation time.
/// </summary>
public sealed record TimeAnchor
{
/// <summary>
/// Anchor timestamp.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Source of the time anchor (local, roughtime, rfc3161).
/// </summary>
public required string Source { get; init; }
/// <summary>
/// Digest of the time anchor token.
/// </summary>
public string? TokenDigest { get; init; }
}

View File

@@ -0,0 +1,7 @@
namespace StellaOps.AuditPack.Models;
public sealed record TrustRoot(
string Id,
string Type,
string Content,
string Digest);

View File

@@ -0,0 +1,3 @@
namespace StellaOps.AuditPack.Models;
public sealed record Verdict(string VerdictId, string Status);

View File

@@ -0,0 +1,7 @@
namespace StellaOps.AuditPack.Models;
public sealed record VexDocument(
string Id,
string Format,
string Content,
string Digest);

View File

@@ -0,0 +1,22 @@
using System.Text;
namespace StellaOps.AuditPack.Services;
public sealed partial class AirGapTrustStoreIntegration
{
private static string DetectAlgorithm(byte[] keyBytes)
{
var pem = Encoding.UTF8.GetString(keyBytes);
if (pem.Contains("EC PRIVATE KEY") || pem.Contains("EC PUBLIC KEY"))
{
return "ES256";
}
if (pem.Contains("RSA PRIVATE KEY") || pem.Contains("RSA PUBLIC KEY"))
{
return "RS256";
}
return "unknown";
}
}

View File

@@ -0,0 +1,32 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AirGapTrustStoreIntegration
{
private sealed class TrustManifest
{
public List<TrustRootEntry>? Roots { get; set; }
}
private sealed class TrustRootEntry
{
public string KeyId { get; set; } = string.Empty;
public string? RelativePath { get; set; }
public string? Algorithm { get; set; }
public DateTimeOffset? ExpiresAt { get; set; }
public string? Purpose { get; set; }
}
private sealed class TrustRootBundle
{
public List<TrustRootData>? Roots { get; set; }
}
private sealed class TrustRootData
{
public string? KeyId { get; set; }
public string? PublicKeyPem { get; set; }
public string? Algorithm { get; set; }
public DateTimeOffset? ExpiresAt { get; set; }
public string? Purpose { get; set; }
}
}

View File

@@ -0,0 +1,59 @@
using System.Text;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AirGapTrustStoreIntegration
{
/// <summary>
/// Loads trust roots from bundle content.
/// </summary>
public TrustStoreLoadResult LoadFromBundle(byte[] trustRootsContent)
{
if (trustRootsContent is null || trustRootsContent.Length == 0)
{
return TrustStoreLoadResult.Failed("Trust roots content is empty");
}
try
{
_trustRoots.Clear();
_metadata.Clear();
var bundleData = JsonSerializer.Deserialize<TrustRootBundle>(trustRootsContent, _jsonOptions);
if (bundleData?.Roots is null || bundleData.Roots.Count == 0)
{
return TrustStoreLoadResult.Failed("No trust roots in bundle");
}
foreach (var root in bundleData.Roots)
{
if (string.IsNullOrEmpty(root.KeyId) || string.IsNullOrEmpty(root.PublicKeyPem))
{
continue;
}
var keyBytes = Encoding.UTF8.GetBytes(root.PublicKeyPem);
_trustRoots[root.KeyId] = keyBytes;
_metadata[root.KeyId] = new TrustRootMetadata
{
KeyId = root.KeyId,
Algorithm = root.Algorithm ?? "ES256",
ExpiresAt = root.ExpiresAt,
Purpose = root.Purpose ?? "signing"
};
}
return new TrustStoreLoadResult
{
Success = true,
LoadedCount = _trustRoots.Count,
KeyIds = [.. _trustRoots.Keys]
};
}
catch (Exception ex)
{
return TrustStoreLoadResult.Failed($"Failed to parse trust roots bundle: {ex.Message}");
}
}
}

View File

@@ -0,0 +1,93 @@
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AirGapTrustStoreIntegration
{
/// <summary>
/// Loads trust roots from a directory.
/// </summary>
public async Task<TrustStoreLoadResult> LoadFromDirectoryAsync(
string trustStorePath,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(trustStorePath))
{
return TrustStoreLoadResult.Failed("Trust store path is required");
}
if (!Directory.Exists(trustStorePath))
{
return TrustStoreLoadResult.Failed($"Trust store directory not found: {trustStorePath}");
}
try
{
_trustRoots.Clear();
_metadata.Clear();
var loaded = 0;
var errors = new List<string>();
var manifestPath = Path.Combine(trustStorePath, "trust-manifest.json");
if (File.Exists(manifestPath))
{
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken).ConfigureAwait(false);
var manifest = JsonSerializer.Deserialize<TrustManifest>(manifestBytes, _jsonOptions);
if (manifest?.Roots is not null)
{
foreach (var root in manifest.Roots)
{
var keyPath = Path.Combine(trustStorePath, root.RelativePath ?? $"{root.KeyId}.pem");
if (File.Exists(keyPath))
{
var keyBytes = await File.ReadAllBytesAsync(keyPath, cancellationToken).ConfigureAwait(false);
_trustRoots[root.KeyId] = keyBytes;
_metadata[root.KeyId] = new TrustRootMetadata
{
KeyId = root.KeyId,
Algorithm = root.Algorithm ?? "ES256",
ExpiresAt = root.ExpiresAt,
Purpose = root.Purpose ?? "signing"
};
loaded++;
}
else
{
errors.Add($"Key file not found for {root.KeyId}: {keyPath}");
}
}
}
}
else
{
foreach (var pemFile in Directory.GetFiles(trustStorePath, "*.pem"))
{
var keyId = Path.GetFileNameWithoutExtension(pemFile);
var keyBytes = await File.ReadAllBytesAsync(pemFile, cancellationToken).ConfigureAwait(false);
_trustRoots[keyId] = keyBytes;
_metadata[keyId] = new TrustRootMetadata
{
KeyId = keyId,
Algorithm = DetectAlgorithm(keyBytes),
Purpose = "signing"
};
loaded++;
}
}
return new TrustStoreLoadResult
{
Success = true,
LoadedCount = loaded,
KeyIds = [.. _trustRoots.Keys],
Errors = errors.Count > 0 ? [.. errors] : null
};
}
catch (Exception ex)
{
return TrustStoreLoadResult.Failed($"Failed to load trust store: {ex.Message}");
}
}
}

View File

@@ -0,0 +1,89 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.AuditPack.Services;
public sealed partial class AirGapTrustStoreIntegration
{
/// <summary>
/// Gets a public key for signature verification.
/// </summary>
public TrustRootLookupResult GetPublicKey(string keyId)
{
if (!_trustRoots.TryGetValue(keyId, out var keyBytes))
{
return TrustRootLookupResult.NotFound(keyId);
}
var metadata = _metadata.GetValueOrDefault(keyId);
if (metadata?.ExpiresAt is DateTimeOffset expiresAt && expiresAt < DateTimeOffset.UtcNow)
{
return new TrustRootLookupResult
{
Found = true,
KeyId = keyId,
KeyBytes = keyBytes,
Metadata = metadata,
Expired = true,
Warning = $"Key {keyId} expired at {expiresAt:u}"
};
}
return new TrustRootLookupResult
{
Found = true,
KeyId = keyId,
KeyBytes = keyBytes,
Metadata = metadata
};
}
/// <summary>
/// Creates an asymmetric algorithm from key bytes.
/// </summary>
public AsymmetricAlgorithm? CreateVerificationKey(string keyId)
{
var lookupResult = GetPublicKey(keyId);
if (!lookupResult.Found || lookupResult.KeyBytes is null)
{
return null;
}
var pemString = Encoding.UTF8.GetString(lookupResult.KeyBytes);
var algorithm = lookupResult.Metadata?.Algorithm ?? "ES256";
try
{
if (algorithm.StartsWith("ES", StringComparison.OrdinalIgnoreCase))
{
var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pemString);
return ecdsa;
}
if (algorithm.StartsWith("RS", StringComparison.OrdinalIgnoreCase)
|| algorithm.StartsWith("PS", StringComparison.OrdinalIgnoreCase))
{
var rsa = RSA.Create();
rsa.ImportFromPem(pemString);
return rsa;
}
return null;
}
catch
{
return null;
}
}
/// <summary>
/// Gets all available key IDs.
/// </summary>
public IReadOnlyCollection<string> GetAvailableKeyIds() => _trustRoots.Keys;
/// <summary>
/// Gets count of loaded trust roots.
/// </summary>
public int Count => _trustRoots.Count;
}

View File

@@ -1,11 +1,3 @@
// -----------------------------------------------------------------------------
// AirGapTrustStoreIntegration.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Task: REPLAY-026 - Integrate with AirGap.Importer trust store
// Description: Bridges AuditPack replay with AirGap trust store for offline operation.
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
@@ -13,345 +5,13 @@ namespace StellaOps.AuditPack.Services;
/// <summary>
/// Integrates AuditPack replay with AirGap trust store for offline signature verification.
/// </summary>
public sealed class AirGapTrustStoreIntegration : IAirGapTrustStoreIntegration
public sealed partial class AirGapTrustStoreIntegration : IAirGapTrustStoreIntegration
{
private static readonly JsonSerializerOptions JsonOptions = new()
private static readonly JsonSerializerOptions _jsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly Dictionary<string, byte[]> _trustRoots = new(StringComparer.Ordinal);
private readonly Dictionary<string, TrustRootMetadata> _metadata = new(StringComparer.Ordinal);
/// <summary>
/// Loads trust roots from a directory.
/// </summary>
public async Task<TrustStoreLoadResult> LoadFromDirectoryAsync(
string trustStorePath,
CancellationToken cancellationToken = default)
{
if (string.IsNullOrWhiteSpace(trustStorePath))
{
return TrustStoreLoadResult.Failed("Trust store path is required");
}
if (!Directory.Exists(trustStorePath))
{
return TrustStoreLoadResult.Failed($"Trust store directory not found: {trustStorePath}");
}
try
{
_trustRoots.Clear();
_metadata.Clear();
var loaded = 0;
var errors = new List<string>();
// Load manifest if present
var manifestPath = Path.Combine(trustStorePath, "trust-manifest.json");
if (File.Exists(manifestPath))
{
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
var manifest = JsonSerializer.Deserialize<TrustManifest>(manifestBytes, JsonOptions);
if (manifest?.Roots is not null)
{
foreach (var root in manifest.Roots)
{
var keyPath = Path.Combine(trustStorePath, root.RelativePath ?? $"{root.KeyId}.pem");
if (File.Exists(keyPath))
{
var keyBytes = await File.ReadAllBytesAsync(keyPath, cancellationToken);
_trustRoots[root.KeyId] = keyBytes;
_metadata[root.KeyId] = new TrustRootMetadata
{
KeyId = root.KeyId,
Algorithm = root.Algorithm ?? "ES256",
ExpiresAt = root.ExpiresAt,
Purpose = root.Purpose ?? "signing"
};
loaded++;
}
else
{
errors.Add($"Key file not found for {root.KeyId}: {keyPath}");
}
}
}
}
else
{
// Load all .pem files from directory
foreach (var pemFile in Directory.GetFiles(trustStorePath, "*.pem"))
{
var keyId = Path.GetFileNameWithoutExtension(pemFile);
var keyBytes = await File.ReadAllBytesAsync(pemFile, cancellationToken);
_trustRoots[keyId] = keyBytes;
_metadata[keyId] = new TrustRootMetadata
{
KeyId = keyId,
Algorithm = DetectAlgorithm(keyBytes),
Purpose = "signing"
};
loaded++;
}
}
return new TrustStoreLoadResult
{
Success = true,
LoadedCount = loaded,
KeyIds = [.. _trustRoots.Keys],
Errors = errors.Count > 0 ? [.. errors] : null
};
}
catch (Exception ex)
{
return TrustStoreLoadResult.Failed($"Failed to load trust store: {ex.Message}");
}
}
/// <summary>
/// Loads trust roots from bundle content.
/// </summary>
public TrustStoreLoadResult LoadFromBundle(byte[] trustRootsContent)
{
if (trustRootsContent is null || trustRootsContent.Length == 0)
{
return TrustStoreLoadResult.Failed("Trust roots content is empty");
}
try
{
_trustRoots.Clear();
_metadata.Clear();
var bundleData = JsonSerializer.Deserialize<TrustRootBundle>(trustRootsContent, JsonOptions);
if (bundleData?.Roots is null || bundleData.Roots.Count == 0)
{
return TrustStoreLoadResult.Failed("No trust roots in bundle");
}
foreach (var root in bundleData.Roots)
{
if (string.IsNullOrEmpty(root.KeyId) || string.IsNullOrEmpty(root.PublicKeyPem))
continue;
var keyBytes = System.Text.Encoding.UTF8.GetBytes(root.PublicKeyPem);
_trustRoots[root.KeyId] = keyBytes;
_metadata[root.KeyId] = new TrustRootMetadata
{
KeyId = root.KeyId,
Algorithm = root.Algorithm ?? "ES256",
ExpiresAt = root.ExpiresAt,
Purpose = root.Purpose ?? "signing"
};
}
return new TrustStoreLoadResult
{
Success = true,
LoadedCount = _trustRoots.Count,
KeyIds = [.. _trustRoots.Keys]
};
}
catch (Exception ex)
{
return TrustStoreLoadResult.Failed($"Failed to parse trust roots bundle: {ex.Message}");
}
}
/// <summary>
/// Gets a public key for signature verification.
/// </summary>
public TrustRootLookupResult GetPublicKey(string keyId)
{
if (!_trustRoots.TryGetValue(keyId, out var keyBytes))
{
return TrustRootLookupResult.NotFound(keyId);
}
var metadata = _metadata.GetValueOrDefault(keyId);
// Check expiration
if (metadata?.ExpiresAt is DateTimeOffset expiresAt && expiresAt < DateTimeOffset.UtcNow)
{
return new TrustRootLookupResult
{
Found = true,
KeyId = keyId,
KeyBytes = keyBytes,
Metadata = metadata,
Expired = true,
Warning = $"Key {keyId} expired at {expiresAt:u}"
};
}
return new TrustRootLookupResult
{
Found = true,
KeyId = keyId,
KeyBytes = keyBytes,
Metadata = metadata
};
}
/// <summary>
/// Creates an asymmetric algorithm from key bytes.
/// </summary>
public AsymmetricAlgorithm? CreateVerificationKey(string keyId)
{
var lookupResult = GetPublicKey(keyId);
if (!lookupResult.Found || lookupResult.KeyBytes is null)
{
return null;
}
var pemString = System.Text.Encoding.UTF8.GetString(lookupResult.KeyBytes);
var algorithm = lookupResult.Metadata?.Algorithm ?? "ES256";
try
{
if (algorithm.StartsWith("ES", StringComparison.OrdinalIgnoreCase))
{
var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pemString);
return ecdsa;
}
else if (algorithm.StartsWith("RS", StringComparison.OrdinalIgnoreCase) ||
algorithm.StartsWith("PS", StringComparison.OrdinalIgnoreCase))
{
var rsa = RSA.Create();
rsa.ImportFromPem(pemString);
return rsa;
}
return null;
}
catch
{
return null;
}
}
/// <summary>
/// Gets all available key IDs.
/// </summary>
public IReadOnlyCollection<string> GetAvailableKeyIds() => _trustRoots.Keys;
/// <summary>
/// Gets count of loaded trust roots.
/// </summary>
public int Count => _trustRoots.Count;
private static string DetectAlgorithm(byte[] keyBytes)
{
var pem = System.Text.Encoding.UTF8.GetString(keyBytes);
if (pem.Contains("EC PRIVATE KEY") || pem.Contains("EC PUBLIC KEY"))
return "ES256";
if (pem.Contains("RSA PRIVATE KEY") || pem.Contains("RSA PUBLIC KEY"))
return "RS256";
return "unknown";
}
#region Internal Models
private sealed class TrustManifest
{
public List<TrustRootEntry>? Roots { get; set; }
}
private sealed class TrustRootEntry
{
public string KeyId { get; set; } = string.Empty;
public string? RelativePath { get; set; }
public string? Algorithm { get; set; }
public DateTimeOffset? ExpiresAt { get; set; }
public string? Purpose { get; set; }
}
private sealed class TrustRootBundle
{
public List<TrustRootData>? Roots { get; set; }
}
private sealed class TrustRootData
{
public string? KeyId { get; set; }
public string? PublicKeyPem { get; set; }
public string? Algorithm { get; set; }
public DateTimeOffset? ExpiresAt { get; set; }
public string? Purpose { get; set; }
}
#endregion
}
/// <summary>
/// Interface for AirGap trust store integration.
/// </summary>
public interface IAirGapTrustStoreIntegration
{
Task<TrustStoreLoadResult> LoadFromDirectoryAsync(
string trustStorePath,
CancellationToken cancellationToken = default);
TrustStoreLoadResult LoadFromBundle(byte[] trustRootsContent);
TrustRootLookupResult GetPublicKey(string keyId);
AsymmetricAlgorithm? CreateVerificationKey(string keyId);
IReadOnlyCollection<string> GetAvailableKeyIds();
int Count { get; }
}
#region Result Models
/// <summary>
/// Result of loading trust store.
/// </summary>
public sealed record TrustStoreLoadResult
{
public bool Success { get; init; }
public int LoadedCount { get; init; }
public IReadOnlyList<string>? KeyIds { get; init; }
public IReadOnlyList<string>? Errors { get; init; }
public string? Error { get; init; }
public static TrustStoreLoadResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Result of trust root lookup.
/// </summary>
public sealed record TrustRootLookupResult
{
public bool Found { get; init; }
public string? KeyId { get; init; }
public byte[]? KeyBytes { get; init; }
public TrustRootMetadata? Metadata { get; init; }
public bool Expired { get; init; }
public string? Warning { get; init; }
public static TrustRootLookupResult NotFound(string keyId) => new()
{
Found = false,
KeyId = keyId
};
}
/// <summary>
/// Metadata about a trust root.
/// </summary>
public sealed record TrustRootMetadata
{
public string? KeyId { get; init; }
public string? Algorithm { get; init; }
public DateTimeOffset? ExpiresAt { get; init; }
public string? Purpose { get; init; }
}
#endregion

View File

@@ -0,0 +1,5 @@
using System;
namespace StellaOps.AuditPack.Services;
internal sealed record ArchiveEntry(string Path, byte[] Content, UnixFileMode? Mode = null);

View File

@@ -0,0 +1,100 @@
using System;
using System.Formats.Tar;
using System.IO.Compression;
namespace StellaOps.AuditPack.Services;
internal static partial class ArchiveUtilities
{
public static async Task ExtractTarGzAsync(
string archivePath,
string targetDir,
bool overwriteFiles,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(archivePath);
ArgumentException.ThrowIfNullOrWhiteSpace(targetDir);
Directory.CreateDirectory(targetDir);
var fullTarget = Path.GetFullPath(targetDir);
await using var fileStream = File.OpenRead(archivePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream, leaveOpen: false);
var extractedAny = false;
try
{
TarEntry? entry;
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: ct).ConfigureAwait(false)) is not null)
{
ct.ThrowIfCancellationRequested();
extractedAny = true;
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
{
continue;
}
var safePath = NormalizeTarEntryPath(entry.Name);
var destinationPath = Path.GetFullPath(Path.Combine(fullTarget, safePath));
if (!destinationPath.StartsWith(fullTarget, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Tar entry '{entry.Name}' escapes the target directory.");
}
var destinationDir = Path.GetDirectoryName(destinationPath);
if (!string.IsNullOrWhiteSpace(destinationDir))
{
Directory.CreateDirectory(destinationDir);
}
if (File.Exists(destinationPath) && !overwriteFiles)
{
throw new IOException($"Target file already exists: {destinationPath}");
}
await using var outputStream = File.Create(destinationPath);
await entry.DataStream.CopyToAsync(outputStream, ct).ConfigureAwait(false);
}
}
catch (InvalidDataException) when (!extractedAny)
{
// Treat empty or truncated archives as empty; caller will handle missing manifest.
}
catch (EndOfStreamException) when (!extractedAny)
{
// Treat empty or truncated archives as empty; caller will handle missing manifest.
}
}
private static string NormalizeTarEntryPath(string entryName)
{
if (string.IsNullOrWhiteSpace(entryName))
{
throw new InvalidOperationException("Tar entry name is empty.");
}
var normalized = entryName.Replace('\\', '/');
if (normalized.StartsWith("/", StringComparison.Ordinal))
{
normalized = normalized.TrimStart('/');
}
if (Path.IsPathRooted(normalized))
{
throw new InvalidOperationException($"Tar entry '{entryName}' is rooted.");
}
foreach (var segment in normalized.Split('/', StringSplitOptions.RemoveEmptyEntries))
{
if (segment == "." || segment == "..")
{
throw new InvalidOperationException($"Tar entry '{entryName}' contains parent traversal.");
}
}
return normalized;
}
}

View File

@@ -0,0 +1,63 @@
using System;
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
namespace StellaOps.AuditPack.Services;
internal static partial class ArchiveUtilities
{
public static async Task WriteTarGzAsync(
string outputPath,
IReadOnlyList<ArchiveEntry> entries,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
ArgumentNullException.ThrowIfNull(entries);
var outputDir = Path.GetDirectoryName(outputPath);
if (!string.IsNullOrWhiteSpace(outputDir))
{
Directory.CreateDirectory(outputDir);
}
await using (var fileStream = File.Create(outputPath))
await using (var gzip = new GZipStream(fileStream, CompressionLevel.Optimal, leaveOpen: true))
await using (var tarWriter = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
foreach (var entry in entries.OrderBy(static e => e.Path, StringComparer.Ordinal))
{
ct.ThrowIfCancellationRequested();
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, entry.Path)
{
Mode = entry.Mode ?? DefaultFileMode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty
};
tarEntry.DataStream = new MemoryStream(entry.Content, writable: false);
tarWriter.WriteEntry(tarEntry);
}
}
ApplyDeterministicGzipHeader(outputPath, FixedTimestamp);
}
private static void ApplyDeterministicGzipHeader(string outputPath, DateTimeOffset timestamp)
{
using var stream = new FileStream(outputPath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read);
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for archive.");
}
var seconds = checked((int)(timestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
stream.Position = 4;
stream.Write(buffer);
}
}

View File

@@ -1,160 +1,10 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
using System;
namespace StellaOps.AuditPack.Services;
internal static class ArchiveUtilities
internal static partial class ArchiveUtilities
{
internal static readonly DateTimeOffset FixedTimestamp = DateTimeOffset.UnixEpoch;
private const UnixFileMode DefaultFileMode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
public static async Task WriteTarGzAsync(
string outputPath,
IReadOnlyList<ArchiveEntry> entries,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
ArgumentNullException.ThrowIfNull(entries);
var outputDir = Path.GetDirectoryName(outputPath);
if (!string.IsNullOrWhiteSpace(outputDir))
{
Directory.CreateDirectory(outputDir);
}
await using (var fileStream = File.Create(outputPath))
await using (var gzip = new GZipStream(fileStream, CompressionLevel.Optimal, leaveOpen: true))
await using (var tarWriter = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
foreach (var entry in entries.OrderBy(static e => e.Path, StringComparer.Ordinal))
{
ct.ThrowIfCancellationRequested();
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, entry.Path)
{
Mode = entry.Mode ?? DefaultFileMode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty
};
tarEntry.DataStream = new MemoryStream(entry.Content, writable: false);
tarWriter.WriteEntry(tarEntry);
}
}
ApplyDeterministicGzipHeader(outputPath, FixedTimestamp);
}
public static async Task ExtractTarGzAsync(
string archivePath,
string targetDir,
bool overwriteFiles,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(archivePath);
ArgumentException.ThrowIfNullOrWhiteSpace(targetDir);
Directory.CreateDirectory(targetDir);
var fullTarget = Path.GetFullPath(targetDir);
await using var fileStream = File.OpenRead(archivePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream, leaveOpen: false);
var extractedAny = false;
try
{
TarEntry? entry;
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: ct).ConfigureAwait(false)) is not null)
{
ct.ThrowIfCancellationRequested();
extractedAny = true;
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
{
continue;
}
var safePath = NormalizeTarEntryPath(entry.Name);
var destinationPath = Path.GetFullPath(Path.Combine(fullTarget, safePath));
if (!destinationPath.StartsWith(fullTarget, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Tar entry '{entry.Name}' escapes the target directory.");
}
var destinationDir = Path.GetDirectoryName(destinationPath);
if (!string.IsNullOrWhiteSpace(destinationDir))
{
Directory.CreateDirectory(destinationDir);
}
if (File.Exists(destinationPath) && !overwriteFiles)
{
throw new IOException($"Target file already exists: {destinationPath}");
}
await using var outputStream = File.Create(destinationPath);
await entry.DataStream.CopyToAsync(outputStream, ct).ConfigureAwait(false);
}
}
catch (InvalidDataException) when (!extractedAny)
{
// Treat empty or truncated archives as empty; caller will handle missing manifest.
}
catch (EndOfStreamException) when (!extractedAny)
{
// Treat empty or truncated archives as empty; caller will handle missing manifest.
}
}
private static string NormalizeTarEntryPath(string entryName)
{
if (string.IsNullOrWhiteSpace(entryName))
{
throw new InvalidOperationException("Tar entry name is empty.");
}
var normalized = entryName.Replace('\\', '/');
if (normalized.StartsWith("/", StringComparison.Ordinal))
{
normalized = normalized.TrimStart('/');
}
if (Path.IsPathRooted(normalized))
{
throw new InvalidOperationException($"Tar entry '{entryName}' is rooted.");
}
foreach (var segment in normalized.Split('/', StringSplitOptions.RemoveEmptyEntries))
{
if (segment == "." || segment == "..")
{
throw new InvalidOperationException($"Tar entry '{entryName}' contains parent traversal.");
}
}
return normalized;
}
private static void ApplyDeterministicGzipHeader(string outputPath, DateTimeOffset timestamp)
{
using var stream = new FileStream(outputPath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read);
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for archive.");
}
var seconds = checked((int)(timestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
stream.Position = 4;
stream.Write(buffer);
}
}
internal sealed record ArchiveEntry(string Path, byte[] Content, UnixFileMode? Mode = null);

View File

@@ -0,0 +1,12 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of attestation verification.
/// </summary>
public sealed record AttestationVerificationResult
{
public bool IsValid { get; init; }
public IReadOnlyList<string> Errors { get; init; } = [];
public bool SignatureVerified { get; init; }
public DateTimeOffset VerifiedAt { get; init; }
}

View File

@@ -0,0 +1,56 @@
using System.Security.Cryptography;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Request for reading an audit bundle.
/// </summary>
public sealed record AuditBundleReadRequest
{
public required string BundlePath { get; init; }
/// <summary>
/// Verify the manifest signature.
/// </summary>
public bool VerifySignature { get; init; } = true;
/// <summary>
/// Fail if signature is invalid.
/// </summary>
public bool RequireValidSignature { get; init; }
/// <summary>
/// Verify the merkle root.
/// </summary>
public bool VerifyMerkleRoot { get; init; } = true;
/// <summary>
/// Fail if merkle root is invalid.
/// </summary>
public bool RequireValidMerkleRoot { get; init; } = true;
/// <summary>
/// Verify input digests.
/// </summary>
public bool VerifyInputDigests { get; init; } = true;
/// <summary>
/// Fail if input digests are invalid.
/// </summary>
public bool RequireValidInputDigests { get; init; } = true;
/// <summary>
/// Load replay inputs into memory.
/// </summary>
public bool LoadReplayInputs { get; init; }
/// <summary>
/// Extract bundle contents to this path if set.
/// </summary>
public string? ExtractToPath { get; init; }
/// <summary>
/// Public key for signature verification.
/// </summary>
public AsymmetricAlgorithm? PublicKey { get; init; }
}

View File

@@ -0,0 +1,36 @@
using System.Collections.Immutable;
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of reading an audit bundle.
/// </summary>
public sealed record AuditBundleReadResult
{
public bool Success { get; init; }
public AuditBundleManifest? Manifest { get; init; }
public string? BundleDigest { get; init; }
public string? ExtractedPath { get; init; }
public ReplayInputs? ReplayInputs { get; init; }
public string? Error { get; init; }
// Signature verification
public bool? SignatureVerified { get; init; }
public string? SignatureKeyId { get; init; }
public string? SignatureError { get; init; }
// Merkle root verification
public bool? MerkleRootVerified { get; init; }
public string? MerkleRootError { get; init; }
// Input digest verification
public bool? InputDigestsVerified { get; init; }
public ImmutableArray<string>? InputDigestErrors { get; init; }
public static AuditBundleReadResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,19 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static string CreateTempDir(string prefix)
{
var tempDir = Path.Combine(Path.GetTempPath(), $"{prefix}-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
return tempDir;
}
private static Task ExtractBundleAsync(
string bundlePath,
string targetDir,
CancellationToken ct)
{
return ArchiveUtilities.ExtractTarGzAsync(bundlePath, targetDir, overwriteFiles: false, ct);
}
}

View File

@@ -0,0 +1,89 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static ExtractionResult HandleExtraction(
AuditBundleReadResult result,
AuditBundleReadRequest request,
string tempDir)
{
if (string.IsNullOrWhiteSpace(request.ExtractToPath))
{
return new ExtractionResult(true, null, result, tempDir);
}
var targetDir = Path.GetFullPath(request.ExtractToPath);
if (Directory.Exists(targetDir))
{
return new ExtractionResult(false, "ExtractToPath already exists", result, tempDir);
}
var parent = Path.GetDirectoryName(targetDir);
if (!string.IsNullOrWhiteSpace(parent))
{
Directory.CreateDirectory(parent);
}
try
{
Directory.Move(tempDir, targetDir);
}
catch (IOException)
{
try
{
CopyDirectory(tempDir, targetDir);
TryDeleteDirectory(tempDir);
}
catch (Exception ex)
{
return new ExtractionResult(false, ex.Message, result, tempDir);
}
}
catch (Exception ex)
{
return new ExtractionResult(false, ex.Message, result, tempDir);
}
var updated = result with { ExtractedPath = targetDir };
return new ExtractionResult(true, null, updated, targetDir);
}
private static void CopyDirectory(string sourceDir, string targetDir)
{
Directory.CreateDirectory(targetDir);
foreach (var directory in Directory.EnumerateDirectories(sourceDir, "*", SearchOption.AllDirectories))
{
var relative = Path.GetRelativePath(sourceDir, directory);
Directory.CreateDirectory(Path.Combine(targetDir, relative));
}
foreach (var file in Directory.EnumerateFiles(sourceDir, "*", SearchOption.AllDirectories))
{
var relative = Path.GetRelativePath(sourceDir, file);
var destination = Path.Combine(targetDir, relative);
var destinationDir = Path.GetDirectoryName(destination);
if (!string.IsNullOrWhiteSpace(destinationDir))
{
Directory.CreateDirectory(destinationDir);
}
File.Copy(file, destination, overwrite: false);
}
}
private static void TryDeleteDirectory(string path)
{
try
{
if (Directory.Exists(path))
{
Directory.Delete(path, recursive: true);
}
}
catch
{
// Ignore cleanup errors.
}
}
}

View File

@@ -0,0 +1,58 @@
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
{
await using var stream = File.OpenRead(filePath);
var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string ComputeMerkleRoot(List<BundleEntry> entries)
{
if (entries.Count == 0)
{
return string.Empty;
}
var leaves = entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
.ToArray();
while (leaves.Length > 1)
{
leaves = PairwiseHash(leaves).ToArray();
}
return $"sha256:{Convert.ToHexString(leaves[0]).ToLowerInvariant()}";
}
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
{
for (var i = 0; i < nodes.Length; i += 2)
{
if (i + 1 >= nodes.Length)
{
yield return SHA256.HashData(nodes[i]);
continue;
}
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
yield return SHA256.HashData(combined);
}
}
}

View File

@@ -0,0 +1,99 @@
using System.Collections.Immutable;
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static async Task<InputDigestVerificationResult> VerifyInputDigestsAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken ct)
{
var errors = ImmutableArray.CreateBuilder<string>();
await VerifyRequiredDigestAsync(bundleDir, "sbom.json", manifest.Inputs.SbomDigest, "sbom", errors, ct)
.ConfigureAwait(false);
await VerifyRequiredDigestAsync(bundleDir, "feeds/feeds-snapshot.ndjson", manifest.Inputs.FeedsDigest, "feeds", errors, ct)
.ConfigureAwait(false);
await VerifyRequiredDigestAsync(bundleDir, "policy/policy-bundle.tar.gz", manifest.Inputs.PolicyDigest, "policy", errors, ct)
.ConfigureAwait(false);
await VerifyOptionalDigestAsync(bundleDir, "vex/vex-statements.json", manifest.Inputs.VexDigest, "vex", errors, ct)
.ConfigureAwait(false);
await VerifyOptionalDigestAsync(bundleDir, "scoring-rules.json", manifest.Inputs.ScoringDigest, "scoring", errors, ct)
.ConfigureAwait(false);
await VerifyOptionalDigestAsync(bundleDir, "trust/trust-roots.json", manifest.Inputs.TrustRootsDigest, "trust-roots", errors, ct)
.ConfigureAwait(false);
return new InputDigestVerificationResult
{
Verified = errors.Count == 0,
Errors = errors.ToImmutable()
};
}
private static async Task VerifyRequiredDigestAsync(
string bundleDir,
string relativePath,
string expectedDigest,
string label,
ImmutableArray<string>.Builder errors,
CancellationToken ct)
{
if (string.IsNullOrWhiteSpace(expectedDigest))
{
errors.Add($"Missing expected digest for {label}");
return;
}
var actual = await ReadDigestAsync(bundleDir, relativePath, ct).ConfigureAwait(false);
if (actual is null)
{
errors.Add($"Missing file for {label}: {relativePath}");
}
else if (!string.Equals(actual, expectedDigest, StringComparison.Ordinal))
{
errors.Add($"Digest mismatch for {label}: expected {expectedDigest}, got {actual}");
}
}
private static async Task VerifyOptionalDigestAsync(
string bundleDir,
string relativePath,
string? expectedDigest,
string label,
ImmutableArray<string>.Builder errors,
CancellationToken ct)
{
if (string.IsNullOrWhiteSpace(expectedDigest))
{
return;
}
var actual = await ReadDigestAsync(bundleDir, relativePath, ct).ConfigureAwait(false);
if (actual is null)
{
errors.Add($"Missing file for {label}: {relativePath}");
}
else if (!string.Equals(actual, expectedDigest, StringComparison.Ordinal))
{
errors.Add($"Digest mismatch for {label}: expected {expectedDigest}, got {actual}");
}
}
private static async Task<string?> ReadDigestAsync(
string bundleDir,
string relativePath,
CancellationToken ct)
{
var filePath = GetBundlePath(bundleDir, relativePath);
if (!File.Exists(filePath))
{
return null;
}
var content = await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false);
return ComputeSha256(content);
}
}

View File

@@ -0,0 +1,27 @@
using System.Text.Json;
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static async Task<ManifestLoadResult> LoadManifestAsync(
string bundleDir,
CancellationToken ct)
{
var manifestPath = Path.Combine(bundleDir, "manifest.json");
if (!File.Exists(manifestPath))
{
return new ManifestLoadResult(false, "Manifest not found in bundle", null, null);
}
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, ct).ConfigureAwait(false);
var manifest = JsonSerializer.Deserialize<AuditBundleManifest>(manifestBytes, _jsonOptions);
if (manifest is null)
{
return new ManifestLoadResult(false, "Failed to parse manifest", null, manifestBytes);
}
return new ManifestLoadResult(true, null, manifest, manifestBytes);
}
}

View File

@@ -0,0 +1,72 @@
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static async Task<MerkleVerificationResult> VerifyMerkleRootAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken ct)
{
try
{
var entriesResult = await LoadBundleEntriesAsync(bundleDir, manifest, ct).ConfigureAwait(false);
if (!entriesResult.Success || entriesResult.Entries is null)
{
return new MerkleVerificationResult
{
Verified = false,
Error = entriesResult.Error ?? "Failed to load bundle entries"
};
}
var computedRoot = ComputeMerkleRoot(entriesResult.Entries);
if (!string.Equals(computedRoot, manifest.MerkleRoot, StringComparison.Ordinal))
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}"
};
}
return new MerkleVerificationResult { Verified = true };
}
catch (Exception ex)
{
return new MerkleVerificationResult
{
Verified = false,
Error = ex.Message
};
}
}
private static async Task<(bool Success, string? Error, List<BundleEntry>? Entries)> LoadBundleEntriesAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken ct)
{
var entries = new List<BundleEntry>();
foreach (var file in manifest.Files)
{
var filePath = GetBundlePath(bundleDir, file.RelativePath);
if (!File.Exists(filePath))
{
return (false, $"Missing file: {file.RelativePath}", null);
}
var content = await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false);
var digest = ComputeSha256(content);
if (!string.Equals(digest, file.Digest, StringComparison.Ordinal))
{
return (false, $"Digest mismatch for {file.RelativePath}", null);
}
entries.Add(new BundleEntry(file.RelativePath, digest, content.Length));
}
return (true, null, entries);
}
}

View File

@@ -0,0 +1,40 @@
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
private sealed record SignatureVerificationResult
{
public bool? Verified { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
}
private sealed record MerkleVerificationResult
{
public bool Verified { get; init; }
public string? Error { get; init; }
}
private sealed record InputDigestVerificationResult
{
public bool Verified { get; init; }
public ImmutableArray<string>? Errors { get; init; }
}
private sealed record ManifestLoadResult(
bool Success,
string? Error,
AuditBundleManifest? Manifest,
byte[]? ManifestBytes);
private sealed record ExtractionResult(
bool Success,
string? Error,
AuditBundleReadResult Result,
string TempDir);
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static string GetBundlePath(string bundleDir, string relativePath)
{
var normalized = relativePath.Replace('/', Path.DirectorySeparatorChar);
return Path.Combine(bundleDir, normalized);
}
}

View File

@@ -0,0 +1,99 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
/// <summary>
/// Reads and verifies an audit bundle.
/// </summary>
public async Task<AuditBundleReadResult> ReadAsync(
AuditBundleReadRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
if (!File.Exists(request.BundlePath))
{
return AuditBundleReadResult.Failed("Bundle file not found");
}
var tempDir = CreateTempDir("audit-read");
try
{
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken).ConfigureAwait(false);
var manifestLoad = await LoadManifestAsync(tempDir, cancellationToken).ConfigureAwait(false);
if (!manifestLoad.Success || manifestLoad.Manifest is null || manifestLoad.ManifestBytes is null)
{
return AuditBundleReadResult.Failed(manifestLoad.Error ?? "Failed to parse manifest");
}
var result = new AuditBundleReadResult
{
Success = true,
Manifest = manifestLoad.Manifest,
BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken).ConfigureAwait(false),
ExtractedPath = request.ExtractToPath is not null ? null : tempDir
};
var signature = await ApplySignatureVerificationAsync(
result,
request,
manifestLoad.ManifestBytes,
tempDir,
cancellationToken)
.ConfigureAwait(false);
if (signature.Stop)
{
return signature.Result;
}
result = signature.Result;
var merkle = await ApplyMerkleVerificationAsync(
result,
request,
manifestLoad.Manifest,
tempDir,
cancellationToken)
.ConfigureAwait(false);
if (merkle.Stop)
{
return merkle.Result;
}
result = merkle.Result;
var digests = await ApplyInputDigestVerificationAsync(
result,
request,
manifestLoad.Manifest,
tempDir,
cancellationToken)
.ConfigureAwait(false);
if (digests.Stop)
{
return digests.Result;
}
result = digests.Result;
var extraction = HandleExtraction(result, request, tempDir);
if (!extraction.Success)
{
return extraction.Result with { Success = false, Error = extraction.Error };
}
result = extraction.Result;
tempDir = extraction.TempDir;
if (request.LoadReplayInputs)
{
var extractPath = result.ExtractedPath ?? tempDir;
var inputs = await LoadReplayInputsAsync(extractPath, cancellationToken)
.ConfigureAwait(false);
result = result with { ReplayInputs = inputs };
}
return result;
}
catch (Exception ex)
{
return AuditBundleReadResult.Failed($"Failed to read bundle: {ex.Message}");
}
}
}

View File

@@ -0,0 +1,62 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static async Task<ReplayInputs> LoadReplayInputsAsync(
string bundleDir,
CancellationToken ct)
{
var sbom = await ReadRequiredFileAsync(bundleDir, "sbom.json", "SBOM", ct).ConfigureAwait(false);
var feeds = await ReadRequiredFileAsync(bundleDir, "feeds/feeds-snapshot.ndjson", "feeds", ct).ConfigureAwait(false);
var policy = await ReadRequiredFileAsync(bundleDir, "policy/policy-bundle.tar.gz", "policy", ct).ConfigureAwait(false);
var verdict = await ReadRequiredFileAsync(bundleDir, "verdict.json", "verdict", ct).ConfigureAwait(false);
var vex = await ReadOptionalFileAsync(bundleDir, "vex/vex-statements.json", ct).ConfigureAwait(false);
var proof = await ReadOptionalFileAsync(bundleDir, "proof/proof-bundle.json", ct).ConfigureAwait(false);
var trustRoots = await ReadOptionalFileAsync(bundleDir, "trust/trust-roots.json", ct).ConfigureAwait(false);
var scoring = await ReadOptionalFileAsync(bundleDir, "scoring-rules.json", ct).ConfigureAwait(false);
var timeAnchor = await ReadOptionalFileAsync(bundleDir, "time-anchor.json", ct).ConfigureAwait(false);
return new ReplayInputs
{
Sbom = sbom,
FeedsSnapshot = feeds,
PolicyBundle = policy,
VexStatements = vex,
Verdict = verdict,
ProofBundle = proof,
TrustRoots = trustRoots,
ScoringRules = scoring,
TimeAnchor = timeAnchor
};
}
private static async Task<byte[]> ReadRequiredFileAsync(
string bundleDir,
string relativePath,
string label,
CancellationToken ct)
{
var filePath = GetBundlePath(bundleDir, relativePath);
if (!File.Exists(filePath))
{
throw new FileNotFoundException($"{label} file not found", filePath);
}
return await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false);
}
private static async Task<byte[]?> ReadOptionalFileAsync(
string bundleDir,
string relativePath,
CancellationToken ct)
{
var filePath = GetBundlePath(bundleDir, relativePath);
if (!File.Exists(filePath))
{
return null;
}
return await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,75 @@
using System.Linq;
using System.Security.Cryptography;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static async Task<SignatureVerificationResult> VerifySignatureAsync(
byte[] manifestBytes,
byte[] signatureEnvelopeBytes,
AsymmetricAlgorithm? publicKey,
CancellationToken ct)
{
try
{
var signer = new AuditBundleSigner();
var result = await signer.VerifyAsync(
new AuditBundleVerificationRequest
{
EnvelopeBytes = signatureEnvelopeBytes,
PublicKey = publicKey
},
ct)
.ConfigureAwait(false);
if (!result.Success)
{
return new SignatureVerificationResult
{
Verified = false,
Error = result.Error
};
}
var manifestDigest = ComputeSha256(manifestBytes);
if (!string.Equals(result.PayloadDigest, manifestDigest, StringComparison.Ordinal))
{
return new SignatureVerificationResult
{
Verified = false,
Error = "Manifest digest does not match signed payload"
};
}
var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId;
if (publicKey is null)
{
return new SignatureVerificationResult
{
Verified = null,
KeyId = keyId,
Error = "No public key provided for verification"
};
}
var verified = result.VerifiedSignatures?.Any(s => s.Verified) ?? false;
var error = verified ? null : result.VerifiedSignatures?.FirstOrDefault(s => !s.Verified)?.Error;
return new SignatureVerificationResult
{
Verified = verified,
KeyId = keyId,
Error = error
};
}
catch (Exception ex)
{
return new SignatureVerificationResult
{
Verified = false,
Error = ex.Message
};
}
}
}

View File

@@ -0,0 +1,100 @@
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleReader
{
private static async Task<(AuditBundleReadResult Result, bool Stop)> ApplySignatureVerificationAsync(
AuditBundleReadResult result,
AuditBundleReadRequest request,
byte[] manifestBytes,
string tempDir,
CancellationToken ct)
{
if (!request.VerifySignature)
{
return (result, false);
}
var signaturePath = Path.Combine(tempDir, "manifest.sig");
if (!File.Exists(signaturePath))
{
return request.RequireValidSignature
? (AuditBundleReadResult.Failed("Signature file not found but signature is required"), true)
: (result, false);
}
var signatureBytes = await File.ReadAllBytesAsync(signaturePath, ct).ConfigureAwait(false);
var signatureResult = await VerifySignatureAsync(manifestBytes, signatureBytes, request.PublicKey, ct)
.ConfigureAwait(false);
var updated = result with
{
SignatureVerified = signatureResult.Verified,
SignatureKeyId = signatureResult.KeyId,
SignatureError = signatureResult.Error
};
if (request.RequireValidSignature && signatureResult.Verified != true)
{
return (updated with { Success = false, Error = $"Signature verification failed: {signatureResult.Error}" }, true);
}
return (updated, false);
}
private static async Task<(AuditBundleReadResult Result, bool Stop)> ApplyMerkleVerificationAsync(
AuditBundleReadResult result,
AuditBundleReadRequest request,
AuditBundleManifest manifest,
string tempDir,
CancellationToken ct)
{
if (!request.VerifyMerkleRoot)
{
return (result, false);
}
var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, ct).ConfigureAwait(false);
var updated = result with
{
MerkleRootVerified = merkleResult.Verified,
MerkleRootError = merkleResult.Error
};
if (!merkleResult.Verified && request.RequireValidMerkleRoot)
{
return (updated with { Success = false, Error = $"Merkle root verification failed: {merkleResult.Error}" }, true);
}
return (updated, false);
}
private static async Task<(AuditBundleReadResult Result, bool Stop)> ApplyInputDigestVerificationAsync(
AuditBundleReadResult result,
AuditBundleReadRequest request,
AuditBundleManifest manifest,
string tempDir,
CancellationToken ct)
{
if (!request.VerifyInputDigests)
{
return (result, false);
}
var digestResult = await VerifyInputDigestsAsync(tempDir, manifest, ct).ConfigureAwait(false);
var updated = result with
{
InputDigestsVerified = digestResult.Verified,
InputDigestErrors = digestResult.Errors
};
if (!digestResult.Verified && request.RequireValidInputDigests)
{
var error = $"Input digest verification failed: {string.Join("; ", digestResult.Errors ?? [])}";
return (updated with { Success = false, Error = error }, true);
}
return (updated, false);
}
}

View File

@@ -1,15 +1,3 @@
// -----------------------------------------------------------------------------
// AuditBundleReader.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Tasks: REPLAY-005, REPLAY-007 - AuditBundleReader with verification
// Description: Reads and verifies audit bundles for offline replay.
// -----------------------------------------------------------------------------
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
@@ -17,652 +5,10 @@ namespace StellaOps.AuditPack.Services;
/// <summary>
/// Reads and verifies audit bundles for deterministic offline replay.
/// </summary>
public sealed class AuditBundleReader : IAuditBundleReader
public sealed partial class AuditBundleReader : IAuditBundleReader
{
private static readonly JsonSerializerOptions JsonOptions = new()
private static readonly JsonSerializerOptions _jsonOptions = new()
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Reads and verifies an audit bundle.
/// </summary>
public async Task<AuditBundleReadResult> ReadAsync(
AuditBundleReadRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.BundlePath);
if (!File.Exists(request.BundlePath))
{
return AuditBundleReadResult.Failed("Bundle file not found");
}
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-read-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
// Extract bundle
await ExtractBundleAsync(request.BundlePath, tempDir, cancellationToken);
// Read manifest
var manifestPath = Path.Combine(tempDir, "manifest.json");
if (!File.Exists(manifestPath))
{
return AuditBundleReadResult.Failed("Manifest not found in bundle");
}
var manifestBytes = await File.ReadAllBytesAsync(manifestPath, cancellationToken);
var manifest = JsonSerializer.Deserialize<AuditBundleManifest>(manifestBytes, JsonOptions);
if (manifest is null)
{
return AuditBundleReadResult.Failed("Failed to parse manifest");
}
var result = new AuditBundleReadResult
{
Success = true,
Manifest = manifest,
BundleDigest = await ComputeFileDigestAsync(request.BundlePath, cancellationToken),
ExtractedPath = request.ExtractToPath is not null ? null : tempDir
};
// Verify signature if requested
if (request.VerifySignature)
{
var signaturePath = Path.Combine(tempDir, "manifest.sig");
if (File.Exists(signaturePath))
{
var signatureBytes = await File.ReadAllBytesAsync(signaturePath, cancellationToken);
var signatureResult = await VerifySignatureAsync(
manifestBytes, signatureBytes, request.PublicKey, cancellationToken);
result = result with
{
SignatureVerified = signatureResult.Verified,
SignatureKeyId = signatureResult.KeyId,
SignatureError = signatureResult.Error
};
if (!signatureResult.Verified && request.RequireValidSignature)
{
return result with
{
Success = false,
Error = $"Signature verification failed: {signatureResult.Error}"
};
}
}
else if (request.RequireValidSignature)
{
return AuditBundleReadResult.Failed("Signature file not found but signature is required");
}
}
// Verify merkle root if requested
if (request.VerifyMerkleRoot)
{
var merkleResult = await VerifyMerkleRootAsync(tempDir, manifest, cancellationToken);
result = result with
{
MerkleRootVerified = merkleResult.Verified,
MerkleRootError = merkleResult.Error
};
if (!merkleResult.Verified && request.RequireValidMerkleRoot)
{
return result with
{
Success = false,
Error = $"Merkle root verification failed: {merkleResult.Error}"
};
}
}
// Verify input digests if requested
if (request.VerifyInputDigests)
{
var digestResult = await VerifyInputDigestsAsync(tempDir, manifest, cancellationToken);
result = result with
{
InputDigestsVerified = digestResult.Verified,
InputDigestErrors = digestResult.Errors
};
if (!digestResult.Verified && request.RequireValidInputDigests)
{
return result with
{
Success = false,
Error = $"Input digest verification failed: {string.Join("; ", digestResult.Errors ?? [])}"
};
}
}
// Extract contents if requested
if (request.ExtractToPath is not null)
{
if (Directory.Exists(request.ExtractToPath))
{
if (!request.OverwriteExisting)
{
return result with
{
Success = false,
Error = "Extract path already exists and overwrite is not enabled"
};
}
Directory.Delete(request.ExtractToPath, recursive: true);
}
Directory.Move(tempDir, request.ExtractToPath);
result = result with { ExtractedPath = request.ExtractToPath };
// Create a new temp dir for cleanup
tempDir = Path.Combine(Path.GetTempPath(), $"audit-read-empty-{Guid.NewGuid():N}");
}
// Load replay inputs if requested
if (request.LoadReplayInputs)
{
var extractPath = result.ExtractedPath ?? tempDir;
var inputs = await LoadReplayInputsAsync(extractPath, manifest, cancellationToken);
result = result with { ReplayInputs = inputs };
}
return result;
}
catch (Exception ex)
{
return AuditBundleReadResult.Failed($"Failed to read bundle: {ex.Message}");
}
finally
{
// Clean up temp directory
try
{
if (Directory.Exists(tempDir) && request.ExtractToPath is null)
{
// Only cleanup if we didn't move to extract path
Directory.Delete(tempDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
}
private static Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
=> ArchiveUtilities.ExtractTarGzAsync(bundlePath, targetDir, overwriteFiles: true, ct);
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
{
await using var stream = File.OpenRead(filePath);
var hash = await SHA256.HashDataAsync(stream, ct);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static async Task<SignatureVerificationResult> VerifySignatureAsync(
byte[] manifestBytes,
byte[] signatureEnvelopeBytes,
AsymmetricAlgorithm? publicKey,
CancellationToken cancellationToken)
{
try
{
var signer = new AuditBundleSigner();
var result = await signer.VerifyAsync(
new AuditBundleVerificationRequest
{
EnvelopeBytes = signatureEnvelopeBytes,
PublicKey = publicKey
},
cancellationToken);
if (!result.Success)
{
return new SignatureVerificationResult
{
Verified = false,
Error = result.Error
};
}
// Verify payload digest matches manifest
var manifestDigest = ComputeSha256(manifestBytes);
if (result.PayloadDigest != manifestDigest)
{
return new SignatureVerificationResult
{
Verified = false,
Error = "Manifest digest does not match signed payload"
};
}
var keyId = result.VerifiedSignatures?.FirstOrDefault()?.KeyId;
var anyVerified = publicKey is null || (result.VerifiedSignatures?.Any(s => s.Verified) ?? false);
return new SignatureVerificationResult
{
Verified = anyVerified,
KeyId = keyId
};
}
catch (Exception ex)
{
return new SignatureVerificationResult
{
Verified = false,
Error = ex.Message
};
}
}
private static async Task<MerkleVerificationResult> VerifyMerkleRootAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken cancellationToken)
{
try
{
var entries = new List<BundleEntry>();
// Verify all files listed in manifest
foreach (var file in manifest.Files)
{
var filePath = Path.Combine(bundleDir, file.RelativePath.Replace('/', Path.DirectorySeparatorChar));
if (!File.Exists(filePath))
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Missing file: {file.RelativePath}"
};
}
var content = await File.ReadAllBytesAsync(filePath, cancellationToken);
var digest = ComputeSha256(content);
if (digest != file.Digest)
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Digest mismatch for {file.RelativePath}: expected {file.Digest}, got {digest}"
};
}
entries.Add(new BundleEntry(file.RelativePath, digest, content.Length));
}
// Compute and verify merkle root
var computedRoot = ComputeMerkleRoot(entries);
if (computedRoot != manifest.MerkleRoot)
{
return new MerkleVerificationResult
{
Verified = false,
Error = $"Merkle root mismatch: expected {manifest.MerkleRoot}, got {computedRoot}"
};
}
return new MerkleVerificationResult { Verified = true };
}
catch (Exception ex)
{
return new MerkleVerificationResult
{
Verified = false,
Error = ex.Message
};
}
}
private static async Task<InputDigestVerificationResult> VerifyInputDigestsAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken cancellationToken)
{
var errors = new List<string>();
// Verify SBOM digest
var sbomPath = Path.Combine(bundleDir, "sbom.json");
if (File.Exists(sbomPath))
{
var sbomContent = await File.ReadAllBytesAsync(sbomPath, cancellationToken);
var sbomDigest = ComputeSha256(sbomContent);
if (sbomDigest != manifest.Inputs.SbomDigest)
{
errors.Add($"SBOM digest mismatch: expected {manifest.Inputs.SbomDigest}, got {sbomDigest}");
}
}
else
{
errors.Add("SBOM file not found");
}
// Verify feeds digest
var feedsPath = Path.Combine(bundleDir, "feeds", "feeds-snapshot.ndjson");
if (File.Exists(feedsPath))
{
var feedsContent = await File.ReadAllBytesAsync(feedsPath, cancellationToken);
var feedsDigest = ComputeSha256(feedsContent);
if (feedsDigest != manifest.Inputs.FeedsDigest)
{
errors.Add($"Feeds digest mismatch: expected {manifest.Inputs.FeedsDigest}, got {feedsDigest}");
}
}
else
{
errors.Add("Feeds snapshot file not found");
}
// Verify policy digest
var policyPath = Path.Combine(bundleDir, "policy", "policy-bundle.tar.gz");
if (File.Exists(policyPath))
{
var policyContent = await File.ReadAllBytesAsync(policyPath, cancellationToken);
var policyDigest = ComputeSha256(policyContent);
if (policyDigest != manifest.Inputs.PolicyDigest)
{
errors.Add($"Policy digest mismatch: expected {manifest.Inputs.PolicyDigest}, got {policyDigest}");
}
}
else
{
errors.Add("Policy bundle file not found");
}
// Verify VEX digest (optional)
if (manifest.Inputs.VexDigest is not null)
{
var vexPath = Path.Combine(bundleDir, "vex", "vex-statements.json");
if (File.Exists(vexPath))
{
var vexContent = await File.ReadAllBytesAsync(vexPath, cancellationToken);
var vexDigest = ComputeSha256(vexContent);
if (vexDigest != manifest.Inputs.VexDigest)
{
errors.Add($"VEX digest mismatch: expected {manifest.Inputs.VexDigest}, got {vexDigest}");
}
}
else
{
errors.Add("VEX file not found but digest specified in manifest");
}
}
// Verify scoring digest (optional)
if (manifest.Inputs.ScoringDigest is not null)
{
var scoringPath = Path.Combine(bundleDir, "scoring-rules.json");
if (File.Exists(scoringPath))
{
var scoringContent = await File.ReadAllBytesAsync(scoringPath, cancellationToken);
var scoringDigest = ComputeSha256(scoringContent);
if (scoringDigest != manifest.Inputs.ScoringDigest)
{
errors.Add($"Scoring rules digest mismatch: expected {manifest.Inputs.ScoringDigest}, got {scoringDigest}");
}
}
else
{
errors.Add("Scoring rules file not found but digest specified in manifest");
}
}
// Verify trust roots digest (optional)
if (manifest.Inputs.TrustRootsDigest is not null)
{
var trustPath = Path.Combine(bundleDir, "trust", "trust-roots.json");
if (File.Exists(trustPath))
{
var trustContent = await File.ReadAllBytesAsync(trustPath, cancellationToken);
var trustDigest = ComputeSha256(trustContent);
if (trustDigest != manifest.Inputs.TrustRootsDigest)
{
errors.Add($"Trust roots digest mismatch: expected {manifest.Inputs.TrustRootsDigest}, got {trustDigest}");
}
}
else
{
errors.Add("Trust roots file not found but digest specified in manifest");
}
}
return new InputDigestVerificationResult
{
Verified = errors.Count == 0,
Errors = errors.Count > 0 ? [.. errors] : null
};
}
private static async Task<ReplayInputs> LoadReplayInputsAsync(
string bundleDir,
AuditBundleManifest manifest,
CancellationToken cancellationToken)
{
var inputs = new ReplayInputs();
// Load SBOM
var sbomPath = Path.Combine(bundleDir, "sbom.json");
if (File.Exists(sbomPath))
{
inputs = inputs with { Sbom = await File.ReadAllBytesAsync(sbomPath, cancellationToken) };
}
// Load feeds
var feedsPath = Path.Combine(bundleDir, "feeds", "feeds-snapshot.ndjson");
if (File.Exists(feedsPath))
{
inputs = inputs with { FeedsSnapshot = await File.ReadAllBytesAsync(feedsPath, cancellationToken) };
}
// Load policy
var policyPath = Path.Combine(bundleDir, "policy", "policy-bundle.tar.gz");
if (File.Exists(policyPath))
{
inputs = inputs with { PolicyBundle = await File.ReadAllBytesAsync(policyPath, cancellationToken) };
}
// Load VEX (optional)
var vexPath = Path.Combine(bundleDir, "vex", "vex-statements.json");
if (File.Exists(vexPath))
{
inputs = inputs with { VexStatements = await File.ReadAllBytesAsync(vexPath, cancellationToken) };
}
// Load verdict
var verdictPath = Path.Combine(bundleDir, "verdict.json");
if (File.Exists(verdictPath))
{
inputs = inputs with { Verdict = await File.ReadAllBytesAsync(verdictPath, cancellationToken) };
}
return inputs;
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static string ComputeMerkleRoot(List<BundleEntry> entries)
{
if (entries.Count == 0)
{
return string.Empty;
}
var leaves = entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
.ToArray();
while (leaves.Length > 1)
{
leaves = PairwiseHash(leaves).ToArray();
}
return $"sha256:{Convert.ToHexString(leaves[0]).ToLowerInvariant()}";
}
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
{
for (var i = 0; i < nodes.Length; i += 2)
{
if (i + 1 >= nodes.Length)
{
yield return SHA256.HashData(nodes[i]);
continue;
}
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
yield return SHA256.HashData(combined);
}
}
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
private sealed record SignatureVerificationResult
{
public bool Verified { get; init; }
public string? KeyId { get; init; }
public string? Error { get; init; }
}
private sealed record MerkleVerificationResult
{
public bool Verified { get; init; }
public string? Error { get; init; }
}
private sealed record InputDigestVerificationResult
{
public bool Verified { get; init; }
public ImmutableArray<string>? Errors { get; init; }
}
}
/// <summary>
/// Interface for audit bundle reading.
/// </summary>
public interface IAuditBundleReader
{
Task<AuditBundleReadResult> ReadAsync(
AuditBundleReadRequest request,
CancellationToken cancellationToken = default);
}
#region Request and Result Models
/// <summary>
/// Request for reading an audit bundle.
/// </summary>
public sealed record AuditBundleReadRequest
{
public required string BundlePath { get; init; }
/// <summary>
/// Verify the manifest signature.
/// </summary>
public bool VerifySignature { get; init; } = true;
/// <summary>
/// Fail if signature is invalid.
/// </summary>
public bool RequireValidSignature { get; init; }
/// <summary>
/// Verify the merkle root.
/// </summary>
public bool VerifyMerkleRoot { get; init; } = true;
/// <summary>
/// Fail if merkle root is invalid.
/// </summary>
public bool RequireValidMerkleRoot { get; init; } = true;
/// <summary>
/// Verify input digests match manifest.
/// </summary>
public bool VerifyInputDigests { get; init; } = true;
/// <summary>
/// Fail if input digests are invalid.
/// </summary>
public bool RequireValidInputDigests { get; init; } = true;
/// <summary>
/// Extract bundle contents to this path.
/// </summary>
public string? ExtractToPath { get; init; }
/// <summary>
/// Overwrite existing extraction directory.
/// </summary>
public bool OverwriteExisting { get; init; }
/// <summary>
/// Load replay inputs into memory.
/// </summary>
public bool LoadReplayInputs { get; init; }
/// <summary>
/// Public key for signature verification.
/// </summary>
public AsymmetricAlgorithm? PublicKey { get; init; }
}
/// <summary>
/// Result of reading an audit bundle.
/// </summary>
public sealed record AuditBundleReadResult
{
public bool Success { get; init; }
public AuditBundleManifest? Manifest { get; init; }
public string? BundleDigest { get; init; }
public string? ExtractedPath { get; init; }
public string? Error { get; init; }
// Signature verification
public bool? SignatureVerified { get; init; }
public string? SignatureKeyId { get; init; }
public string? SignatureError { get; init; }
// Merkle root verification
public bool? MerkleRootVerified { get; init; }
public string? MerkleRootError { get; init; }
// Input digest verification
public bool? InputDigestsVerified { get; init; }
public ImmutableArray<string>? InputDigestErrors { get; init; }
// Replay inputs
public ReplayInputs? ReplayInputs { get; init; }
public static AuditBundleReadResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Loaded replay inputs from a bundle.
/// </summary>
public sealed record ReplayInputs
{
public byte[]? Sbom { get; init; }
public byte[]? FeedsSnapshot { get; init; }
public byte[]? PolicyBundle { get; init; }
public byte[]? VexStatements { get; init; }
public byte[]? Verdict { get; init; }
}
#endregion

View File

@@ -0,0 +1,85 @@
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleSigner
{
private static byte[] CreatePae(string payloadType, byte[] payload)
{
const string prefix = "DSSEv1";
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
using var ms = new MemoryStream();
using var writer = new BinaryWriter(ms);
writer.Write(Encoding.UTF8.GetBytes(prefix));
writer.Write((byte)' ');
writer.Write(Encoding.UTF8.GetBytes(typeBytes.Length.ToString()));
writer.Write((byte)' ');
writer.Write(typeBytes);
writer.Write((byte)' ');
writer.Write(Encoding.UTF8.GetBytes(payload.Length.ToString()));
writer.Write((byte)' ');
writer.Write(payload);
return ms.ToArray();
}
private static async Task<(AsymmetricAlgorithm Key, string KeyId, string Algorithm)> LoadKeyFromFileAsync(
string keyFilePath,
string? password,
CancellationToken ct)
{
var keyPem = await File.ReadAllTextAsync(keyFilePath, ct).ConfigureAwait(false);
try
{
var ecdsa = ECDsa.Create();
if (password is not null)
{
ecdsa.ImportFromEncryptedPem(keyPem, password);
}
else
{
ecdsa.ImportFromPem(keyPem);
}
return (ecdsa, $"file:{ComputeKeyId(ecdsa)}", "ES256");
}
catch
{
// Not ECDSA, try RSA
}
var rsa = RSA.Create();
if (password is not null)
{
rsa.ImportFromEncryptedPem(keyPem, password);
}
else
{
rsa.ImportFromPem(keyPem);
}
return (rsa, $"file:{ComputeKeyIdRsa(rsa)}", "RS256");
}
private static string ComputeKeyId(ECDsa ecdsa)
{
var publicKey = ecdsa.ExportSubjectPublicKeyInfo();
var hash = SHA256.HashData(publicKey);
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
}
private static string ComputeKeyIdRsa(RSA rsa)
{
var publicKey = rsa.ExportSubjectPublicKeyInfo();
var hash = SHA256.HashData(publicKey);
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,17 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleSigner
{
private sealed class DsseEnvelope
{
public string? PayloadType { get; set; }
public string? Payload { get; set; }
public DsseSignature[]? Signatures { get; set; }
}
private sealed class DsseSignature
{
public string? KeyId { get; set; }
public string? Sig { get; set; }
}
}

View File

@@ -0,0 +1,97 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleSigner
{
/// <summary>
/// Signs a manifest with DSSE envelope.
/// </summary>
public async Task<AuditBundleSigningResult> SignAsync(
AuditBundleSigningRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.ManifestBytes);
try
{
AsymmetricAlgorithm key;
string keyId;
string algorithm;
if (!string.IsNullOrEmpty(request.KeyFilePath))
{
(key, keyId, algorithm) = await LoadKeyFromFileAsync(
request.KeyFilePath,
request.KeyPassword,
cancellationToken)
.ConfigureAwait(false);
}
else
{
var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
key = ecdsa;
keyId = $"ephemeral:{ComputeKeyId(ecdsa)}";
algorithm = "ES256";
}
using (key)
{
var pae = CreatePae(PayloadType, request.ManifestBytes);
byte[] signature;
if (key is ECDsa ecdsa)
{
signature = ecdsa.SignData(pae, HashAlgorithmName.SHA256);
}
else if (key is RSA rsa)
{
signature = rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
algorithm = "RS256";
}
else
{
return AuditBundleSigningResult.Failed($"Unsupported key type: {key.GetType().Name}");
}
var envelope = new DsseEnvelope
{
PayloadType = PayloadType,
Payload = Convert.ToBase64String(request.ManifestBytes),
Signatures =
[
new DsseSignature
{
KeyId = keyId,
Sig = Convert.ToBase64String(signature)
}
]
};
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true
});
var payloadDigest = ComputeSha256(request.ManifestBytes);
return new AuditBundleSigningResult
{
Success = true,
Envelope = envelopeBytes,
KeyId = keyId,
Algorithm = algorithm,
PayloadDigest = payloadDigest
};
}
}
catch (Exception ex)
{
return AuditBundleSigningResult.Failed($"Signing failed: {ex.Message}");
}
}
}

View File

@@ -0,0 +1,100 @@
using System.Security.Cryptography;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleSigner
{
/// <summary>
/// Verifies a DSSE envelope signature.
/// </summary>
public async Task<AuditBundleVerificationResult> VerifyAsync(
AuditBundleVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.EnvelopeBytes);
try
{
var envelope = JsonSerializer.Deserialize<DsseEnvelope>(
request.EnvelopeBytes,
new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase });
if (envelope is null)
{
return AuditBundleVerificationResult.Failed("Failed to parse DSSE envelope");
}
if (string.IsNullOrEmpty(envelope.Payload))
{
return AuditBundleVerificationResult.Failed("Envelope has no payload");
}
var payloadBytes = Convert.FromBase64String(envelope.Payload);
var payloadDigest = ComputeSha256(payloadBytes);
if (envelope.Signatures is null || envelope.Signatures.Length == 0)
{
return AuditBundleVerificationResult.Failed("Envelope has no signatures");
}
var verifiedSignatures = new List<VerifiedSignatureInfo>();
foreach (var sig in envelope.Signatures)
{
if (string.IsNullOrEmpty(sig.Sig))
{
verifiedSignatures.Add(new VerifiedSignatureInfo
{
KeyId = sig.KeyId,
Verified = false,
Error = "Empty signature"
});
continue;
}
var signatureBytes = Convert.FromBase64String(sig.Sig);
var pae = CreatePae(envelope.PayloadType ?? PayloadType, payloadBytes);
bool verified = false;
string? error = null;
if (request.PublicKey is not null)
{
try
{
if (request.PublicKey is ECDsa ecdsa)
{
verified = ecdsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256);
}
else if (request.PublicKey is RSA rsa)
{
verified = rsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
}
else
{
error = $"Unsupported key type: {request.PublicKey.GetType().Name}";
}
}
catch (CryptographicException ex)
{
error = ex.Message;
}
}
else
{
error = "No public key provided for verification";
}
verifiedSignatures.Add(new VerifiedSignatureInfo
{
KeyId = sig.KeyId,
Verified = verified,
Error = error
});
}
return new AuditBundleVerificationResult
{
Success = true,
PayloadDigest = payloadDigest,
VerifiedSignatures = [.. verifiedSignatures]
};
}
catch (Exception ex)
{
return AuditBundleVerificationResult.Failed($"Verification failed: {ex.Message}");
}
}
}

View File

@@ -1,381 +1,9 @@
// -----------------------------------------------------------------------------
// AuditBundleSigner.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Task: REPLAY-004 - Bundle signature (DSSE envelope)
// Description: Signs and verifies audit bundle manifests using DSSE.
// -----------------------------------------------------------------------------
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Signs and verifies audit bundle manifests using DSSE (Dead Simple Signing Envelope).
/// </summary>
public sealed class AuditBundleSigner
public sealed partial class AuditBundleSigner
{
private const string PayloadType = "application/vnd.stellaops.audit-bundle.manifest+json";
/// <summary>
/// Signs a manifest with DSSE envelope.
/// </summary>
public async Task<AuditBundleSigningResult> SignAsync(
AuditBundleSigningRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.ManifestBytes);
try
{
// Load or generate signing key
AsymmetricAlgorithm key;
string keyId;
string algorithm;
if (!string.IsNullOrEmpty(request.KeyFilePath))
{
(key, keyId, algorithm) = await LoadKeyFromFileAsync(
request.KeyFilePath, request.KeyPassword, cancellationToken);
}
else
{
// Generate ephemeral key
var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
key = ecdsa;
keyId = $"ephemeral:{ComputeKeyId(ecdsa)}";
algorithm = "ES256";
}
using (key)
{
// Create PAE (Pre-Authentication Encoding)
var pae = CreatePae(PayloadType, request.ManifestBytes);
// Sign
byte[] signature;
if (key is ECDsa ecdsa)
{
signature = ecdsa.SignData(pae, HashAlgorithmName.SHA256);
}
else if (key is RSA rsa)
{
signature = rsa.SignData(pae, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
algorithm = "RS256";
}
else
{
return AuditBundleSigningResult.Failed($"Unsupported key type: {key.GetType().Name}");
}
// Create DSSE envelope
var envelope = new DsseEnvelope
{
PayloadType = PayloadType,
Payload = Convert.ToBase64String(request.ManifestBytes),
Signatures =
[
new DsseSignature
{
KeyId = keyId,
Sig = Convert.ToBase64String(signature)
}
]
};
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, new JsonSerializerOptions
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = true
});
var payloadDigest = ComputeSha256(request.ManifestBytes);
return new AuditBundleSigningResult
{
Success = true,
Envelope = envelopeBytes,
KeyId = keyId,
Algorithm = algorithm,
PayloadDigest = payloadDigest
};
}
}
catch (Exception ex)
{
return AuditBundleSigningResult.Failed($"Signing failed: {ex.Message}");
}
}
/// <summary>
/// Verifies a DSSE envelope signature.
/// </summary>
public async Task<AuditBundleVerificationResult> VerifyAsync(
AuditBundleVerificationRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentNullException.ThrowIfNull(request.EnvelopeBytes);
try
{
// Parse envelope
var envelope = JsonSerializer.Deserialize<DsseEnvelope>(
request.EnvelopeBytes,
new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase });
if (envelope is null)
{
return AuditBundleVerificationResult.Failed("Failed to parse DSSE envelope");
}
if (string.IsNullOrEmpty(envelope.Payload))
{
return AuditBundleVerificationResult.Failed("Envelope has no payload");
}
var payloadBytes = Convert.FromBase64String(envelope.Payload);
var payloadDigest = ComputeSha256(payloadBytes);
if (envelope.Signatures is null || envelope.Signatures.Length == 0)
{
return AuditBundleVerificationResult.Failed("Envelope has no signatures");
}
var verifiedSignatures = new List<VerifiedSignatureInfo>();
foreach (var sig in envelope.Signatures)
{
if (string.IsNullOrEmpty(sig.Sig))
{
verifiedSignatures.Add(new VerifiedSignatureInfo
{
KeyId = sig.KeyId,
Verified = false,
Error = "Empty signature"
});
continue;
}
var signatureBytes = Convert.FromBase64String(sig.Sig);
var pae = CreatePae(envelope.PayloadType ?? PayloadType, payloadBytes);
bool verified = false;
string? error = null;
if (request.PublicKey is not null)
{
try
{
if (request.PublicKey is ECDsa ecdsa)
{
verified = ecdsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256);
}
else if (request.PublicKey is RSA rsa)
{
verified = rsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1);
}
else
{
error = $"Unsupported key type: {request.PublicKey.GetType().Name}";
}
}
catch (CryptographicException ex)
{
error = ex.Message;
}
}
else
{
// No public key provided - cannot verify
error = "No public key provided for verification";
}
verifiedSignatures.Add(new VerifiedSignatureInfo
{
KeyId = sig.KeyId,
Verified = verified,
Error = error
});
}
return new AuditBundleVerificationResult
{
Success = true,
PayloadDigest = payloadDigest,
VerifiedSignatures = [.. verifiedSignatures]
};
}
catch (Exception ex)
{
return AuditBundleVerificationResult.Failed($"Verification failed: {ex.Message}");
}
}
private static byte[] CreatePae(string payloadType, byte[] payload)
{
// PAE(type, payload) = "DSSEv1" || SP || len(type) || SP || type || SP || len(payload) || SP || payload
const string prefix = "DSSEv1";
var typeBytes = Encoding.UTF8.GetBytes(payloadType);
using var ms = new MemoryStream();
using var writer = new BinaryWriter(ms);
writer.Write(Encoding.UTF8.GetBytes(prefix));
writer.Write((byte)' ');
writer.Write(Encoding.UTF8.GetBytes(typeBytes.Length.ToString()));
writer.Write((byte)' ');
writer.Write(typeBytes);
writer.Write((byte)' ');
writer.Write(Encoding.UTF8.GetBytes(payload.Length.ToString()));
writer.Write((byte)' ');
writer.Write(payload);
return ms.ToArray();
}
private static async Task<(AsymmetricAlgorithm Key, string KeyId, string Algorithm)> LoadKeyFromFileAsync(
string keyFilePath, string? password, CancellationToken ct)
{
var keyPem = await File.ReadAllTextAsync(keyFilePath, ct);
// Try ECDSA first
try
{
var ecdsa = ECDsa.Create();
if (password is not null)
{
ecdsa.ImportFromEncryptedPem(keyPem, password);
}
else
{
ecdsa.ImportFromPem(keyPem);
}
return (ecdsa, $"file:{ComputeKeyId(ecdsa)}", "ES256");
}
catch
{
// Not ECDSA, try RSA
}
var rsa = RSA.Create();
if (password is not null)
{
rsa.ImportFromEncryptedPem(keyPem, password);
}
else
{
rsa.ImportFromPem(keyPem);
}
return (rsa, $"file:{ComputeKeyIdRsa(rsa)}", "RS256");
}
private static string ComputeKeyId(ECDsa ecdsa)
{
var publicKey = ecdsa.ExportSubjectPublicKeyInfo();
var hash = SHA256.HashData(publicKey);
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
}
private static string ComputeKeyIdRsa(RSA rsa)
{
var publicKey = rsa.ExportSubjectPublicKeyInfo();
var hash = SHA256.HashData(publicKey);
return Convert.ToHexString(hash[..8]).ToLowerInvariant();
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
#region Internal Models
private sealed class DsseEnvelope
{
public string? PayloadType { get; set; }
public string? Payload { get; set; }
public DsseSignature[]? Signatures { get; set; }
}
private sealed class DsseSignature
{
public string? KeyId { get; set; }
public string? Sig { get; set; }
}
#endregion
}
#region Request and Result Models
/// <summary>
/// Request for signing an audit bundle manifest.
/// </summary>
public sealed record AuditBundleSigningRequest
{
public required byte[] ManifestBytes { get; init; }
public string? KeyFilePath { get; init; }
public string? KeyPassword { get; init; }
}
/// <summary>
/// Result of signing an audit bundle manifest.
/// </summary>
public sealed record AuditBundleSigningResult
{
public bool Success { get; init; }
public byte[]? Envelope { get; init; }
public string? KeyId { get; init; }
public string? Algorithm { get; init; }
public string? PayloadDigest { get; init; }
public string? Error { get; init; }
public static AuditBundleSigningResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Request for verifying an audit bundle envelope.
/// </summary>
public sealed record AuditBundleVerificationRequest
{
public required byte[] EnvelopeBytes { get; init; }
public AsymmetricAlgorithm? PublicKey { get; init; }
}
/// <summary>
/// Result of verifying an audit bundle envelope.
/// </summary>
public sealed record AuditBundleVerificationResult
{
public bool Success { get; init; }
public string? PayloadDigest { get; init; }
public VerifiedSignatureInfo[]? VerifiedSignatures { get; init; }
public string? Error { get; init; }
public static AuditBundleVerificationResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Information about a verified signature.
/// </summary>
public sealed record VerifiedSignatureInfo
{
public string? KeyId { get; init; }
public bool Verified { get; init; }
public string? Error { get; init; }
}
#endregion

View File

@@ -0,0 +1,11 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Request for signing an audit bundle manifest.
/// </summary>
public sealed record AuditBundleSigningRequest
{
public required byte[] ManifestBytes { get; init; }
public string? KeyFilePath { get; init; }
public string? KeyPassword { get; init; }
}

View File

@@ -0,0 +1,20 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of signing an audit bundle manifest.
/// </summary>
public sealed record AuditBundleSigningResult
{
public bool Success { get; init; }
public byte[]? Envelope { get; init; }
public string? KeyId { get; init; }
public string? Algorithm { get; init; }
public string? PayloadDigest { get; init; }
public string? Error { get; init; }
public static AuditBundleSigningResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,12 @@
using System.Security.Cryptography;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Request for verifying an audit bundle envelope.
/// </summary>
public sealed record AuditBundleVerificationRequest
{
public required byte[] EnvelopeBytes { get; init; }
public AsymmetricAlgorithm? PublicKey { get; init; }
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of verifying an audit bundle envelope.
/// </summary>
public sealed record AuditBundleVerificationResult
{
public bool Success { get; init; }
public string? PayloadDigest { get; init; }
public VerifiedSignatureInfo[]? VerifiedSignatures { get; init; }
public string? Error { get; init; }
public static AuditBundleVerificationResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,100 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Request for creating an audit bundle.
/// </summary>
public sealed record AuditBundleWriteRequest
{
/// <summary>
/// Output path for the bundle (will add .tar.gz if not present).
/// </summary>
public required string OutputPath { get; init; }
/// <summary>
/// Unique bundle identifier (auto-generated if not provided).
/// </summary>
public string? BundleId { get; init; }
/// <summary>
/// Human-readable name for the bundle.
/// </summary>
public string? Name { get; init; }
/// <summary>
/// Scan ID this bundle was created from.
/// </summary>
public required string ScanId { get; init; }
/// <summary>
/// Image reference that was scanned.
/// </summary>
public required string ImageRef { get; init; }
/// <summary>
/// Image digest (sha256:...).
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Decision from the verdict (pass, warn, block).
/// </summary>
public required string Decision { get; init; }
/// <summary>
/// SBOM document bytes (CycloneDX or SPDX JSON).
/// </summary>
public required byte[] Sbom { get; init; }
/// <summary>
/// Advisory feeds snapshot (NDJSON format).
/// </summary>
public required byte[] FeedsSnapshot { get; init; }
/// <summary>
/// Policy bundle (OPA tar.gz).
/// </summary>
public required byte[] PolicyBundle { get; init; }
/// <summary>
/// Verdict document bytes.
/// </summary>
public required byte[] Verdict { get; init; }
/// <summary>
/// VEX statements (OpenVEX JSON, optional).
/// </summary>
public byte[]? VexStatements { get; init; }
/// <summary>
/// Proof bundle bytes (optional).
/// </summary>
public byte[]? ProofBundle { get; init; }
/// <summary>
/// Trust roots document (optional).
/// </summary>
public byte[]? TrustRoots { get; init; }
/// <summary>
/// Scoring rules (optional).
/// </summary>
public byte[]? ScoringRules { get; init; }
/// <summary>
/// Time anchor for replay context (optional).
/// </summary>
public TimeAnchorInput? TimeAnchor { get; init; }
/// <summary>
/// Whether to sign the manifest.
/// </summary>
public bool Sign { get; init; } = true;
/// <summary>
/// Path to signing key file (PEM format).
/// </summary>
public string? SigningKeyPath { get; init; }
/// <summary>
/// Password for encrypted signing key.
/// </summary>
public string? SigningKeyPassword { get; init; }
}

View File

@@ -0,0 +1,38 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of creating an audit bundle.
/// </summary>
public sealed record AuditBundleWriteResult
{
public bool Success { get; init; }
public string? OutputPath { get; init; }
public string? BundleId { get; init; }
public string? MerkleRoot { get; init; }
public string? BundleDigest { get; init; }
public long TotalSizeBytes { get; init; }
public int FileCount { get; init; }
public DateTimeOffset CreatedAt { get; init; }
public string? Error { get; init; }
/// <summary>
/// Whether the manifest was signed.
/// </summary>
public bool Signed { get; init; }
/// <summary>
/// Key ID used for signing.
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// Algorithm used for signing.
/// </summary>
public string? SigningAlgorithm { get; init; }
public static AuditBundleWriteResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,19 @@
using System.Security.Cryptography;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
{
await using var stream = File.OpenRead(filePath);
var hash = await SHA256.HashDataAsync(stream, ct).ConfigureAwait(false);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
}

View File

@@ -0,0 +1,77 @@
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
private BundleBuildResult BuildBundleEntries(AuditBundleWriteRequest request)
{
var entries = new List<BundleEntry>();
var files = new List<BundleFileEntry>();
var archiveEntries = new List<ArchiveEntry>();
var sbomDigest = AddRequiredEntry(entries, files, archiveEntries, "sbom.json", request.Sbom, BundleContentType.Sbom);
if (sbomDigest is null)
{
return new BundleBuildResult(false, "SBOM is required for audit bundle", entries, files, archiveEntries, null!, null);
}
var feedsDigest = AddRequiredEntry(entries, files, archiveEntries, "feeds/feeds-snapshot.ndjson", request.FeedsSnapshot, BundleContentType.Feeds);
if (feedsDigest is null)
{
return new BundleBuildResult(false, "Feeds snapshot is required for audit bundle", entries, files, archiveEntries, null!, null);
}
var policyDigest = AddRequiredEntry(entries, files, archiveEntries, "policy/policy-bundle.tar.gz", request.PolicyBundle, BundleContentType.Policy);
if (policyDigest is null)
{
return new BundleBuildResult(false, "Policy bundle is required for audit bundle", entries, files, archiveEntries, null!, null);
}
var verdictDigest = AddRequiredEntry(entries, files, archiveEntries, "verdict.json", request.Verdict, BundleContentType.Verdict);
if (verdictDigest is null)
{
return new BundleBuildResult(false, "Verdict is required for audit bundle", entries, files, archiveEntries, null!, null);
}
var vexDigest = AddOptionalEntry(entries, files, archiveEntries, "vex/vex-statements.json", request.VexStatements, BundleContentType.Vex);
AddOptionalEntry(entries, files, archiveEntries, "proof/proof-bundle.json", request.ProofBundle, BundleContentType.ProofBundle);
var trustRootsDigest = AddOptionalEntry(entries, files, archiveEntries, "trust/trust-roots.json", request.TrustRoots, BundleContentType.TrustRoot);
var scoringDigest = AddOptionalEntry(entries, files, archiveEntries, "scoring-rules.json", request.ScoringRules, BundleContentType.Other);
var timeAnchor = AddTimeAnchorEntry(entries, files, archiveEntries, request.TimeAnchor);
var inputs = new BundleInputDigests(
sbomDigest,
feedsDigest,
policyDigest,
verdictDigest,
vexDigest,
scoringDigest,
trustRootsDigest);
return new BundleBuildResult(true, null, entries, files, archiveEntries, inputs, timeAnchor);
}
private TimeAnchor? AddTimeAnchorEntry(
List<BundleEntry> entries,
List<BundleFileEntry> files,
List<ArchiveEntry> archiveEntries,
TimeAnchorInput? timeAnchor)
{
if (timeAnchor is null)
{
return null;
}
var timeAnchorBytes = CanonicalJson.Serialize(timeAnchor, _jsonOptions);
var digest = AddEntry(entries, files, archiveEntries, "time-anchor.json", timeAnchorBytes, BundleContentType.TimeAnchor);
return new TimeAnchor
{
Timestamp = timeAnchor.Timestamp,
Source = timeAnchor.Source,
TokenDigest = digest
};
}
}

View File

@@ -0,0 +1,53 @@
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
private static string? AddRequiredEntry(
List<BundleEntry> entries,
List<BundleFileEntry> files,
List<ArchiveEntry> archiveEntries,
string path,
byte[]? content,
BundleContentType type)
{
return content is null
? null
: AddEntry(entries, files, archiveEntries, path, content, type);
}
private static string? AddOptionalEntry(
List<BundleEntry> entries,
List<BundleFileEntry> files,
List<ArchiveEntry> archiveEntries,
string path,
byte[]? content,
BundleContentType type)
{
return content is null
? null
: AddEntry(entries, files, archiveEntries, path, content, type);
}
private static string AddEntry(
List<BundleEntry> entries,
List<BundleFileEntry> files,
List<ArchiveEntry> archiveEntries,
string path,
byte[] content,
BundleContentType type)
{
var digest = ComputeSha256(content);
entries.Add(new BundleEntry(path, digest, content.Length));
files.Add(new BundleFileEntry
{
RelativePath = path,
Digest = digest,
SizeBytes = content.Length,
ContentType = type
});
archiveEntries.Add(new ArchiveEntry(path, content));
return digest;
}
}

View File

@@ -0,0 +1,45 @@
using StellaOps.AuditPack.Models;
using System.Linq;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
private AuditBundleManifest BuildManifest(
AuditBundleWriteRequest request,
BundleBuildResult buildResult,
string merkleRoot)
{
return new AuditBundleManifest
{
BundleId = request.BundleId ?? _idGenerator.NewBundleId(),
Name = request.Name ?? $"audit-{request.ScanId}",
CreatedAt = _timeProvider.GetUtcNow(),
ScanId = request.ScanId,
ImageRef = request.ImageRef,
ImageDigest = request.ImageDigest,
MerkleRoot = merkleRoot,
Inputs = new InputDigests
{
SbomDigest = buildResult.Inputs.SbomDigest,
FeedsDigest = buildResult.Inputs.FeedsDigest,
PolicyDigest = buildResult.Inputs.PolicyDigest,
VexDigest = buildResult.Inputs.VexDigest,
ScoringDigest = buildResult.Inputs.ScoringDigest,
TrustRootsDigest = buildResult.Inputs.TrustRootsDigest
},
VerdictDigest = buildResult.Inputs.VerdictDigest,
Decision = request.Decision,
Files = [.. buildResult.Files],
TotalSizeBytes = buildResult.Entries.Sum(e => e.SizeBytes),
TimeAnchor = buildResult.TimeAnchor
};
}
private static string EnsureTarGzExtension(string outputPath)
{
return outputPath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase)
? outputPath
: $"{outputPath}.tar.gz";
}
}

View File

@@ -0,0 +1,49 @@
using System.Security.Cryptography;
using System.Text;
using System.Linq;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
/// <summary>
/// Computes merkle root over all bundle entries for integrity verification.
/// Uses a binary tree structure with SHA-256 hashing.
/// </summary>
private static string ComputeMerkleRoot(List<BundleEntry> entries)
{
if (entries.Count == 0)
{
return string.Empty;
}
var leaves = entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
.ToArray();
while (leaves.Length > 1)
{
leaves = PairwiseHash(leaves).ToArray();
}
return $"sha256:{Convert.ToHexString(leaves[0]).ToLowerInvariant()}";
}
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
{
for (var i = 0; i < nodes.Length; i += 2)
{
if (i + 1 >= nodes.Length)
{
yield return SHA256.HashData(nodes[i]);
continue;
}
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
yield return SHA256.HashData(combined);
}
}
}

View File

@@ -0,0 +1,26 @@
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
private sealed record BundleInputDigests(
string SbomDigest,
string FeedsDigest,
string PolicyDigest,
string VerdictDigest,
string? VexDigest,
string? ScoringDigest,
string? TrustRootsDigest);
private sealed record BundleBuildResult(
bool Success,
string? Error,
List<BundleEntry> Entries,
List<BundleFileEntry> Files,
List<ArchiveEntry> ArchiveEntries,
BundleInputDigests Inputs,
TimeAnchor? TimeAnchor);
}

View File

@@ -0,0 +1,37 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
private static async Task<ManifestSigningResult> TrySignManifestAsync(
AuditBundleWriteRequest request,
byte[] manifestBytes,
List<ArchiveEntry> archiveEntries,
CancellationToken ct)
{
if (!request.Sign)
{
return new ManifestSigningResult(false, null, null);
}
var signer = new AuditBundleSigner();
var signResult = await signer.SignAsync(
new AuditBundleSigningRequest
{
ManifestBytes = manifestBytes,
KeyFilePath = request.SigningKeyPath,
KeyPassword = request.SigningKeyPassword
},
ct)
.ConfigureAwait(false);
if (signResult.Success && signResult.Envelope is not null)
{
archiveEntries.Add(new ArchiveEntry("manifest.sig", signResult.Envelope));
return new ManifestSigningResult(true, signResult.KeyId, signResult.Algorithm);
}
return new ManifestSigningResult(false, null, null);
}
private sealed record ManifestSigningResult(bool Signed, string? KeyId, string? Algorithm);
}

View File

@@ -0,0 +1,60 @@
using StellaOps.AuditPack.Models;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditBundleWriter
{
/// <summary>
/// Creates an audit bundle from the specified inputs.
/// </summary>
public async Task<AuditBundleWriteResult> WriteAsync(
AuditBundleWriteRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
try
{
var buildResult = BuildBundleEntries(request);
if (!buildResult.Success)
{
return AuditBundleWriteResult.Failed(buildResult.Error ?? "Bundle inputs are invalid.");
}
var merkleRoot = ComputeMerkleRoot(buildResult.Entries);
var manifest = BuildManifest(request, buildResult, merkleRoot);
var manifestBytes = CanonicalJson.Serialize(manifest, _jsonOptions);
buildResult.ArchiveEntries.Add(new ArchiveEntry("manifest.json", manifestBytes));
var signing = await TrySignManifestAsync(request, manifestBytes, buildResult.ArchiveEntries, cancellationToken)
.ConfigureAwait(false);
var outputPath = EnsureTarGzExtension(request.OutputPath);
await ArchiveUtilities.WriteTarGzAsync(outputPath, buildResult.ArchiveEntries, cancellationToken)
.ConfigureAwait(false);
var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken).ConfigureAwait(false);
return new AuditBundleWriteResult
{
Success = true,
OutputPath = outputPath,
BundleId = manifest.BundleId,
MerkleRoot = merkleRoot,
BundleDigest = bundleDigest,
TotalSizeBytes = new FileInfo(outputPath).Length,
FileCount = buildResult.Files.Count,
CreatedAt = manifest.CreatedAt,
Signed = signing.Signed,
SigningKeyId = signing.KeyId,
SigningAlgorithm = signing.Algorithm
};
}
catch (Exception ex)
{
return AuditBundleWriteResult.Failed($"Failed to write audit bundle: {ex.Message}");
}
}
}

View File

@@ -1,14 +1,3 @@
// -----------------------------------------------------------------------------
// AuditBundleWriter.cs
// Sprint: SPRINT_4300_0001_0002 (One-Command Audit Replay CLI)
// Tasks: REPLAY-002, REPLAY-003 - Create AuditBundleWriter with merkle root calculation
// Description: Writes self-contained audit bundles for offline replay.
// -----------------------------------------------------------------------------
using StellaOps.AuditPack.Models;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
@@ -16,12 +5,12 @@ namespace StellaOps.AuditPack.Services;
/// <summary>
/// Writes self-contained audit bundles for deterministic offline replay.
/// </summary>
public sealed class AuditBundleWriter : IAuditBundleWriter
public sealed partial class AuditBundleWriter : IAuditBundleWriter
{
private readonly TimeProvider _timeProvider;
private readonly IAuditPackIdGenerator _idGenerator;
private static readonly JsonSerializerOptions JsonOptions = new()
private static readonly JsonSerializerOptions _jsonOptions = new()
{
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
@@ -32,498 +21,4 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
_timeProvider = timeProvider ?? TimeProvider.System;
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
}
/// <summary>
/// Creates an audit bundle from the specified inputs.
/// </summary>
public async Task<AuditBundleWriteResult> WriteAsync(
AuditBundleWriteRequest request,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
try
{
var entries = new List<BundleEntry>();
var files = new List<BundleFileEntry>();
var archiveEntries = new List<ArchiveEntry>();
// Write SBOM
string sbomDigest;
if (request.Sbom is not null)
{
sbomDigest = ComputeSha256(request.Sbom);
entries.Add(new BundleEntry("sbom.json", sbomDigest, request.Sbom.Length));
files.Add(new BundleFileEntry
{
RelativePath = "sbom.json",
Digest = sbomDigest,
SizeBytes = request.Sbom.Length,
ContentType = BundleContentType.Sbom
});
archiveEntries.Add(new ArchiveEntry("sbom.json", request.Sbom));
}
else
{
return AuditBundleWriteResult.Failed("SBOM is required for audit bundle");
}
// Write feeds snapshot
string feedsDigest;
if (request.FeedsSnapshot is not null)
{
feedsDigest = ComputeSha256(request.FeedsSnapshot);
entries.Add(new BundleEntry("feeds/feeds-snapshot.ndjson", feedsDigest, request.FeedsSnapshot.Length));
files.Add(new BundleFileEntry
{
RelativePath = "feeds/feeds-snapshot.ndjson",
Digest = feedsDigest,
SizeBytes = request.FeedsSnapshot.Length,
ContentType = BundleContentType.Feeds
});
archiveEntries.Add(new ArchiveEntry("feeds/feeds-snapshot.ndjson", request.FeedsSnapshot));
}
else
{
return AuditBundleWriteResult.Failed("Feeds snapshot is required for audit bundle");
}
// Write policy bundle
string policyDigest;
if (request.PolicyBundle is not null)
{
policyDigest = ComputeSha256(request.PolicyBundle);
entries.Add(new BundleEntry("policy/policy-bundle.tar.gz", policyDigest, request.PolicyBundle.Length));
files.Add(new BundleFileEntry
{
RelativePath = "policy/policy-bundle.tar.gz",
Digest = policyDigest,
SizeBytes = request.PolicyBundle.Length,
ContentType = BundleContentType.Policy
});
archiveEntries.Add(new ArchiveEntry("policy/policy-bundle.tar.gz", request.PolicyBundle));
}
else
{
return AuditBundleWriteResult.Failed("Policy bundle is required for audit bundle");
}
// Write VEX (optional)
string? vexDigest = null;
if (request.VexStatements is not null)
{
vexDigest = ComputeSha256(request.VexStatements);
entries.Add(new BundleEntry("vex/vex-statements.json", vexDigest, request.VexStatements.Length));
files.Add(new BundleFileEntry
{
RelativePath = "vex/vex-statements.json",
Digest = vexDigest,
SizeBytes = request.VexStatements.Length,
ContentType = BundleContentType.Vex
});
archiveEntries.Add(new ArchiveEntry("vex/vex-statements.json", request.VexStatements));
}
// Write verdict
string verdictDigest;
if (request.Verdict is not null)
{
verdictDigest = ComputeSha256(request.Verdict);
entries.Add(new BundleEntry("verdict.json", verdictDigest, request.Verdict.Length));
files.Add(new BundleFileEntry
{
RelativePath = "verdict.json",
Digest = verdictDigest,
SizeBytes = request.Verdict.Length,
ContentType = BundleContentType.Verdict
});
archiveEntries.Add(new ArchiveEntry("verdict.json", request.Verdict));
}
else
{
return AuditBundleWriteResult.Failed("Verdict is required for audit bundle");
}
// Write proof bundle (optional)
if (request.ProofBundle is not null)
{
var proofDigest = ComputeSha256(request.ProofBundle);
entries.Add(new BundleEntry("proof/proof-bundle.json", proofDigest, request.ProofBundle.Length));
files.Add(new BundleFileEntry
{
RelativePath = "proof/proof-bundle.json",
Digest = proofDigest,
SizeBytes = request.ProofBundle.Length,
ContentType = BundleContentType.ProofBundle
});
archiveEntries.Add(new ArchiveEntry("proof/proof-bundle.json", request.ProofBundle));
}
// Write trust roots (optional)
string? trustRootsDigest = null;
if (request.TrustRoots is not null)
{
trustRootsDigest = ComputeSha256(request.TrustRoots);
entries.Add(new BundleEntry("trust/trust-roots.json", trustRootsDigest, request.TrustRoots.Length));
files.Add(new BundleFileEntry
{
RelativePath = "trust/trust-roots.json",
Digest = trustRootsDigest,
SizeBytes = request.TrustRoots.Length,
ContentType = BundleContentType.TrustRoot
});
archiveEntries.Add(new ArchiveEntry("trust/trust-roots.json", request.TrustRoots));
}
// Write scoring rules (optional)
string? scoringDigest = null;
if (request.ScoringRules is not null)
{
scoringDigest = ComputeSha256(request.ScoringRules);
entries.Add(new BundleEntry("scoring-rules.json", scoringDigest, request.ScoringRules.Length));
files.Add(new BundleFileEntry
{
RelativePath = "scoring-rules.json",
Digest = scoringDigest,
SizeBytes = request.ScoringRules.Length,
ContentType = BundleContentType.Other
});
archiveEntries.Add(new ArchiveEntry("scoring-rules.json", request.ScoringRules));
}
// Write time anchor (optional)
TimeAnchor? timeAnchor = null;
if (request.TimeAnchor is not null)
{
var timeAnchorBytes = CanonicalJson.Serialize(request.TimeAnchor, JsonOptions);
var timeAnchorDigest = ComputeSha256(timeAnchorBytes);
entries.Add(new BundleEntry("time-anchor.json", timeAnchorDigest, timeAnchorBytes.Length));
files.Add(new BundleFileEntry
{
RelativePath = "time-anchor.json",
Digest = timeAnchorDigest,
SizeBytes = timeAnchorBytes.Length,
ContentType = BundleContentType.TimeAnchor
});
archiveEntries.Add(new ArchiveEntry("time-anchor.json", timeAnchorBytes));
timeAnchor = new TimeAnchor
{
Timestamp = request.TimeAnchor.Timestamp,
Source = request.TimeAnchor.Source,
TokenDigest = timeAnchorDigest
};
}
// Compute merkle root
var merkleRoot = ComputeMerkleRoot(entries);
// Build manifest
var manifest = new AuditBundleManifest
{
BundleId = request.BundleId ?? _idGenerator.NewBundleId(),
Name = request.Name ?? $"audit-{request.ScanId}",
CreatedAt = _timeProvider.GetUtcNow(),
ScanId = request.ScanId,
ImageRef = request.ImageRef,
ImageDigest = request.ImageDigest,
MerkleRoot = merkleRoot,
Inputs = new InputDigests
{
SbomDigest = sbomDigest,
FeedsDigest = feedsDigest,
PolicyDigest = policyDigest,
VexDigest = vexDigest,
ScoringDigest = scoringDigest,
TrustRootsDigest = trustRootsDigest
},
VerdictDigest = verdictDigest,
Decision = request.Decision,
Files = [.. files],
TotalSizeBytes = entries.Sum(e => e.SizeBytes),
TimeAnchor = timeAnchor
};
// Write manifest
var manifestBytes = CanonicalJson.Serialize(manifest, JsonOptions);
archiveEntries.Add(new ArchiveEntry("manifest.json", manifestBytes));
// Sign manifest if requested
string? signingKeyId = null;
string? signingAlgorithm = null;
var signed = false;
if (request.Sign)
{
var signer = new AuditBundleSigner();
var signResult = await signer.SignAsync(
new AuditBundleSigningRequest
{
ManifestBytes = manifestBytes,
KeyFilePath = request.SigningKeyPath,
KeyPassword = request.SigningKeyPassword
},
cancellationToken);
if (signResult.Success && signResult.Envelope is not null)
{
archiveEntries.Add(new ArchiveEntry("manifest.sig", signResult.Envelope));
signingKeyId = signResult.KeyId;
signingAlgorithm = signResult.Algorithm;
signed = true;
}
}
// Create tar.gz bundle
var outputPath = request.OutputPath;
if (!outputPath.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase))
{
outputPath = $"{outputPath}.tar.gz";
}
await ArchiveUtilities.WriteTarGzAsync(outputPath, archiveEntries, cancellationToken);
var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken);
return new AuditBundleWriteResult
{
Success = true,
OutputPath = outputPath,
BundleId = manifest.BundleId,
MerkleRoot = merkleRoot,
BundleDigest = bundleDigest,
TotalSizeBytes = new FileInfo(outputPath).Length,
FileCount = files.Count,
CreatedAt = manifest.CreatedAt,
Signed = signed,
SigningKeyId = signingKeyId,
SigningAlgorithm = signingAlgorithm
};
}
catch (Exception ex)
{
return AuditBundleWriteResult.Failed($"Failed to write audit bundle: {ex.Message}");
}
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
{
await using var stream = File.OpenRead(filePath);
var hash = await SHA256.HashDataAsync(stream, ct);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
/// <summary>
/// Computes merkle root over all bundle entries for integrity verification.
/// Uses a binary tree structure with SHA-256 hashing.
/// </summary>
private static string ComputeMerkleRoot(List<BundleEntry> entries)
{
if (entries.Count == 0)
{
return string.Empty;
}
// Create leaf nodes: hash of "path:digest" for each entry
var leaves = entries
.OrderBy(e => e.Path, StringComparer.Ordinal)
.Select(e => SHA256.HashData(Encoding.UTF8.GetBytes($"{e.Path}:{e.Digest}")))
.ToArray();
// Build merkle tree by pairwise hashing until we reach the root
while (leaves.Length > 1)
{
leaves = PairwiseHash(leaves).ToArray();
}
return $"sha256:{Convert.ToHexString(leaves[0]).ToLowerInvariant()}";
}
private static IEnumerable<byte[]> PairwiseHash(byte[][] nodes)
{
for (var i = 0; i < nodes.Length; i += 2)
{
if (i + 1 >= nodes.Length)
{
// Odd node: hash it alone (promotes to next level)
yield return SHA256.HashData(nodes[i]);
continue;
}
// Concatenate and hash pair
var combined = new byte[nodes[i].Length + nodes[i + 1].Length];
Buffer.BlockCopy(nodes[i], 0, combined, 0, nodes[i].Length);
Buffer.BlockCopy(nodes[i + 1], 0, combined, nodes[i].Length, nodes[i + 1].Length);
yield return SHA256.HashData(combined);
}
}
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
}
/// <summary>
/// Interface for audit bundle writing.
/// </summary>
public interface IAuditBundleWriter
{
Task<AuditBundleWriteResult> WriteAsync(
AuditBundleWriteRequest request,
CancellationToken cancellationToken = default);
}
#region Request and Result Models
/// <summary>
/// Request for creating an audit bundle.
/// </summary>
public sealed record AuditBundleWriteRequest
{
/// <summary>
/// Output path for the bundle (will add .tar.gz if not present).
/// </summary>
public required string OutputPath { get; init; }
/// <summary>
/// Unique bundle identifier (auto-generated if not provided).
/// </summary>
public string? BundleId { get; init; }
/// <summary>
/// Human-readable name for the bundle.
/// </summary>
public string? Name { get; init; }
/// <summary>
/// Scan ID this bundle was created from.
/// </summary>
public required string ScanId { get; init; }
/// <summary>
/// Image reference that was scanned.
/// </summary>
public required string ImageRef { get; init; }
/// <summary>
/// Image digest (sha256:...).
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Decision from the verdict (pass, warn, block).
/// </summary>
public required string Decision { get; init; }
/// <summary>
/// SBOM document bytes (CycloneDX or SPDX JSON).
/// </summary>
public required byte[] Sbom { get; init; }
/// <summary>
/// Advisory feeds snapshot (NDJSON format).
/// </summary>
public required byte[] FeedsSnapshot { get; init; }
/// <summary>
/// Policy bundle (OPA tar.gz).
/// </summary>
public required byte[] PolicyBundle { get; init; }
/// <summary>
/// Verdict document bytes.
/// </summary>
public required byte[] Verdict { get; init; }
/// <summary>
/// VEX statements (OpenVEX JSON, optional).
/// </summary>
public byte[]? VexStatements { get; init; }
/// <summary>
/// Proof bundle bytes (optional).
/// </summary>
public byte[]? ProofBundle { get; init; }
/// <summary>
/// Trust roots document (optional).
/// </summary>
public byte[]? TrustRoots { get; init; }
/// <summary>
/// Scoring rules (optional).
/// </summary>
public byte[]? ScoringRules { get; init; }
/// <summary>
/// Time anchor for replay context (optional).
/// </summary>
public TimeAnchorInput? TimeAnchor { get; init; }
/// <summary>
/// Whether to sign the manifest.
/// </summary>
public bool Sign { get; init; } = true;
/// <summary>
/// Path to signing key file (PEM format).
/// </summary>
public string? SigningKeyPath { get; init; }
/// <summary>
/// Password for encrypted signing key.
/// </summary>
public string? SigningKeyPassword { get; init; }
}
/// <summary>
/// Time anchor input for bundle creation.
/// </summary>
public sealed record TimeAnchorInput
{
public required DateTimeOffset Timestamp { get; init; }
public required string Source { get; init; }
}
/// <summary>
/// Result of creating an audit bundle.
/// </summary>
public sealed record AuditBundleWriteResult
{
public bool Success { get; init; }
public string? OutputPath { get; init; }
public string? BundleId { get; init; }
public string? MerkleRoot { get; init; }
public string? BundleDigest { get; init; }
public long TotalSizeBytes { get; init; }
public int FileCount { get; init; }
public DateTimeOffset CreatedAt { get; init; }
public string? Error { get; init; }
/// <summary>
/// Whether the manifest was signed.
/// </summary>
public bool Signed { get; init; }
/// <summary>
/// Key ID used for signing.
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// Algorithm used for signing.
/// </summary>
public string? SigningAlgorithm { get; init; }
public static AuditBundleWriteResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
#endregion

View File

@@ -0,0 +1,55 @@
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackBuilder
{
/// <summary>
/// Builds an audit pack from a scan result.
/// </summary>
public async Task<AuditPackRecord> BuildAsync(
ScanResult scanResult,
AuditPackOptions options,
CancellationToken ct = default)
{
var files = new List<PackFile>();
var attestations = await CollectAttestationsAsync(scanResult, ct).ConfigureAwait(false);
var sboms = CollectSboms(scanResult);
var vexDocuments = CollectVexDocuments(scanResult);
var trustRoots = await CollectTrustRootsAsync(options, ct).ConfigureAwait(false);
var bundleManifest = await BuildMinimalBundleAsync(scanResult, ct).ConfigureAwait(false);
var now = _timeProvider.GetUtcNow();
var pack = new AuditPackRecord
{
PackId = _idGenerator.NewPackId(),
SchemaVersion = "1.0.0",
Name = options.Name ?? $"audit-pack-{scanResult.ScanId}",
CreatedAt = now,
RunManifest = new RunManifest(scanResult.ScanId, now),
EvidenceIndex = new EvidenceIndex(Array.Empty<string>().ToImmutableArray()),
Verdict = new Verdict(scanResult.ScanId, "completed"),
OfflineBundle = bundleManifest,
Attestations = [.. attestations],
Sboms = [.. sboms],
VexDocuments = [.. vexDocuments],
TrustRoots = [.. trustRoots],
Contents = new PackContents
{
Files = [.. files],
TotalSizeBytes = files.Sum(f => f.SizeBytes),
FileCount = files.Count
}
};
var fileResult = BuildPackFiles(pack);
pack = pack with { Contents = fileResult.Contents };
return WithDigest(pack);
}
}

View File

@@ -0,0 +1,37 @@
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackBuilder
{
private static Task<ImmutableArray<Attestation>> CollectAttestationsAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Collect attestations from storage
return Task.FromResult(ImmutableArray<Attestation>.Empty);
}
private static ImmutableArray<SbomDocument> CollectSboms(ScanResult scanResult)
{
// TODO: Collect SBOMs
return [];
}
private static ImmutableArray<VexDocument> CollectVexDocuments(ScanResult scanResult)
{
// TODO: Collect VEX documents
return [];
}
private static Task<ImmutableArray<TrustRoot>> CollectTrustRootsAsync(AuditPackOptions options, CancellationToken ct)
{
// TODO: Load trust roots from configuration
return Task.FromResult(ImmutableArray<TrustRoot>.Empty);
}
private static Task<BundleManifest> BuildMinimalBundleAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Build minimal offline bundle
return Task.FromResult(new BundleManifest("bundle-1", "1.0.0"));
}
}

View File

@@ -0,0 +1,20 @@
using System.Security.Cryptography;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackBuilder
{
private static AuditPackRecord WithDigest(AuditPackRecord pack)
{
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
var digest = ComputeDigest(json);
return pack with { PackDigest = digest };
}
private static string ComputeDigest(byte[] content)
{
var hash = SHA256.HashData(content);
return Convert.ToHexString(hash).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,54 @@
using StellaOps.AuditPack.Models;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackBuilder
{
/// <summary>
/// Exports audit pack to archive file.
/// </summary>
public async Task ExportAsync(
AuditPackRecord pack,
string outputPath,
ExportOptions options,
CancellationToken ct = default)
{
var fileBuild = BuildPackFiles(pack);
pack = pack with { Contents = fileBuild.Contents };
pack = WithDigest(pack);
var entries = fileBuild.Entries;
var manifestBytes = CanonicalJson.Serialize(pack);
entries.Insert(0, new ArchiveEntry("manifest.json", manifestBytes));
if (options.Sign && !string.IsNullOrWhiteSpace(options.SigningKey))
{
var signature = await SignManifestAsync(manifestBytes, options.SigningKey, ct)
.ConfigureAwait(false);
entries.Add(new ArchiveEntry("manifest.sig", signature));
}
await ArchiveUtilities.WriteTarGzAsync(outputPath, entries, ct).ConfigureAwait(false);
}
private static async Task<byte[]> SignManifestAsync(byte[] manifestBytes, string signingKey, CancellationToken ct)
{
var signer = new AuditBundleSigner();
var result = await signer.SignAsync(
new AuditBundleSigningRequest
{
ManifestBytes = manifestBytes,
KeyFilePath = signingKey
},
ct)
.ConfigureAwait(false);
if (!result.Success || result.Envelope is null)
{
throw new InvalidOperationException(result.Error ?? "Failed to sign audit pack manifest.");
}
return result.Envelope;
}
}

View File

@@ -0,0 +1,73 @@
using StellaOps.AuditPack.Models;
using System.Text;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackBuilder
{
private static PackFileBuildResult BuildPackFiles(AuditPackRecord pack)
{
var entries = new List<ArchiveEntry>();
var files = new List<PackFile>();
AddJsonEntry(entries, files, "run-manifest.json", pack.RunManifest, PackFileType.RunManifest);
AddJsonEntry(entries, files, "evidence-index.json", pack.EvidenceIndex, PackFileType.EvidenceIndex);
AddJsonEntry(entries, files, "verdict.json", pack.Verdict, PackFileType.Verdict);
foreach (var sbom in pack.Sboms)
{
AddTextEntry(entries, files, $"sboms/{sbom.Id}.json", sbom.Content, PackFileType.Sbom);
}
foreach (var attestation in pack.Attestations)
{
AddTextEntry(entries, files, $"attestations/{attestation.Id}.json", attestation.Envelope, PackFileType.Attestation);
}
foreach (var vex in pack.VexDocuments)
{
AddTextEntry(entries, files, $"vex/{vex.Id}.json", vex.Content, PackFileType.Vex);
}
foreach (var root in pack.TrustRoots)
{
AddTextEntry(entries, files, $"trust-roots/{root.Id}.pem", root.Content, PackFileType.TrustRoot);
}
var contents = new PackContents
{
Files = [.. files],
TotalSizeBytes = files.Sum(f => f.SizeBytes),
FileCount = files.Count
};
return new PackFileBuildResult(entries, contents);
}
private static void AddJsonEntry<T>(
List<ArchiveEntry> entries,
List<PackFile> files,
string path,
T payload,
PackFileType type)
{
var bytes = CanonicalJson.Serialize(payload);
entries.Add(new ArchiveEntry(path, bytes));
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
}
private static void AddTextEntry(
List<ArchiveEntry> entries,
List<PackFile> files,
string path,
string content,
PackFileType type)
{
var bytes = Encoding.UTF8.GetBytes(content);
entries.Add(new ArchiveEntry(path, bytes));
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
}
private sealed record PackFileBuildResult(List<ArchiveEntry> Entries, PackContents Contents);
}

View File

@@ -1,16 +1,9 @@
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Builds audit packs from scan results.
/// </summary>
public sealed class AuditPackBuilder : IAuditPackBuilder
public sealed partial class AuditPackBuilder : IAuditPackBuilder
{
private readonly TimeProvider _timeProvider;
private readonly IAuditPackIdGenerator _idGenerator;
@@ -20,231 +13,4 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
_timeProvider = timeProvider ?? TimeProvider.System;
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
}
/// <summary>
/// Builds an audit pack from a scan result.
/// </summary>
public async Task<AuditPackRecord> BuildAsync(
ScanResult scanResult,
AuditPackOptions options,
CancellationToken ct = default)
{
var files = new List<PackFile>();
// Collect all evidence
var attestations = await CollectAttestationsAsync(scanResult, ct);
var sboms = CollectSboms(scanResult);
var vexDocuments = CollectVexDocuments(scanResult);
var trustRoots = await CollectTrustRootsAsync(options, ct);
// Build offline bundle subset (only required feeds/policies)
var bundleManifest = await BuildMinimalBundleAsync(scanResult, ct);
// Create pack structure
var now = _timeProvider.GetUtcNow();
var pack = new AuditPackRecord
{
PackId = _idGenerator.NewPackId(),
SchemaVersion = "1.0.0",
Name = options.Name ?? $"audit-pack-{scanResult.ScanId}",
CreatedAt = now,
RunManifest = new RunManifest(scanResult.ScanId, now),
EvidenceIndex = new EvidenceIndex(Array.Empty<string>().ToImmutableArray()),
Verdict = new Verdict(scanResult.ScanId, "completed"),
OfflineBundle = bundleManifest,
Attestations = [.. attestations],
Sboms = [.. sboms],
VexDocuments = [.. vexDocuments],
TrustRoots = [.. trustRoots],
Contents = new PackContents
{
Files = [.. files],
TotalSizeBytes = files.Sum(f => f.SizeBytes),
FileCount = files.Count
}
};
var fileResult = BuildPackFiles(pack);
pack = pack with { Contents = fileResult.Contents };
return WithDigest(pack);
}
/// <summary>
/// Exports audit pack to archive file.
/// </summary>
public async Task ExportAsync(
AuditPackRecord pack,
string outputPath,
ExportOptions options,
CancellationToken ct = default)
{
var fileBuild = BuildPackFiles(pack);
pack = pack with { Contents = fileBuild.Contents };
pack = WithDigest(pack);
var entries = fileBuild.Entries;
var manifestBytes = CanonicalJson.Serialize(pack);
entries.Insert(0, new ArchiveEntry("manifest.json", manifestBytes));
if (options.Sign && !string.IsNullOrWhiteSpace(options.SigningKey))
{
var signature = await SignManifestAsync(manifestBytes, options.SigningKey, ct);
entries.Add(new ArchiveEntry("manifest.sig", signature));
}
await ArchiveUtilities.WriteTarGzAsync(outputPath, entries, ct);
}
private static AuditPackRecord WithDigest(AuditPackRecord pack)
{
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
var digest = ComputeDigest(json);
return pack with { PackDigest = digest };
}
private static string ComputeDigest(byte[] content)
{
var hash = SHA256.HashData(content);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static Task<ImmutableArray<Attestation>> CollectAttestationsAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Collect attestations from storage
return Task.FromResult(ImmutableArray<Attestation>.Empty);
}
private static ImmutableArray<SbomDocument> CollectSboms(ScanResult scanResult)
{
// TODO: Collect SBOMs
return [];
}
private static ImmutableArray<VexDocument> CollectVexDocuments(ScanResult scanResult)
{
// TODO: Collect VEX documents
return [];
}
private static Task<ImmutableArray<TrustRoot>> CollectTrustRootsAsync(AuditPackOptions options, CancellationToken ct)
{
// TODO: Load trust roots from configuration
return Task.FromResult(ImmutableArray<TrustRoot>.Empty);
}
private static Task<BundleManifest> BuildMinimalBundleAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Build minimal offline bundle
return Task.FromResult(new BundleManifest("bundle-1", "1.0.0"));
}
private static async Task<byte[]> SignManifestAsync(byte[] manifestBytes, string signingKey, CancellationToken ct)
{
var signer = new AuditBundleSigner();
var result = await signer.SignAsync(
new AuditBundleSigningRequest
{
ManifestBytes = manifestBytes,
KeyFilePath = signingKey
},
ct);
if (!result.Success || result.Envelope is null)
{
throw new InvalidOperationException(result.Error ?? "Failed to sign audit pack manifest.");
}
return result.Envelope;
}
private static PackFileBuildResult BuildPackFiles(AuditPackRecord pack)
{
var entries = new List<ArchiveEntry>();
var files = new List<PackFile>();
AddJsonEntry(entries, files, "run-manifest.json", pack.RunManifest, PackFileType.RunManifest);
AddJsonEntry(entries, files, "evidence-index.json", pack.EvidenceIndex, PackFileType.EvidenceIndex);
AddJsonEntry(entries, files, "verdict.json", pack.Verdict, PackFileType.Verdict);
foreach (var sbom in pack.Sboms)
{
AddTextEntry(entries, files, $"sboms/{sbom.Id}.json", sbom.Content, PackFileType.Sbom);
}
foreach (var attestation in pack.Attestations)
{
AddTextEntry(entries, files, $"attestations/{attestation.Id}.json", attestation.Envelope, PackFileType.Attestation);
}
foreach (var vex in pack.VexDocuments)
{
AddTextEntry(entries, files, $"vex/{vex.Id}.json", vex.Content, PackFileType.Vex);
}
foreach (var root in pack.TrustRoots)
{
AddTextEntry(entries, files, $"trust-roots/{root.Id}.pem", root.Content, PackFileType.TrustRoot);
}
var contents = new PackContents
{
Files = [.. files],
TotalSizeBytes = files.Sum(f => f.SizeBytes),
FileCount = files.Count
};
return new PackFileBuildResult(entries, contents);
}
private static void AddJsonEntry<T>(
List<ArchiveEntry> entries,
List<PackFile> files,
string path,
T payload,
PackFileType type)
{
var bytes = CanonicalJson.Serialize(payload);
entries.Add(new ArchiveEntry(path, bytes));
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
}
private static void AddTextEntry(
List<ArchiveEntry> entries,
List<PackFile> files,
string path,
string content,
PackFileType type)
{
var bytes = Encoding.UTF8.GetBytes(content);
entries.Add(new ArchiveEntry(path, bytes));
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
}
private sealed record PackFileBuildResult(List<ArchiveEntry> Entries, PackContents Contents);
}
public interface IAuditPackBuilder
{
Task<AuditPackRecord> BuildAsync(ScanResult scanResult, AuditPackOptions options, CancellationToken ct = default);
Task ExportAsync(AuditPackRecord pack, string outputPath, ExportOptions options, CancellationToken ct = default);
}
public sealed record AuditPackOptions
{
public string? Name { get; init; }
public bool IncludeFeeds { get; init; } = true;
public bool IncludePolicies { get; init; } = true;
public bool MinimizeSize { get; init; } = false;
}
public sealed record ExportOptions
{
public bool Sign { get; init; } = true;
public string? SigningKey { get; init; }
public bool Compress { get; init; } = true;
}
// Placeholder for scan result
public sealed record ScanResult(string ScanId);

View File

@@ -0,0 +1,43 @@
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackExportService
{
private async Task<ExportResult> ExportAsDsseAsync(
ExportRequest request,
CancellationToken ct)
{
var jsonResult = await ExportAsJsonAsync(request, ct).ConfigureAwait(false);
if (!jsonResult.Success)
{
return jsonResult;
}
var payload = Convert.ToBase64String(jsonResult.Data!);
var signatures = new List<DsseSignature>();
if (_dsseSigner is not null)
{
var signature = await _dsseSigner.SignAsync(jsonResult.Data!, ct).ConfigureAwait(false);
signatures.Add(signature);
}
var envelope = new DsseExportEnvelope
{
PayloadType = "application/vnd.stellaops.audit-pack+json",
Payload = payload,
Signatures = signatures
};
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, _jsonOptions);
return new ExportResult
{
Success = true,
Data = envelopeBytes,
ContentType = "application/vnd.dsse+json",
Filename = $"{request.Filename}.dsse.json",
SizeBytes = envelopeBytes.Length
};
}
}

View File

@@ -0,0 +1,65 @@
using System.Globalization;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackExportService
{
private async Task<ExportResult> ExportAsJsonAsync(
ExportRequest request,
CancellationToken ct)
{
var exportDoc = new Dictionary<string, object>
{
["exportedAt"] = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
["scanId"] = request.ScanId,
["format"] = "json",
["version"] = "1.0"
};
var segments = new Dictionary<string, object>();
foreach (var segment in request.Segments)
{
var segmentData = await GetSegmentDataAsync(request.ScanId, segment, ct).ConfigureAwait(false);
if (segmentData is not null)
{
try
{
var parsedDoc = JsonDocument.Parse(segmentData);
segments[segment.ToString().ToLowerInvariant()] = parsedDoc.RootElement;
}
catch
{
segments[segment.ToString().ToLowerInvariant()] = Convert.ToBase64String(segmentData);
}
}
}
exportDoc["segments"] = segments;
if (request.IncludeAttestations)
{
var attestations = await GetAttestationsAsync(request.ScanId, ct).ConfigureAwait(false);
exportDoc["attestations"] = attestations;
}
if (request.IncludeProofChain)
{
var proofChain = await GetProofChainAsync(request.ScanId, ct).ConfigureAwait(false);
if (proofChain is not null)
{
exportDoc["proofChain"] = proofChain;
}
}
var json = JsonSerializer.SerializeToUtf8Bytes(exportDoc, _jsonOptions);
return new ExportResult
{
Success = true,
Data = json,
ContentType = "application/json",
Filename = $"{request.Filename}.json",
SizeBytes = json.Length
};
}
}

View File

@@ -0,0 +1,35 @@
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackExportService
{
private async Task<byte[]?> GetSegmentDataAsync(
string scanId,
ExportSegment segment,
CancellationToken ct)
{
if (_repository is null)
{
return _emptySegmentPayload;
}
return await _repository.GetSegmentDataAsync(scanId, segment, ct).ConfigureAwait(false);
}
private async Task<List<object>> GetAttestationsAsync(string scanId, CancellationToken ct)
{
if (_repository is null)
{
return [];
}
var attestations = await _repository.GetAttestationsAsync(scanId, ct).ConfigureAwait(false);
return [.. attestations];
}
private async Task<object?> GetProofChainAsync(string scanId, CancellationToken ct)
{
return _repository is null
? null
: await _repository.GetProofChainAsync(scanId, ct).ConfigureAwait(false);
}
}

View File

@@ -0,0 +1,61 @@
using System.IO.Compression;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackExportService
{
private async Task<ExportResult> ExportAsZipAsync(
ExportRequest request,
CancellationToken ct)
{
using var memoryStream = new MemoryStream();
using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, leaveOpen: true))
{
var manifest = CreateManifest(request);
await AddJsonToZipAsync(archive, "manifest.json", manifest, ct).ConfigureAwait(false);
foreach (var segment in request.Segments)
{
var segmentData = await GetSegmentDataAsync(request.ScanId, segment, ct).ConfigureAwait(false);
if (segmentData is not null)
{
var path = GetSegmentPath(segment);
await AddBytesToZipAsync(archive, path, segmentData).ConfigureAwait(false);
}
}
if (request.IncludeAttestations)
{
var attestations = await GetAttestationsAsync(request.ScanId, ct).ConfigureAwait(false);
if (attestations.Count > 0)
{
await AddJsonToZipAsync(archive, "attestations/attestations.json", attestations, ct)
.ConfigureAwait(false);
}
}
if (request.IncludeProofChain)
{
var proofChain = await GetProofChainAsync(request.ScanId, ct).ConfigureAwait(false);
if (proofChain is not null)
{
await AddJsonToZipAsync(archive, "proof/proof-chain.json", proofChain, ct)
.ConfigureAwait(false);
}
}
}
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
return new ExportResult
{
Success = true,
Data = bytes,
ContentType = "application/zip",
Filename = $"{request.Filename}.zip",
SizeBytes = bytes.Length
};
}
}

View File

@@ -0,0 +1,58 @@
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackExportService
{
private ExportManifest CreateManifest(ExportRequest request)
{
return new ExportManifest
{
ExportedAt = _timeProvider.GetUtcNow(),
ScanId = request.ScanId,
FindingIds = request.FindingIds,
Format = request.Format.ToString(),
Segments = [.. request.Segments.Select(s => s.ToString())],
IncludesAttestations = request.IncludeAttestations,
IncludesProofChain = request.IncludeProofChain,
Version = "1.0"
};
}
private static string GetSegmentPath(ExportSegment segment)
{
return segment switch
{
ExportSegment.Sbom => "sbom/sbom.json",
ExportSegment.Match => "match/vulnerability-match.json",
ExportSegment.Reachability => "reachability/reachability-analysis.json",
ExportSegment.Guards => "guards/guard-analysis.json",
ExportSegment.Runtime => "runtime/runtime-signals.json",
ExportSegment.Policy => "policy/policy-evaluation.json",
_ => $"segments/{segment.ToString().ToLowerInvariant()}.json"
};
}
private static async Task AddJsonToZipAsync<T>(
ZipArchive archive,
string path,
T data,
CancellationToken ct)
{
var entry = archive.CreateEntry(path, CompressionLevel.Optimal);
await using var stream = entry.Open();
await JsonSerializer.SerializeAsync(stream, data, _jsonOptions, ct).ConfigureAwait(false);
}
private static async Task AddBytesToZipAsync(
ZipArchive archive,
string path,
byte[] data)
{
var entry = archive.CreateEntry(path, CompressionLevel.Optimal);
await using var stream = entry.Open();
await stream.WriteAsync(data).ConfigureAwait(false);
}
}

View File

@@ -1,13 +1,3 @@
// -----------------------------------------------------------------------------
// AuditPackExportService.cs
// Sprint: SPRINT_1227_0005_0003_FE_copy_audit_export
// Task: T5 — Backend export service for audit packs
// -----------------------------------------------------------------------------
using StellaOps.AuditPack.Models;
using System.Globalization;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
@@ -17,10 +7,10 @@ namespace StellaOps.AuditPack.Services;
/// Service for exporting audit packs in various formats.
/// Supports ZIP bundle, JSON, and DSSE envelope formats.
/// </summary>
public sealed class AuditPackExportService : IAuditPackExportService
public sealed partial class AuditPackExportService : IAuditPackExportService
{
private static readonly byte[] EmptySegmentPayload = Encoding.UTF8.GetBytes("{}");
private static readonly JsonSerializerOptions JsonOptions = new()
private static readonly byte[] _emptySegmentPayload = Encoding.UTF8.GetBytes("{}");
private static readonly JsonSerializerOptions _jsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
@@ -57,375 +47,10 @@ public sealed class AuditPackExportService : IAuditPackExportService
return request.Format switch
{
ExportFormat.Zip => await ExportAsZipAsync(request, cancellationToken),
ExportFormat.Json => await ExportAsJsonAsync(request, cancellationToken),
ExportFormat.Dsse => await ExportAsDsseAsync(request, cancellationToken),
ExportFormat.Zip => await ExportAsZipAsync(request, cancellationToken).ConfigureAwait(false),
ExportFormat.Json => await ExportAsJsonAsync(request, cancellationToken).ConfigureAwait(false),
ExportFormat.Dsse => await ExportAsDsseAsync(request, cancellationToken).ConfigureAwait(false),
_ => ExportResult.Failed($"Unsupported export format: {request.Format}")
};
}
/// <summary>
/// Exports as a ZIP bundle containing all evidence segments.
/// </summary>
private async Task<ExportResult> ExportAsZipAsync(
ExportRequest request,
CancellationToken ct)
{
using var memoryStream = new MemoryStream();
using (var archive = new ZipArchive(memoryStream, ZipArchiveMode.Create, leaveOpen: true))
{
// Create manifest
var manifest = CreateManifest(request);
await AddJsonToZipAsync(archive, "manifest.json", manifest, ct);
// Add selected segments
foreach (var segment in request.Segments)
{
var segmentData = await GetSegmentDataAsync(request.ScanId, segment, ct);
if (segmentData is not null)
{
var path = GetSegmentPath(segment);
await AddBytesToZipAsync(archive, path, segmentData);
}
}
// Add attestations if requested
if (request.IncludeAttestations)
{
var attestations = await GetAttestationsAsync(request.ScanId, ct);
if (attestations.Count > 0)
{
await AddJsonToZipAsync(archive, "attestations/attestations.json", attestations, ct);
}
}
// Add proof chain if requested
if (request.IncludeProofChain)
{
var proofChain = await GetProofChainAsync(request.ScanId, ct);
if (proofChain is not null)
{
await AddJsonToZipAsync(archive, "proof/proof-chain.json", proofChain, ct);
}
}
}
memoryStream.Position = 0;
var bytes = memoryStream.ToArray();
return new ExportResult
{
Success = true,
Data = bytes,
ContentType = "application/zip",
Filename = $"{request.Filename}.zip",
SizeBytes = bytes.Length
};
}
/// <summary>
/// Exports as a single JSON document.
/// </summary>
private async Task<ExportResult> ExportAsJsonAsync(
ExportRequest request,
CancellationToken ct)
{
var exportDoc = new Dictionary<string, object>
{
["exportedAt"] = _timeProvider.GetUtcNow().ToString("O", CultureInfo.InvariantCulture),
["scanId"] = request.ScanId,
["format"] = "json",
["version"] = "1.0"
};
// Add segments
var segments = new Dictionary<string, object>();
foreach (var segment in request.Segments)
{
var segmentData = await GetSegmentDataAsync(request.ScanId, segment, ct);
if (segmentData is not null)
{
try
{
var parsedDoc = JsonDocument.Parse(segmentData);
segments[segment.ToString().ToLowerInvariant()] = parsedDoc.RootElement;
}
catch
{
segments[segment.ToString().ToLowerInvariant()] = Convert.ToBase64String(segmentData);
}
}
}
exportDoc["segments"] = segments;
// Add attestations
if (request.IncludeAttestations)
{
var attestations = await GetAttestationsAsync(request.ScanId, ct);
exportDoc["attestations"] = attestations;
}
// Add proof chain
if (request.IncludeProofChain)
{
var proofChain = await GetProofChainAsync(request.ScanId, ct);
if (proofChain is not null)
{
exportDoc["proofChain"] = proofChain;
}
}
var json = JsonSerializer.SerializeToUtf8Bytes(exportDoc, JsonOptions);
return new ExportResult
{
Success = true,
Data = json,
ContentType = "application/json",
Filename = $"{request.Filename}.json",
SizeBytes = json.Length
};
}
/// <summary>
/// Exports as a DSSE envelope with signature.
/// </summary>
private async Task<ExportResult> ExportAsDsseAsync(
ExportRequest request,
CancellationToken ct)
{
// First create the JSON payload
var jsonResult = await ExportAsJsonAsync(request, ct);
if (!jsonResult.Success)
{
return jsonResult;
}
// Create DSSE envelope structure
var payload = Convert.ToBase64String(jsonResult.Data!);
var signatures = new List<DsseSignature>();
if (_dsseSigner is not null)
{
var signature = await _dsseSigner.SignAsync(jsonResult.Data!, ct);
signatures.Add(signature);
}
var envelope = new DsseExportEnvelope
{
PayloadType = "application/vnd.stellaops.audit-pack+json",
Payload = payload,
Signatures = signatures
};
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions);
return new ExportResult
{
Success = true,
Data = envelopeBytes,
ContentType = "application/vnd.dsse+json",
Filename = $"{request.Filename}.dsse.json",
SizeBytes = envelopeBytes.Length
};
}
private ExportManifest CreateManifest(ExportRequest request)
{
return new ExportManifest
{
ExportedAt = _timeProvider.GetUtcNow(),
ScanId = request.ScanId,
FindingIds = request.FindingIds,
Format = request.Format.ToString(),
Segments = [.. request.Segments.Select(s => s.ToString())],
IncludesAttestations = request.IncludeAttestations,
IncludesProofChain = request.IncludeProofChain,
Version = "1.0"
};
}
private static string GetSegmentPath(ExportSegment segment)
{
return segment switch
{
ExportSegment.Sbom => "sbom/sbom.json",
ExportSegment.Match => "match/vulnerability-match.json",
ExportSegment.Reachability => "reachability/reachability-analysis.json",
ExportSegment.Guards => "guards/guard-analysis.json",
ExportSegment.Runtime => "runtime/runtime-signals.json",
ExportSegment.Policy => "policy/policy-evaluation.json",
_ => $"segments/{segment.ToString().ToLowerInvariant()}.json"
};
}
private async Task<byte[]?> GetSegmentDataAsync(
string scanId,
ExportSegment segment,
CancellationToken ct)
{
if (_repository is null)
{
return EmptySegmentPayload;
}
return await _repository.GetSegmentDataAsync(scanId, segment, ct);
}
private async Task<List<object>> GetAttestationsAsync(string scanId, CancellationToken ct)
{
if (_repository is null)
{
return [];
}
var attestations = await _repository.GetAttestationsAsync(scanId, ct);
return [.. attestations];
}
private async Task<object?> GetProofChainAsync(string scanId, CancellationToken ct)
{
return _repository is null
? null
: await _repository.GetProofChainAsync(scanId, ct);
}
private static async Task AddJsonToZipAsync<T>(
ZipArchive archive,
string path,
T data,
CancellationToken ct)
{
var entry = archive.CreateEntry(path, CompressionLevel.Optimal);
await using var stream = entry.Open();
await JsonSerializer.SerializeAsync(stream, data, JsonOptions, ct);
}
private static async Task AddBytesToZipAsync(
ZipArchive archive,
string path,
byte[] data)
{
var entry = archive.CreateEntry(path, CompressionLevel.Optimal);
await using var stream = entry.Open();
await stream.WriteAsync(data);
}
}
/// <summary>
/// Interface for audit pack export service.
/// </summary>
public interface IAuditPackExportService
{
Task<ExportResult> ExportAsync(ExportRequest request, CancellationToken cancellationToken = default);
}
/// <summary>
/// Repository interface for accessing audit pack data.
/// </summary>
public interface IAuditPackRepository
{
Task<byte[]?> GetSegmentDataAsync(string scanId, ExportSegment segment, CancellationToken ct);
Task<IReadOnlyList<object>> GetAttestationsAsync(string scanId, CancellationToken ct);
Task<object?> GetProofChainAsync(string scanId, CancellationToken ct);
}
/// <summary>
/// DSSE signer for audit pack exports.
/// </summary>
public interface IAuditPackExportSigner
{
Task<DsseSignature> SignAsync(byte[] payload, CancellationToken ct);
}
#region Models
/// <summary>
/// Export format options.
/// </summary>
public enum ExportFormat
{
Zip,
Json,
Dsse
}
/// <summary>
/// Evidence segment types for export.
/// </summary>
public enum ExportSegment
{
Sbom,
Match,
Reachability,
Guards,
Runtime,
Policy
}
/// <summary>
/// Request for audit pack export.
/// </summary>
public sealed record ExportRequest
{
public required string ScanId { get; init; }
public IReadOnlyList<string>? FindingIds { get; init; }
public required ExportFormat Format { get; init; }
public required IReadOnlyList<ExportSegment> Segments { get; init; }
public bool IncludeAttestations { get; init; }
public bool IncludeProofChain { get; init; }
public required string Filename { get; init; }
}
/// <summary>
/// Result of audit pack export.
/// </summary>
public sealed record ExportResult
{
public bool Success { get; init; }
public byte[]? Data { get; init; }
public string? ContentType { get; init; }
public string? Filename { get; init; }
public long SizeBytes { get; init; }
public string? Error { get; init; }
public static ExportResult Failed(string error) => new()
{
Success = false,
Error = error
};
}
/// <summary>
/// Export manifest included in ZIP bundles.
/// </summary>
public sealed record ExportManifest
{
public DateTimeOffset ExportedAt { get; init; }
public required string ScanId { get; init; }
public IReadOnlyList<string>? FindingIds { get; init; }
public required string Format { get; init; }
public required IReadOnlyList<string> Segments { get; init; }
public bool IncludesAttestations { get; init; }
public bool IncludesProofChain { get; init; }
public required string Version { get; init; }
}
/// <summary>
/// DSSE envelope for export.
/// </summary>
public sealed record DsseExportEnvelope
{
public required string PayloadType { get; init; }
public required string Payload { get; init; }
public required IReadOnlyList<DsseSignature> Signatures { get; init; }
}
/// <summary>
/// DSSE signature entry.
/// </summary>
public sealed record DsseSignature
{
public required string KeyId { get; init; }
public required string Sig { get; init; }
}
#endregion

View File

@@ -0,0 +1,49 @@
using System.Security.Cryptography;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackImporter
{
private static async Task<IntegrityResult> VerifyIntegrityAsync(
AuditPackRecord pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
foreach (var file in pack.Contents.Files)
{
var filePath = Path.Combine(extractDir, file.RelativePath);
if (!File.Exists(filePath))
{
errors.Add($"Missing file: {file.RelativePath}");
continue;
}
var content = await File.ReadAllBytesAsync(filePath, ct).ConfigureAwait(false);
var actualDigest = Convert.ToHexString(SHA256.HashData(content)).ToLowerInvariant();
if (actualDigest != file.Digest.ToLowerInvariant())
{
errors.Add($"Digest mismatch for {file.RelativePath}: expected {file.Digest}, got {actualDigest}");
}
}
if (!string.IsNullOrEmpty(pack.PackDigest))
{
var computed = ComputePackDigest(pack);
if (computed != pack.PackDigest)
{
errors.Add($"Pack digest mismatch: expected {pack.PackDigest}, got {computed}");
}
}
return new IntegrityResult(errors.Count == 0, errors);
}
private static string ComputePackDigest(AuditPackRecord pack)
{
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
return Convert.ToHexString(SHA256.HashData(json)).ToLowerInvariant();
}
}

View File

@@ -0,0 +1,100 @@
using System.Linq;
using System.Security.Cryptography;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackImporter
{
private static async Task<SignatureResult> VerifySignaturesAsync(
byte[] manifestBytes,
AuditPackRecord pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
var signaturePath = Path.Combine(extractDir, "manifest.sig");
if (!File.Exists(signaturePath))
{
return new SignatureResult(true, [], "No signature present");
}
var signature = await File.ReadAllBytesAsync(signaturePath, ct).ConfigureAwait(false);
var trustRoots = pack.TrustRoots;
if (trustRoots.Length == 0)
{
errors.Add("No trust roots available for signature verification");
return new SignatureResult(false, errors);
}
foreach (var root in trustRoots)
{
if (string.IsNullOrWhiteSpace(root.Content))
{
continue;
}
using var publicKey = TryLoadPublicKey(root.Content);
if (publicKey is null)
{
continue;
}
var signer = new AuditBundleSigner();
var result = await signer.VerifyAsync(
new AuditBundleVerificationRequest
{
EnvelopeBytes = signature,
PublicKey = publicKey
},
ct)
.ConfigureAwait(false);
if (!result.Success || result.VerifiedSignatures is null)
{
continue;
}
if (result.VerifiedSignatures.Any(s => s.Verified)
&& string.Equals(result.PayloadDigest, ComputeSha256(manifestBytes), StringComparison.Ordinal))
{
return new SignatureResult(true, [], $"Verified with {root.Id}");
}
}
errors.Add("Signature does not verify against any trust root");
return new SignatureResult(false, errors);
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static AsymmetricAlgorithm? TryLoadPublicKey(string pem)
{
try
{
var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pem);
return ecdsa;
}
catch
{
// ignored
}
try
{
var rsa = RSA.Create();
rsa.ImportFromPem(pem);
return rsa;
}
catch
{
return null;
}
}
}

View File

@@ -1,17 +1,15 @@
using StellaOps.AuditPack.Models;
using System.Security.Cryptography;
using System.Text.Json;
using AuditPackRecord = StellaOps.AuditPack.Models.AuditPack;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Imports and validates audit packs.
/// </summary>
public sealed class AuditPackImporter : IAuditPackImporter
public sealed partial class AuditPackImporter : IAuditPackImporter
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
private static readonly JsonSerializerOptions _jsonOptions = new(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true
};
@@ -36,36 +34,34 @@ public sealed class AuditPackImporter : IAuditPackImporter
try
{
// Extract archive
await ArchiveUtilities.ExtractTarGzAsync(archivePath, extractDir, overwriteFiles: true, ct);
await ArchiveUtilities.ExtractTarGzAsync(archivePath, extractDir, overwriteFiles: true, ct)
.ConfigureAwait(false);
// Load manifest
var manifestPath = Path.Combine(extractDir, "manifest.json");
if (!File.Exists(manifestPath))
{
return ImportResult.Failed("Manifest file not found");
}
var manifestJson = await File.ReadAllBytesAsync(manifestPath, ct);
var pack = JsonSerializer.Deserialize<AuditPackRecord>(manifestJson, JsonOptions);
var manifestJson = await File.ReadAllBytesAsync(manifestPath, ct).ConfigureAwait(false);
var pack = JsonSerializer.Deserialize<AuditPackRecord>(manifestJson, _jsonOptions);
if (pack == null)
{
return ImportResult.Failed("Failed to deserialize manifest");
}
// Verify integrity
var integrityResult = await VerifyIntegrityAsync(pack, extractDir, ct);
var integrityResult = await VerifyIntegrityAsync(pack, extractDir, ct).ConfigureAwait(false);
if (!integrityResult.IsValid)
{
return ImportResult.Failed("Integrity verification failed", integrityResult.Errors);
}
// Verify signatures if present
SignatureResult? signatureResult = null;
if (options.VerifySignatures)
{
signatureResult = await VerifySignaturesAsync(manifestJson, pack, extractDir, ct);
signatureResult = await VerifySignaturesAsync(manifestJson, pack, extractDir, ct)
.ConfigureAwait(false);
if (!signatureResult.IsValid)
{
return ImportResult.Failed("Signature verification failed", signatureResult.Errors);
@@ -94,170 +90,4 @@ public sealed class AuditPackImporter : IAuditPackImporter
return ImportResult.Failed($"Import failed: {ex.Message}");
}
}
private static async Task<IntegrityResult> VerifyIntegrityAsync(
AuditPackRecord pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
// Verify each file digest
foreach (var file in pack.Contents.Files)
{
var filePath = Path.Combine(extractDir, file.RelativePath);
if (!File.Exists(filePath))
{
errors.Add($"Missing file: {file.RelativePath}");
continue;
}
var content = await File.ReadAllBytesAsync(filePath, ct);
var actualDigest = Convert.ToHexString(SHA256.HashData(content)).ToLowerInvariant();
if (actualDigest != file.Digest.ToLowerInvariant())
{
errors.Add($"Digest mismatch for {file.RelativePath}: expected {file.Digest}, got {actualDigest}");
}
}
// Verify pack digest
if (!string.IsNullOrEmpty(pack.PackDigest))
{
var computed = ComputePackDigest(pack);
if (computed != pack.PackDigest)
{
errors.Add($"Pack digest mismatch: expected {pack.PackDigest}, got {computed}");
}
}
return new IntegrityResult(errors.Count == 0, errors);
}
private static async Task<SignatureResult> VerifySignaturesAsync(
byte[] manifestBytes,
AuditPackRecord pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
var signaturePath = Path.Combine(extractDir, "manifest.sig");
if (!File.Exists(signaturePath))
{
return new SignatureResult(true, [], "No signature present");
}
var signature = await File.ReadAllBytesAsync(signaturePath, ct);
var trustRoots = pack.TrustRoots;
if (trustRoots.Length == 0)
{
errors.Add("No trust roots available for signature verification");
return new SignatureResult(false, errors);
}
foreach (var root in trustRoots)
{
if (string.IsNullOrWhiteSpace(root.Content))
{
continue;
}
using var publicKey = TryLoadPublicKey(root.Content);
if (publicKey is null)
{
continue;
}
var signer = new AuditBundleSigner();
var result = await signer.VerifyAsync(
new AuditBundleVerificationRequest
{
EnvelopeBytes = signature,
PublicKey = publicKey
},
ct);
if (!result.Success || result.VerifiedSignatures is null)
{
continue;
}
if (result.VerifiedSignatures.Any(s => s.Verified)
&& string.Equals(result.PayloadDigest, ComputeSha256(manifestBytes), StringComparison.Ordinal))
{
return new SignatureResult(true, [], $"Verified with {root.Id}");
}
}
errors.Add("Signature does not verify against any trust root");
return new SignatureResult(false, errors);
}
private static string ComputePackDigest(AuditPackRecord pack)
{
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
return Convert.ToHexString(SHA256.HashData(json)).ToLowerInvariant();
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static AsymmetricAlgorithm? TryLoadPublicKey(string pem)
{
try
{
var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pem);
return ecdsa;
}
catch
{
// ignored
}
try
{
var rsa = RSA.Create();
rsa.ImportFromPem(pem);
return rsa;
}
catch
{
return null;
}
}
}
public interface IAuditPackImporter
{
Task<ImportResult> ImportAsync(string archivePath, ImportOptions options, CancellationToken ct = default);
}
public sealed record ImportOptions
{
public string? ExtractDirectory { get; init; }
public bool VerifySignatures { get; init; } = true;
public bool KeepExtracted { get; init; } = false;
}
public sealed record ImportResult
{
public bool Success { get; init; }
public AuditPackRecord? Pack { get; init; }
public string? ExtractDirectory { get; init; }
public IntegrityResult? IntegrityResult { get; init; }
public SignatureResult? SignatureResult { get; init; }
public IReadOnlyList<string>? Errors { get; init; }
public static ImportResult Failed(string message, IReadOnlyList<string>? errors = null) =>
new() { Success = false, Errors = errors != null ? [message, .. errors] : [message] };
}
public sealed record IntegrityResult(bool IsValid, IReadOnlyList<string> Errors);
public sealed record SignatureResult(bool IsValid, IReadOnlyList<string> Errors, string? Message = null);

View File

@@ -0,0 +1,9 @@
namespace StellaOps.AuditPack.Services;
public sealed record AuditPackOptions
{
public string? Name { get; init; }
public bool IncludeFeeds { get; init; } = true;
public bool IncludePolicies { get; init; } = true;
public bool MinimizeSize { get; init; } = false;
}

View File

@@ -0,0 +1,65 @@
using StellaOps.AuditPack.Models;
using System.Security.Cryptography;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
public sealed partial class AuditPackReplayer
{
private static Task<ReplayResult> ExecuteReplayAsync(
Verdict originalVerdict,
RunManifest runManifest,
CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
var replayed = new Verdict(originalVerdict.VerdictId, originalVerdict.Status);
var verdictJson = JsonSerializer.SerializeToUtf8Bytes(replayed);
var digest = ComputeDigest(verdictJson);
return Task.FromResult(new ReplayResult
{
Success = true,
Verdict = replayed,
VerdictDigest = digest,
DurationMs = 0
});
}
private static string ComputeDigest(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static VerdictComparison CompareVerdicts(Verdict original, Verdict? replayed)
{
if (replayed == null)
{
return new VerdictComparison(false, ["Replayed verdict is null"]);
}
var originalJson = JsonSerializer.Serialize(original);
var replayedJson = JsonSerializer.Serialize(replayed);
if (originalJson == replayedJson)
{
return new VerdictComparison(true, []);
}
var differences = FindJsonDifferences(originalJson, replayedJson);
return new VerdictComparison(false, differences);
}
private static List<string> FindJsonDifferences(string json1, string json2)
{
// TODO: Implement proper JSON diff
// For now, just report that they differ
if (json1 == json2)
{
return [];
}
return ["Verdicts differ"];
}
}

View File

@@ -1,13 +1,11 @@
using StellaOps.AuditPack.Models;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Replays scans from imported audit packs and compares results.
/// </summary>
public sealed class AuditPackReplayer : IAuditPackReplayer
public sealed partial class AuditPackReplayer : IAuditPackReplayer
{
/// <summary>
/// Replays a scan from an imported audit pack.
@@ -24,12 +22,13 @@ public sealed class AuditPackReplayer : IAuditPackReplayer
var pack = importResult.Pack;
// Load offline bundle from pack
var bundlePath = Path.Combine(importResult.ExtractDirectory!, "bundle");
_ = Path.Combine(importResult.ExtractDirectory!, "bundle");
// TODO: Load bundle using bundle loader
// await _bundleLoader.LoadAsync(bundlePath, ct);
// Execute replay
var replayResult = await ExecuteReplayAsync(pack.Verdict, pack.RunManifest, ct);
var replayResult = await ExecuteReplayAsync(pack.Verdict, pack.RunManifest, ct)
.ConfigureAwait(false);
if (!replayResult.Success)
{
@@ -50,86 +49,4 @@ public sealed class AuditPackReplayer : IAuditPackReplayer
ReplayDurationMs = replayResult.DurationMs
};
}
private static Task<ReplayResult> ExecuteReplayAsync(
Verdict originalVerdict,
RunManifest runManifest,
CancellationToken ct)
{
ct.ThrowIfCancellationRequested();
var replayed = new Verdict(originalVerdict.VerdictId, originalVerdict.Status);
var verdictJson = JsonSerializer.SerializeToUtf8Bytes(replayed);
var digest = ComputeDigest(verdictJson);
return Task.FromResult(new ReplayResult
{
Success = true,
Verdict = replayed,
VerdictDigest = digest,
DurationMs = 0
});
}
private static string ComputeDigest(byte[] content)
{
var hash = System.Security.Cryptography.SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static VerdictComparison CompareVerdicts(Verdict original, Verdict? replayed)
{
if (replayed == null)
return new VerdictComparison(false, ["Replayed verdict is null"]);
var originalJson = JsonSerializer.Serialize(original);
var replayedJson = JsonSerializer.Serialize(replayed);
if (originalJson == replayedJson)
return new VerdictComparison(true, []);
// Find differences
var differences = FindJsonDifferences(originalJson, replayedJson);
return new VerdictComparison(false, differences);
}
private static List<string> FindJsonDifferences(string json1, string json2)
{
// TODO: Implement proper JSON diff
// For now, just report that they differ
if (json1 == json2)
return [];
return ["Verdicts differ"];
}
}
public interface IAuditPackReplayer
{
Task<ReplayComparisonResult> ReplayAsync(ImportResult importResult, CancellationToken ct = default);
}
public sealed record ReplayComparisonResult
{
public bool Success { get; init; }
public bool IsIdentical { get; init; }
public string? OriginalVerdictDigest { get; init; }
public string? ReplayedVerdictDigest { get; init; }
public IReadOnlyList<string> Differences { get; init; } = [];
public long ReplayDurationMs { get; init; }
public string? Error { get; init; }
public static ReplayComparisonResult Failed(string error) =>
new() { Success = false, Error = error };
}
public sealed record VerdictComparison(bool IsIdentical, IReadOnlyList<string> Differences);
public sealed record ReplayResult
{
public bool Success { get; init; }
public Verdict? Verdict { get; init; }
public string? VerdictDigest { get; init; }
public long DurationMs { get; init; }
public IReadOnlyList<string>? Errors { get; init; }
}

View File

@@ -6,7 +6,7 @@ namespace StellaOps.AuditPack.Services;
internal static class CanonicalJson
{
private static readonly JsonWriterOptions WriterOptions = new()
private static readonly JsonWriterOptions _writerOptions = new()
{
Indented = false,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
@@ -22,7 +22,7 @@ internal static class CanonicalJson
{
using var doc = JsonDocument.Parse(json.ToArray());
using var stream = new MemoryStream();
using var writer = new Utf8JsonWriter(stream, WriterOptions);
using var writer = new Utf8JsonWriter(stream, _writerOptions);
WriteElementSorted(doc.RootElement, writer);
writer.Flush();
return stream.ToArray();

View File

@@ -0,0 +1,6 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// A digest mismatch between expected and actual values.
/// </summary>
public sealed record DigestMismatch(string InputName, string? Expected, string? Actual);

View File

@@ -0,0 +1,13 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Category of divergence.
/// </summary>
public enum DivergenceCategory
{
Decision,
VerdictHash,
VerdictField,
Input,
Other
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Individual divergence item.
/// </summary>
public sealed record DivergenceItem
{
public DivergenceCategory Category { get; init; }
public required string Field { get; init; }
public string? OriginalValue { get; init; }
public string? ReplayedValue { get; init; }
public DivergenceSeverity Severity { get; init; }
public string? Explanation { get; init; }
}

View File

@@ -0,0 +1,12 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Severity of divergence.
/// </summary>
public enum DivergenceSeverity
{
None,
Low,
Medium,
High
}

View File

@@ -0,0 +1,18 @@
using System.Text.Json.Serialization;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Drift item in attestation.
/// </summary>
public sealed record DriftAttestation
{
[JsonPropertyName("type")]
public required string Type { get; init; }
[JsonPropertyName("field")]
public string? Field { get; init; }
[JsonPropertyName("message")]
public string? Message { get; init; }
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// A detected drift item.
/// </summary>
public sealed record DriftItem
{
public DriftType Type { get; init; }
public string? Field { get; init; }
public string? Expected { get; init; }
public string? Actual { get; init; }
public string? Message { get; init; }
}

View File

@@ -0,0 +1,13 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Type of drift detected.
/// </summary>
public enum DriftType
{
InputDigest,
VerdictDigest,
VerdictField,
Decision,
Other
}

View File

@@ -0,0 +1,11 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// DSSE envelope for export.
/// </summary>
public sealed record DsseExportEnvelope
{
public required string PayloadType { get; init; }
public required string Payload { get; init; }
public required IReadOnlyList<DsseSignature> Signatures { get; init; }
}

View File

@@ -0,0 +1,10 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// DSSE signature entry.
/// </summary>
public sealed record DsseSignature
{
public required string KeyId { get; init; }
public required string Sig { get; init; }
}

View File

@@ -0,0 +1,11 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of signing operation.
/// </summary>
public sealed record DsseSignatureResult
{
public required string KeyId { get; init; }
public required string Signature { get; init; }
public string? Algorithm { get; init; }
}

View File

@@ -0,0 +1,11 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Export format options.
/// </summary>
public enum ExportFormat
{
Zip,
Json,
Dsse
}

View File

@@ -0,0 +1,16 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Export manifest included in ZIP bundles.
/// </summary>
public sealed record ExportManifest
{
public DateTimeOffset ExportedAt { get; init; }
public required string ScanId { get; init; }
public IReadOnlyList<string>? FindingIds { get; init; }
public required string Format { get; init; }
public required IReadOnlyList<string> Segments { get; init; }
public bool IncludesAttestations { get; init; }
public bool IncludesProofChain { get; init; }
public required string Version { get; init; }
}

View File

@@ -0,0 +1,8 @@
namespace StellaOps.AuditPack.Services;
public sealed record ExportOptions
{
public bool Sign { get; init; } = true;
public string? SigningKey { get; init; }
public bool Compress { get; init; } = true;
}

View File

@@ -0,0 +1,15 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Request for audit pack export.
/// </summary>
public sealed record ExportRequest
{
public required string ScanId { get; init; }
public IReadOnlyList<string>? FindingIds { get; init; }
public required ExportFormat Format { get; init; }
public required IReadOnlyList<ExportSegment> Segments { get; init; }
public bool IncludeAttestations { get; init; }
public bool IncludeProofChain { get; init; }
public required string Filename { get; init; }
}

View File

@@ -0,0 +1,20 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Result of audit pack export.
/// </summary>
public sealed record ExportResult
{
public bool Success { get; init; }
public byte[]? Data { get; init; }
public string? ContentType { get; init; }
public string? Filename { get; init; }
public long SizeBytes { get; init; }
public string? Error { get; init; }
public static ExportResult Failed(string error) => new()
{
Success = false,
Error = error
};
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Evidence segment types for export.
/// </summary>
public enum ExportSegment
{
Sbom,
Match,
Reachability,
Guards,
Runtime,
Policy
}

View File

@@ -0,0 +1,12 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Feed snapshot data.
/// </summary>
public sealed record FeedSnapshotData
{
public bool Success { get; init; }
public byte[]? Snapshot { get; init; }
public DateTimeOffset? SnapshotAt { get; init; }
public string? Error { get; init; }
}

View File

@@ -0,0 +1,19 @@
using System.Security.Cryptography;
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Interface for AirGap trust store integration.
/// </summary>
public interface IAirGapTrustStoreIntegration
{
Task<TrustStoreLoadResult> LoadFromDirectoryAsync(
string trustStorePath,
CancellationToken cancellationToken = default);
TrustStoreLoadResult LoadFromBundle(byte[] trustRootsContent);
TrustRootLookupResult GetPublicKey(string keyId);
AsymmetricAlgorithm? CreateVerificationKey(string keyId);
IReadOnlyCollection<string> GetAvailableKeyIds();
int Count { get; }
}

View File

@@ -0,0 +1,11 @@
namespace StellaOps.AuditPack.Services;
/// <summary>
/// Interface for audit bundle reading.
/// </summary>
public interface IAuditBundleReader
{
Task<AuditBundleReadResult> ReadAsync(
AuditBundleReadRequest request,
CancellationToken cancellationToken = default);
}

Some files were not shown because too many files have changed in this diff Show More