sprints and audit work
This commit is contained in:
@@ -0,0 +1,209 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ChecksumFileWriter.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T004
|
||||
// Description: Writes checksums.sha256 file in standard format.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Writes checksums.sha256 file in BSD-style format.
|
||||
/// Format: SHA256 (filename) = hexdigest
|
||||
/// </summary>
|
||||
public static class ChecksumFileWriter
|
||||
{
|
||||
/// <summary>
|
||||
/// Generates checksum file content from a bundle manifest.
|
||||
/// </summary>
|
||||
/// <param name="manifest">Bundle manifest with artifact entries.</param>
|
||||
/// <returns>Checksums file content in BSD format.</returns>
|
||||
public static string Generate(BundleManifest manifest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
|
||||
var sb = new StringBuilder();
|
||||
sb.AppendLine("# Evidence Bundle Checksums");
|
||||
sb.AppendLine($"# Bundle ID: {manifest.BundleId}");
|
||||
sb.AppendLine($"# Generated: {manifest.CreatedAt:O}");
|
||||
sb.AppendLine();
|
||||
|
||||
// Add manifest.json itself (will be computed after writing)
|
||||
// This is a placeholder - actual digest computed during archive creation
|
||||
|
||||
// Add all artifacts in deterministic order
|
||||
foreach (var artifact in manifest.AllArtifacts.OrderBy(a => a.Path, StringComparer.Ordinal))
|
||||
{
|
||||
sb.AppendLine(FormatEntry(artifact.Path, artifact.Digest));
|
||||
}
|
||||
|
||||
// Add public keys
|
||||
foreach (var key in manifest.PublicKeys.OrderBy(k => k.Path, StringComparer.Ordinal))
|
||||
{
|
||||
// Key digest would need to be computed separately
|
||||
sb.AppendLine($"# Key: {key.Path} (KeyId: {key.KeyId})");
|
||||
}
|
||||
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates checksum entries from a list of file digests.
|
||||
/// </summary>
|
||||
/// <param name="entries">File path and digest pairs.</param>
|
||||
/// <returns>Checksums file content.</returns>
|
||||
public static string Generate(IEnumerable<(string Path, string Digest)> entries)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entries);
|
||||
|
||||
var sb = new StringBuilder();
|
||||
foreach (var (path, digest) in entries.OrderBy(e => e.Path, StringComparer.Ordinal))
|
||||
{
|
||||
sb.AppendLine(FormatEntry(path, digest));
|
||||
}
|
||||
return sb.ToString();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Formats a single checksum entry in BSD format.
|
||||
/// </summary>
|
||||
/// <param name="path">File path (relative to bundle root).</param>
|
||||
/// <param name="digest">SHA256 hex digest.</param>
|
||||
/// <returns>Formatted checksum line.</returns>
|
||||
public static string FormatEntry(string path, string digest)
|
||||
{
|
||||
// BSD format: SHA256 (filename) = hexdigest
|
||||
// Normalize path separators to forward slash
|
||||
var normalizedPath = path.Replace('\\', '/');
|
||||
return $"SHA256 ({normalizedPath}) = {digest.ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a checksums file and returns path-digest pairs.
|
||||
/// </summary>
|
||||
/// <param name="content">Checksums file content.</param>
|
||||
/// <returns>Parsed entries.</returns>
|
||||
public static IReadOnlyList<ChecksumEntry> Parse(string content)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(content);
|
||||
|
||||
var entries = new List<ChecksumEntry>();
|
||||
var lines = content.Split('\n', StringSplitOptions.RemoveEmptyEntries);
|
||||
|
||||
foreach (var line in lines)
|
||||
{
|
||||
var trimmed = line.Trim();
|
||||
if (string.IsNullOrEmpty(trimmed) || trimmed.StartsWith('#'))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var entry = ParseEntry(trimmed);
|
||||
if (entry is not null)
|
||||
{
|
||||
entries.Add(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return entries.AsReadOnly();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses a single checksum entry line.
|
||||
/// </summary>
|
||||
/// <param name="line">Line in BSD format.</param>
|
||||
/// <returns>Parsed entry or null if invalid.</returns>
|
||||
public static ChecksumEntry? ParseEntry(string line)
|
||||
{
|
||||
// BSD format: SHA256 (filename) = hexdigest
|
||||
// Also support GNU format: hexdigest filename
|
||||
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Try BSD format first
|
||||
if (line.StartsWith("SHA256 (", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
var closeParenIndex = line.IndexOf(')', 8);
|
||||
if (closeParenIndex > 8)
|
||||
{
|
||||
var path = line.Substring(8, closeParenIndex - 8);
|
||||
var equalsIndex = line.IndexOf('=', closeParenIndex);
|
||||
if (equalsIndex > closeParenIndex)
|
||||
{
|
||||
var digest = line.Substring(equalsIndex + 1).Trim();
|
||||
return new ChecksumEntry(path, digest, ChecksumAlgorithm.SHA256);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try GNU format: hexdigest filename (two spaces)
|
||||
var parts = line.Split(new[] { " " }, 2, StringSplitOptions.None);
|
||||
if (parts.Length == 2 && parts[0].Length == 64)
|
||||
{
|
||||
return new ChecksumEntry(parts[1].Trim(), parts[0].Trim(), ChecksumAlgorithm.SHA256);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies all checksums in a file against computed digests.
|
||||
/// </summary>
|
||||
/// <param name="entries">Parsed checksum entries.</param>
|
||||
/// <param name="computeDigest">Function to compute digest for a path.</param>
|
||||
/// <returns>Verification results.</returns>
|
||||
public static IReadOnlyList<ChecksumVerification> Verify(
|
||||
IEnumerable<ChecksumEntry> entries,
|
||||
Func<string, string?> computeDigest)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entries);
|
||||
ArgumentNullException.ThrowIfNull(computeDigest);
|
||||
|
||||
var results = new List<ChecksumVerification>();
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
var computed = computeDigest(entry.Path);
|
||||
if (computed is null)
|
||||
{
|
||||
results.Add(new ChecksumVerification(entry.Path, false, "File not found"));
|
||||
}
|
||||
else if (string.Equals(computed, entry.Digest, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
results.Add(new ChecksumVerification(entry.Path, true, null));
|
||||
}
|
||||
else
|
||||
{
|
||||
results.Add(new ChecksumVerification(entry.Path, false, $"Digest mismatch: expected {entry.Digest}, got {computed}"));
|
||||
}
|
||||
}
|
||||
|
||||
return results.AsReadOnly();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A parsed checksum entry.
|
||||
/// </summary>
|
||||
public sealed record ChecksumEntry(string Path, string Digest, ChecksumAlgorithm Algorithm);
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a single checksum.
|
||||
/// </summary>
|
||||
public sealed record ChecksumVerification(string Path, bool Valid, string? Error);
|
||||
|
||||
/// <summary>
|
||||
/// Supported checksum algorithms.
|
||||
/// </summary>
|
||||
public enum ChecksumAlgorithm
|
||||
{
|
||||
SHA256,
|
||||
SHA384,
|
||||
SHA512
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DependencyInjectionRoutine.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T007
|
||||
// Description: Dependency injection registration for export services.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Dependency injection registration for evidence export services.
|
||||
/// </summary>
|
||||
public static class DependencyInjectionRoutine
|
||||
{
|
||||
/// <summary>
|
||||
/// Adds evidence bundle export services.
|
||||
/// </summary>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddEvidenceBundleExport(this IServiceCollection services)
|
||||
{
|
||||
services.AddSingleton(TimeProvider.System);
|
||||
services.AddScoped<IEvidenceBundleExporter, TarGzBundleExporter>();
|
||||
return services;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Adds evidence bundle export services with custom data provider.
|
||||
/// </summary>
|
||||
/// <typeparam name="TProvider">Data provider implementation type.</typeparam>
|
||||
/// <param name="services">Service collection.</param>
|
||||
/// <returns>Service collection for chaining.</returns>
|
||||
public static IServiceCollection AddEvidenceBundleExport<TProvider>(this IServiceCollection services)
|
||||
where TProvider : class, IBundleDataProvider
|
||||
{
|
||||
services.AddSingleton(TimeProvider.System);
|
||||
services.AddScoped<IBundleDataProvider, TProvider>();
|
||||
services.AddScoped<IEvidenceBundleExporter, TarGzBundleExporter>();
|
||||
return services;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IBundleDataProvider.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T008, T009, T010, T011
|
||||
// Description: Interface for loading bundle data from storage.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Provides access to bundle data from the evidence locker storage.
|
||||
/// </summary>
|
||||
public interface IBundleDataProvider
|
||||
{
|
||||
/// <summary>
|
||||
/// Loads all data for a bundle.
|
||||
/// </summary>
|
||||
/// <param name="bundleId">Bundle ID.</param>
|
||||
/// <param name="tenantId">Optional tenant ID for access control.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Bundle data or null if not found.</returns>
|
||||
Task<BundleData?> LoadBundleDataAsync(string bundleId, string? tenantId, CancellationToken cancellationToken);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Complete data for a bundle export.
|
||||
/// </summary>
|
||||
public sealed record BundleData
|
||||
{
|
||||
/// <summary>
|
||||
/// Bundle metadata.
|
||||
/// </summary>
|
||||
public required BundleMetadata Metadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM artifacts.
|
||||
/// </summary>
|
||||
public IReadOnlyList<BundleArtifact> Sboms { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// VEX statement artifacts.
|
||||
/// </summary>
|
||||
public IReadOnlyList<BundleArtifact> VexStatements { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Attestation artifacts.
|
||||
/// </summary>
|
||||
public IReadOnlyList<BundleArtifact> Attestations { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Policy verdict artifacts.
|
||||
/// </summary>
|
||||
public IReadOnlyList<BundleArtifact> PolicyVerdicts { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Scan result artifacts.
|
||||
/// </summary>
|
||||
public IReadOnlyList<BundleArtifact> ScanResults { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Public keys for verification.
|
||||
/// </summary>
|
||||
public IReadOnlyList<BundleKeyData> PublicKeys { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// An artifact to include in the bundle.
|
||||
/// </summary>
|
||||
public sealed record BundleArtifact
|
||||
{
|
||||
/// <summary>
|
||||
/// File name within the category directory.
|
||||
/// </summary>
|
||||
public required string FileName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Artifact content bytes.
|
||||
/// </summary>
|
||||
public required byte[] Content { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type.
|
||||
/// </summary>
|
||||
public required string MediaType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Format version (e.g., "cyclonedx-1.7").
|
||||
/// </summary>
|
||||
public string? Format { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subject of the artifact.
|
||||
/// </summary>
|
||||
public string? Subject { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Public key data for bundle export.
|
||||
/// </summary>
|
||||
public sealed record BundleKeyData
|
||||
{
|
||||
/// <summary>
|
||||
/// File name for the key.
|
||||
/// </summary>
|
||||
public required string FileName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// PEM-encoded public key.
|
||||
/// </summary>
|
||||
public required string PublicKeyPem { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key identifier.
|
||||
/// </summary>
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key algorithm.
|
||||
/// </summary>
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key purpose.
|
||||
/// </summary>
|
||||
public string Purpose { get; init; } = "signing";
|
||||
|
||||
/// <summary>
|
||||
/// Key issuer.
|
||||
/// </summary>
|
||||
public string? Issuer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key expiration.
|
||||
/// </summary>
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,158 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// IEvidenceBundleExporter.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T006
|
||||
// Description: Interface for exporting evidence bundles in tar.gz format.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for exporting evidence bundles to tar.gz archives.
|
||||
/// </summary>
|
||||
public interface IEvidenceBundleExporter
|
||||
{
|
||||
/// <summary>
|
||||
/// Exports an evidence bundle to a tar.gz file.
|
||||
/// </summary>
|
||||
/// <param name="request">Export request with bundle details.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Result with path to exported file.</returns>
|
||||
Task<ExportResult> ExportAsync(ExportRequest request, CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports an evidence bundle to a stream.
|
||||
/// </summary>
|
||||
/// <param name="request">Export request with bundle details.</param>
|
||||
/// <param name="outputStream">Stream to write the archive to.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Result with export details.</returns>
|
||||
Task<ExportResult> ExportToStreamAsync(ExportRequest request, Stream outputStream, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Request to export an evidence bundle.
|
||||
/// </summary>
|
||||
public sealed record ExportRequest
|
||||
{
|
||||
/// <summary>
|
||||
/// Evidence locker bundle ID to export.
|
||||
/// </summary>
|
||||
public required string BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Output directory for the exported file (if not streaming).
|
||||
/// </summary>
|
||||
public string? OutputDirectory { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional custom filename (defaults to evidence-bundle-{id}.tar.gz).
|
||||
/// </summary>
|
||||
public string? FileName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Export configuration options.
|
||||
/// </summary>
|
||||
public ExportConfiguration? Configuration { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant ID for access control.
|
||||
/// </summary>
|
||||
public string? TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// User or service account requesting the export.
|
||||
/// </summary>
|
||||
public string? RequestedBy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of an export operation.
|
||||
/// </summary>
|
||||
public sealed record ExportResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the export succeeded.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Path to the exported file (if written to disk).
|
||||
/// </summary>
|
||||
public string? FilePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size of the exported archive in bytes.
|
||||
/// </summary>
|
||||
public long SizeBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 digest of the exported archive.
|
||||
/// </summary>
|
||||
public string? ArchiveDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Bundle manifest included in the export.
|
||||
/// </summary>
|
||||
public BundleManifest? Manifest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if export failed.
|
||||
/// </summary>
|
||||
public string? ErrorMessage { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error code if export failed.
|
||||
/// </summary>
|
||||
public string? ErrorCode { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration of the export operation.
|
||||
/// </summary>
|
||||
public TimeSpan Duration { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a successful result.
|
||||
/// </summary>
|
||||
public static ExportResult Succeeded(
|
||||
string? filePath,
|
||||
long sizeBytes,
|
||||
string? archiveDigest,
|
||||
BundleManifest manifest,
|
||||
TimeSpan duration) => new()
|
||||
{
|
||||
Success = true,
|
||||
FilePath = filePath,
|
||||
SizeBytes = sizeBytes,
|
||||
ArchiveDigest = archiveDigest,
|
||||
Manifest = manifest,
|
||||
Duration = duration
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Creates a failed result.
|
||||
/// </summary>
|
||||
public static ExportResult Failed(string errorCode, string errorMessage, TimeSpan duration) => new()
|
||||
{
|
||||
Success = false,
|
||||
ErrorCode = errorCode,
|
||||
ErrorMessage = errorMessage,
|
||||
Duration = duration
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Error codes for export operations.
|
||||
/// </summary>
|
||||
public static class ExportErrorCodes
|
||||
{
|
||||
public const string BundleNotFound = "BUNDLE_NOT_FOUND";
|
||||
public const string AccessDenied = "ACCESS_DENIED";
|
||||
public const string ArtifactMissing = "ARTIFACT_MISSING";
|
||||
public const string IoError = "IO_ERROR";
|
||||
public const string CompressionError = "COMPRESSION_ERROR";
|
||||
public const string KeysNotAvailable = "KEYS_NOT_AVAILABLE";
|
||||
public const string InvalidConfiguration = "INVALID_CONFIGURATION";
|
||||
}
|
||||
@@ -0,0 +1,193 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MerkleTreeBuilder.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T012
|
||||
// Description: Merkle tree builder for bundle integrity verification.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Builds Merkle trees for bundle integrity verification.
|
||||
/// </summary>
|
||||
public static class MerkleTreeBuilder
|
||||
{
|
||||
/// <summary>
|
||||
/// Computes the Merkle root hash from a list of leaf digests.
|
||||
/// </summary>
|
||||
/// <param name="leafDigests">Leaf node digests (SHA-256 hex strings).</param>
|
||||
/// <returns>Root hash as sha256:hex string, or null if empty.</returns>
|
||||
public static string? ComputeRoot(IReadOnlyList<string> leafDigests)
|
||||
{
|
||||
if (leafDigests.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
// Convert hex strings to byte arrays
|
||||
var nodes = leafDigests
|
||||
.OrderBy(d => d, StringComparer.Ordinal) // Deterministic ordering
|
||||
.Select(ParseDigest)
|
||||
.ToList();
|
||||
|
||||
// Build tree bottom-up
|
||||
while (nodes.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (var i = 0; i < nodes.Count; i += 2)
|
||||
{
|
||||
if (i + 1 < nodes.Count)
|
||||
{
|
||||
// Hash pair of nodes
|
||||
nextLevel.Add(HashPair(nodes[i], nodes[i + 1]));
|
||||
}
|
||||
else
|
||||
{
|
||||
// Odd node, promote to next level (hash with itself)
|
||||
nextLevel.Add(HashPair(nodes[i], nodes[i]));
|
||||
}
|
||||
}
|
||||
|
||||
nodes = nextLevel;
|
||||
}
|
||||
|
||||
return $"sha256:{Convert.ToHexStringLower(nodes[0])}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes Merkle root from artifact entries.
|
||||
/// </summary>
|
||||
/// <param name="artifacts">Artifact entries with digests.</param>
|
||||
/// <returns>Root hash as sha256:hex string.</returns>
|
||||
public static string? ComputeRootFromArtifacts(IEnumerable<Models.ArtifactEntry> artifacts)
|
||||
{
|
||||
var digests = artifacts
|
||||
.Select(a => NormalizeDigest(a.Digest))
|
||||
.ToList();
|
||||
|
||||
return ComputeRoot(digests);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that a leaf is included in the tree given an inclusion proof.
|
||||
/// </summary>
|
||||
/// <param name="leafDigest">Leaf digest to verify.</param>
|
||||
/// <param name="proof">Inclusion proof (sibling hashes from leaf to root).</param>
|
||||
/// <param name="leafIndex">Index of the leaf in the tree.</param>
|
||||
/// <param name="expectedRoot">Expected root hash.</param>
|
||||
/// <returns>True if the proof is valid.</returns>
|
||||
public static bool VerifyInclusion(
|
||||
string leafDigest,
|
||||
IReadOnlyList<string> proof,
|
||||
int leafIndex,
|
||||
string expectedRoot)
|
||||
{
|
||||
var current = ParseDigest(NormalizeDigest(leafDigest));
|
||||
var index = leafIndex;
|
||||
|
||||
foreach (var siblingHex in proof)
|
||||
{
|
||||
var sibling = ParseDigest(NormalizeDigest(siblingHex));
|
||||
|
||||
// If index is even, we're on the left; if odd, we're on the right
|
||||
current = (index % 2 == 0)
|
||||
? HashPair(current, sibling)
|
||||
: HashPair(sibling, current);
|
||||
|
||||
index /= 2;
|
||||
}
|
||||
|
||||
var computedRoot = $"sha256:{Convert.ToHexStringLower(current)}";
|
||||
return string.Equals(computedRoot, expectedRoot, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates an inclusion proof for a leaf at the given index.
|
||||
/// </summary>
|
||||
/// <param name="leafDigests">All leaf digests in order.</param>
|
||||
/// <param name="leafIndex">Index of the leaf to prove.</param>
|
||||
/// <returns>Inclusion proof as list of sibling hashes.</returns>
|
||||
public static IReadOnlyList<string> GenerateInclusionProof(
|
||||
IReadOnlyList<string> leafDigests,
|
||||
int leafIndex)
|
||||
{
|
||||
if (leafDigests.Count == 0 || leafIndex < 0 || leafIndex >= leafDigests.Count)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var proof = new List<string>();
|
||||
|
||||
// Sort for deterministic ordering
|
||||
var orderedDigests = leafDigests
|
||||
.OrderBy(d => d, StringComparer.Ordinal)
|
||||
.ToList();
|
||||
|
||||
var nodes = orderedDigests.Select(ParseDigest).ToList();
|
||||
var index = leafIndex;
|
||||
|
||||
while (nodes.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
var siblingIndex = (index % 2 == 0) ? index + 1 : index - 1;
|
||||
|
||||
// Add sibling to proof if it exists
|
||||
if (siblingIndex >= 0 && siblingIndex < nodes.Count)
|
||||
{
|
||||
proof.Add($"sha256:{Convert.ToHexStringLower(nodes[siblingIndex])}");
|
||||
}
|
||||
else if (siblingIndex == nodes.Count && index == nodes.Count - 1)
|
||||
{
|
||||
// Odd node at end, sibling is itself
|
||||
proof.Add($"sha256:{Convert.ToHexStringLower(nodes[index])}");
|
||||
}
|
||||
|
||||
// Build next level
|
||||
for (var i = 0; i < nodes.Count; i += 2)
|
||||
{
|
||||
if (i + 1 < nodes.Count)
|
||||
{
|
||||
nextLevel.Add(HashPair(nodes[i], nodes[i + 1]));
|
||||
}
|
||||
else
|
||||
{
|
||||
nextLevel.Add(HashPair(nodes[i], nodes[i]));
|
||||
}
|
||||
}
|
||||
|
||||
nodes = nextLevel;
|
||||
index /= 2;
|
||||
}
|
||||
|
||||
return proof.AsReadOnly();
|
||||
}
|
||||
|
||||
private static byte[] HashPair(byte[] left, byte[] right)
|
||||
{
|
||||
// Concatenate and hash: H(left || right)
|
||||
var combined = new byte[left.Length + right.Length];
|
||||
Buffer.BlockCopy(left, 0, combined, 0, left.Length);
|
||||
Buffer.BlockCopy(right, 0, combined, left.Length, right.Length);
|
||||
return SHA256.HashData(combined);
|
||||
}
|
||||
|
||||
private static byte[] ParseDigest(string digest)
|
||||
{
|
||||
var normalized = NormalizeDigest(digest);
|
||||
return Convert.FromHexString(normalized);
|
||||
}
|
||||
|
||||
private static string NormalizeDigest(string digest)
|
||||
{
|
||||
// Remove sha256: prefix if present
|
||||
if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
return digest.Substring(7).ToLowerInvariant();
|
||||
}
|
||||
return digest.ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,252 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// BundleManifest.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T001, T002
|
||||
// Description: Bundle directory structure and manifest model for evidence export.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Manifest for an evidence bundle, indexing all artifacts included.
|
||||
/// Defines the standard bundle directory structure.
|
||||
/// </summary>
|
||||
public sealed record BundleManifest
|
||||
{
|
||||
/// <summary>
|
||||
/// Manifest schema version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string SchemaVersion { get; init; } = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// Unique bundle identifier.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bundleId")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required string BundleId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the bundle was created (UTC ISO-8601).
|
||||
/// </summary>
|
||||
[JsonPropertyName("createdAt")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Bundle metadata.
|
||||
/// </summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public required BundleMetadata Metadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SBOM artifacts included in the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sboms")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public ImmutableArray<ArtifactEntry> Sboms { get; init; } = ImmutableArray<ArtifactEntry>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// VEX statement artifacts included in the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vexStatements")]
|
||||
[JsonPropertyOrder(5)]
|
||||
public ImmutableArray<ArtifactEntry> VexStatements { get; init; } = ImmutableArray<ArtifactEntry>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Attestation artifacts (DSSE envelopes) included in the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("attestations")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public ImmutableArray<ArtifactEntry> Attestations { get; init; } = ImmutableArray<ArtifactEntry>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Policy verdict artifacts included in the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyVerdicts")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public ImmutableArray<ArtifactEntry> PolicyVerdicts { get; init; } = ImmutableArray<ArtifactEntry>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Scan results included in the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("scanResults")]
|
||||
[JsonPropertyOrder(8)]
|
||||
public ImmutableArray<ArtifactEntry> ScanResults { get; init; } = ImmutableArray<ArtifactEntry>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Public keys for verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("publicKeys")]
|
||||
[JsonPropertyOrder(9)]
|
||||
public ImmutableArray<KeyEntry> PublicKeys { get; init; } = ImmutableArray<KeyEntry>.Empty;
|
||||
|
||||
/// <summary>
|
||||
/// Merkle root hash of all artifacts for integrity verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("merkleRoot")]
|
||||
[JsonPropertyOrder(10)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? MerkleRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets all artifact entries in the bundle.
|
||||
/// </summary>
|
||||
[JsonIgnore]
|
||||
public IEnumerable<ArtifactEntry> AllArtifacts =>
|
||||
Sboms.Concat(VexStatements).Concat(Attestations).Concat(PolicyVerdicts).Concat(ScanResults);
|
||||
|
||||
/// <summary>
|
||||
/// Total count of artifacts in the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("totalArtifacts")]
|
||||
[JsonPropertyOrder(11)]
|
||||
public int TotalArtifacts => Sboms.Length + VexStatements.Length + Attestations.Length +
|
||||
PolicyVerdicts.Length + ScanResults.Length;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for an artifact in the bundle.
|
||||
/// </summary>
|
||||
public sealed record ArtifactEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Relative path within the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("path")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 digest of the artifact content.
|
||||
/// </summary>
|
||||
[JsonPropertyName("digest")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type of the artifact.
|
||||
/// </summary>
|
||||
[JsonPropertyName("mediaType")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public required string MediaType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("size")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public long Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Artifact type (sbom, vex, attestation, policy, scan).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Format version (e.g., "cyclonedx-1.7", "spdx-3.0.1", "openvex-1.0").
|
||||
/// </summary>
|
||||
[JsonPropertyName("format")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Format { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Subject of the artifact (e.g., image digest, CVE).
|
||||
/// </summary>
|
||||
[JsonPropertyName("subject")]
|
||||
[JsonPropertyOrder(6)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Subject { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a public key in the bundle.
|
||||
/// </summary>
|
||||
public sealed record KeyEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Relative path to the key file.
|
||||
/// </summary>
|
||||
[JsonPropertyName("path")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key identifier (fingerprint or key ID).
|
||||
/// </summary>
|
||||
[JsonPropertyName("keyId")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key algorithm (e.g., "ecdsa-p256", "rsa-4096", "ed25519").
|
||||
/// </summary>
|
||||
[JsonPropertyName("algorithm")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key purpose (signing, encryption).
|
||||
/// </summary>
|
||||
[JsonPropertyName("purpose")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public string Purpose { get; init; } = "signing";
|
||||
|
||||
/// <summary>
|
||||
/// Issuer or owner of the key.
|
||||
/// </summary>
|
||||
[JsonPropertyName("issuer")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Issuer { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expiration date of the key.
|
||||
/// </summary>
|
||||
[JsonPropertyName("expiresAt")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public DateTimeOffset? ExpiresAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Standard paths within the bundle.
|
||||
/// </summary>
|
||||
public static class BundlePaths
|
||||
{
|
||||
public const string ManifestFile = "manifest.json";
|
||||
public const string MetadataFile = "metadata.json";
|
||||
public const string ReadmeFile = "README.md";
|
||||
public const string VerifyShFile = "verify.sh";
|
||||
public const string VerifyPs1File = "verify.ps1";
|
||||
public const string ChecksumsFile = "checksums.sha256";
|
||||
public const string KeysDirectory = "keys";
|
||||
public const string SbomsDirectory = "sboms";
|
||||
public const string VexDirectory = "vex";
|
||||
public const string AttestationsDirectory = "attestations";
|
||||
public const string PolicyDirectory = "policy";
|
||||
public const string ScansDirectory = "scans";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Media types for bundle artifacts.
|
||||
/// </summary>
|
||||
public static class BundleMediaTypes
|
||||
{
|
||||
public const string SbomCycloneDx = "application/vnd.cyclonedx+json";
|
||||
public const string SbomSpdx = "application/spdx+json";
|
||||
public const string VexOpenVex = "application/vnd.openvex+json";
|
||||
public const string VexCsaf = "application/json";
|
||||
public const string DsseEnvelope = "application/vnd.dsse.envelope+json";
|
||||
public const string PolicyVerdict = "application/json";
|
||||
public const string ScanResult = "application/json";
|
||||
public const string PublicKeyPem = "application/x-pem-file";
|
||||
}
|
||||
@@ -0,0 +1,370 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// BundleMetadata.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T003
|
||||
// Description: Metadata model for evidence bundles (provenance, timestamps, subject).
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for an evidence bundle, capturing provenance and context.
|
||||
/// </summary>
|
||||
public sealed record BundleMetadata
|
||||
{
|
||||
/// <summary>
|
||||
/// Schema version for metadata format.
|
||||
/// </summary>
|
||||
[JsonPropertyName("schemaVersion")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public string SchemaVersion { get; init; } = "1.0.0";
|
||||
|
||||
/// <summary>
|
||||
/// Primary subject of the bundle (e.g., container image digest).
|
||||
/// </summary>
|
||||
[JsonPropertyName("subject")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required BundleSubject Subject { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Provenance information for the bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("provenance")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public required BundleProvenance Provenance { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Time window covered by the evidence in this bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("timeWindow")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public required TimeWindow TimeWindow { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant that owns this bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tenant")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Tenant { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Export configuration used to create this bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exportConfig")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ExportConfiguration? ExportConfig { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional custom labels.
|
||||
/// </summary>
|
||||
[JsonPropertyName("labels")]
|
||||
[JsonPropertyOrder(6)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableDictionary<string, string>? Labels { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Compliance standards this bundle is intended to support.
|
||||
/// </summary>
|
||||
[JsonPropertyName("compliance")]
|
||||
[JsonPropertyOrder(7)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public ImmutableArray<string>? Compliance { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// The primary subject of the evidence bundle.
|
||||
/// </summary>
|
||||
public sealed record BundleSubject
|
||||
{
|
||||
/// <summary>
|
||||
/// Subject type (container_image, source_repo, artifact).
|
||||
/// </summary>
|
||||
[JsonPropertyName("type")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required string Type { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Primary identifier (digest for images, commit SHA for repos).
|
||||
/// </summary>
|
||||
[JsonPropertyName("digest")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Human-readable name (image reference, repo URL).
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tag or version if applicable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("tag")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Tag { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Platform/architecture if applicable.
|
||||
/// </summary>
|
||||
[JsonPropertyName("platform")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Platform { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Registry or repository host.
|
||||
/// </summary>
|
||||
[JsonPropertyName("registry")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Registry { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provenance information for the bundle.
|
||||
/// </summary>
|
||||
public sealed record BundleProvenance
|
||||
{
|
||||
/// <summary>
|
||||
/// Tool that created this bundle.
|
||||
/// </summary>
|
||||
[JsonPropertyName("creator")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required CreatorInfo Creator { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the bundle was exported.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exportedAt")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required DateTimeOffset ExportedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Original scan ID if this bundle is from a scan.
|
||||
/// </summary>
|
||||
[JsonPropertyName("scanId")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ScanId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Evidence locker bundle ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("evidenceLockerId")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? EvidenceLockerId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CI/CD pipeline information if available.
|
||||
/// </summary>
|
||||
[JsonPropertyName("pipeline")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public PipelineInfo? Pipeline { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// User or service account that requested the export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exportedBy")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? ExportedBy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Information about the tool that created the bundle.
|
||||
/// </summary>
|
||||
public sealed record CreatorInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// Tool name (e.g., "StellaOps EvidenceLocker").
|
||||
/// </summary>
|
||||
[JsonPropertyName("name")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tool version.
|
||||
/// </summary>
|
||||
[JsonPropertyName("version")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required string Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Vendor/organization.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vendor")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Vendor { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// CI/CD pipeline information.
|
||||
/// </summary>
|
||||
public sealed record PipelineInfo
|
||||
{
|
||||
/// <summary>
|
||||
/// CI/CD system name (e.g., "GitLab CI", "GitHub Actions").
|
||||
/// </summary>
|
||||
[JsonPropertyName("system")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required string System { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Pipeline/workflow ID.
|
||||
/// </summary>
|
||||
[JsonPropertyName("pipelineId")]
|
||||
[JsonPropertyOrder(1)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? PipelineId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Job ID within the pipeline.
|
||||
/// </summary>
|
||||
[JsonPropertyName("jobId")]
|
||||
[JsonPropertyOrder(2)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? JobId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// URL to the pipeline run.
|
||||
/// </summary>
|
||||
[JsonPropertyName("url")]
|
||||
[JsonPropertyOrder(3)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Url { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source repository.
|
||||
/// </summary>
|
||||
[JsonPropertyName("repository")]
|
||||
[JsonPropertyOrder(4)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Repository { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Git commit SHA.
|
||||
/// </summary>
|
||||
[JsonPropertyName("commitSha")]
|
||||
[JsonPropertyOrder(5)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? CommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Git branch.
|
||||
/// </summary>
|
||||
[JsonPropertyName("branch")]
|
||||
[JsonPropertyOrder(6)]
|
||||
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
|
||||
public string? Branch { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Time window covered by evidence in the bundle.
|
||||
/// </summary>
|
||||
public sealed record TimeWindow
|
||||
{
|
||||
/// <summary>
|
||||
/// Earliest evidence timestamp.
|
||||
/// </summary>
|
||||
[JsonPropertyName("earliest")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public required DateTimeOffset Earliest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest evidence timestamp.
|
||||
/// </summary>
|
||||
[JsonPropertyName("latest")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public required DateTimeOffset Latest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Export configuration options.
|
||||
/// </summary>
|
||||
public sealed record ExportConfiguration
|
||||
{
|
||||
/// <summary>
|
||||
/// Include SBOMs in export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includeSboms")]
|
||||
[JsonPropertyOrder(0)]
|
||||
public bool IncludeSboms { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include VEX statements in export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includeVex")]
|
||||
[JsonPropertyOrder(1)]
|
||||
public bool IncludeVex { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include attestations in export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includeAttestations")]
|
||||
[JsonPropertyOrder(2)]
|
||||
public bool IncludeAttestations { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include policy verdicts in export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includePolicyVerdicts")]
|
||||
[JsonPropertyOrder(3)]
|
||||
public bool IncludePolicyVerdicts { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include scan results in export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includeScanResults")]
|
||||
[JsonPropertyOrder(4)]
|
||||
public bool IncludeScanResults { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include public keys for offline verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includeKeys")]
|
||||
[JsonPropertyOrder(5)]
|
||||
public bool IncludeKeys { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Include verification scripts.
|
||||
/// </summary>
|
||||
[JsonPropertyName("includeVerifyScripts")]
|
||||
[JsonPropertyOrder(6)]
|
||||
public bool IncludeVerifyScripts { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Compression algorithm (gzip, brotli, none).
|
||||
/// </summary>
|
||||
[JsonPropertyName("compression")]
|
||||
[JsonPropertyOrder(7)]
|
||||
public string Compression { get; init; } = "gzip";
|
||||
|
||||
/// <summary>
|
||||
/// Compression level (1-9).
|
||||
/// </summary>
|
||||
[JsonPropertyName("compressionLevel")]
|
||||
[JsonPropertyOrder(8)]
|
||||
public int CompressionLevel { get; init; } = 6;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subject types for evidence bundles.
|
||||
/// </summary>
|
||||
public static class SubjectTypes
|
||||
{
|
||||
public const string ContainerImage = "container_image";
|
||||
public const string SourceRepository = "source_repo";
|
||||
public const string Artifact = "artifact";
|
||||
public const string Package = "package";
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<RootNamespace>StellaOps.EvidenceLocker.Export</RootNamespace>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<Description>Evidence bundle export library for offline verification</Description>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
@@ -0,0 +1,545 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TarGzBundleExporter.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T007
|
||||
// Description: Implementation of tar.gz bundle export with streaming support.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Exports evidence bundles to tar.gz archives.
|
||||
/// </summary>
|
||||
public sealed class TarGzBundleExporter : IEvidenceBundleExporter
|
||||
{
|
||||
private readonly ILogger<TarGzBundleExporter> _logger;
|
||||
private readonly IBundleDataProvider _dataProvider;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = null // Use explicit JsonPropertyName
|
||||
};
|
||||
|
||||
public TarGzBundleExporter(
|
||||
ILogger<TarGzBundleExporter> logger,
|
||||
IBundleDataProvider dataProvider,
|
||||
TimeProvider timeProvider)
|
||||
{
|
||||
_logger = logger;
|
||||
_dataProvider = dataProvider;
|
||||
_timeProvider = timeProvider;
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<ExportResult> ExportAsync(ExportRequest request, CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var outputDir = request.OutputDirectory ?? Path.GetTempPath();
|
||||
var fileName = request.FileName ?? $"evidence-bundle-{request.BundleId}.tar.gz";
|
||||
var filePath = Path.Combine(outputDir, fileName);
|
||||
|
||||
_logger.LogInformation("Exporting bundle {BundleId} to {FilePath}", request.BundleId, filePath);
|
||||
|
||||
try
|
||||
{
|
||||
await using var fileStream = new FileStream(filePath, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
var result = await ExportToStreamInternalAsync(request, fileStream, filePath, cancellationToken);
|
||||
return result with { Duration = stopwatch.Elapsed };
|
||||
}
|
||||
catch (Exception ex) when (ex is not OperationCanceledException)
|
||||
{
|
||||
_logger.LogError(ex, "Failed to export bundle {BundleId}", request.BundleId);
|
||||
return ExportResult.Failed(
|
||||
ExportErrorCodes.IoError,
|
||||
$"Failed to export bundle: {ex.Message}",
|
||||
stopwatch.Elapsed);
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public async Task<ExportResult> ExportToStreamAsync(
|
||||
ExportRequest request,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentNullException.ThrowIfNull(outputStream);
|
||||
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var result = await ExportToStreamInternalAsync(request, outputStream, null, cancellationToken);
|
||||
return result with { Duration = stopwatch.Elapsed };
|
||||
}
|
||||
|
||||
private async Task<ExportResult> ExportToStreamInternalAsync(
|
||||
ExportRequest request,
|
||||
Stream outputStream,
|
||||
string? filePath,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Load bundle data
|
||||
var bundleData = await _dataProvider.LoadBundleDataAsync(request.BundleId, request.TenantId, cancellationToken);
|
||||
if (bundleData is null)
|
||||
{
|
||||
return ExportResult.Failed(ExportErrorCodes.BundleNotFound, $"Bundle {request.BundleId} not found", TimeSpan.Zero);
|
||||
}
|
||||
|
||||
var config = request.Configuration ?? new ExportConfiguration();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
var checksumEntries = new List<(string Path, string Digest)>();
|
||||
|
||||
// Create manifest builder
|
||||
var manifestBuilder = new BundleManifestBuilder(request.BundleId, now);
|
||||
manifestBuilder.SetMetadata(bundleData.Metadata);
|
||||
|
||||
// We need to build the tar in memory first to compute checksums
|
||||
using var tarStream = new MemoryStream();
|
||||
|
||||
await using (var tarWriter = new TarWriter(tarStream, leaveOpen: true))
|
||||
{
|
||||
// Add SBOMs
|
||||
if (config.IncludeSboms)
|
||||
{
|
||||
foreach (var sbom in bundleData.Sboms)
|
||||
{
|
||||
var entry = await AddArtifactAsync(tarWriter, sbom, BundlePaths.SbomsDirectory, "sbom", cancellationToken);
|
||||
manifestBuilder.AddSbom(entry);
|
||||
checksumEntries.Add((entry.Path, entry.Digest));
|
||||
}
|
||||
}
|
||||
|
||||
// Add VEX statements
|
||||
if (config.IncludeVex)
|
||||
{
|
||||
foreach (var vex in bundleData.VexStatements)
|
||||
{
|
||||
var entry = await AddArtifactAsync(tarWriter, vex, BundlePaths.VexDirectory, "vex", cancellationToken);
|
||||
manifestBuilder.AddVex(entry);
|
||||
checksumEntries.Add((entry.Path, entry.Digest));
|
||||
}
|
||||
}
|
||||
|
||||
// Add attestations
|
||||
if (config.IncludeAttestations)
|
||||
{
|
||||
foreach (var attestation in bundleData.Attestations)
|
||||
{
|
||||
var entry = await AddArtifactAsync(tarWriter, attestation, BundlePaths.AttestationsDirectory, "attestation", cancellationToken);
|
||||
manifestBuilder.AddAttestation(entry);
|
||||
checksumEntries.Add((entry.Path, entry.Digest));
|
||||
}
|
||||
}
|
||||
|
||||
// Add policy verdicts
|
||||
if (config.IncludePolicyVerdicts)
|
||||
{
|
||||
foreach (var verdict in bundleData.PolicyVerdicts)
|
||||
{
|
||||
var entry = await AddArtifactAsync(tarWriter, verdict, BundlePaths.PolicyDirectory, "policy", cancellationToken);
|
||||
manifestBuilder.AddPolicyVerdict(entry);
|
||||
checksumEntries.Add((entry.Path, entry.Digest));
|
||||
}
|
||||
}
|
||||
|
||||
// Add scan results
|
||||
if (config.IncludeScanResults)
|
||||
{
|
||||
foreach (var scan in bundleData.ScanResults)
|
||||
{
|
||||
var entry = await AddArtifactAsync(tarWriter, scan, BundlePaths.ScansDirectory, "scan", cancellationToken);
|
||||
manifestBuilder.AddScanResult(entry);
|
||||
checksumEntries.Add((entry.Path, entry.Digest));
|
||||
}
|
||||
}
|
||||
|
||||
// Add public keys
|
||||
if (config.IncludeKeys)
|
||||
{
|
||||
foreach (var key in bundleData.PublicKeys)
|
||||
{
|
||||
var keyEntry = await AddKeyAsync(tarWriter, key, cancellationToken);
|
||||
manifestBuilder.AddPublicKey(keyEntry);
|
||||
}
|
||||
}
|
||||
|
||||
// Build manifest
|
||||
var manifest = manifestBuilder.Build();
|
||||
|
||||
// Add metadata.json
|
||||
var metadataJson = JsonSerializer.Serialize(manifest.Metadata, JsonOptions);
|
||||
var metadataDigest = await AddTextFileAsync(tarWriter, BundlePaths.MetadataFile, metadataJson, cancellationToken);
|
||||
checksumEntries.Add((BundlePaths.MetadataFile, metadataDigest));
|
||||
|
||||
// Add checksums.sha256
|
||||
var checksumsContent = ChecksumFileWriter.Generate(checksumEntries);
|
||||
var checksumsDigest = await AddTextFileAsync(tarWriter, BundlePaths.ChecksumsFile, checksumsContent, cancellationToken);
|
||||
|
||||
// Add manifest.json (after checksums so it can reference checksum file)
|
||||
var manifestJson = JsonSerializer.Serialize(manifest, JsonOptions);
|
||||
await AddTextFileAsync(tarWriter, BundlePaths.ManifestFile, manifestJson, cancellationToken);
|
||||
|
||||
// Add verify scripts if requested
|
||||
if (config.IncludeVerifyScripts)
|
||||
{
|
||||
await AddTextFileAsync(tarWriter, BundlePaths.VerifyShFile, GenerateVerifyShScript(), cancellationToken);
|
||||
await AddTextFileAsync(tarWriter, BundlePaths.VerifyPs1File, GenerateVerifyPs1Script(), cancellationToken);
|
||||
}
|
||||
|
||||
// Add README
|
||||
await AddTextFileAsync(tarWriter, BundlePaths.ReadmeFile, GenerateReadme(manifest), cancellationToken);
|
||||
|
||||
// Compress to gzip
|
||||
tarStream.Position = 0;
|
||||
string archiveDigest;
|
||||
|
||||
if (filePath is not null)
|
||||
{
|
||||
// Reset file stream position
|
||||
outputStream.Position = 0;
|
||||
}
|
||||
|
||||
await using (var gzipStream = new GZipStream(outputStream, GetCompressionLevel(config.CompressionLevel), leaveOpen: true))
|
||||
{
|
||||
await tarStream.CopyToAsync(gzipStream, cancellationToken);
|
||||
}
|
||||
|
||||
// Compute archive digest
|
||||
outputStream.Position = 0;
|
||||
archiveDigest = await ComputeSha256Async(outputStream, cancellationToken);
|
||||
|
||||
var archiveSize = outputStream.Length;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Exported bundle {BundleId}: {Size} bytes, {ArtifactCount} artifacts",
|
||||
request.BundleId, archiveSize, manifest.TotalArtifacts);
|
||||
|
||||
return ExportResult.Succeeded(
|
||||
filePath,
|
||||
archiveSize,
|
||||
$"sha256:{archiveDigest}",
|
||||
manifest,
|
||||
TimeSpan.Zero);
|
||||
}
|
||||
}
|
||||
|
||||
private async Task<ArtifactEntry> AddArtifactAsync(
|
||||
TarWriter tarWriter,
|
||||
BundleArtifact artifact,
|
||||
string directory,
|
||||
string type,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var path = $"{directory}/{artifact.FileName}";
|
||||
var content = artifact.Content;
|
||||
var digest = await ComputeSha256FromBytesAsync(content);
|
||||
|
||||
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, path)
|
||||
{
|
||||
DataStream = new MemoryStream(content)
|
||||
};
|
||||
|
||||
await tarWriter.WriteEntryAsync(tarEntry, cancellationToken);
|
||||
|
||||
return new ArtifactEntry
|
||||
{
|
||||
Path = path,
|
||||
Digest = $"sha256:{digest}",
|
||||
MediaType = artifact.MediaType,
|
||||
Size = content.Length,
|
||||
Type = type,
|
||||
Format = artifact.Format,
|
||||
Subject = artifact.Subject
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<KeyEntry> AddKeyAsync(
|
||||
TarWriter tarWriter,
|
||||
BundleKeyData key,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var path = $"{BundlePaths.KeysDirectory}/{key.FileName}";
|
||||
var content = Encoding.UTF8.GetBytes(key.PublicKeyPem);
|
||||
|
||||
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, path)
|
||||
{
|
||||
DataStream = new MemoryStream(content)
|
||||
};
|
||||
|
||||
await tarWriter.WriteEntryAsync(tarEntry, cancellationToken);
|
||||
|
||||
return new KeyEntry
|
||||
{
|
||||
Path = path,
|
||||
KeyId = key.KeyId,
|
||||
Algorithm = key.Algorithm,
|
||||
Purpose = key.Purpose,
|
||||
Issuer = key.Issuer,
|
||||
ExpiresAt = key.ExpiresAt
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<string> AddTextFileAsync(
|
||||
TarWriter tarWriter,
|
||||
string path,
|
||||
string content,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var digest = await ComputeSha256FromBytesAsync(bytes);
|
||||
|
||||
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, path)
|
||||
{
|
||||
DataStream = new MemoryStream(bytes)
|
||||
};
|
||||
|
||||
await tarWriter.WriteEntryAsync(tarEntry, cancellationToken);
|
||||
return digest;
|
||||
}
|
||||
|
||||
private static async Task<string> ComputeSha256Async(Stream stream, CancellationToken cancellationToken)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var hash = await sha256.ComputeHashAsync(stream, cancellationToken);
|
||||
return Convert.ToHexStringLower(hash);
|
||||
}
|
||||
|
||||
private static Task<string> ComputeSha256FromBytesAsync(byte[] bytes)
|
||||
{
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Task.FromResult(Convert.ToHexStringLower(hash));
|
||||
}
|
||||
|
||||
private static CompressionLevel GetCompressionLevel(int level) => level switch
|
||||
{
|
||||
<= 1 => CompressionLevel.Fastest,
|
||||
>= 9 => CompressionLevel.SmallestSize,
|
||||
_ => CompressionLevel.Optimal
|
||||
};
|
||||
|
||||
private static string GenerateVerifyShScript() => """
|
||||
#!/bin/bash
|
||||
# Evidence Bundle Verification Script
|
||||
# Verifies checksums and signature (if present)
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
echo "Verifying evidence bundle checksums..."
|
||||
|
||||
if [ ! -f "checksums.sha256" ]; then
|
||||
echo "ERROR: checksums.sha256 not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Verify all checksums
|
||||
while IFS= read -r line; do
|
||||
# Skip comments and empty lines
|
||||
[[ "$line" =~ ^#.*$ ]] && continue
|
||||
[[ -z "$line" ]] && continue
|
||||
|
||||
# Parse BSD format: SHA256 (filename) = digest
|
||||
if [[ "$line" =~ ^SHA256\ \(([^)]+)\)\ =\ ([a-f0-9]+)$ ]]; then
|
||||
file="${BASH_REMATCH[1]}"
|
||||
expected="${BASH_REMATCH[2]}"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "MISSING: $file"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
actual=$(sha256sum "$file" | awk '{print $1}')
|
||||
if [ "$actual" != "$expected" ]; then
|
||||
echo "FAILED: $file"
|
||||
echo " Expected: $expected"
|
||||
echo " Actual: $actual"
|
||||
exit 1
|
||||
fi
|
||||
echo "OK: $file"
|
||||
fi
|
||||
done < checksums.sha256
|
||||
|
||||
echo ""
|
||||
echo "All checksums verified successfully."
|
||||
exit 0
|
||||
""";
|
||||
|
||||
private static string GenerateVerifyPs1Script() => """
|
||||
# Evidence Bundle Verification Script (PowerShell)
|
||||
# Verifies checksums and signature (if present)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
Set-Location $ScriptDir
|
||||
|
||||
Write-Host "Verifying evidence bundle checksums..."
|
||||
|
||||
$ChecksumFile = "checksums.sha256"
|
||||
if (-not (Test-Path $ChecksumFile)) {
|
||||
Write-Error "checksums.sha256 not found"
|
||||
exit 1
|
||||
}
|
||||
|
||||
$Lines = Get-Content $ChecksumFile
|
||||
$FailedCount = 0
|
||||
|
||||
foreach ($Line in $Lines) {
|
||||
# Skip comments and empty lines
|
||||
if ($Line -match "^#" -or [string]::IsNullOrWhiteSpace($Line)) {
|
||||
continue
|
||||
}
|
||||
|
||||
# Parse BSD format: SHA256 (filename) = digest
|
||||
if ($Line -match "^SHA256 \(([^)]+)\) = ([a-f0-9]+)$") {
|
||||
$File = $Matches[1]
|
||||
$Expected = $Matches[2]
|
||||
|
||||
if (-not (Test-Path $File)) {
|
||||
Write-Host "MISSING: $File" -ForegroundColor Red
|
||||
$FailedCount++
|
||||
continue
|
||||
}
|
||||
|
||||
$Hash = (Get-FileHash -Path $File -Algorithm SHA256).Hash.ToLower()
|
||||
if ($Hash -ne $Expected) {
|
||||
Write-Host "FAILED: $File" -ForegroundColor Red
|
||||
Write-Host " Expected: $Expected"
|
||||
Write-Host " Actual: $Hash"
|
||||
$FailedCount++
|
||||
} else {
|
||||
Write-Host "OK: $File" -ForegroundColor Green
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ($FailedCount -gt 0) {
|
||||
Write-Error "$FailedCount file(s) failed verification"
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "All checksums verified successfully." -ForegroundColor Green
|
||||
exit 0
|
||||
""";
|
||||
|
||||
private static string GenerateReadme(BundleManifest manifest) => $"""
|
||||
# Evidence Bundle
|
||||
|
||||
Bundle ID: {manifest.BundleId}
|
||||
Created: {manifest.CreatedAt:O}
|
||||
Schema Version: {manifest.SchemaVersion}
|
||||
|
||||
## Contents
|
||||
|
||||
- SBOMs: {manifest.Sboms.Length}
|
||||
- VEX Statements: {manifest.VexStatements.Length}
|
||||
- Attestations: {manifest.Attestations.Length}
|
||||
- Policy Verdicts: {manifest.PolicyVerdicts.Length}
|
||||
- Scan Results: {manifest.ScanResults.Length}
|
||||
- Public Keys: {manifest.PublicKeys.Length}
|
||||
|
||||
Total Artifacts: {manifest.TotalArtifacts}
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
/
|
||||
+-- manifest.json # Bundle manifest with artifact index
|
||||
+-- metadata.json # Bundle metadata and provenance
|
||||
+-- checksums.sha256 # SHA-256 checksums for all files
|
||||
+-- verify.sh # Verification script (Unix)
|
||||
+-- verify.ps1 # Verification script (Windows)
|
||||
+-- README.md # This file
|
||||
+-- sboms/ # SBOM artifacts
|
||||
+-- vex/ # VEX statements
|
||||
+-- attestations/ # DSSE attestation envelopes
|
||||
+-- policy/ # Policy verdicts
|
||||
+-- scans/ # Scan results
|
||||
+-- keys/ # Public keys for verification
|
||||
```
|
||||
|
||||
## Verification
|
||||
|
||||
### Unix/Linux/macOS
|
||||
```bash
|
||||
chmod +x verify.sh
|
||||
./verify.sh
|
||||
```
|
||||
|
||||
### Windows PowerShell
|
||||
```powershell
|
||||
.\verify.ps1
|
||||
```
|
||||
|
||||
## Subject
|
||||
|
||||
Type: {manifest.Metadata.Subject.Type}
|
||||
Digest: {manifest.Metadata.Subject.Digest}
|
||||
{(manifest.Metadata.Subject.Name is not null ? $"Name: {manifest.Metadata.Subject.Name}" : "")}
|
||||
|
||||
## Provenance
|
||||
|
||||
Creator: {manifest.Metadata.Provenance.Creator.Name} v{manifest.Metadata.Provenance.Creator.Version}
|
||||
Exported: {manifest.Metadata.Provenance.ExportedAt:O}
|
||||
{(manifest.Metadata.Provenance.ScanId is not null ? $"Scan ID: {manifest.Metadata.Provenance.ScanId}" : "")}
|
||||
|
||||
---
|
||||
Generated by StellaOps EvidenceLocker
|
||||
""";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builder for constructing bundle manifests.
|
||||
/// </summary>
|
||||
internal sealed class BundleManifestBuilder
|
||||
{
|
||||
private readonly string _bundleId;
|
||||
private readonly DateTimeOffset _createdAt;
|
||||
private BundleMetadata? _metadata;
|
||||
private readonly List<ArtifactEntry> _sboms = [];
|
||||
private readonly List<ArtifactEntry> _vexStatements = [];
|
||||
private readonly List<ArtifactEntry> _attestations = [];
|
||||
private readonly List<ArtifactEntry> _policyVerdicts = [];
|
||||
private readonly List<ArtifactEntry> _scanResults = [];
|
||||
private readonly List<KeyEntry> _publicKeys = [];
|
||||
|
||||
public BundleManifestBuilder(string bundleId, DateTimeOffset createdAt)
|
||||
{
|
||||
_bundleId = bundleId;
|
||||
_createdAt = createdAt;
|
||||
}
|
||||
|
||||
public void SetMetadata(BundleMetadata metadata) => _metadata = metadata;
|
||||
public void AddSbom(ArtifactEntry entry) => _sboms.Add(entry);
|
||||
public void AddVex(ArtifactEntry entry) => _vexStatements.Add(entry);
|
||||
public void AddAttestation(ArtifactEntry entry) => _attestations.Add(entry);
|
||||
public void AddPolicyVerdict(ArtifactEntry entry) => _policyVerdicts.Add(entry);
|
||||
public void AddScanResult(ArtifactEntry entry) => _scanResults.Add(entry);
|
||||
public void AddPublicKey(KeyEntry entry) => _publicKeys.Add(entry);
|
||||
|
||||
public BundleManifest Build() => new()
|
||||
{
|
||||
BundleId = _bundleId,
|
||||
CreatedAt = _createdAt,
|
||||
Metadata = _metadata ?? throw new InvalidOperationException("Metadata not set"),
|
||||
Sboms = [.. _sboms],
|
||||
VexStatements = [.. _vexStatements],
|
||||
Attestations = [.. _attestations],
|
||||
PolicyVerdicts = [.. _policyVerdicts],
|
||||
ScanResults = [.. _scanResults],
|
||||
PublicKeys = [.. _publicKeys]
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,430 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// VerifyScriptGenerator.cs
|
||||
// Sprint: SPRINT_20260106_003_003_EVIDENCE_export_bundle
|
||||
// Task: T014, T015, T016, T017
|
||||
// Description: Generates verification scripts for evidence bundles.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.EvidenceLocker.Export.Models;
|
||||
|
||||
namespace StellaOps.EvidenceLocker.Export;
|
||||
|
||||
/// <summary>
|
||||
/// Generates verification scripts for evidence bundles.
|
||||
/// </summary>
|
||||
public static class VerifyScriptGenerator
|
||||
{
|
||||
/// <summary>
|
||||
/// Generates a Unix shell verification script.
|
||||
/// </summary>
|
||||
/// <returns>Shell script content.</returns>
|
||||
public static string GenerateShellScript() => """
|
||||
#!/bin/bash
|
||||
# Evidence Bundle Verification Script
|
||||
# Verifies checksums and signature (if present)
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
cd "$SCRIPT_DIR"
|
||||
|
||||
echo "=============================================="
|
||||
echo " Evidence Bundle Verification"
|
||||
echo "=============================================="
|
||||
echo ""
|
||||
|
||||
# Check for required files
|
||||
if [ ! -f "checksums.sha256" ]; then
|
||||
echo "ERROR: checksums.sha256 not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ ! -f "manifest.json" ]; then
|
||||
echo "ERROR: manifest.json not found"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Verifying checksums..."
|
||||
echo ""
|
||||
|
||||
PASS_COUNT=0
|
||||
FAIL_COUNT=0
|
||||
|
||||
# Verify all checksums
|
||||
while IFS= read -r line; do
|
||||
# Skip comments and empty lines
|
||||
[[ "$line" =~ ^#.*$ ]] && continue
|
||||
[[ -z "$line" ]] && continue
|
||||
|
||||
# Parse BSD format: SHA256 (filename) = digest
|
||||
if [[ "$line" =~ ^SHA256\ \(([^)]+)\)\ =\ ([a-f0-9]+)$ ]]; then
|
||||
file="${BASH_REMATCH[1]}"
|
||||
expected="${BASH_REMATCH[2]}"
|
||||
|
||||
if [ ! -f "$file" ]; then
|
||||
echo "MISSING: $file"
|
||||
FAIL_COUNT=$((FAIL_COUNT + 1))
|
||||
continue
|
||||
fi
|
||||
|
||||
actual=$(sha256sum "$file" | awk '{print $1}')
|
||||
if [ "$actual" != "$expected" ]; then
|
||||
echo "FAILED: $file"
|
||||
echo " Expected: $expected"
|
||||
echo " Actual: $actual"
|
||||
FAIL_COUNT=$((FAIL_COUNT + 1))
|
||||
else
|
||||
echo "OK: $file"
|
||||
PASS_COUNT=$((PASS_COUNT + 1))
|
||||
fi
|
||||
fi
|
||||
done < checksums.sha256
|
||||
|
||||
echo ""
|
||||
echo "=============================================="
|
||||
echo " Verification Summary"
|
||||
echo "=============================================="
|
||||
echo "Passed: $PASS_COUNT"
|
||||
echo "Failed: $FAIL_COUNT"
|
||||
echo ""
|
||||
|
||||
if [ $FAIL_COUNT -gt 0 ]; then
|
||||
echo "VERIFICATION FAILED"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "ALL CHECKSUMS VERIFIED SUCCESSFULLY"
|
||||
exit 0
|
||||
""";
|
||||
|
||||
/// <summary>
|
||||
/// Generates a PowerShell verification script.
|
||||
/// </summary>
|
||||
/// <returns>PowerShell script content.</returns>
|
||||
public static string GeneratePowerShellScript() => """
|
||||
# Evidence Bundle Verification Script (PowerShell)
|
||||
# Verifies checksums and signature (if present)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Path
|
||||
Set-Location $ScriptDir
|
||||
|
||||
Write-Host "=============================================="
|
||||
Write-Host " Evidence Bundle Verification"
|
||||
Write-Host "=============================================="
|
||||
Write-Host ""
|
||||
|
||||
# Check for required files
|
||||
$ChecksumFile = "checksums.sha256"
|
||||
if (-not (Test-Path $ChecksumFile)) {
|
||||
Write-Error "checksums.sha256 not found"
|
||||
exit 1
|
||||
}
|
||||
|
||||
if (-not (Test-Path "manifest.json")) {
|
||||
Write-Error "manifest.json not found"
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "Verifying checksums..."
|
||||
Write-Host ""
|
||||
|
||||
$Lines = Get-Content $ChecksumFile
|
||||
$PassCount = 0
|
||||
$FailCount = 0
|
||||
|
||||
foreach ($Line in $Lines) {
|
||||
# Skip comments and empty lines
|
||||
if ($Line -match "^#" -or [string]::IsNullOrWhiteSpace($Line)) {
|
||||
continue
|
||||
}
|
||||
|
||||
# Parse BSD format: SHA256 (filename) = digest
|
||||
if ($Line -match "^SHA256 \(([^)]+)\) = ([a-f0-9]+)$") {
|
||||
$File = $Matches[1]
|
||||
$Expected = $Matches[2]
|
||||
|
||||
if (-not (Test-Path $File)) {
|
||||
Write-Host "MISSING: $File" -ForegroundColor Red
|
||||
$FailCount++
|
||||
continue
|
||||
}
|
||||
|
||||
$Hash = (Get-FileHash -Path $File -Algorithm SHA256).Hash.ToLower()
|
||||
if ($Hash -ne $Expected) {
|
||||
Write-Host "FAILED: $File" -ForegroundColor Red
|
||||
Write-Host " Expected: $Expected"
|
||||
Write-Host " Actual: $Hash"
|
||||
$FailCount++
|
||||
} else {
|
||||
Write-Host "OK: $File" -ForegroundColor Green
|
||||
$PassCount++
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host ""
|
||||
Write-Host "=============================================="
|
||||
Write-Host " Verification Summary"
|
||||
Write-Host "=============================================="
|
||||
Write-Host "Passed: $PassCount"
|
||||
Write-Host "Failed: $FailCount"
|
||||
Write-Host ""
|
||||
|
||||
if ($FailCount -gt 0) {
|
||||
Write-Error "VERIFICATION FAILED"
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "ALL CHECKSUMS VERIFIED SUCCESSFULLY" -ForegroundColor Green
|
||||
exit 0
|
||||
""";
|
||||
|
||||
/// <summary>
|
||||
/// Generates a Python verification script.
|
||||
/// </summary>
|
||||
/// <returns>Python script content.</returns>
|
||||
public static string GeneratePythonScript()
|
||||
{
|
||||
// Using regular string because Python uses triple quotes which conflict with C# raw strings
|
||||
return @"#!/usr/bin/env python3
|
||||
# Evidence Bundle Verification Script (Python)
|
||||
# Verifies checksums and signature (if present)
|
||||
# Requires Python 3.6+
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def compute_sha256(filepath):
|
||||
""""""Compute SHA-256 hash of a file.""""""
|
||||
sha256_hash = hashlib.sha256()
|
||||
with open(filepath, ""rb"") as f:
|
||||
for chunk in iter(lambda: f.read(8192), b""""):
|
||||
sha256_hash.update(chunk)
|
||||
return sha256_hash.hexdigest()
|
||||
|
||||
|
||||
def parse_checksum_line(line):
|
||||
""""""Parse a BSD-format checksum line.""""""
|
||||
# BSD format: SHA256 (filename) = digest
|
||||
match = re.match(r'^SHA256 \(([^)]+)\) = ([a-f0-9]+)$', line.strip())
|
||||
if match:
|
||||
return match.group(1), match.group(2)
|
||||
return None
|
||||
|
||||
|
||||
def verify_bundle(bundle_dir):
|
||||
""""""Verify all checksums in the bundle.""""""
|
||||
os.chdir(bundle_dir)
|
||||
|
||||
print(""=============================================="")
|
||||
print("" Evidence Bundle Verification"")
|
||||
print(""=============================================="")
|
||||
print()
|
||||
|
||||
checksum_file = Path(""checksums.sha256"")
|
||||
if not checksum_file.exists():
|
||||
print(""ERROR: checksums.sha256 not found"")
|
||||
return False
|
||||
|
||||
manifest_file = Path(""manifest.json"")
|
||||
if not manifest_file.exists():
|
||||
print(""ERROR: manifest.json not found"")
|
||||
return False
|
||||
|
||||
print(""Verifying checksums..."")
|
||||
print()
|
||||
|
||||
pass_count = 0
|
||||
fail_count = 0
|
||||
|
||||
with open(checksum_file, ""r"") as f:
|
||||
for line in f:
|
||||
# Skip comments and empty lines
|
||||
line = line.strip()
|
||||
if not line or line.startswith(""#""):
|
||||
continue
|
||||
|
||||
parsed = parse_checksum_line(line)
|
||||
if not parsed:
|
||||
continue
|
||||
|
||||
filepath, expected = parsed
|
||||
file_path = Path(filepath)
|
||||
|
||||
if not file_path.exists():
|
||||
print(f""MISSING: {filepath}"")
|
||||
fail_count += 1
|
||||
continue
|
||||
|
||||
actual = compute_sha256(file_path)
|
||||
if actual != expected:
|
||||
print(f""FAILED: {filepath}"")
|
||||
print(f"" Expected: {expected}"")
|
||||
print(f"" Actual: {actual}"")
|
||||
fail_count += 1
|
||||
else:
|
||||
print(f""OK: {filepath}"")
|
||||
pass_count += 1
|
||||
|
||||
print()
|
||||
print(""=============================================="")
|
||||
print("" Verification Summary"")
|
||||
print(""=============================================="")
|
||||
print(f""Passed: {pass_count}"")
|
||||
print(f""Failed: {fail_count}"")
|
||||
print()
|
||||
|
||||
if fail_count > 0:
|
||||
print(""VERIFICATION FAILED"")
|
||||
return False
|
||||
|
||||
print(""ALL CHECKSUMS VERIFIED SUCCESSFULLY"")
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1:
|
||||
bundle_dir = Path(sys.argv[1])
|
||||
else:
|
||||
bundle_dir = Path(__file__).parent
|
||||
|
||||
if not bundle_dir.is_dir():
|
||||
print(f""ERROR: {bundle_dir} is not a directory"")
|
||||
sys.exit(1)
|
||||
|
||||
success = verify_bundle(bundle_dir)
|
||||
sys.exit(0 if success else 1)
|
||||
|
||||
|
||||
if __name__ == ""__main__"":
|
||||
main()
|
||||
";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a README with verification instructions.
|
||||
/// </summary>
|
||||
/// <param name="manifest">Bundle manifest.</param>
|
||||
/// <returns>README content.</returns>
|
||||
public static string GenerateReadme(BundleManifest manifest)
|
||||
{
|
||||
var subjectName = manifest.Metadata.Subject.Name is not null
|
||||
? $"| Name | {manifest.Metadata.Subject.Name} |"
|
||||
: "";
|
||||
var subjectTag = manifest.Metadata.Subject.Tag is not null
|
||||
? $"| Tag | {manifest.Metadata.Subject.Tag} |"
|
||||
: "";
|
||||
var scanId = manifest.Metadata.Provenance.ScanId is not null
|
||||
? $"| Scan ID | {manifest.Metadata.Provenance.ScanId} |"
|
||||
: "";
|
||||
var lockerId = manifest.Metadata.Provenance.EvidenceLockerId is not null
|
||||
? $"| Evidence Locker ID | {manifest.Metadata.Provenance.EvidenceLockerId} |"
|
||||
: "";
|
||||
|
||||
return $"""
|
||||
# Evidence Bundle
|
||||
|
||||
Bundle ID: {manifest.BundleId}
|
||||
Created: {manifest.CreatedAt:O}
|
||||
Schema Version: {manifest.SchemaVersion}
|
||||
|
||||
## Contents
|
||||
|
||||
| Category | Count |
|
||||
|----------|-------|
|
||||
| SBOMs | {manifest.Sboms.Length} |
|
||||
| VEX Statements | {manifest.VexStatements.Length} |
|
||||
| Attestations | {manifest.Attestations.Length} |
|
||||
| Policy Verdicts | {manifest.PolicyVerdicts.Length} |
|
||||
| Scan Results | {manifest.ScanResults.Length} |
|
||||
| Public Keys | {manifest.PublicKeys.Length} |
|
||||
| **Total Artifacts** | **{manifest.TotalArtifacts}** |
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
/
|
||||
+-- manifest.json # Bundle manifest with artifact index
|
||||
+-- metadata.json # Bundle metadata and provenance
|
||||
+-- checksums.sha256 # SHA-256 checksums for all files
|
||||
+-- verify.sh # Verification script (Unix)
|
||||
+-- verify.ps1 # Verification script (Windows)
|
||||
+-- verify.py # Verification script (Python)
|
||||
+-- README.md # This file
|
||||
+-- sboms/ # SBOM artifacts
|
||||
+-- vex/ # VEX statements
|
||||
+-- attestations/ # DSSE attestation envelopes
|
||||
+-- policy/ # Policy verdicts
|
||||
+-- scans/ # Scan results
|
||||
+-- keys/ # Public keys for verification
|
||||
```
|
||||
|
||||
## Verification
|
||||
|
||||
This bundle includes verification scripts to ensure integrity. Choose your platform:
|
||||
|
||||
### Unix/Linux/macOS (Bash)
|
||||
|
||||
```bash
|
||||
chmod +x verify.sh
|
||||
./verify.sh
|
||||
```
|
||||
|
||||
**Requirements:** `sha256sum` (installed by default on most systems)
|
||||
|
||||
### Windows (PowerShell)
|
||||
|
||||
```powershell
|
||||
# May need to adjust execution policy
|
||||
Set-ExecutionPolicy -ExecutionPolicy Bypass -Scope Process
|
||||
.\verify.ps1
|
||||
```
|
||||
|
||||
**Requirements:** PowerShell 5.1 or later (included in Windows 10+)
|
||||
|
||||
### Cross-Platform (Python)
|
||||
|
||||
```bash
|
||||
python3 verify.py
|
||||
```
|
||||
|
||||
**Requirements:** Python 3.6 or later
|
||||
|
||||
### Manual Verification
|
||||
|
||||
You can also manually verify checksums using standard tools:
|
||||
|
||||
```bash
|
||||
# On Linux/macOS
|
||||
sha256sum -c checksums.sha256
|
||||
```
|
||||
|
||||
## Subject
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| Type | {manifest.Metadata.Subject.Type} |
|
||||
| Digest | {manifest.Metadata.Subject.Digest} |
|
||||
{subjectName}
|
||||
{subjectTag}
|
||||
|
||||
## Provenance
|
||||
|
||||
| Field | Value |
|
||||
|-------|-------|
|
||||
| Creator | {manifest.Metadata.Provenance.Creator.Name} v{manifest.Metadata.Provenance.Creator.Version} |
|
||||
| Exported | {manifest.Metadata.Provenance.ExportedAt:O} |
|
||||
{scanId}
|
||||
{lockerId}
|
||||
|
||||
---
|
||||
Generated by StellaOps EvidenceLocker
|
||||
""";
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user