save progress
This commit is contained in:
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.AuditPack.Tests")]
|
||||
146
src/__Libraries/StellaOps.AuditPack/Services/ArchiveUtilities.cs
Normal file
146
src/__Libraries/StellaOps.AuditPack/Services/ArchiveUtilities.cs
Normal file
@@ -0,0 +1,146 @@
|
||||
using System.Buffers.Binary;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
|
||||
namespace StellaOps.AuditPack.Services;
|
||||
|
||||
internal static class ArchiveUtilities
|
||||
{
|
||||
internal static readonly DateTimeOffset FixedTimestamp = DateTimeOffset.UnixEpoch;
|
||||
private const UnixFileMode DefaultFileMode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
|
||||
|
||||
public static async Task WriteTarGzAsync(
|
||||
string outputPath,
|
||||
IReadOnlyList<ArchiveEntry> entries,
|
||||
CancellationToken ct)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
|
||||
ArgumentNullException.ThrowIfNull(entries);
|
||||
|
||||
var outputDir = Path.GetDirectoryName(outputPath);
|
||||
if (!string.IsNullOrWhiteSpace(outputDir))
|
||||
{
|
||||
Directory.CreateDirectory(outputDir);
|
||||
}
|
||||
|
||||
await using (var fileStream = File.Create(outputPath))
|
||||
await using (var gzip = new GZipStream(fileStream, CompressionLevel.Optimal, leaveOpen: true))
|
||||
await using (var tarWriter = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
|
||||
{
|
||||
foreach (var entry in entries.OrderBy(static e => e.Path, StringComparer.Ordinal))
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, entry.Path)
|
||||
{
|
||||
Mode = entry.Mode ?? DefaultFileMode,
|
||||
ModificationTime = FixedTimestamp,
|
||||
Uid = 0,
|
||||
Gid = 0,
|
||||
UserName = string.Empty,
|
||||
GroupName = string.Empty
|
||||
};
|
||||
tarEntry.DataStream = new MemoryStream(entry.Content, writable: false);
|
||||
tarWriter.WriteEntry(tarEntry);
|
||||
}
|
||||
}
|
||||
|
||||
ApplyDeterministicGzipHeader(outputPath, FixedTimestamp);
|
||||
}
|
||||
|
||||
public static async Task ExtractTarGzAsync(
|
||||
string archivePath,
|
||||
string targetDir,
|
||||
bool overwriteFiles,
|
||||
CancellationToken ct)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(archivePath);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(targetDir);
|
||||
|
||||
Directory.CreateDirectory(targetDir);
|
||||
var fullTarget = Path.GetFullPath(targetDir);
|
||||
|
||||
await using var fileStream = File.OpenRead(archivePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
using var tarReader = new TarReader(gzipStream, leaveOpen: false);
|
||||
|
||||
TarEntry? entry;
|
||||
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: ct).ConfigureAwait(false)) is not null)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
|
||||
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var safePath = NormalizeTarEntryPath(entry.Name);
|
||||
var destinationPath = Path.GetFullPath(Path.Combine(fullTarget, safePath));
|
||||
|
||||
if (!destinationPath.StartsWith(fullTarget, StringComparison.Ordinal))
|
||||
{
|
||||
throw new InvalidOperationException($"Tar entry '{entry.Name}' escapes the target directory.");
|
||||
}
|
||||
|
||||
var destinationDir = Path.GetDirectoryName(destinationPath);
|
||||
if (!string.IsNullOrWhiteSpace(destinationDir))
|
||||
{
|
||||
Directory.CreateDirectory(destinationDir);
|
||||
}
|
||||
|
||||
if (File.Exists(destinationPath) && !overwriteFiles)
|
||||
{
|
||||
throw new IOException($"Target file already exists: {destinationPath}");
|
||||
}
|
||||
|
||||
await using var outputStream = File.Create(destinationPath);
|
||||
await entry.DataStream.CopyToAsync(outputStream, ct).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
|
||||
private static string NormalizeTarEntryPath(string entryName)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(entryName))
|
||||
{
|
||||
throw new InvalidOperationException("Tar entry name is empty.");
|
||||
}
|
||||
|
||||
var normalized = entryName.Replace('\\', '/');
|
||||
if (normalized.StartsWith("/", StringComparison.Ordinal))
|
||||
{
|
||||
normalized = normalized.TrimStart('/');
|
||||
}
|
||||
|
||||
if (Path.IsPathRooted(normalized))
|
||||
{
|
||||
throw new InvalidOperationException($"Tar entry '{entryName}' is rooted.");
|
||||
}
|
||||
|
||||
foreach (var segment in normalized.Split('/', StringSplitOptions.RemoveEmptyEntries))
|
||||
{
|
||||
if (segment == "." || segment == "..")
|
||||
{
|
||||
throw new InvalidOperationException($"Tar entry '{entryName}' contains parent traversal.");
|
||||
}
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
private static void ApplyDeterministicGzipHeader(string outputPath, DateTimeOffset timestamp)
|
||||
{
|
||||
using var stream = new FileStream(outputPath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read);
|
||||
if (stream.Length < 10)
|
||||
{
|
||||
throw new InvalidOperationException("GZip header not fully written for archive.");
|
||||
}
|
||||
|
||||
var seconds = checked((int)(timestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
|
||||
Span<byte> buffer = stackalloc byte[4];
|
||||
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
|
||||
|
||||
stream.Position = 4;
|
||||
stream.Write(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record ArchiveEntry(string Path, byte[] Content, UnixFileMode? Mode = null);
|
||||
@@ -6,8 +6,6 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -197,12 +195,8 @@ public sealed class AuditBundleReader : IAuditBundleReader
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
{
|
||||
await using var fileStream = File.OpenRead(bundlePath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
|
||||
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
|
||||
}
|
||||
private static Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
|
||||
=> ArchiveUtilities.ExtractTarGzAsync(bundlePath, targetDir, overwriteFiles: true, ct);
|
||||
|
||||
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
|
||||
{
|
||||
|
||||
@@ -5,9 +5,6 @@
|
||||
// Description: Writes self-contained audit bundles for offline replay.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
@@ -20,12 +17,21 @@ namespace StellaOps.AuditPack.Services;
|
||||
/// </summary>
|
||||
public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IAuditPackIdGenerator _idGenerator;
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
WriteIndented = false,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
public AuditBundleWriter(TimeProvider? timeProvider = null, IAuditPackIdGenerator? idGenerator = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an audit bundle from the specified inputs.
|
||||
/// </summary>
|
||||
@@ -36,20 +42,16 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
|
||||
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-bundle-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var entries = new List<BundleEntry>();
|
||||
var files = new List<BundleFileEntry>();
|
||||
var archiveEntries = new List<ArchiveEntry>();
|
||||
|
||||
// Write SBOM
|
||||
string sbomDigest;
|
||||
if (request.Sbom is not null)
|
||||
{
|
||||
var sbomPath = Path.Combine(tempDir, "sbom.json");
|
||||
await File.WriteAllBytesAsync(sbomPath, request.Sbom, cancellationToken);
|
||||
sbomDigest = ComputeSha256(request.Sbom);
|
||||
entries.Add(new BundleEntry("sbom.json", sbomDigest, request.Sbom.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -59,6 +61,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.Sbom.Length,
|
||||
ContentType = BundleContentType.Sbom
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("sbom.json", request.Sbom));
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -69,10 +72,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
string feedsDigest;
|
||||
if (request.FeedsSnapshot is not null)
|
||||
{
|
||||
var feedsDir = Path.Combine(tempDir, "feeds");
|
||||
Directory.CreateDirectory(feedsDir);
|
||||
var feedsPath = Path.Combine(feedsDir, "feeds-snapshot.ndjson");
|
||||
await File.WriteAllBytesAsync(feedsPath, request.FeedsSnapshot, cancellationToken);
|
||||
feedsDigest = ComputeSha256(request.FeedsSnapshot);
|
||||
entries.Add(new BundleEntry("feeds/feeds-snapshot.ndjson", feedsDigest, request.FeedsSnapshot.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -82,6 +81,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.FeedsSnapshot.Length,
|
||||
ContentType = BundleContentType.Feeds
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("feeds/feeds-snapshot.ndjson", request.FeedsSnapshot));
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -92,10 +92,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
string policyDigest;
|
||||
if (request.PolicyBundle is not null)
|
||||
{
|
||||
var policyDir = Path.Combine(tempDir, "policy");
|
||||
Directory.CreateDirectory(policyDir);
|
||||
var policyPath = Path.Combine(policyDir, "policy-bundle.tar.gz");
|
||||
await File.WriteAllBytesAsync(policyPath, request.PolicyBundle, cancellationToken);
|
||||
policyDigest = ComputeSha256(request.PolicyBundle);
|
||||
entries.Add(new BundleEntry("policy/policy-bundle.tar.gz", policyDigest, request.PolicyBundle.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -105,6 +101,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.PolicyBundle.Length,
|
||||
ContentType = BundleContentType.Policy
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("policy/policy-bundle.tar.gz", request.PolicyBundle));
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -115,10 +112,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
string? vexDigest = null;
|
||||
if (request.VexStatements is not null)
|
||||
{
|
||||
var vexDir = Path.Combine(tempDir, "vex");
|
||||
Directory.CreateDirectory(vexDir);
|
||||
var vexPath = Path.Combine(vexDir, "vex-statements.json");
|
||||
await File.WriteAllBytesAsync(vexPath, request.VexStatements, cancellationToken);
|
||||
vexDigest = ComputeSha256(request.VexStatements);
|
||||
entries.Add(new BundleEntry("vex/vex-statements.json", vexDigest, request.VexStatements.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -128,14 +121,13 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.VexStatements.Length,
|
||||
ContentType = BundleContentType.Vex
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("vex/vex-statements.json", request.VexStatements));
|
||||
}
|
||||
|
||||
// Write verdict
|
||||
string verdictDigest;
|
||||
if (request.Verdict is not null)
|
||||
{
|
||||
var verdictPath = Path.Combine(tempDir, "verdict.json");
|
||||
await File.WriteAllBytesAsync(verdictPath, request.Verdict, cancellationToken);
|
||||
verdictDigest = ComputeSha256(request.Verdict);
|
||||
entries.Add(new BundleEntry("verdict.json", verdictDigest, request.Verdict.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -145,6 +137,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.Verdict.Length,
|
||||
ContentType = BundleContentType.Verdict
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("verdict.json", request.Verdict));
|
||||
}
|
||||
else
|
||||
{
|
||||
@@ -154,10 +147,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
// Write proof bundle (optional)
|
||||
if (request.ProofBundle is not null)
|
||||
{
|
||||
var proofDir = Path.Combine(tempDir, "proof");
|
||||
Directory.CreateDirectory(proofDir);
|
||||
var proofPath = Path.Combine(proofDir, "proof-bundle.json");
|
||||
await File.WriteAllBytesAsync(proofPath, request.ProofBundle, cancellationToken);
|
||||
var proofDigest = ComputeSha256(request.ProofBundle);
|
||||
entries.Add(new BundleEntry("proof/proof-bundle.json", proofDigest, request.ProofBundle.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -167,16 +156,13 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.ProofBundle.Length,
|
||||
ContentType = BundleContentType.ProofBundle
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("proof/proof-bundle.json", request.ProofBundle));
|
||||
}
|
||||
|
||||
// Write trust roots (optional)
|
||||
string? trustRootsDigest = null;
|
||||
if (request.TrustRoots is not null)
|
||||
{
|
||||
var trustDir = Path.Combine(tempDir, "trust");
|
||||
Directory.CreateDirectory(trustDir);
|
||||
var trustPath = Path.Combine(trustDir, "trust-roots.json");
|
||||
await File.WriteAllBytesAsync(trustPath, request.TrustRoots, cancellationToken);
|
||||
trustRootsDigest = ComputeSha256(request.TrustRoots);
|
||||
entries.Add(new BundleEntry("trust/trust-roots.json", trustRootsDigest, request.TrustRoots.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -186,14 +172,13 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.TrustRoots.Length,
|
||||
ContentType = BundleContentType.TrustRoot
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("trust/trust-roots.json", request.TrustRoots));
|
||||
}
|
||||
|
||||
// Write scoring rules (optional)
|
||||
string? scoringDigest = null;
|
||||
if (request.ScoringRules is not null)
|
||||
{
|
||||
var scoringPath = Path.Combine(tempDir, "scoring-rules.json");
|
||||
await File.WriteAllBytesAsync(scoringPath, request.ScoringRules, cancellationToken);
|
||||
scoringDigest = ComputeSha256(request.ScoringRules);
|
||||
entries.Add(new BundleEntry("scoring-rules.json", scoringDigest, request.ScoringRules.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -203,15 +188,14 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = request.ScoringRules.Length,
|
||||
ContentType = BundleContentType.Other
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("scoring-rules.json", request.ScoringRules));
|
||||
}
|
||||
|
||||
// Write time anchor (optional)
|
||||
TimeAnchor? timeAnchor = null;
|
||||
if (request.TimeAnchor is not null)
|
||||
{
|
||||
var timeAnchorPath = Path.Combine(tempDir, "time-anchor.json");
|
||||
var timeAnchorBytes = JsonSerializer.SerializeToUtf8Bytes(request.TimeAnchor, JsonOptions);
|
||||
await File.WriteAllBytesAsync(timeAnchorPath, timeAnchorBytes, cancellationToken);
|
||||
var timeAnchorBytes = CanonicalJson.Serialize(request.TimeAnchor, JsonOptions);
|
||||
var timeAnchorDigest = ComputeSha256(timeAnchorBytes);
|
||||
entries.Add(new BundleEntry("time-anchor.json", timeAnchorDigest, timeAnchorBytes.Length));
|
||||
files.Add(new BundleFileEntry
|
||||
@@ -221,6 +205,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
SizeBytes = timeAnchorBytes.Length,
|
||||
ContentType = BundleContentType.TimeAnchor
|
||||
});
|
||||
archiveEntries.Add(new ArchiveEntry("time-anchor.json", timeAnchorBytes));
|
||||
timeAnchor = new TimeAnchor
|
||||
{
|
||||
Timestamp = request.TimeAnchor.Timestamp,
|
||||
@@ -235,9 +220,9 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
// Build manifest
|
||||
var manifest = new AuditBundleManifest
|
||||
{
|
||||
BundleId = request.BundleId ?? Guid.NewGuid().ToString("N"),
|
||||
BundleId = request.BundleId ?? _idGenerator.NewBundleId(),
|
||||
Name = request.Name ?? $"audit-{request.ScanId}",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ScanId = request.ScanId,
|
||||
ImageRef = request.ImageRef,
|
||||
ImageDigest = request.ImageDigest,
|
||||
@@ -259,9 +244,8 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
};
|
||||
|
||||
// Write manifest
|
||||
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
|
||||
var manifestPath = Path.Combine(tempDir, "manifest.json");
|
||||
await File.WriteAllBytesAsync(manifestPath, manifestBytes, cancellationToken);
|
||||
var manifestBytes = CanonicalJson.Serialize(manifest, JsonOptions);
|
||||
archiveEntries.Add(new ArchiveEntry("manifest.json", manifestBytes));
|
||||
|
||||
// Sign manifest if requested
|
||||
string? signingKeyId = null;
|
||||
@@ -282,8 +266,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
|
||||
if (signResult.Success && signResult.Envelope is not null)
|
||||
{
|
||||
var signaturePath = Path.Combine(tempDir, "manifest.sig");
|
||||
await File.WriteAllBytesAsync(signaturePath, signResult.Envelope, cancellationToken);
|
||||
archiveEntries.Add(new ArchiveEntry("manifest.sig", signResult.Envelope));
|
||||
signingKeyId = signResult.KeyId;
|
||||
signingAlgorithm = signResult.Algorithm;
|
||||
signed = true;
|
||||
@@ -297,7 +280,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
outputPath = $"{outputPath}.tar.gz";
|
||||
}
|
||||
|
||||
await CreateTarGzAsync(tempDir, outputPath, cancellationToken);
|
||||
await ArchiveUtilities.WriteTarGzAsync(outputPath, archiveEntries, cancellationToken);
|
||||
|
||||
var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken);
|
||||
|
||||
@@ -320,21 +303,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
{
|
||||
return AuditBundleWriteResult.Failed($"Failed to write audit bundle: {ex.Message}");
|
||||
}
|
||||
finally
|
||||
{
|
||||
// Clean up temp directory
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
{
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
@@ -395,19 +363,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
|
||||
{
|
||||
var outputDir = Path.GetDirectoryName(outputPath);
|
||||
if (!string.IsNullOrEmpty(outputDir) && !Directory.Exists(outputDir))
|
||||
{
|
||||
Directory.CreateDirectory(outputDir);
|
||||
}
|
||||
|
||||
await using var fileStream = File.Create(outputPath);
|
||||
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
|
||||
await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, includeBaseDirectory: false, ct);
|
||||
}
|
||||
|
||||
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,17 +2,23 @@ namespace StellaOps.AuditPack.Services;
|
||||
|
||||
using StellaOps.AuditPack.Models;
|
||||
using System.Collections.Immutable;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
/// <summary>
|
||||
/// Builds audit packs from scan results.
|
||||
/// </summary>
|
||||
public sealed class AuditPackBuilder : IAuditPackBuilder
|
||||
{
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IAuditPackIdGenerator _idGenerator;
|
||||
|
||||
public AuditPackBuilder(TimeProvider? timeProvider = null, IAuditPackIdGenerator? idGenerator = null)
|
||||
{
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builds an audit pack from a scan result.
|
||||
/// </summary>
|
||||
@@ -33,14 +39,16 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
|
||||
var bundleManifest = await BuildMinimalBundleAsync(scanResult, ct);
|
||||
|
||||
// Create pack structure
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var pack = new AuditPack
|
||||
{
|
||||
PackId = Guid.NewGuid().ToString(),
|
||||
PackId = _idGenerator.NewPackId(),
|
||||
SchemaVersion = "1.0.0",
|
||||
Name = options.Name ?? $"audit-pack-{scanResult.ScanId}",
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
RunManifest = new RunManifest(scanResult.ScanId, DateTimeOffset.UtcNow),
|
||||
EvidenceIndex = new EvidenceIndex([]),
|
||||
CreatedAt = now,
|
||||
RunManifest = new RunManifest(scanResult.ScanId, now),
|
||||
EvidenceIndex = new EvidenceIndex(Array.Empty<string>().ToImmutableArray()),
|
||||
Verdict = new Verdict(scanResult.ScanId, "completed"),
|
||||
OfflineBundle = bundleManifest,
|
||||
Attestations = [.. attestations],
|
||||
@@ -55,6 +63,9 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
|
||||
}
|
||||
};
|
||||
|
||||
var fileResult = BuildPackFiles(pack);
|
||||
pack = pack with { Contents = fileResult.Contents };
|
||||
|
||||
return WithDigest(pack);
|
||||
}
|
||||
|
||||
@@ -67,126 +78,36 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
|
||||
ExportOptions options,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-pack-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(tempDir);
|
||||
var fileBuild = BuildPackFiles(pack);
|
||||
pack = pack with { Contents = fileBuild.Contents };
|
||||
pack = WithDigest(pack);
|
||||
var entries = fileBuild.Entries;
|
||||
|
||||
try
|
||||
var manifestBytes = CanonicalJson.Serialize(pack);
|
||||
entries.Insert(0, new ArchiveEntry("manifest.json", manifestBytes));
|
||||
|
||||
if (options.Sign && !string.IsNullOrWhiteSpace(options.SigningKey))
|
||||
{
|
||||
// Write pack manifest
|
||||
var manifestJson = JsonSerializer.Serialize(pack, new JsonSerializerOptions
|
||||
{
|
||||
WriteIndented = true
|
||||
});
|
||||
await File.WriteAllTextAsync(Path.Combine(tempDir, "manifest.json"), manifestJson, ct);
|
||||
|
||||
// Write run manifest
|
||||
var runManifestJson = JsonSerializer.Serialize(pack.RunManifest);
|
||||
await File.WriteAllTextAsync(Path.Combine(tempDir, "run-manifest.json"), runManifestJson, ct);
|
||||
|
||||
// Write evidence index
|
||||
var evidenceJson = JsonSerializer.Serialize(pack.EvidenceIndex);
|
||||
await File.WriteAllTextAsync(Path.Combine(tempDir, "evidence-index.json"), evidenceJson, ct);
|
||||
|
||||
// Write verdict
|
||||
var verdictJson = JsonSerializer.Serialize(pack.Verdict);
|
||||
await File.WriteAllTextAsync(Path.Combine(tempDir, "verdict.json"), verdictJson, ct);
|
||||
|
||||
// Write SBOMs
|
||||
var sbomsDir = Path.Combine(tempDir, "sboms");
|
||||
Directory.CreateDirectory(sbomsDir);
|
||||
foreach (var sbom in pack.Sboms)
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(sbomsDir, $"{sbom.Id}.json"),
|
||||
sbom.Content,
|
||||
ct);
|
||||
}
|
||||
|
||||
// Write attestations
|
||||
var attestationsDir = Path.Combine(tempDir, "attestations");
|
||||
Directory.CreateDirectory(attestationsDir);
|
||||
foreach (var att in pack.Attestations)
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(attestationsDir, $"{att.Id}.json"),
|
||||
att.Envelope,
|
||||
ct);
|
||||
}
|
||||
|
||||
// Write VEX documents
|
||||
if (pack.VexDocuments.Length > 0)
|
||||
{
|
||||
var vexDir = Path.Combine(tempDir, "vex");
|
||||
Directory.CreateDirectory(vexDir);
|
||||
foreach (var vex in pack.VexDocuments)
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(vexDir, $"{vex.Id}.json"),
|
||||
vex.Content,
|
||||
ct);
|
||||
}
|
||||
}
|
||||
|
||||
// Write trust roots
|
||||
var certsDir = Path.Combine(tempDir, "trust-roots");
|
||||
Directory.CreateDirectory(certsDir);
|
||||
foreach (var root in pack.TrustRoots)
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
Path.Combine(certsDir, $"{root.Id}.pem"),
|
||||
root.Content,
|
||||
ct);
|
||||
}
|
||||
|
||||
// Create tar.gz archive
|
||||
await CreateTarGzAsync(tempDir, outputPath, ct);
|
||||
|
||||
// Sign if requested
|
||||
if (options.Sign && !string.IsNullOrEmpty(options.SigningKey))
|
||||
{
|
||||
await SignPackAsync(outputPath, options.SigningKey, ct);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (Directory.Exists(tempDir))
|
||||
Directory.Delete(tempDir, recursive: true);
|
||||
var signature = await SignManifestAsync(manifestBytes, options.SigningKey, ct);
|
||||
entries.Add(new ArchiveEntry("manifest.sig", signature));
|
||||
}
|
||||
|
||||
await ArchiveUtilities.WriteTarGzAsync(outputPath, entries, ct);
|
||||
}
|
||||
|
||||
private static AuditPack WithDigest(AuditPack pack)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(pack with { PackDigest = null, Signature = null });
|
||||
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
|
||||
var digest = ComputeDigest(json);
|
||||
return pack with { PackDigest = digest };
|
||||
}
|
||||
|
||||
private static string ComputeDigest(string content)
|
||||
private static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
var hash = SHA256.HashData(content);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
|
||||
{
|
||||
var tarPath = outputPath.Replace(".tar.gz", ".tar");
|
||||
|
||||
// Create tar
|
||||
await TarFile.CreateFromDirectoryAsync(sourceDir, tarPath, includeBaseDirectory: false, ct);
|
||||
|
||||
// Compress to tar.gz
|
||||
using (var tarStream = File.OpenRead(tarPath))
|
||||
using (var gzStream = File.Create(outputPath))
|
||||
using (var gzip = new GZipStream(gzStream, CompressionLevel.Optimal))
|
||||
{
|
||||
await tarStream.CopyToAsync(gzip, ct);
|
||||
}
|
||||
|
||||
// Clean up uncompressed tar after streams are closed.
|
||||
File.Delete(tarPath);
|
||||
}
|
||||
|
||||
private static Task<ImmutableArray<Attestation>> CollectAttestationsAsync(ScanResult scanResult, CancellationToken ct)
|
||||
{
|
||||
// TODO: Collect attestations from storage
|
||||
@@ -217,11 +138,89 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
|
||||
return Task.FromResult(new BundleManifest("bundle-1", "1.0.0"));
|
||||
}
|
||||
|
||||
private static Task SignPackAsync(string packPath, string signingKey, CancellationToken ct)
|
||||
private static async Task<byte[]> SignManifestAsync(byte[] manifestBytes, string signingKey, CancellationToken ct)
|
||||
{
|
||||
// TODO: Sign pack with key
|
||||
return Task.CompletedTask;
|
||||
var signer = new AuditBundleSigner();
|
||||
var result = await signer.SignAsync(
|
||||
new AuditBundleSigningRequest
|
||||
{
|
||||
ManifestBytes = manifestBytes,
|
||||
KeyFilePath = signingKey
|
||||
},
|
||||
ct);
|
||||
|
||||
if (!result.Success || result.Envelope is null)
|
||||
{
|
||||
throw new InvalidOperationException(result.Error ?? "Failed to sign audit pack manifest.");
|
||||
}
|
||||
|
||||
return result.Envelope;
|
||||
}
|
||||
|
||||
private static PackFileBuildResult BuildPackFiles(AuditPack pack)
|
||||
{
|
||||
var entries = new List<ArchiveEntry>();
|
||||
var files = new List<PackFile>();
|
||||
|
||||
AddJsonEntry(entries, files, "run-manifest.json", pack.RunManifest, PackFileType.RunManifest);
|
||||
AddJsonEntry(entries, files, "evidence-index.json", pack.EvidenceIndex, PackFileType.EvidenceIndex);
|
||||
AddJsonEntry(entries, files, "verdict.json", pack.Verdict, PackFileType.Verdict);
|
||||
|
||||
foreach (var sbom in pack.Sboms)
|
||||
{
|
||||
AddTextEntry(entries, files, $"sboms/{sbom.Id}.json", sbom.Content, PackFileType.Sbom);
|
||||
}
|
||||
|
||||
foreach (var attestation in pack.Attestations)
|
||||
{
|
||||
AddTextEntry(entries, files, $"attestations/{attestation.Id}.json", attestation.Envelope, PackFileType.Attestation);
|
||||
}
|
||||
|
||||
foreach (var vex in pack.VexDocuments)
|
||||
{
|
||||
AddTextEntry(entries, files, $"vex/{vex.Id}.json", vex.Content, PackFileType.Vex);
|
||||
}
|
||||
|
||||
foreach (var root in pack.TrustRoots)
|
||||
{
|
||||
AddTextEntry(entries, files, $"trust-roots/{root.Id}.pem", root.Content, PackFileType.TrustRoot);
|
||||
}
|
||||
|
||||
var contents = new PackContents
|
||||
{
|
||||
Files = [.. files],
|
||||
TotalSizeBytes = files.Sum(f => f.SizeBytes),
|
||||
FileCount = files.Count
|
||||
};
|
||||
|
||||
return new PackFileBuildResult(entries, contents);
|
||||
}
|
||||
|
||||
private static void AddJsonEntry<T>(
|
||||
List<ArchiveEntry> entries,
|
||||
List<PackFile> files,
|
||||
string path,
|
||||
T payload,
|
||||
PackFileType type)
|
||||
{
|
||||
var bytes = CanonicalJson.Serialize(payload);
|
||||
entries.Add(new ArchiveEntry(path, bytes));
|
||||
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
|
||||
}
|
||||
|
||||
private static void AddTextEntry(
|
||||
List<ArchiveEntry> entries,
|
||||
List<PackFile> files,
|
||||
string path,
|
||||
string content,
|
||||
PackFileType type)
|
||||
{
|
||||
var bytes = Encoding.UTF8.GetBytes(content);
|
||||
entries.Add(new ArchiveEntry(path, bytes));
|
||||
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
|
||||
}
|
||||
|
||||
private sealed record PackFileBuildResult(List<ArchiveEntry> Entries, PackContents Contents);
|
||||
}
|
||||
|
||||
public interface IAuditPackBuilder
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.IO.Compression;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using StellaOps.AuditPack.Models;
|
||||
|
||||
@@ -25,13 +24,19 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
|
||||
private readonly IAuditBundleWriter _bundleWriter;
|
||||
private readonly IAuditPackRepository? _repository;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IAuditPackExportSigner? _dsseSigner;
|
||||
|
||||
public AuditPackExportService(
|
||||
IAuditBundleWriter bundleWriter,
|
||||
IAuditPackRepository? repository = null)
|
||||
IAuditPackRepository? repository = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
IAuditPackExportSigner? dsseSigner = null)
|
||||
{
|
||||
_bundleWriter = bundleWriter;
|
||||
_repository = repository;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_dsseSigner = dsseSigner;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -43,6 +48,13 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
_ = _bundleWriter;
|
||||
|
||||
if (_repository is null)
|
||||
{
|
||||
return ExportResult.Failed("Audit pack repository is required for export.");
|
||||
}
|
||||
|
||||
return request.Format switch
|
||||
{
|
||||
ExportFormat.Zip => await ExportAsZipAsync(request, cancellationToken),
|
||||
@@ -120,7 +132,7 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
{
|
||||
var exportDoc = new Dictionary<string, object>
|
||||
{
|
||||
["exportedAt"] = DateTimeOffset.UtcNow.ToString("O"),
|
||||
["exportedAt"] = _timeProvider.GetUtcNow().ToString("O"),
|
||||
["scanId"] = request.ScanId,
|
||||
["format"] = "json",
|
||||
["version"] = "1.0"
|
||||
@@ -182,6 +194,11 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
ExportRequest request,
|
||||
CancellationToken ct)
|
||||
{
|
||||
if (_dsseSigner is null)
|
||||
{
|
||||
return ExportResult.Failed("DSSE export requires a signing provider.");
|
||||
}
|
||||
|
||||
// First create the JSON payload
|
||||
var jsonResult = await ExportAsJsonAsync(request, ct);
|
||||
if (!jsonResult.Success)
|
||||
@@ -191,11 +208,12 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
|
||||
// Create DSSE envelope structure
|
||||
var payload = Convert.ToBase64String(jsonResult.Data!);
|
||||
var signature = await _dsseSigner.SignAsync(jsonResult.Data!, ct);
|
||||
var envelope = new DsseExportEnvelope
|
||||
{
|
||||
PayloadType = "application/vnd.stellaops.audit-pack+json",
|
||||
Payload = payload,
|
||||
Signatures = [] // Would be populated by actual signing in production
|
||||
Signatures = [signature]
|
||||
};
|
||||
|
||||
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions);
|
||||
@@ -210,11 +228,11 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
};
|
||||
}
|
||||
|
||||
private static ExportManifest CreateManifest(ExportRequest request)
|
||||
private ExportManifest CreateManifest(ExportRequest request)
|
||||
{
|
||||
return new ExportManifest
|
||||
{
|
||||
ExportedAt = DateTimeOffset.UtcNow,
|
||||
ExportedAt = _timeProvider.GetUtcNow(),
|
||||
ScanId = request.ScanId,
|
||||
FindingIds = request.FindingIds,
|
||||
Format = request.Format.ToString(),
|
||||
@@ -244,46 +262,25 @@ public sealed class AuditPackExportService : IAuditPackExportService
|
||||
ExportSegment segment,
|
||||
CancellationToken ct)
|
||||
{
|
||||
if (_repository is null)
|
||||
{
|
||||
// Return mock data for testing
|
||||
return CreateMockSegmentData(segment);
|
||||
}
|
||||
|
||||
return await _repository.GetSegmentDataAsync(scanId, segment, ct);
|
||||
var repository = RequireRepository();
|
||||
return await repository.GetSegmentDataAsync(scanId, segment, ct);
|
||||
}
|
||||
|
||||
private async Task<List<object>> GetAttestationsAsync(string scanId, CancellationToken ct)
|
||||
{
|
||||
if (_repository is null)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var attestations = await _repository.GetAttestationsAsync(scanId, ct);
|
||||
var repository = RequireRepository();
|
||||
var attestations = await repository.GetAttestationsAsync(scanId, ct);
|
||||
return [.. attestations];
|
||||
}
|
||||
|
||||
private async Task<object?> GetProofChainAsync(string scanId, CancellationToken ct)
|
||||
{
|
||||
if (_repository is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return await _repository.GetProofChainAsync(scanId, ct);
|
||||
var repository = RequireRepository();
|
||||
return await repository.GetProofChainAsync(scanId, ct);
|
||||
}
|
||||
|
||||
private static byte[] CreateMockSegmentData(ExportSegment segment)
|
||||
{
|
||||
var mockData = new Dictionary<string, object>
|
||||
{
|
||||
["segment"] = segment.ToString(),
|
||||
["generatedAt"] = DateTimeOffset.UtcNow.ToString("O"),
|
||||
["data"] = new { placeholder = true }
|
||||
};
|
||||
return JsonSerializer.SerializeToUtf8Bytes(mockData, JsonOptions);
|
||||
}
|
||||
private IAuditPackRepository RequireRepository()
|
||||
=> _repository ?? throw new InvalidOperationException("Audit pack repository is required for export.");
|
||||
|
||||
private static async Task AddJsonToZipAsync<T>(
|
||||
ZipArchive archive,
|
||||
@@ -325,6 +322,14 @@ public interface IAuditPackRepository
|
||||
Task<object?> GetProofChainAsync(string scanId, CancellationToken ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signer for audit pack exports.
|
||||
/// </summary>
|
||||
public interface IAuditPackExportSigner
|
||||
{
|
||||
Task<DsseSignature> SignAsync(byte[] payload, CancellationToken ct);
|
||||
}
|
||||
|
||||
#region Models
|
||||
|
||||
/// <summary>
|
||||
|
||||
17
src/__Libraries/StellaOps.AuditPack/Services/AuditPackIds.cs
Normal file
17
src/__Libraries/StellaOps.AuditPack/Services/AuditPackIds.cs
Normal file
@@ -0,0 +1,17 @@
|
||||
namespace StellaOps.AuditPack.Services;
|
||||
|
||||
public interface IAuditPackIdGenerator
|
||||
{
|
||||
string NewPackId();
|
||||
string NewBundleId();
|
||||
string NewAttestationId();
|
||||
string NewTempId();
|
||||
}
|
||||
|
||||
public sealed class GuidAuditPackIdGenerator : IAuditPackIdGenerator
|
||||
{
|
||||
public string NewPackId() => Guid.NewGuid().ToString();
|
||||
public string NewBundleId() => Guid.NewGuid().ToString("N");
|
||||
public string NewAttestationId() => Guid.NewGuid().ToString("N");
|
||||
public string NewTempId() => Guid.NewGuid().ToString("N");
|
||||
}
|
||||
@@ -1,8 +1,6 @@
|
||||
namespace StellaOps.AuditPack.Services;
|
||||
|
||||
using StellaOps.AuditPack.Models;
|
||||
using System.Formats.Tar;
|
||||
using System.IO.Compression;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
|
||||
@@ -11,6 +9,13 @@ using System.Text.Json;
|
||||
/// </summary>
|
||||
public sealed class AuditPackImporter : IAuditPackImporter
|
||||
{
|
||||
private readonly IAuditPackIdGenerator _idGenerator;
|
||||
|
||||
public AuditPackImporter(IAuditPackIdGenerator? idGenerator = null)
|
||||
{
|
||||
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Imports an audit pack from archive.
|
||||
/// </summary>
|
||||
@@ -20,12 +25,12 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var extractDir = options.ExtractDirectory ??
|
||||
Path.Combine(Path.GetTempPath(), $"audit-pack-{Guid.NewGuid():N}");
|
||||
Path.Combine(Path.GetTempPath(), $"audit-pack-{_idGenerator.NewTempId()}");
|
||||
|
||||
try
|
||||
{
|
||||
// Extract archive
|
||||
await ExtractTarGzAsync(archivePath, extractDir, ct);
|
||||
await ArchiveUtilities.ExtractTarGzAsync(archivePath, extractDir, overwriteFiles: true, ct);
|
||||
|
||||
// Load manifest
|
||||
var manifestPath = Path.Combine(extractDir, "manifest.json");
|
||||
@@ -34,7 +39,7 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
return ImportResult.Failed("Manifest file not found");
|
||||
}
|
||||
|
||||
var manifestJson = await File.ReadAllTextAsync(manifestPath, ct);
|
||||
var manifestJson = await File.ReadAllBytesAsync(manifestPath, ct);
|
||||
var pack = JsonSerializer.Deserialize<AuditPack>(manifestJson);
|
||||
|
||||
if (pack == null)
|
||||
@@ -53,14 +58,14 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
SignatureResult? signatureResult = null;
|
||||
if (options.VerifySignatures)
|
||||
{
|
||||
signatureResult = await VerifySignaturesAsync(pack, extractDir, ct);
|
||||
signatureResult = await VerifySignaturesAsync(manifestJson, pack, extractDir, ct);
|
||||
if (!signatureResult.IsValid)
|
||||
{
|
||||
return ImportResult.Failed("Signature verification failed", signatureResult.Errors);
|
||||
}
|
||||
}
|
||||
|
||||
return new ImportResult
|
||||
var result = new ImportResult
|
||||
{
|
||||
Success = true,
|
||||
Pack = pack,
|
||||
@@ -68,6 +73,14 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
IntegrityResult = integrityResult,
|
||||
SignatureResult = signatureResult
|
||||
};
|
||||
|
||||
if (!options.KeepExtracted && options.ExtractDirectory is null)
|
||||
{
|
||||
Directory.Delete(extractDir, recursive: true);
|
||||
result = result with { ExtractDirectory = null };
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
@@ -75,27 +88,6 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
}
|
||||
}
|
||||
|
||||
private static async Task ExtractTarGzAsync(string archivePath, string extractDir, CancellationToken ct)
|
||||
{
|
||||
Directory.CreateDirectory(extractDir);
|
||||
|
||||
var tarPath = archivePath.Replace(".tar.gz", ".tar");
|
||||
|
||||
// Decompress gz
|
||||
using (var gzStream = File.OpenRead(archivePath))
|
||||
using (var gzip = new GZipStream(gzStream, CompressionMode.Decompress))
|
||||
using (var tarStream = File.Create(tarPath))
|
||||
{
|
||||
await gzip.CopyToAsync(tarStream, ct);
|
||||
}
|
||||
|
||||
// Extract tar
|
||||
await TarFile.ExtractToDirectoryAsync(tarPath, extractDir, overwriteFiles: true, ct);
|
||||
|
||||
// Clean up tar
|
||||
File.Delete(tarPath);
|
||||
}
|
||||
|
||||
private static async Task<IntegrityResult> VerifyIntegrityAsync(
|
||||
AuditPack pack,
|
||||
string extractDir,
|
||||
@@ -136,27 +128,57 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
}
|
||||
|
||||
private static async Task<SignatureResult> VerifySignaturesAsync(
|
||||
byte[] manifestBytes,
|
||||
AuditPack pack,
|
||||
string extractDir,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var errors = new List<string>();
|
||||
|
||||
// Load signature
|
||||
var signaturePath = Path.Combine(extractDir, "signature.sig");
|
||||
var signaturePath = Path.Combine(extractDir, "manifest.sig");
|
||||
if (!File.Exists(signaturePath))
|
||||
{
|
||||
return new SignatureResult(true, [], "No signature present");
|
||||
}
|
||||
|
||||
var signature = await File.ReadAllTextAsync(signaturePath, ct);
|
||||
var signature = await File.ReadAllBytesAsync(signaturePath, ct);
|
||||
var trustRoots = pack.TrustRoots;
|
||||
|
||||
// Verify against trust roots
|
||||
foreach (var root in pack.TrustRoots)
|
||||
if (trustRoots.Length == 0)
|
||||
{
|
||||
// TODO: Implement actual signature verification
|
||||
// For now, just check that trust root exists
|
||||
if (!string.IsNullOrEmpty(root.Content))
|
||||
errors.Add("No trust roots available for signature verification");
|
||||
return new SignatureResult(false, errors);
|
||||
}
|
||||
|
||||
foreach (var root in trustRoots)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(root.Content))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
using var publicKey = TryLoadPublicKey(root.Content);
|
||||
if (publicKey is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var signer = new AuditBundleSigner();
|
||||
var result = await signer.VerifyAsync(
|
||||
new AuditBundleVerificationRequest
|
||||
{
|
||||
EnvelopeBytes = signature,
|
||||
PublicKey = publicKey
|
||||
},
|
||||
ct);
|
||||
|
||||
if (!result.Success || result.VerifiedSignatures is null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (result.VerifiedSignatures.Any(s => s.Verified)
|
||||
&& string.Equals(result.PayloadDigest, ComputeSha256(manifestBytes), StringComparison.Ordinal))
|
||||
{
|
||||
return new SignatureResult(true, [], $"Verified with {root.Id}");
|
||||
}
|
||||
@@ -168,10 +190,39 @@ public sealed class AuditPackImporter : IAuditPackImporter
|
||||
|
||||
private static string ComputePackDigest(AuditPack pack)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(pack with { PackDigest = null, Signature = null });
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
|
||||
var hash = SHA256.HashData(bytes);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
|
||||
return Convert.ToHexString(SHA256.HashData(json)).ToLowerInvariant();
|
||||
}
|
||||
|
||||
private static string ComputeSha256(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static AsymmetricAlgorithm? TryLoadPublicKey(string pem)
|
||||
{
|
||||
try
|
||||
{
|
||||
var ecdsa = ECDsa.Create();
|
||||
ecdsa.ImportFromPem(pem);
|
||||
return ecdsa;
|
||||
}
|
||||
catch
|
||||
{
|
||||
// ignored
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var rsa = RSA.Create();
|
||||
rsa.ImportFromPem(pem);
|
||||
return rsa;
|
||||
}
|
||||
catch
|
||||
{
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -54,16 +54,11 @@ public sealed class AuditPackReplayer : IAuditPackReplayer
|
||||
RunManifest runManifest,
|
||||
CancellationToken ct)
|
||||
{
|
||||
// TODO: Implement actual replay execution
|
||||
// This would call the scanner with frozen time and offline bundle
|
||||
await Task.CompletedTask;
|
||||
|
||||
return new ReplayResult
|
||||
{
|
||||
Success = true,
|
||||
Verdict = new Verdict("replayed-verdict", "completed"),
|
||||
VerdictDigest = "placeholder-digest",
|
||||
DurationMs = 1000
|
||||
Success = false,
|
||||
Errors = ["Replay execution is not implemented."]
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,61 @@
|
||||
using System.Text.Encodings.Web;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AuditPack.Services;
|
||||
|
||||
internal static class CanonicalJson
|
||||
{
|
||||
private static readonly JsonWriterOptions WriterOptions = new()
|
||||
{
|
||||
Indented = false,
|
||||
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
|
||||
};
|
||||
|
||||
public static byte[] Serialize<T>(T value, JsonSerializerOptions? options = null)
|
||||
{
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(value, options ?? DefaultOptions);
|
||||
return Canonicalize(json);
|
||||
}
|
||||
|
||||
public static byte[] Canonicalize(ReadOnlySpan<byte> json)
|
||||
{
|
||||
using var doc = JsonDocument.Parse(json.ToArray());
|
||||
using var stream = new MemoryStream();
|
||||
using var writer = new Utf8JsonWriter(stream, WriterOptions);
|
||||
WriteElementSorted(doc.RootElement, writer);
|
||||
writer.Flush();
|
||||
return stream.ToArray();
|
||||
}
|
||||
|
||||
private static void WriteElementSorted(JsonElement element, Utf8JsonWriter writer)
|
||||
{
|
||||
switch (element.ValueKind)
|
||||
{
|
||||
case JsonValueKind.Object:
|
||||
writer.WriteStartObject();
|
||||
foreach (var property in element.EnumerateObject().OrderBy(p => p.Name, StringComparer.Ordinal))
|
||||
{
|
||||
writer.WritePropertyName(property.Name);
|
||||
WriteElementSorted(property.Value, writer);
|
||||
}
|
||||
writer.WriteEndObject();
|
||||
break;
|
||||
case JsonValueKind.Array:
|
||||
writer.WriteStartArray();
|
||||
foreach (var item in element.EnumerateArray())
|
||||
{
|
||||
WriteElementSorted(item, writer);
|
||||
}
|
||||
writer.WriteEndArray();
|
||||
break;
|
||||
default:
|
||||
element.WriteTo(writer);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
public static readonly JsonSerializerOptions DefaultOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = false
|
||||
};
|
||||
}
|
||||
@@ -25,17 +25,24 @@ public sealed class IsolatedReplayContext : IIsolatedReplayContext, IDisposable
|
||||
|
||||
private readonly string _workingDirectory;
|
||||
private readonly bool _cleanupOnDispose;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private bool _disposed;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new isolated replay context.
|
||||
/// </summary>
|
||||
public IsolatedReplayContext(IsolatedReplayContextOptions options)
|
||||
public IsolatedReplayContext(IsolatedReplayContextOptions options, TimeProvider? timeProvider = null)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
|
||||
Options = options;
|
||||
_cleanupOnDispose = options.CleanupOnDispose;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
|
||||
if (options.EnforceOffline && IsNetworkPath(options.WorkingDirectory))
|
||||
{
|
||||
throw new InvalidOperationException("WorkingDirectory cannot be a network path when offline enforcement is enabled.");
|
||||
}
|
||||
|
||||
// Create isolated working directory
|
||||
_workingDirectory = options.WorkingDirectory
|
||||
@@ -44,7 +51,7 @@ public sealed class IsolatedReplayContext : IIsolatedReplayContext, IDisposable
|
||||
|
||||
// Initialize context state
|
||||
IsInitialized = false;
|
||||
EvaluationTime = options.EvaluationTime ?? DateTimeOffset.UtcNow;
|
||||
EvaluationTime = options.EvaluationTime ?? _timeProvider.GetUtcNow();
|
||||
}
|
||||
|
||||
public IsolatedReplayContextOptions Options { get; }
|
||||
@@ -237,6 +244,16 @@ public sealed class IsolatedReplayContext : IIsolatedReplayContext, IDisposable
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
private static bool IsNetworkPath(string? path)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(path))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return path.StartsWith("\\\\", StringComparison.Ordinal) || path.StartsWith("//", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (_disposed) return;
|
||||
|
||||
@@ -30,10 +30,20 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
};
|
||||
|
||||
private readonly IReplayAttestationSigner? _signer;
|
||||
private readonly IReplayAttestationSignatureVerifier? _verifier;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IAuditPackIdGenerator _idGenerator;
|
||||
|
||||
public ReplayAttestationService(IReplayAttestationSigner? signer = null)
|
||||
public ReplayAttestationService(
|
||||
IReplayAttestationSigner? signer = null,
|
||||
IReplayAttestationSignatureVerifier? verifier = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
IAuditPackIdGenerator? idGenerator = null)
|
||||
{
|
||||
_signer = signer;
|
||||
_verifier = verifier;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -51,7 +61,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
var statement = CreateInTotoStatement(manifest, replayResult);
|
||||
|
||||
// Serialize to canonical JSON
|
||||
var statementBytes = JsonSerializer.SerializeToUtf8Bytes(statement, JsonOptions);
|
||||
var statementBytes = CanonicalJson.Serialize(statement, JsonOptions);
|
||||
var statementDigest = ComputeSha256Digest(statementBytes);
|
||||
|
||||
// Create DSSE envelope
|
||||
@@ -59,9 +69,9 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
|
||||
return new ReplayAttestation
|
||||
{
|
||||
AttestationId = Guid.NewGuid().ToString("N"),
|
||||
AttestationId = _idGenerator.NewAttestationId(),
|
||||
ManifestId = manifest.BundleId,
|
||||
CreatedAt = DateTimeOffset.UtcNow,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
Statement = statement,
|
||||
StatementDigest = statementDigest,
|
||||
Envelope = envelope,
|
||||
@@ -73,7 +83,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
/// <summary>
|
||||
/// Verifies a replay attestation's integrity.
|
||||
/// </summary>
|
||||
public Task<AttestationVerificationResult> VerifyAsync(
|
||||
public async Task<AttestationVerificationResult> VerifyAsync(
|
||||
ReplayAttestation attestation,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
@@ -82,7 +92,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
var errors = new List<string>();
|
||||
|
||||
// Verify statement digest
|
||||
var statementBytes = JsonSerializer.SerializeToUtf8Bytes(attestation.Statement, JsonOptions);
|
||||
var statementBytes = CanonicalJson.Serialize(attestation.Statement, JsonOptions);
|
||||
var computedDigest = ComputeSha256Digest(statementBytes);
|
||||
|
||||
if (computedDigest != attestation.StatementDigest)
|
||||
@@ -109,16 +119,36 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
}
|
||||
}
|
||||
|
||||
// Verify signatures if signer is available
|
||||
var signatureValid = attestation.Envelope?.Signatures.Count > 0;
|
||||
var signatureVerified = false;
|
||||
if (attestation.Envelope is not null)
|
||||
{
|
||||
if (attestation.Envelope.Signatures.Count == 0)
|
||||
{
|
||||
errors.Add("Envelope contains no signatures");
|
||||
}
|
||||
else if (_verifier is null)
|
||||
{
|
||||
errors.Add("Signature verifier is not configured");
|
||||
}
|
||||
else
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(attestation.Envelope.Payload);
|
||||
var verification = await _verifier.VerifyAsync(attestation.Envelope, payloadBytes, cancellationToken);
|
||||
signatureVerified = verification.Verified;
|
||||
if (!verification.Verified)
|
||||
{
|
||||
errors.Add(verification.Error ?? "Signature verification failed");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(new AttestationVerificationResult
|
||||
return new AttestationVerificationResult
|
||||
{
|
||||
IsValid = errors.Count == 0,
|
||||
Errors = [.. errors],
|
||||
SignatureVerified = signatureValid,
|
||||
VerifiedAt = DateTimeOffset.UtcNow
|
||||
});
|
||||
SignatureVerified = signatureVerified,
|
||||
VerifiedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -180,7 +210,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
|
||||
Message = d.Message
|
||||
}).ToList(),
|
||||
EvaluatedAt = replayResult.EvaluatedAt,
|
||||
ReplayedAt = DateTimeOffset.UtcNow,
|
||||
ReplayedAt = _timeProvider.GetUtcNow(),
|
||||
DurationMs = replayResult.DurationMs
|
||||
}
|
||||
};
|
||||
@@ -253,6 +283,17 @@ public interface IReplayAttestationSigner
|
||||
Task<DsseSignatureResult> SignAsync(byte[] payload, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for verifying replay attestation signatures.
|
||||
/// </summary>
|
||||
public interface IReplayAttestationSignatureVerifier
|
||||
{
|
||||
Task<ReplayAttestationSignatureVerification> VerifyAsync(
|
||||
ReplayDsseEnvelope envelope,
|
||||
byte[] payload,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
#region Models
|
||||
|
||||
/// <summary>
|
||||
@@ -406,6 +447,15 @@ public sealed record DsseSignatureResult
|
||||
public string? Algorithm { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signature verification.
|
||||
/// </summary>
|
||||
public sealed record ReplayAttestationSignatureVerification
|
||||
{
|
||||
public bool Verified { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of attestation verification.
|
||||
/// </summary>
|
||||
|
||||
@@ -5,9 +5,6 @@
|
||||
// Description: Fetches scan data and snapshots required for audit bundle creation.
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
|
||||
namespace StellaOps.AuditPack.Services;
|
||||
|
||||
/// <summary>
|
||||
@@ -15,24 +12,21 @@ namespace StellaOps.AuditPack.Services;
|
||||
/// </summary>
|
||||
public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
WriteIndented = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
|
||||
};
|
||||
|
||||
private readonly IScanDataProvider? _scanDataProvider;
|
||||
private readonly IFeedSnapshotProvider? _feedProvider;
|
||||
private readonly IPolicySnapshotProvider? _policyProvider;
|
||||
private readonly IVexSnapshotProvider? _vexProvider;
|
||||
|
||||
public ScanSnapshotFetcher(
|
||||
IScanDataProvider? scanDataProvider = null,
|
||||
IFeedSnapshotProvider? feedProvider = null,
|
||||
IPolicySnapshotProvider? policyProvider = null)
|
||||
IPolicySnapshotProvider? policyProvider = null,
|
||||
IVexSnapshotProvider? vexProvider = null)
|
||||
{
|
||||
_scanDataProvider = scanDataProvider;
|
||||
_feedProvider = feedProvider;
|
||||
_policyProvider = policyProvider;
|
||||
_vexProvider = vexProvider;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -81,6 +75,10 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
|
||||
if (request.IncludeVex)
|
||||
{
|
||||
vexData = await FetchVexSnapshotAsync(request.ScanId, cancellationToken);
|
||||
if (!vexData.Success)
|
||||
{
|
||||
return ScanSnapshotResult.Failed($"Failed to fetch VEX: {vexData.Error}");
|
||||
}
|
||||
}
|
||||
|
||||
return new ScanSnapshotResult
|
||||
@@ -115,30 +113,11 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
|
||||
return await _scanDataProvider.GetScanDataAsync(scanId, ct);
|
||||
}
|
||||
|
||||
// Default implementation - return placeholder data
|
||||
// In production, this would fetch from Scanner service
|
||||
return new ScanData
|
||||
{
|
||||
Success = true,
|
||||
Success = false,
|
||||
ScanId = scanId,
|
||||
ImageRef = $"scan-image-{scanId}",
|
||||
ImageDigest = $"sha256:{scanId}",
|
||||
Sbom = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
|
||||
{
|
||||
bomFormat = "CycloneDX",
|
||||
specVersion = "1.6",
|
||||
version = 1,
|
||||
metadata = new { timestamp = DateTimeOffset.UtcNow },
|
||||
components = Array.Empty<object>()
|
||||
}, JsonOptions)),
|
||||
Verdict = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
|
||||
{
|
||||
scanId,
|
||||
decision = "pass",
|
||||
evaluatedAt = DateTimeOffset.UtcNow
|
||||
}, JsonOptions)),
|
||||
Decision = "pass",
|
||||
EvaluatedAt = DateTimeOffset.UtcNow
|
||||
Error = "Scan data provider is not configured."
|
||||
};
|
||||
}
|
||||
|
||||
@@ -152,23 +131,10 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
|
||||
return await _feedProvider.GetFeedSnapshotAsync(scanId, asOf, ct);
|
||||
}
|
||||
|
||||
// Default implementation - return placeholder feeds
|
||||
// In production, this would fetch from Concelier
|
||||
var snapshotAt = asOf ?? DateTimeOffset.UtcNow;
|
||||
var feeds = new StringBuilder();
|
||||
feeds.AppendLine(JsonSerializer.Serialize(new
|
||||
{
|
||||
type = "advisory-feed-snapshot",
|
||||
snapshotAt,
|
||||
feedId = "nvd",
|
||||
recordCount = 0
|
||||
}));
|
||||
|
||||
return new FeedSnapshotData
|
||||
{
|
||||
Success = true,
|
||||
Snapshot = Encoding.UTF8.GetBytes(feeds.ToString()),
|
||||
SnapshotAt = snapshotAt
|
||||
Success = false,
|
||||
Error = "Feed snapshot provider is not configured."
|
||||
};
|
||||
}
|
||||
|
||||
@@ -182,47 +148,26 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
|
||||
return await _policyProvider.GetPolicySnapshotAsync(scanId, version, ct);
|
||||
}
|
||||
|
||||
// Default implementation - return placeholder policy bundle
|
||||
// In production, this would fetch from Policy service
|
||||
return new PolicySnapshotData
|
||||
{
|
||||
Success = true,
|
||||
Bundle = CreatePlaceholderPolicyBundle(),
|
||||
Version = version ?? "1.0.0"
|
||||
Success = false,
|
||||
Error = "Policy snapshot provider is not configured."
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<VexSnapshotData> FetchVexSnapshotAsync(string scanId, CancellationToken ct)
|
||||
{
|
||||
// Default implementation - return empty VEX
|
||||
if (_vexProvider is not null)
|
||||
{
|
||||
return await _vexProvider.GetVexSnapshotAsync(scanId, ct);
|
||||
}
|
||||
|
||||
return await Task.FromResult(new VexSnapshotData
|
||||
{
|
||||
Success = true,
|
||||
Statements = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
|
||||
{
|
||||
type = "https://openvex.dev/ns/v0.2.0",
|
||||
statements = Array.Empty<object>()
|
||||
}, JsonOptions))
|
||||
Success = false,
|
||||
Error = "VEX snapshot provider is not configured."
|
||||
});
|
||||
}
|
||||
|
||||
private static byte[] CreatePlaceholderPolicyBundle()
|
||||
{
|
||||
// Create a minimal tar.gz bundle
|
||||
using var ms = new MemoryStream();
|
||||
using (var gzip = new System.IO.Compression.GZipStream(ms, System.IO.Compression.CompressionLevel.Optimal, leaveOpen: true))
|
||||
using (var writer = new BinaryWriter(gzip))
|
||||
{
|
||||
// Write minimal tar header for empty bundle
|
||||
var header = new byte[512];
|
||||
var name = "policy/empty.rego"u8;
|
||||
name.CopyTo(header);
|
||||
header[156] = (byte)'0'; // Regular file
|
||||
writer.Write(header);
|
||||
writer.Write(new byte[512]); // End of archive marker
|
||||
}
|
||||
return ms.ToArray();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
@@ -259,6 +204,14 @@ public interface IPolicySnapshotProvider
|
||||
Task<PolicySnapshotData> GetPolicySnapshotAsync(string scanId, string? version, CancellationToken ct);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Provider interface for VEX snapshots.
|
||||
/// </summary>
|
||||
public interface IVexSnapshotProvider
|
||||
{
|
||||
Task<VexSnapshotData> GetVexSnapshotAsync(string scanId, CancellationToken ct);
|
||||
}
|
||||
|
||||
#region Request and Result Models
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
|
||||
| --- | --- | --- |
|
||||
| AUDIT-0075-M | DONE | Maintainability audit for StellaOps.AuditPack. |
|
||||
| AUDIT-0075-T | DONE | Test coverage audit for StellaOps.AuditPack. |
|
||||
| AUDIT-0075-A | TODO | Pending approval for changes. |
|
||||
| AUDIT-0075-A | DONE | Deterministic archive/export + signature verification + tests. |
|
||||
|
||||
Reference in New Issue
Block a user