save progress

This commit is contained in:
StellaOps Bot
2026-01-02 21:06:27 +02:00
parent f46bde5575
commit 3f197814c5
441 changed files with 21545 additions and 4306 deletions

View File

@@ -4,15 +4,21 @@ Deterministic replay token generation used to make triage decisions and scoring
## Token format
`replay:v<version>:<algorithm>:<sha256_hex>`
v1 (no expiration):
`replay:v1.0:<algorithm>:<sha256_hex>`
Example:
`replay:v1.0:SHA-256:0123abcd...`
v2 (includes expiration):
`replay:v2.0:<algorithm>:<sha256_hex>:<expires_unix_seconds>`
## Usage
- Create a `ReplayTokenRequest` with feed/rules/policy/input digests.
- Call `IReplayTokenGenerator.Generate(request)` to get a stable token value.
- Store the tokens `Canonical` string alongside immutable decision events.
- Store the token's `Canonical` string alongside immutable decision events.
- `ReplayToken.Parse` uses `DateTimeOffset.UnixEpoch` for `GeneratedAt` because the canonical format does not include generation time.

View File

@@ -22,21 +22,25 @@ public sealed class ReplayCliSnippetGenerator
"stellaops",
"replay",
"decision",
$"--token {token.Value}",
$"--alert-id {alertId}"
"--token",
QuoteArgument(token.Value),
"--alert-id",
QuoteArgument(alertId)
};
if (!string.IsNullOrWhiteSpace(feedManifestUri))
{
parts.Add($"--feed-manifest {feedManifestUri.Trim()}");
parts.Add("--feed-manifest");
parts.Add(QuoteArgument(feedManifestUri.Trim()));
}
if (!string.IsNullOrWhiteSpace(policyVersion))
{
parts.Add($"--policy-version {policyVersion.Trim()}");
parts.Add("--policy-version");
parts.Add(QuoteArgument(policyVersion.Trim()));
}
return string.Join(" \\\n+ ", parts);
return string.Join(" \\\n ", parts);
}
/// <summary>
@@ -55,15 +59,28 @@ public sealed class ReplayCliSnippetGenerator
"stellaops",
"replay",
"scoring",
$"--token {token.Value}",
$"--subject {subjectKey}"
"--token",
QuoteArgument(token.Value),
"--subject",
QuoteArgument(subjectKey)
};
if (!string.IsNullOrWhiteSpace(configVersion))
{
parts.Add($"--config-version {configVersion.Trim()}");
parts.Add("--config-version");
parts.Add(QuoteArgument(configVersion.Trim()));
}
return string.Join(" \\\n+ ", parts);
return string.Join(" \\\n ", parts);
}
private static string QuoteArgument(string value)
{
if (string.IsNullOrEmpty(value))
{
return "''";
}
return $"'{value.Replace("'", "'\"'\"'", StringComparison.Ordinal)}'";
}
}

View File

@@ -122,6 +122,7 @@ public sealed class ReplayToken : IEquatable<ReplayToken>
/// <summary>
/// Parse a canonical token string.
/// Supports both v1.0 format (4 parts) and v2.0 format with expiration (5 parts).
/// GeneratedAt is set to UnixEpoch because the canonical format does not include it.
/// </summary>
public static ReplayToken Parse(string canonical)
{

View File

@@ -29,8 +29,7 @@ public sealed class Sha256ReplayTokenGenerator : IReplayTokenGenerator
{
ArgumentNullException.ThrowIfNull(request);
var canonical = Canonicalize(request);
var hashHex = ComputeHash(canonical);
var hashHex = ComputeTokenValue(request, ReplayToken.DefaultVersion);
return new ReplayToken(hashHex, _timeProvider.GetUtcNow());
}
@@ -39,11 +38,16 @@ public sealed class Sha256ReplayTokenGenerator : IReplayTokenGenerator
{
ArgumentNullException.ThrowIfNull(request);
var canonical = Canonicalize(request);
var hashHex = ComputeHash(canonical);
var effectiveExpiration = expiration ?? ReplayToken.DefaultExpiration;
if (effectiveExpiration <= TimeSpan.Zero)
{
throw new ArgumentOutOfRangeException(nameof(expiration), "Expiration must be positive.");
}
var hashHex = ComputeTokenValue(request, ReplayToken.VersionWithExpiration);
var now = _timeProvider.GetUtcNow();
var expiresAt = now + (expiration ?? ReplayToken.DefaultExpiration);
var expiresAt = now + effectiveExpiration;
return new ReplayToken(hashHex, now, expiresAt, ReplayToken.DefaultAlgorithm, ReplayToken.VersionWithExpiration);
}
@@ -53,8 +57,8 @@ public sealed class Sha256ReplayTokenGenerator : IReplayTokenGenerator
ArgumentNullException.ThrowIfNull(token);
ArgumentNullException.ThrowIfNull(request);
var computed = Generate(request);
return string.Equals(token.Value, computed.Value, StringComparison.OrdinalIgnoreCase);
var computed = ComputeTokenValue(request, token.Version);
return string.Equals(token.Value, computed, StringComparison.OrdinalIgnoreCase);
}
public ReplayTokenVerificationResult VerifyWithExpiration(ReplayToken token, ReplayTokenRequest request)
@@ -63,8 +67,8 @@ public sealed class Sha256ReplayTokenGenerator : IReplayTokenGenerator
ArgumentNullException.ThrowIfNull(request);
// Check hash first
var computed = Generate(request);
if (!string.Equals(token.Value, computed.Value, StringComparison.OrdinalIgnoreCase))
var computed = ComputeTokenValue(request, token.Version);
if (!string.Equals(token.Value, computed, StringComparison.OrdinalIgnoreCase))
{
return ReplayTokenVerificationResult.Invalid;
}
@@ -84,6 +88,12 @@ public sealed class Sha256ReplayTokenGenerator : IReplayTokenGenerator
return _cryptoHash.ComputeHashHex(bytes, HashAlgorithms.Sha256);
}
private string ComputeTokenValue(ReplayTokenRequest request, string version)
{
var canonical = Canonicalize(request, version);
return ComputeHash(canonical);
}
private static string? NormalizeValue(string? value)
{
if (string.IsNullOrWhiteSpace(value))
@@ -117,23 +127,40 @@ public sealed class Sha256ReplayTokenGenerator : IReplayTokenGenerator
return new Dictionary<string, string>();
}
var normalized = values
.Where(static kvp => !string.IsNullOrWhiteSpace(kvp.Key))
.Select(static kvp => new KeyValuePair<string, string>(kvp.Key.Trim(), kvp.Value?.Trim() ?? string.Empty))
var normalized = new List<KeyValuePair<string, string>>(values.Count);
var seen = new HashSet<string>(StringComparer.Ordinal);
foreach (var kvp in values)
{
if (string.IsNullOrWhiteSpace(kvp.Key))
{
continue;
}
var key = kvp.Key.Trim();
if (!seen.Add(key))
{
throw new ArgumentException($"AdditionalContext contains duplicate key after normalization: '{key}'.", nameof(values));
}
normalized.Add(new KeyValuePair<string, string>(key, kvp.Value?.Trim() ?? string.Empty));
}
var ordered = normalized
.OrderBy(static kvp => kvp.Key, StringComparer.Ordinal)
.ToDictionary(static kvp => kvp.Key, static kvp => kvp.Value, StringComparer.Ordinal);
return normalized;
return ordered;
}
/// <summary>
/// Produces deterministic canonical representation of inputs.
/// </summary>
private static string Canonicalize(ReplayTokenRequest request)
private static string Canonicalize(ReplayTokenRequest request, string version)
{
var canonical = new CanonicalReplayInput
{
Version = ReplayToken.DefaultVersion,
Version = version,
FeedManifests = NormalizeSortedList(request.FeedManifests),
RulesVersion = NormalizeValue(request.RulesVersion),
RulesHash = NormalizeValue(request.RulesHash),

View File

@@ -5,7 +5,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<RootNamespace>StellaOps.Audit.ReplayToken</RootNamespace>
<Description>Deterministic replay token generation for audit and reproducibility</Description>
</PropertyGroup>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0073-M | DONE | Maintainability audit for StellaOps.Audit.ReplayToken. |
| AUDIT-0073-T | DONE | Test coverage audit for StellaOps.Audit.ReplayToken. |
| AUDIT-0073-A | TODO | Pending approval for changes. |
| AUDIT-0073-A | DONE | Applied library changes + coverage updates. |

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.AuditPack.Tests")]

View File

@@ -0,0 +1,146 @@
using System.Buffers.Binary;
using System.Formats.Tar;
using System.IO.Compression;
namespace StellaOps.AuditPack.Services;
internal static class ArchiveUtilities
{
internal static readonly DateTimeOffset FixedTimestamp = DateTimeOffset.UnixEpoch;
private const UnixFileMode DefaultFileMode = UnixFileMode.UserRead | UnixFileMode.UserWrite | UnixFileMode.GroupRead | UnixFileMode.OtherRead;
public static async Task WriteTarGzAsync(
string outputPath,
IReadOnlyList<ArchiveEntry> entries,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(outputPath);
ArgumentNullException.ThrowIfNull(entries);
var outputDir = Path.GetDirectoryName(outputPath);
if (!string.IsNullOrWhiteSpace(outputDir))
{
Directory.CreateDirectory(outputDir);
}
await using (var fileStream = File.Create(outputPath))
await using (var gzip = new GZipStream(fileStream, CompressionLevel.Optimal, leaveOpen: true))
await using (var tarWriter = new TarWriter(gzip, TarEntryFormat.Pax, leaveOpen: true))
{
foreach (var entry in entries.OrderBy(static e => e.Path, StringComparer.Ordinal))
{
ct.ThrowIfCancellationRequested();
var tarEntry = new PaxTarEntry(TarEntryType.RegularFile, entry.Path)
{
Mode = entry.Mode ?? DefaultFileMode,
ModificationTime = FixedTimestamp,
Uid = 0,
Gid = 0,
UserName = string.Empty,
GroupName = string.Empty
};
tarEntry.DataStream = new MemoryStream(entry.Content, writable: false);
tarWriter.WriteEntry(tarEntry);
}
}
ApplyDeterministicGzipHeader(outputPath, FixedTimestamp);
}
public static async Task ExtractTarGzAsync(
string archivePath,
string targetDir,
bool overwriteFiles,
CancellationToken ct)
{
ArgumentException.ThrowIfNullOrWhiteSpace(archivePath);
ArgumentException.ThrowIfNullOrWhiteSpace(targetDir);
Directory.CreateDirectory(targetDir);
var fullTarget = Path.GetFullPath(targetDir);
await using var fileStream = File.OpenRead(archivePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
using var tarReader = new TarReader(gzipStream, leaveOpen: false);
TarEntry? entry;
while ((entry = await tarReader.GetNextEntryAsync(cancellationToken: ct).ConfigureAwait(false)) is not null)
{
ct.ThrowIfCancellationRequested();
if (entry.EntryType != TarEntryType.RegularFile || entry.DataStream is null)
{
continue;
}
var safePath = NormalizeTarEntryPath(entry.Name);
var destinationPath = Path.GetFullPath(Path.Combine(fullTarget, safePath));
if (!destinationPath.StartsWith(fullTarget, StringComparison.Ordinal))
{
throw new InvalidOperationException($"Tar entry '{entry.Name}' escapes the target directory.");
}
var destinationDir = Path.GetDirectoryName(destinationPath);
if (!string.IsNullOrWhiteSpace(destinationDir))
{
Directory.CreateDirectory(destinationDir);
}
if (File.Exists(destinationPath) && !overwriteFiles)
{
throw new IOException($"Target file already exists: {destinationPath}");
}
await using var outputStream = File.Create(destinationPath);
await entry.DataStream.CopyToAsync(outputStream, ct).ConfigureAwait(false);
}
}
private static string NormalizeTarEntryPath(string entryName)
{
if (string.IsNullOrWhiteSpace(entryName))
{
throw new InvalidOperationException("Tar entry name is empty.");
}
var normalized = entryName.Replace('\\', '/');
if (normalized.StartsWith("/", StringComparison.Ordinal))
{
normalized = normalized.TrimStart('/');
}
if (Path.IsPathRooted(normalized))
{
throw new InvalidOperationException($"Tar entry '{entryName}' is rooted.");
}
foreach (var segment in normalized.Split('/', StringSplitOptions.RemoveEmptyEntries))
{
if (segment == "." || segment == "..")
{
throw new InvalidOperationException($"Tar entry '{entryName}' contains parent traversal.");
}
}
return normalized;
}
private static void ApplyDeterministicGzipHeader(string outputPath, DateTimeOffset timestamp)
{
using var stream = new FileStream(outputPath, FileMode.Open, FileAccess.ReadWrite, FileShare.Read);
if (stream.Length < 10)
{
throw new InvalidOperationException("GZip header not fully written for archive.");
}
var seconds = checked((int)(timestamp - DateTimeOffset.UnixEpoch).TotalSeconds);
Span<byte> buffer = stackalloc byte[4];
BinaryPrimitives.WriteInt32LittleEndian(buffer, seconds);
stream.Position = 4;
stream.Write(buffer);
}
}
internal sealed record ArchiveEntry(string Path, byte[] Content, UnixFileMode? Mode = null);

View File

@@ -6,8 +6,6 @@
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -197,12 +195,8 @@ public sealed class AuditBundleReader : IAuditBundleReader
}
}
private static async Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
{
await using var fileStream = File.OpenRead(bundlePath);
await using var gzipStream = new GZipStream(fileStream, CompressionMode.Decompress);
await TarFile.ExtractToDirectoryAsync(gzipStream, targetDir, overwriteFiles: true, ct);
}
private static Task ExtractBundleAsync(string bundlePath, string targetDir, CancellationToken ct)
=> ArchiveUtilities.ExtractTarGzAsync(bundlePath, targetDir, overwriteFiles: true, ct);
private static async Task<string> ComputeFileDigestAsync(string filePath, CancellationToken ct)
{

View File

@@ -5,9 +5,6 @@
// Description: Writes self-contained audit bundles for offline replay.
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -20,12 +17,21 @@ namespace StellaOps.AuditPack.Services;
/// </summary>
public sealed class AuditBundleWriter : IAuditBundleWriter
{
private readonly TimeProvider _timeProvider;
private readonly IAuditPackIdGenerator _idGenerator;
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
WriteIndented = false,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
public AuditBundleWriter(TimeProvider? timeProvider = null, IAuditPackIdGenerator? idGenerator = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
}
/// <summary>
/// Creates an audit bundle from the specified inputs.
/// </summary>
@@ -36,20 +42,16 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
ArgumentNullException.ThrowIfNull(request);
ArgumentException.ThrowIfNullOrWhiteSpace(request.OutputPath);
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-bundle-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
try
{
var entries = new List<BundleEntry>();
var files = new List<BundleFileEntry>();
var archiveEntries = new List<ArchiveEntry>();
// Write SBOM
string sbomDigest;
if (request.Sbom is not null)
{
var sbomPath = Path.Combine(tempDir, "sbom.json");
await File.WriteAllBytesAsync(sbomPath, request.Sbom, cancellationToken);
sbomDigest = ComputeSha256(request.Sbom);
entries.Add(new BundleEntry("sbom.json", sbomDigest, request.Sbom.Length));
files.Add(new BundleFileEntry
@@ -59,6 +61,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.Sbom.Length,
ContentType = BundleContentType.Sbom
});
archiveEntries.Add(new ArchiveEntry("sbom.json", request.Sbom));
}
else
{
@@ -69,10 +72,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
string feedsDigest;
if (request.FeedsSnapshot is not null)
{
var feedsDir = Path.Combine(tempDir, "feeds");
Directory.CreateDirectory(feedsDir);
var feedsPath = Path.Combine(feedsDir, "feeds-snapshot.ndjson");
await File.WriteAllBytesAsync(feedsPath, request.FeedsSnapshot, cancellationToken);
feedsDigest = ComputeSha256(request.FeedsSnapshot);
entries.Add(new BundleEntry("feeds/feeds-snapshot.ndjson", feedsDigest, request.FeedsSnapshot.Length));
files.Add(new BundleFileEntry
@@ -82,6 +81,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.FeedsSnapshot.Length,
ContentType = BundleContentType.Feeds
});
archiveEntries.Add(new ArchiveEntry("feeds/feeds-snapshot.ndjson", request.FeedsSnapshot));
}
else
{
@@ -92,10 +92,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
string policyDigest;
if (request.PolicyBundle is not null)
{
var policyDir = Path.Combine(tempDir, "policy");
Directory.CreateDirectory(policyDir);
var policyPath = Path.Combine(policyDir, "policy-bundle.tar.gz");
await File.WriteAllBytesAsync(policyPath, request.PolicyBundle, cancellationToken);
policyDigest = ComputeSha256(request.PolicyBundle);
entries.Add(new BundleEntry("policy/policy-bundle.tar.gz", policyDigest, request.PolicyBundle.Length));
files.Add(new BundleFileEntry
@@ -105,6 +101,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.PolicyBundle.Length,
ContentType = BundleContentType.Policy
});
archiveEntries.Add(new ArchiveEntry("policy/policy-bundle.tar.gz", request.PolicyBundle));
}
else
{
@@ -115,10 +112,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
string? vexDigest = null;
if (request.VexStatements is not null)
{
var vexDir = Path.Combine(tempDir, "vex");
Directory.CreateDirectory(vexDir);
var vexPath = Path.Combine(vexDir, "vex-statements.json");
await File.WriteAllBytesAsync(vexPath, request.VexStatements, cancellationToken);
vexDigest = ComputeSha256(request.VexStatements);
entries.Add(new BundleEntry("vex/vex-statements.json", vexDigest, request.VexStatements.Length));
files.Add(new BundleFileEntry
@@ -128,14 +121,13 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.VexStatements.Length,
ContentType = BundleContentType.Vex
});
archiveEntries.Add(new ArchiveEntry("vex/vex-statements.json", request.VexStatements));
}
// Write verdict
string verdictDigest;
if (request.Verdict is not null)
{
var verdictPath = Path.Combine(tempDir, "verdict.json");
await File.WriteAllBytesAsync(verdictPath, request.Verdict, cancellationToken);
verdictDigest = ComputeSha256(request.Verdict);
entries.Add(new BundleEntry("verdict.json", verdictDigest, request.Verdict.Length));
files.Add(new BundleFileEntry
@@ -145,6 +137,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.Verdict.Length,
ContentType = BundleContentType.Verdict
});
archiveEntries.Add(new ArchiveEntry("verdict.json", request.Verdict));
}
else
{
@@ -154,10 +147,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
// Write proof bundle (optional)
if (request.ProofBundle is not null)
{
var proofDir = Path.Combine(tempDir, "proof");
Directory.CreateDirectory(proofDir);
var proofPath = Path.Combine(proofDir, "proof-bundle.json");
await File.WriteAllBytesAsync(proofPath, request.ProofBundle, cancellationToken);
var proofDigest = ComputeSha256(request.ProofBundle);
entries.Add(new BundleEntry("proof/proof-bundle.json", proofDigest, request.ProofBundle.Length));
files.Add(new BundleFileEntry
@@ -167,16 +156,13 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.ProofBundle.Length,
ContentType = BundleContentType.ProofBundle
});
archiveEntries.Add(new ArchiveEntry("proof/proof-bundle.json", request.ProofBundle));
}
// Write trust roots (optional)
string? trustRootsDigest = null;
if (request.TrustRoots is not null)
{
var trustDir = Path.Combine(tempDir, "trust");
Directory.CreateDirectory(trustDir);
var trustPath = Path.Combine(trustDir, "trust-roots.json");
await File.WriteAllBytesAsync(trustPath, request.TrustRoots, cancellationToken);
trustRootsDigest = ComputeSha256(request.TrustRoots);
entries.Add(new BundleEntry("trust/trust-roots.json", trustRootsDigest, request.TrustRoots.Length));
files.Add(new BundleFileEntry
@@ -186,14 +172,13 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.TrustRoots.Length,
ContentType = BundleContentType.TrustRoot
});
archiveEntries.Add(new ArchiveEntry("trust/trust-roots.json", request.TrustRoots));
}
// Write scoring rules (optional)
string? scoringDigest = null;
if (request.ScoringRules is not null)
{
var scoringPath = Path.Combine(tempDir, "scoring-rules.json");
await File.WriteAllBytesAsync(scoringPath, request.ScoringRules, cancellationToken);
scoringDigest = ComputeSha256(request.ScoringRules);
entries.Add(new BundleEntry("scoring-rules.json", scoringDigest, request.ScoringRules.Length));
files.Add(new BundleFileEntry
@@ -203,15 +188,14 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = request.ScoringRules.Length,
ContentType = BundleContentType.Other
});
archiveEntries.Add(new ArchiveEntry("scoring-rules.json", request.ScoringRules));
}
// Write time anchor (optional)
TimeAnchor? timeAnchor = null;
if (request.TimeAnchor is not null)
{
var timeAnchorPath = Path.Combine(tempDir, "time-anchor.json");
var timeAnchorBytes = JsonSerializer.SerializeToUtf8Bytes(request.TimeAnchor, JsonOptions);
await File.WriteAllBytesAsync(timeAnchorPath, timeAnchorBytes, cancellationToken);
var timeAnchorBytes = CanonicalJson.Serialize(request.TimeAnchor, JsonOptions);
var timeAnchorDigest = ComputeSha256(timeAnchorBytes);
entries.Add(new BundleEntry("time-anchor.json", timeAnchorDigest, timeAnchorBytes.Length));
files.Add(new BundleFileEntry
@@ -221,6 +205,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
SizeBytes = timeAnchorBytes.Length,
ContentType = BundleContentType.TimeAnchor
});
archiveEntries.Add(new ArchiveEntry("time-anchor.json", timeAnchorBytes));
timeAnchor = new TimeAnchor
{
Timestamp = request.TimeAnchor.Timestamp,
@@ -235,9 +220,9 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
// Build manifest
var manifest = new AuditBundleManifest
{
BundleId = request.BundleId ?? Guid.NewGuid().ToString("N"),
BundleId = request.BundleId ?? _idGenerator.NewBundleId(),
Name = request.Name ?? $"audit-{request.ScanId}",
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = _timeProvider.GetUtcNow(),
ScanId = request.ScanId,
ImageRef = request.ImageRef,
ImageDigest = request.ImageDigest,
@@ -259,9 +244,8 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
};
// Write manifest
var manifestBytes = JsonSerializer.SerializeToUtf8Bytes(manifest, JsonOptions);
var manifestPath = Path.Combine(tempDir, "manifest.json");
await File.WriteAllBytesAsync(manifestPath, manifestBytes, cancellationToken);
var manifestBytes = CanonicalJson.Serialize(manifest, JsonOptions);
archiveEntries.Add(new ArchiveEntry("manifest.json", manifestBytes));
// Sign manifest if requested
string? signingKeyId = null;
@@ -282,8 +266,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
if (signResult.Success && signResult.Envelope is not null)
{
var signaturePath = Path.Combine(tempDir, "manifest.sig");
await File.WriteAllBytesAsync(signaturePath, signResult.Envelope, cancellationToken);
archiveEntries.Add(new ArchiveEntry("manifest.sig", signResult.Envelope));
signingKeyId = signResult.KeyId;
signingAlgorithm = signResult.Algorithm;
signed = true;
@@ -297,7 +280,7 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
outputPath = $"{outputPath}.tar.gz";
}
await CreateTarGzAsync(tempDir, outputPath, cancellationToken);
await ArchiveUtilities.WriteTarGzAsync(outputPath, archiveEntries, cancellationToken);
var bundleDigest = await ComputeFileDigestAsync(outputPath, cancellationToken);
@@ -320,21 +303,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
{
return AuditBundleWriteResult.Failed($"Failed to write audit bundle: {ex.Message}");
}
finally
{
// Clean up temp directory
try
{
if (Directory.Exists(tempDir))
{
Directory.Delete(tempDir, recursive: true);
}
}
catch
{
// Ignore cleanup errors
}
}
}
private static string ComputeSha256(byte[] content)
@@ -395,19 +363,6 @@ public sealed class AuditBundleWriter : IAuditBundleWriter
}
}
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
{
var outputDir = Path.GetDirectoryName(outputPath);
if (!string.IsNullOrEmpty(outputDir) && !Directory.Exists(outputDir))
{
Directory.CreateDirectory(outputDir);
}
await using var fileStream = File.Create(outputPath);
await using var gzipStream = new GZipStream(fileStream, CompressionLevel.Optimal);
await TarFile.CreateFromDirectoryAsync(sourceDir, gzipStream, includeBaseDirectory: false, ct);
}
private sealed record BundleEntry(string Path, string Digest, long SizeBytes);
}

View File

@@ -2,17 +2,23 @@ namespace StellaOps.AuditPack.Services;
using StellaOps.AuditPack.Models;
using System.Collections.Immutable;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
/// <summary>
/// Builds audit packs from scan results.
/// </summary>
public sealed class AuditPackBuilder : IAuditPackBuilder
{
private readonly TimeProvider _timeProvider;
private readonly IAuditPackIdGenerator _idGenerator;
public AuditPackBuilder(TimeProvider? timeProvider = null, IAuditPackIdGenerator? idGenerator = null)
{
_timeProvider = timeProvider ?? TimeProvider.System;
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
}
/// <summary>
/// Builds an audit pack from a scan result.
/// </summary>
@@ -33,14 +39,16 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
var bundleManifest = await BuildMinimalBundleAsync(scanResult, ct);
// Create pack structure
var now = _timeProvider.GetUtcNow();
var pack = new AuditPack
{
PackId = Guid.NewGuid().ToString(),
PackId = _idGenerator.NewPackId(),
SchemaVersion = "1.0.0",
Name = options.Name ?? $"audit-pack-{scanResult.ScanId}",
CreatedAt = DateTimeOffset.UtcNow,
RunManifest = new RunManifest(scanResult.ScanId, DateTimeOffset.UtcNow),
EvidenceIndex = new EvidenceIndex([]),
CreatedAt = now,
RunManifest = new RunManifest(scanResult.ScanId, now),
EvidenceIndex = new EvidenceIndex(Array.Empty<string>().ToImmutableArray()),
Verdict = new Verdict(scanResult.ScanId, "completed"),
OfflineBundle = bundleManifest,
Attestations = [.. attestations],
@@ -55,6 +63,9 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
}
};
var fileResult = BuildPackFiles(pack);
pack = pack with { Contents = fileResult.Contents };
return WithDigest(pack);
}
@@ -67,126 +78,36 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
ExportOptions options,
CancellationToken ct = default)
{
var tempDir = Path.Combine(Path.GetTempPath(), $"audit-pack-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
var fileBuild = BuildPackFiles(pack);
pack = pack with { Contents = fileBuild.Contents };
pack = WithDigest(pack);
var entries = fileBuild.Entries;
try
var manifestBytes = CanonicalJson.Serialize(pack);
entries.Insert(0, new ArchiveEntry("manifest.json", manifestBytes));
if (options.Sign && !string.IsNullOrWhiteSpace(options.SigningKey))
{
// Write pack manifest
var manifestJson = JsonSerializer.Serialize(pack, new JsonSerializerOptions
{
WriteIndented = true
});
await File.WriteAllTextAsync(Path.Combine(tempDir, "manifest.json"), manifestJson, ct);
// Write run manifest
var runManifestJson = JsonSerializer.Serialize(pack.RunManifest);
await File.WriteAllTextAsync(Path.Combine(tempDir, "run-manifest.json"), runManifestJson, ct);
// Write evidence index
var evidenceJson = JsonSerializer.Serialize(pack.EvidenceIndex);
await File.WriteAllTextAsync(Path.Combine(tempDir, "evidence-index.json"), evidenceJson, ct);
// Write verdict
var verdictJson = JsonSerializer.Serialize(pack.Verdict);
await File.WriteAllTextAsync(Path.Combine(tempDir, "verdict.json"), verdictJson, ct);
// Write SBOMs
var sbomsDir = Path.Combine(tempDir, "sboms");
Directory.CreateDirectory(sbomsDir);
foreach (var sbom in pack.Sboms)
{
await File.WriteAllTextAsync(
Path.Combine(sbomsDir, $"{sbom.Id}.json"),
sbom.Content,
ct);
}
// Write attestations
var attestationsDir = Path.Combine(tempDir, "attestations");
Directory.CreateDirectory(attestationsDir);
foreach (var att in pack.Attestations)
{
await File.WriteAllTextAsync(
Path.Combine(attestationsDir, $"{att.Id}.json"),
att.Envelope,
ct);
}
// Write VEX documents
if (pack.VexDocuments.Length > 0)
{
var vexDir = Path.Combine(tempDir, "vex");
Directory.CreateDirectory(vexDir);
foreach (var vex in pack.VexDocuments)
{
await File.WriteAllTextAsync(
Path.Combine(vexDir, $"{vex.Id}.json"),
vex.Content,
ct);
}
}
// Write trust roots
var certsDir = Path.Combine(tempDir, "trust-roots");
Directory.CreateDirectory(certsDir);
foreach (var root in pack.TrustRoots)
{
await File.WriteAllTextAsync(
Path.Combine(certsDir, $"{root.Id}.pem"),
root.Content,
ct);
}
// Create tar.gz archive
await CreateTarGzAsync(tempDir, outputPath, ct);
// Sign if requested
if (options.Sign && !string.IsNullOrEmpty(options.SigningKey))
{
await SignPackAsync(outputPath, options.SigningKey, ct);
}
}
finally
{
if (Directory.Exists(tempDir))
Directory.Delete(tempDir, recursive: true);
var signature = await SignManifestAsync(manifestBytes, options.SigningKey, ct);
entries.Add(new ArchiveEntry("manifest.sig", signature));
}
await ArchiveUtilities.WriteTarGzAsync(outputPath, entries, ct);
}
private static AuditPack WithDigest(AuditPack pack)
{
var json = JsonSerializer.Serialize(pack with { PackDigest = null, Signature = null });
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
var digest = ComputeDigest(json);
return pack with { PackDigest = digest };
}
private static string ComputeDigest(string content)
private static string ComputeDigest(byte[] content)
{
var bytes = Encoding.UTF8.GetBytes(content);
var hash = SHA256.HashData(bytes);
var hash = SHA256.HashData(content);
return Convert.ToHexString(hash).ToLowerInvariant();
}
private static async Task CreateTarGzAsync(string sourceDir, string outputPath, CancellationToken ct)
{
var tarPath = outputPath.Replace(".tar.gz", ".tar");
// Create tar
await TarFile.CreateFromDirectoryAsync(sourceDir, tarPath, includeBaseDirectory: false, ct);
// Compress to tar.gz
using (var tarStream = File.OpenRead(tarPath))
using (var gzStream = File.Create(outputPath))
using (var gzip = new GZipStream(gzStream, CompressionLevel.Optimal))
{
await tarStream.CopyToAsync(gzip, ct);
}
// Clean up uncompressed tar after streams are closed.
File.Delete(tarPath);
}
private static Task<ImmutableArray<Attestation>> CollectAttestationsAsync(ScanResult scanResult, CancellationToken ct)
{
// TODO: Collect attestations from storage
@@ -217,11 +138,89 @@ public sealed class AuditPackBuilder : IAuditPackBuilder
return Task.FromResult(new BundleManifest("bundle-1", "1.0.0"));
}
private static Task SignPackAsync(string packPath, string signingKey, CancellationToken ct)
private static async Task<byte[]> SignManifestAsync(byte[] manifestBytes, string signingKey, CancellationToken ct)
{
// TODO: Sign pack with key
return Task.CompletedTask;
var signer = new AuditBundleSigner();
var result = await signer.SignAsync(
new AuditBundleSigningRequest
{
ManifestBytes = manifestBytes,
KeyFilePath = signingKey
},
ct);
if (!result.Success || result.Envelope is null)
{
throw new InvalidOperationException(result.Error ?? "Failed to sign audit pack manifest.");
}
return result.Envelope;
}
private static PackFileBuildResult BuildPackFiles(AuditPack pack)
{
var entries = new List<ArchiveEntry>();
var files = new List<PackFile>();
AddJsonEntry(entries, files, "run-manifest.json", pack.RunManifest, PackFileType.RunManifest);
AddJsonEntry(entries, files, "evidence-index.json", pack.EvidenceIndex, PackFileType.EvidenceIndex);
AddJsonEntry(entries, files, "verdict.json", pack.Verdict, PackFileType.Verdict);
foreach (var sbom in pack.Sboms)
{
AddTextEntry(entries, files, $"sboms/{sbom.Id}.json", sbom.Content, PackFileType.Sbom);
}
foreach (var attestation in pack.Attestations)
{
AddTextEntry(entries, files, $"attestations/{attestation.Id}.json", attestation.Envelope, PackFileType.Attestation);
}
foreach (var vex in pack.VexDocuments)
{
AddTextEntry(entries, files, $"vex/{vex.Id}.json", vex.Content, PackFileType.Vex);
}
foreach (var root in pack.TrustRoots)
{
AddTextEntry(entries, files, $"trust-roots/{root.Id}.pem", root.Content, PackFileType.TrustRoot);
}
var contents = new PackContents
{
Files = [.. files],
TotalSizeBytes = files.Sum(f => f.SizeBytes),
FileCount = files.Count
};
return new PackFileBuildResult(entries, contents);
}
private static void AddJsonEntry<T>(
List<ArchiveEntry> entries,
List<PackFile> files,
string path,
T payload,
PackFileType type)
{
var bytes = CanonicalJson.Serialize(payload);
entries.Add(new ArchiveEntry(path, bytes));
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
}
private static void AddTextEntry(
List<ArchiveEntry> entries,
List<PackFile> files,
string path,
string content,
PackFileType type)
{
var bytes = Encoding.UTF8.GetBytes(content);
entries.Add(new ArchiveEntry(path, bytes));
files.Add(new PackFile(path, ComputeDigest(bytes), bytes.Length, type));
}
private sealed record PackFileBuildResult(List<ArchiveEntry> Entries, PackContents Contents);
}
public interface IAuditPackBuilder

View File

@@ -5,7 +5,6 @@
// -----------------------------------------------------------------------------
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using StellaOps.AuditPack.Models;
@@ -25,13 +24,19 @@ public sealed class AuditPackExportService : IAuditPackExportService
private readonly IAuditBundleWriter _bundleWriter;
private readonly IAuditPackRepository? _repository;
private readonly TimeProvider _timeProvider;
private readonly IAuditPackExportSigner? _dsseSigner;
public AuditPackExportService(
IAuditBundleWriter bundleWriter,
IAuditPackRepository? repository = null)
IAuditPackRepository? repository = null,
TimeProvider? timeProvider = null,
IAuditPackExportSigner? dsseSigner = null)
{
_bundleWriter = bundleWriter;
_repository = repository;
_timeProvider = timeProvider ?? TimeProvider.System;
_dsseSigner = dsseSigner;
}
/// <summary>
@@ -43,6 +48,13 @@ public sealed class AuditPackExportService : IAuditPackExportService
{
ArgumentNullException.ThrowIfNull(request);
_ = _bundleWriter;
if (_repository is null)
{
return ExportResult.Failed("Audit pack repository is required for export.");
}
return request.Format switch
{
ExportFormat.Zip => await ExportAsZipAsync(request, cancellationToken),
@@ -120,7 +132,7 @@ public sealed class AuditPackExportService : IAuditPackExportService
{
var exportDoc = new Dictionary<string, object>
{
["exportedAt"] = DateTimeOffset.UtcNow.ToString("O"),
["exportedAt"] = _timeProvider.GetUtcNow().ToString("O"),
["scanId"] = request.ScanId,
["format"] = "json",
["version"] = "1.0"
@@ -182,6 +194,11 @@ public sealed class AuditPackExportService : IAuditPackExportService
ExportRequest request,
CancellationToken ct)
{
if (_dsseSigner is null)
{
return ExportResult.Failed("DSSE export requires a signing provider.");
}
// First create the JSON payload
var jsonResult = await ExportAsJsonAsync(request, ct);
if (!jsonResult.Success)
@@ -191,11 +208,12 @@ public sealed class AuditPackExportService : IAuditPackExportService
// Create DSSE envelope structure
var payload = Convert.ToBase64String(jsonResult.Data!);
var signature = await _dsseSigner.SignAsync(jsonResult.Data!, ct);
var envelope = new DsseExportEnvelope
{
PayloadType = "application/vnd.stellaops.audit-pack+json",
Payload = payload,
Signatures = [] // Would be populated by actual signing in production
Signatures = [signature]
};
var envelopeBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, JsonOptions);
@@ -210,11 +228,11 @@ public sealed class AuditPackExportService : IAuditPackExportService
};
}
private static ExportManifest CreateManifest(ExportRequest request)
private ExportManifest CreateManifest(ExportRequest request)
{
return new ExportManifest
{
ExportedAt = DateTimeOffset.UtcNow,
ExportedAt = _timeProvider.GetUtcNow(),
ScanId = request.ScanId,
FindingIds = request.FindingIds,
Format = request.Format.ToString(),
@@ -244,46 +262,25 @@ public sealed class AuditPackExportService : IAuditPackExportService
ExportSegment segment,
CancellationToken ct)
{
if (_repository is null)
{
// Return mock data for testing
return CreateMockSegmentData(segment);
}
return await _repository.GetSegmentDataAsync(scanId, segment, ct);
var repository = RequireRepository();
return await repository.GetSegmentDataAsync(scanId, segment, ct);
}
private async Task<List<object>> GetAttestationsAsync(string scanId, CancellationToken ct)
{
if (_repository is null)
{
return [];
}
var attestations = await _repository.GetAttestationsAsync(scanId, ct);
var repository = RequireRepository();
var attestations = await repository.GetAttestationsAsync(scanId, ct);
return [.. attestations];
}
private async Task<object?> GetProofChainAsync(string scanId, CancellationToken ct)
{
if (_repository is null)
{
return null;
}
return await _repository.GetProofChainAsync(scanId, ct);
var repository = RequireRepository();
return await repository.GetProofChainAsync(scanId, ct);
}
private static byte[] CreateMockSegmentData(ExportSegment segment)
{
var mockData = new Dictionary<string, object>
{
["segment"] = segment.ToString(),
["generatedAt"] = DateTimeOffset.UtcNow.ToString("O"),
["data"] = new { placeholder = true }
};
return JsonSerializer.SerializeToUtf8Bytes(mockData, JsonOptions);
}
private IAuditPackRepository RequireRepository()
=> _repository ?? throw new InvalidOperationException("Audit pack repository is required for export.");
private static async Task AddJsonToZipAsync<T>(
ZipArchive archive,
@@ -325,6 +322,14 @@ public interface IAuditPackRepository
Task<object?> GetProofChainAsync(string scanId, CancellationToken ct);
}
/// <summary>
/// DSSE signer for audit pack exports.
/// </summary>
public interface IAuditPackExportSigner
{
Task<DsseSignature> SignAsync(byte[] payload, CancellationToken ct);
}
#region Models
/// <summary>

View File

@@ -0,0 +1,17 @@
namespace StellaOps.AuditPack.Services;
public interface IAuditPackIdGenerator
{
string NewPackId();
string NewBundleId();
string NewAttestationId();
string NewTempId();
}
public sealed class GuidAuditPackIdGenerator : IAuditPackIdGenerator
{
public string NewPackId() => Guid.NewGuid().ToString();
public string NewBundleId() => Guid.NewGuid().ToString("N");
public string NewAttestationId() => Guid.NewGuid().ToString("N");
public string NewTempId() => Guid.NewGuid().ToString("N");
}

View File

@@ -1,8 +1,6 @@
namespace StellaOps.AuditPack.Services;
using StellaOps.AuditPack.Models;
using System.Formats.Tar;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Text.Json;
@@ -11,6 +9,13 @@ using System.Text.Json;
/// </summary>
public sealed class AuditPackImporter : IAuditPackImporter
{
private readonly IAuditPackIdGenerator _idGenerator;
public AuditPackImporter(IAuditPackIdGenerator? idGenerator = null)
{
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
}
/// <summary>
/// Imports an audit pack from archive.
/// </summary>
@@ -20,12 +25,12 @@ public sealed class AuditPackImporter : IAuditPackImporter
CancellationToken ct = default)
{
var extractDir = options.ExtractDirectory ??
Path.Combine(Path.GetTempPath(), $"audit-pack-{Guid.NewGuid():N}");
Path.Combine(Path.GetTempPath(), $"audit-pack-{_idGenerator.NewTempId()}");
try
{
// Extract archive
await ExtractTarGzAsync(archivePath, extractDir, ct);
await ArchiveUtilities.ExtractTarGzAsync(archivePath, extractDir, overwriteFiles: true, ct);
// Load manifest
var manifestPath = Path.Combine(extractDir, "manifest.json");
@@ -34,7 +39,7 @@ public sealed class AuditPackImporter : IAuditPackImporter
return ImportResult.Failed("Manifest file not found");
}
var manifestJson = await File.ReadAllTextAsync(manifestPath, ct);
var manifestJson = await File.ReadAllBytesAsync(manifestPath, ct);
var pack = JsonSerializer.Deserialize<AuditPack>(manifestJson);
if (pack == null)
@@ -53,14 +58,14 @@ public sealed class AuditPackImporter : IAuditPackImporter
SignatureResult? signatureResult = null;
if (options.VerifySignatures)
{
signatureResult = await VerifySignaturesAsync(pack, extractDir, ct);
signatureResult = await VerifySignaturesAsync(manifestJson, pack, extractDir, ct);
if (!signatureResult.IsValid)
{
return ImportResult.Failed("Signature verification failed", signatureResult.Errors);
}
}
return new ImportResult
var result = new ImportResult
{
Success = true,
Pack = pack,
@@ -68,6 +73,14 @@ public sealed class AuditPackImporter : IAuditPackImporter
IntegrityResult = integrityResult,
SignatureResult = signatureResult
};
if (!options.KeepExtracted && options.ExtractDirectory is null)
{
Directory.Delete(extractDir, recursive: true);
result = result with { ExtractDirectory = null };
}
return result;
}
catch (Exception ex)
{
@@ -75,27 +88,6 @@ public sealed class AuditPackImporter : IAuditPackImporter
}
}
private static async Task ExtractTarGzAsync(string archivePath, string extractDir, CancellationToken ct)
{
Directory.CreateDirectory(extractDir);
var tarPath = archivePath.Replace(".tar.gz", ".tar");
// Decompress gz
using (var gzStream = File.OpenRead(archivePath))
using (var gzip = new GZipStream(gzStream, CompressionMode.Decompress))
using (var tarStream = File.Create(tarPath))
{
await gzip.CopyToAsync(tarStream, ct);
}
// Extract tar
await TarFile.ExtractToDirectoryAsync(tarPath, extractDir, overwriteFiles: true, ct);
// Clean up tar
File.Delete(tarPath);
}
private static async Task<IntegrityResult> VerifyIntegrityAsync(
AuditPack pack,
string extractDir,
@@ -136,27 +128,57 @@ public sealed class AuditPackImporter : IAuditPackImporter
}
private static async Task<SignatureResult> VerifySignaturesAsync(
byte[] manifestBytes,
AuditPack pack,
string extractDir,
CancellationToken ct)
{
var errors = new List<string>();
// Load signature
var signaturePath = Path.Combine(extractDir, "signature.sig");
var signaturePath = Path.Combine(extractDir, "manifest.sig");
if (!File.Exists(signaturePath))
{
return new SignatureResult(true, [], "No signature present");
}
var signature = await File.ReadAllTextAsync(signaturePath, ct);
var signature = await File.ReadAllBytesAsync(signaturePath, ct);
var trustRoots = pack.TrustRoots;
// Verify against trust roots
foreach (var root in pack.TrustRoots)
if (trustRoots.Length == 0)
{
// TODO: Implement actual signature verification
// For now, just check that trust root exists
if (!string.IsNullOrEmpty(root.Content))
errors.Add("No trust roots available for signature verification");
return new SignatureResult(false, errors);
}
foreach (var root in trustRoots)
{
if (string.IsNullOrWhiteSpace(root.Content))
{
continue;
}
using var publicKey = TryLoadPublicKey(root.Content);
if (publicKey is null)
{
continue;
}
var signer = new AuditBundleSigner();
var result = await signer.VerifyAsync(
new AuditBundleVerificationRequest
{
EnvelopeBytes = signature,
PublicKey = publicKey
},
ct);
if (!result.Success || result.VerifiedSignatures is null)
{
continue;
}
if (result.VerifiedSignatures.Any(s => s.Verified)
&& string.Equals(result.PayloadDigest, ComputeSha256(manifestBytes), StringComparison.Ordinal))
{
return new SignatureResult(true, [], $"Verified with {root.Id}");
}
@@ -168,10 +190,39 @@ public sealed class AuditPackImporter : IAuditPackImporter
private static string ComputePackDigest(AuditPack pack)
{
var json = JsonSerializer.Serialize(pack with { PackDigest = null, Signature = null });
var bytes = System.Text.Encoding.UTF8.GetBytes(json);
var hash = SHA256.HashData(bytes);
return Convert.ToHexString(hash).ToLowerInvariant();
var json = CanonicalJson.Serialize(pack with { PackDigest = null, Signature = null });
return Convert.ToHexString(SHA256.HashData(json)).ToLowerInvariant();
}
private static string ComputeSha256(byte[] content)
{
var hash = SHA256.HashData(content);
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static AsymmetricAlgorithm? TryLoadPublicKey(string pem)
{
try
{
var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(pem);
return ecdsa;
}
catch
{
// ignored
}
try
{
var rsa = RSA.Create();
rsa.ImportFromPem(pem);
return rsa;
}
catch
{
return null;
}
}
}

View File

@@ -54,16 +54,11 @@ public sealed class AuditPackReplayer : IAuditPackReplayer
RunManifest runManifest,
CancellationToken ct)
{
// TODO: Implement actual replay execution
// This would call the scanner with frozen time and offline bundle
await Task.CompletedTask;
return new ReplayResult
{
Success = true,
Verdict = new Verdict("replayed-verdict", "completed"),
VerdictDigest = "placeholder-digest",
DurationMs = 1000
Success = false,
Errors = ["Replay execution is not implemented."]
};
}

View File

@@ -0,0 +1,61 @@
using System.Text.Encodings.Web;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
internal static class CanonicalJson
{
private static readonly JsonWriterOptions WriterOptions = new()
{
Indented = false,
Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping
};
public static byte[] Serialize<T>(T value, JsonSerializerOptions? options = null)
{
var json = JsonSerializer.SerializeToUtf8Bytes(value, options ?? DefaultOptions);
return Canonicalize(json);
}
public static byte[] Canonicalize(ReadOnlySpan<byte> json)
{
using var doc = JsonDocument.Parse(json.ToArray());
using var stream = new MemoryStream();
using var writer = new Utf8JsonWriter(stream, WriterOptions);
WriteElementSorted(doc.RootElement, writer);
writer.Flush();
return stream.ToArray();
}
private static void WriteElementSorted(JsonElement element, Utf8JsonWriter writer)
{
switch (element.ValueKind)
{
case JsonValueKind.Object:
writer.WriteStartObject();
foreach (var property in element.EnumerateObject().OrderBy(p => p.Name, StringComparer.Ordinal))
{
writer.WritePropertyName(property.Name);
WriteElementSorted(property.Value, writer);
}
writer.WriteEndObject();
break;
case JsonValueKind.Array:
writer.WriteStartArray();
foreach (var item in element.EnumerateArray())
{
WriteElementSorted(item, writer);
}
writer.WriteEndArray();
break;
default:
element.WriteTo(writer);
break;
}
}
public static readonly JsonSerializerOptions DefaultOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = false
};
}

View File

@@ -25,17 +25,24 @@ public sealed class IsolatedReplayContext : IIsolatedReplayContext, IDisposable
private readonly string _workingDirectory;
private readonly bool _cleanupOnDispose;
private readonly TimeProvider _timeProvider;
private bool _disposed;
/// <summary>
/// Creates a new isolated replay context.
/// </summary>
public IsolatedReplayContext(IsolatedReplayContextOptions options)
public IsolatedReplayContext(IsolatedReplayContextOptions options, TimeProvider? timeProvider = null)
{
ArgumentNullException.ThrowIfNull(options);
Options = options;
_cleanupOnDispose = options.CleanupOnDispose;
_timeProvider = timeProvider ?? TimeProvider.System;
if (options.EnforceOffline && IsNetworkPath(options.WorkingDirectory))
{
throw new InvalidOperationException("WorkingDirectory cannot be a network path when offline enforcement is enabled.");
}
// Create isolated working directory
_workingDirectory = options.WorkingDirectory
@@ -44,7 +51,7 @@ public sealed class IsolatedReplayContext : IIsolatedReplayContext, IDisposable
// Initialize context state
IsInitialized = false;
EvaluationTime = options.EvaluationTime ?? DateTimeOffset.UtcNow;
EvaluationTime = options.EvaluationTime ?? _timeProvider.GetUtcNow();
}
public IsolatedReplayContextOptions Options { get; }
@@ -237,6 +244,16 @@ public sealed class IsolatedReplayContext : IIsolatedReplayContext, IDisposable
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
}
private static bool IsNetworkPath(string? path)
{
if (string.IsNullOrWhiteSpace(path))
{
return false;
}
return path.StartsWith("\\\\", StringComparison.Ordinal) || path.StartsWith("//", StringComparison.Ordinal);
}
public void Dispose()
{
if (_disposed) return;

View File

@@ -30,10 +30,20 @@ public sealed class ReplayAttestationService : IReplayAttestationService
};
private readonly IReplayAttestationSigner? _signer;
private readonly IReplayAttestationSignatureVerifier? _verifier;
private readonly TimeProvider _timeProvider;
private readonly IAuditPackIdGenerator _idGenerator;
public ReplayAttestationService(IReplayAttestationSigner? signer = null)
public ReplayAttestationService(
IReplayAttestationSigner? signer = null,
IReplayAttestationSignatureVerifier? verifier = null,
TimeProvider? timeProvider = null,
IAuditPackIdGenerator? idGenerator = null)
{
_signer = signer;
_verifier = verifier;
_timeProvider = timeProvider ?? TimeProvider.System;
_idGenerator = idGenerator ?? new GuidAuditPackIdGenerator();
}
/// <summary>
@@ -51,7 +61,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
var statement = CreateInTotoStatement(manifest, replayResult);
// Serialize to canonical JSON
var statementBytes = JsonSerializer.SerializeToUtf8Bytes(statement, JsonOptions);
var statementBytes = CanonicalJson.Serialize(statement, JsonOptions);
var statementDigest = ComputeSha256Digest(statementBytes);
// Create DSSE envelope
@@ -59,9 +69,9 @@ public sealed class ReplayAttestationService : IReplayAttestationService
return new ReplayAttestation
{
AttestationId = Guid.NewGuid().ToString("N"),
AttestationId = _idGenerator.NewAttestationId(),
ManifestId = manifest.BundleId,
CreatedAt = DateTimeOffset.UtcNow,
CreatedAt = _timeProvider.GetUtcNow(),
Statement = statement,
StatementDigest = statementDigest,
Envelope = envelope,
@@ -73,7 +83,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
/// <summary>
/// Verifies a replay attestation's integrity.
/// </summary>
public Task<AttestationVerificationResult> VerifyAsync(
public async Task<AttestationVerificationResult> VerifyAsync(
ReplayAttestation attestation,
CancellationToken cancellationToken = default)
{
@@ -82,7 +92,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
var errors = new List<string>();
// Verify statement digest
var statementBytes = JsonSerializer.SerializeToUtf8Bytes(attestation.Statement, JsonOptions);
var statementBytes = CanonicalJson.Serialize(attestation.Statement, JsonOptions);
var computedDigest = ComputeSha256Digest(statementBytes);
if (computedDigest != attestation.StatementDigest)
@@ -109,16 +119,36 @@ public sealed class ReplayAttestationService : IReplayAttestationService
}
}
// Verify signatures if signer is available
var signatureValid = attestation.Envelope?.Signatures.Count > 0;
var signatureVerified = false;
if (attestation.Envelope is not null)
{
if (attestation.Envelope.Signatures.Count == 0)
{
errors.Add("Envelope contains no signatures");
}
else if (_verifier is null)
{
errors.Add("Signature verifier is not configured");
}
else
{
var payloadBytes = Convert.FromBase64String(attestation.Envelope.Payload);
var verification = await _verifier.VerifyAsync(attestation.Envelope, payloadBytes, cancellationToken);
signatureVerified = verification.Verified;
if (!verification.Verified)
{
errors.Add(verification.Error ?? "Signature verification failed");
}
}
}
return Task.FromResult(new AttestationVerificationResult
return new AttestationVerificationResult
{
IsValid = errors.Count == 0,
Errors = [.. errors],
SignatureVerified = signatureValid,
VerifiedAt = DateTimeOffset.UtcNow
});
SignatureVerified = signatureVerified,
VerifiedAt = _timeProvider.GetUtcNow()
};
}
/// <summary>
@@ -180,7 +210,7 @@ public sealed class ReplayAttestationService : IReplayAttestationService
Message = d.Message
}).ToList(),
EvaluatedAt = replayResult.EvaluatedAt,
ReplayedAt = DateTimeOffset.UtcNow,
ReplayedAt = _timeProvider.GetUtcNow(),
DurationMs = replayResult.DurationMs
}
};
@@ -253,6 +283,17 @@ public interface IReplayAttestationSigner
Task<DsseSignatureResult> SignAsync(byte[] payload, CancellationToken cancellationToken = default);
}
/// <summary>
/// Interface for verifying replay attestation signatures.
/// </summary>
public interface IReplayAttestationSignatureVerifier
{
Task<ReplayAttestationSignatureVerification> VerifyAsync(
ReplayDsseEnvelope envelope,
byte[] payload,
CancellationToken cancellationToken = default);
}
#region Models
/// <summary>
@@ -406,6 +447,15 @@ public sealed record DsseSignatureResult
public string? Algorithm { get; init; }
}
/// <summary>
/// Result of signature verification.
/// </summary>
public sealed record ReplayAttestationSignatureVerification
{
public bool Verified { get; init; }
public string? Error { get; init; }
}
/// <summary>
/// Result of attestation verification.
/// </summary>

View File

@@ -5,9 +5,6 @@
// Description: Fetches scan data and snapshots required for audit bundle creation.
// -----------------------------------------------------------------------------
using System.Text;
using System.Text.Json;
namespace StellaOps.AuditPack.Services;
/// <summary>
@@ -15,24 +12,21 @@ namespace StellaOps.AuditPack.Services;
/// </summary>
public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
private readonly IScanDataProvider? _scanDataProvider;
private readonly IFeedSnapshotProvider? _feedProvider;
private readonly IPolicySnapshotProvider? _policyProvider;
private readonly IVexSnapshotProvider? _vexProvider;
public ScanSnapshotFetcher(
IScanDataProvider? scanDataProvider = null,
IFeedSnapshotProvider? feedProvider = null,
IPolicySnapshotProvider? policyProvider = null)
IPolicySnapshotProvider? policyProvider = null,
IVexSnapshotProvider? vexProvider = null)
{
_scanDataProvider = scanDataProvider;
_feedProvider = feedProvider;
_policyProvider = policyProvider;
_vexProvider = vexProvider;
}
/// <summary>
@@ -81,6 +75,10 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
if (request.IncludeVex)
{
vexData = await FetchVexSnapshotAsync(request.ScanId, cancellationToken);
if (!vexData.Success)
{
return ScanSnapshotResult.Failed($"Failed to fetch VEX: {vexData.Error}");
}
}
return new ScanSnapshotResult
@@ -115,30 +113,11 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
return await _scanDataProvider.GetScanDataAsync(scanId, ct);
}
// Default implementation - return placeholder data
// In production, this would fetch from Scanner service
return new ScanData
{
Success = true,
Success = false,
ScanId = scanId,
ImageRef = $"scan-image-{scanId}",
ImageDigest = $"sha256:{scanId}",
Sbom = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
{
bomFormat = "CycloneDX",
specVersion = "1.6",
version = 1,
metadata = new { timestamp = DateTimeOffset.UtcNow },
components = Array.Empty<object>()
}, JsonOptions)),
Verdict = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
{
scanId,
decision = "pass",
evaluatedAt = DateTimeOffset.UtcNow
}, JsonOptions)),
Decision = "pass",
EvaluatedAt = DateTimeOffset.UtcNow
Error = "Scan data provider is not configured."
};
}
@@ -152,23 +131,10 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
return await _feedProvider.GetFeedSnapshotAsync(scanId, asOf, ct);
}
// Default implementation - return placeholder feeds
// In production, this would fetch from Concelier
var snapshotAt = asOf ?? DateTimeOffset.UtcNow;
var feeds = new StringBuilder();
feeds.AppendLine(JsonSerializer.Serialize(new
{
type = "advisory-feed-snapshot",
snapshotAt,
feedId = "nvd",
recordCount = 0
}));
return new FeedSnapshotData
{
Success = true,
Snapshot = Encoding.UTF8.GetBytes(feeds.ToString()),
SnapshotAt = snapshotAt
Success = false,
Error = "Feed snapshot provider is not configured."
};
}
@@ -182,47 +148,26 @@ public sealed class ScanSnapshotFetcher : IScanSnapshotFetcher
return await _policyProvider.GetPolicySnapshotAsync(scanId, version, ct);
}
// Default implementation - return placeholder policy bundle
// In production, this would fetch from Policy service
return new PolicySnapshotData
{
Success = true,
Bundle = CreatePlaceholderPolicyBundle(),
Version = version ?? "1.0.0"
Success = false,
Error = "Policy snapshot provider is not configured."
};
}
private async Task<VexSnapshotData> FetchVexSnapshotAsync(string scanId, CancellationToken ct)
{
// Default implementation - return empty VEX
if (_vexProvider is not null)
{
return await _vexProvider.GetVexSnapshotAsync(scanId, ct);
}
return await Task.FromResult(new VexSnapshotData
{
Success = true,
Statements = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(new
{
type = "https://openvex.dev/ns/v0.2.0",
statements = Array.Empty<object>()
}, JsonOptions))
Success = false,
Error = "VEX snapshot provider is not configured."
});
}
private static byte[] CreatePlaceholderPolicyBundle()
{
// Create a minimal tar.gz bundle
using var ms = new MemoryStream();
using (var gzip = new System.IO.Compression.GZipStream(ms, System.IO.Compression.CompressionLevel.Optimal, leaveOpen: true))
using (var writer = new BinaryWriter(gzip))
{
// Write minimal tar header for empty bundle
var header = new byte[512];
var name = "policy/empty.rego"u8;
name.CopyTo(header);
header[156] = (byte)'0'; // Regular file
writer.Write(header);
writer.Write(new byte[512]); // End of archive marker
}
return ms.ToArray();
}
}
/// <summary>
@@ -259,6 +204,14 @@ public interface IPolicySnapshotProvider
Task<PolicySnapshotData> GetPolicySnapshotAsync(string scanId, string? version, CancellationToken ct);
}
/// <summary>
/// Provider interface for VEX snapshots.
/// </summary>
public interface IVexSnapshotProvider
{
Task<VexSnapshotData> GetVexSnapshotAsync(string scanId, CancellationToken ct);
}
#region Request and Result Models
/// <summary>

View File

@@ -5,6 +5,7 @@
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0075-M | DONE | Maintainability audit for StellaOps.AuditPack. |
| AUDIT-0075-T | DONE | Test coverage audit for StellaOps.AuditPack. |
| AUDIT-0075-A | TODO | Pending approval for changes. |
| AUDIT-0075-A | DONE | Deterministic archive/export + signature verification + tests. |

View File

@@ -35,9 +35,13 @@ internal static class DpopNonceUtilities
ArgumentException.ThrowIfNullOrWhiteSpace(clientId);
ArgumentException.ThrowIfNullOrWhiteSpace(keyThumbprint);
var normalizedAudience = audience.Trim().ToLowerInvariant();
var normalizedClientId = clientId.Trim().ToLowerInvariant();
var normalizedThumbprint = keyThumbprint.Trim().ToLowerInvariant();
return string.Create(
"dpop-nonce:".Length + audience.Length + clientId.Length + keyThumbprint.Length + 2,
(audience.Trim(), clientId.Trim(), keyThumbprint.Trim()),
"dpop-nonce:".Length + normalizedAudience.Length + normalizedClientId.Length + normalizedThumbprint.Length + 2,
(normalizedAudience, normalizedClientId, normalizedThumbprint),
static (span, parts) =>
{
var index = 0;

View File

@@ -27,10 +27,8 @@ public sealed class DpopProofValidator : IDpopProofValidator
{
ArgumentNullException.ThrowIfNull(options);
var cloned = options.Value ?? throw new InvalidOperationException("DPoP options must be provided.");
cloned.Validate();
this.options = cloned;
var snapshot = options.Value ?? throw new InvalidOperationException("DPoP options must be provided.");
this.options = snapshot.Snapshot();
this.replayCache = replayCache ?? NullReplayCache.Instance;
this.timeProvider = timeProvider ?? TimeProvider.System;
this.logger = logger;
@@ -50,12 +48,14 @@ public sealed class DpopProofValidator : IDpopProofValidator
return DpopValidationResult.Failure("invalid_header", headerError ?? "Unable to decode header.");
}
if (!headerElement.TryGetProperty("typ", out var typElement) || !string.Equals(typElement.GetString(), ProofType, StringComparison.OrdinalIgnoreCase))
if (!headerElement.TryGetProperty("typ", out var typElement) ||
typElement.ValueKind != JsonValueKind.String ||
!string.Equals(typElement.GetString(), ProofType, StringComparison.OrdinalIgnoreCase))
{
return DpopValidationResult.Failure("invalid_header", "DPoP proof missing typ=dpop+jwt header.");
}
if (!headerElement.TryGetProperty("alg", out var algElement))
if (!headerElement.TryGetProperty("alg", out var algElement) || algElement.ValueKind != JsonValueKind.String)
{
return DpopValidationResult.Failure("invalid_header", "DPoP proof missing alg header.");
}
@@ -88,7 +88,7 @@ public sealed class DpopProofValidator : IDpopProofValidator
return DpopValidationResult.Failure("invalid_payload", payloadError ?? "Unable to decode payload.");
}
if (!payloadElement.TryGetProperty("htm", out var htmElement))
if (!payloadElement.TryGetProperty("htm", out var htmElement) || htmElement.ValueKind != JsonValueKind.String)
{
return DpopValidationResult.Failure("invalid_payload", "DPoP proof missing htm claim.");
}
@@ -99,7 +99,7 @@ public sealed class DpopProofValidator : IDpopProofValidator
return DpopValidationResult.Failure("invalid_payload", "DPoP htm does not match request method.");
}
if (!payloadElement.TryGetProperty("htu", out var htuElement))
if (!payloadElement.TryGetProperty("htu", out var htuElement) || htuElement.ValueKind != JsonValueKind.String)
{
return DpopValidationResult.Failure("invalid_payload", "DPoP proof missing htu claim.");
}

View File

@@ -42,6 +42,25 @@ public sealed class DpopValidationOptions
/// </summary>
public IReadOnlySet<string> NormalizedAlgorithms { get; private set; } = ImmutableHashSet<string>.Empty;
internal DpopValidationOptions Snapshot()
{
var clone = new DpopValidationOptions
{
ProofLifetime = ProofLifetime,
AllowedClockSkew = AllowedClockSkew,
ReplayWindow = ReplayWindow
};
clone.allowedAlgorithms.Clear();
foreach (var algorithm in allowedAlgorithms)
{
clone.allowedAlgorithms.Add(algorithm);
}
clone.Validate();
return clone;
}
public void Validate()
{
if (ProofLifetime <= TimeSpan.Zero)

View File

@@ -4,7 +4,7 @@
<LangVersion>preview</LangVersion>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<PropertyGroup>
<Description>Sender-constrained authentication primitives (DPoP, mTLS) shared across StellaOps services.</Description>
@@ -35,6 +35,11 @@
<ItemGroup>
<None Include="README.md" Pack="true" PackagePath="" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo">
<_Parameter1>StellaOps.Auth.Security.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\..\Router\__Libraries\StellaOps.Messaging\StellaOps.Messaging.csproj" />
</ItemGroup>

View File

@@ -7,4 +7,4 @@ Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.
| --- | --- | --- |
| AUDIT-0082-M | DONE | Maintainability audit for StellaOps.Auth.Security. |
| AUDIT-0082-T | DONE | Test coverage audit for StellaOps.Auth.Security. |
| AUDIT-0082-A | TODO | Pending approval for changes. |
| AUDIT-0082-A | DONE | DPoP validation hardening, nonce normalization, and tests added. |

View File

@@ -0,0 +1,185 @@
// -----------------------------------------------------------------------------
// DistroDerivative.cs
// Sprint: SPRINT_20251230_001_BE_backport_resolver (BP-301, BP-302, BP-303, BP-304)
// Task: Create DistroDerivativeMapping model for cross-distro OVAL/CSAF evidence
// -----------------------------------------------------------------------------
using System.Collections.Frozen;
using System.Collections.Immutable;
namespace StellaOps.DistroIntel;
/// <summary>
/// Confidence level for derivative distro mappings.
/// Determines the confidence penalty applied when using evidence from a derivative.
/// </summary>
public enum DerivativeConfidence
{
/// <summary>
/// High confidence - ABI-compatible rebuilds.
/// Examples: AlmaLinux/Rocky → RHEL, CentOS → RHEL
/// Confidence multiplier: 0.95
/// </summary>
High,
/// <summary>
/// Medium confidence - Modified derivatives with some customizations.
/// Examples: Linux Mint → Ubuntu, Ubuntu → Debian
/// Confidence multiplier: 0.80
/// </summary>
Medium
}
/// <summary>
/// Represents a relationship between a canonical (parent) distro and a derivative.
/// Used for Tier 1 evidence fallback when native OVAL/CSAF is unavailable.
/// </summary>
/// <param name="CanonicalDistro">The parent/upstream distro identifier (e.g., "rhel", "debian").</param>
/// <param name="DerivativeDistro">The derivative distro identifier (e.g., "almalinux", "ubuntu").</param>
/// <param name="MajorRelease">The major release version for which this mapping applies.</param>
/// <param name="Confidence">Confidence level of the derivative relationship.</param>
public sealed record DistroDerivative(
string CanonicalDistro,
string DerivativeDistro,
int MajorRelease,
DerivativeConfidence Confidence);
/// <summary>
/// Static registry of distro derivative mappings for cross-distro evidence sharing.
/// </summary>
public static class DistroMappings
{
/// <summary>
/// All known distro derivative relationships.
/// Maps parent distros to their derivatives for OVAL/CSAF fallback.
/// </summary>
public static readonly ImmutableArray<DistroDerivative> Derivatives =
[
// RHEL family - High confidence (ABI-compatible rebuilds)
new DistroDerivative("rhel", "almalinux", 8, DerivativeConfidence.High),
new DistroDerivative("rhel", "almalinux", 9, DerivativeConfidence.High),
new DistroDerivative("rhel", "almalinux", 10, DerivativeConfidence.High),
new DistroDerivative("rhel", "rocky", 8, DerivativeConfidence.High),
new DistroDerivative("rhel", "rocky", 9, DerivativeConfidence.High),
new DistroDerivative("rhel", "rocky", 10, DerivativeConfidence.High),
new DistroDerivative("rhel", "centos", 7, DerivativeConfidence.High),
new DistroDerivative("rhel", "centos", 8, DerivativeConfidence.High), // CentOS 8 (EOL)
new DistroDerivative("rhel", "oracle", 7, DerivativeConfidence.High),
new DistroDerivative("rhel", "oracle", 8, DerivativeConfidence.High),
new DistroDerivative("rhel", "oracle", 9, DerivativeConfidence.High),
// Debian family - Medium confidence (modified derivatives)
new DistroDerivative("debian", "ubuntu", 10, DerivativeConfidence.Medium), // Debian 10 -> Ubuntu derivation
new DistroDerivative("debian", "ubuntu", 11, DerivativeConfidence.Medium), // Debian 11 (Bullseye)
new DistroDerivative("debian", "ubuntu", 12, DerivativeConfidence.Medium), // Debian 12 (Bookworm)
// Ubuntu derivatives - Medium confidence
new DistroDerivative("ubuntu", "linuxmint", 20, DerivativeConfidence.Medium), // Ubuntu 20.04 base
new DistroDerivative("ubuntu", "linuxmint", 21, DerivativeConfidence.Medium), // Ubuntu 21.04 base
new DistroDerivative("ubuntu", "linuxmint", 22, DerivativeConfidence.Medium), // Ubuntu 22.04 base
new DistroDerivative("ubuntu", "pop", 20, DerivativeConfidence.Medium), // Pop!_OS
new DistroDerivative("ubuntu", "pop", 22, DerivativeConfidence.Medium),
// SUSE family
new DistroDerivative("sles", "opensuse-leap", 15, DerivativeConfidence.High),
];
private static readonly FrozenDictionary<(string, int), ImmutableArray<DistroDerivative>> _byCanonicalIndex =
Derivatives
.GroupBy(d => (d.CanonicalDistro.ToLowerInvariant(), d.MajorRelease))
.ToFrozenDictionary(
g => g.Key,
g => g.ToImmutableArray());
private static readonly FrozenDictionary<(string, int), DistroDerivative?> _byDerivativeIndex =
Derivatives
.ToFrozenDictionary(
d => (d.DerivativeDistro.ToLowerInvariant(), d.MajorRelease),
d => (DistroDerivative?)d);
/// <summary>
/// Finds derivatives for a canonical (parent) distro at a specific major release.
/// Use this to find alternative evidence sources when native OVAL/CSAF is unavailable.
/// </summary>
/// <param name="canonicalDistro">The canonical distro identifier (e.g., "rhel").</param>
/// <param name="majorRelease">The major release version.</param>
/// <returns>Matching derivative mappings, ordered by confidence (High first).</returns>
/// <example>
/// var derivatives = DistroMappings.FindDerivativesFor("rhel", 9);
/// // Returns: [(rhel, almalinux, 9, High), (rhel, rocky, 9, High), (rhel, oracle, 9, High)]
/// </example>
public static IEnumerable<DistroDerivative> FindDerivativesFor(string canonicalDistro, int majorRelease)
{
var key = (canonicalDistro.ToLowerInvariant(), majorRelease);
if (_byCanonicalIndex.TryGetValue(key, out var derivatives))
{
return derivatives.OrderByDescending(d => d.Confidence);
}
return [];
}
/// <summary>
/// Finds the canonical (parent) distro for a derivative distro.
/// Use this to map a derivative back to its upstream source.
/// </summary>
/// <param name="derivativeDistro">The derivative distro identifier (e.g., "almalinux").</param>
/// <param name="majorRelease">The major release version.</param>
/// <returns>The canonical mapping if found, null otherwise.</returns>
/// <example>
/// var canonical = DistroMappings.FindCanonicalFor("almalinux", 9);
/// // Returns: (rhel, almalinux, 9, High)
/// </example>
public static DistroDerivative? FindCanonicalFor(string derivativeDistro, int majorRelease)
{
var key = (derivativeDistro.ToLowerInvariant(), majorRelease);
return _byDerivativeIndex.GetValueOrDefault(key);
}
/// <summary>
/// Gets the confidence multiplier for a derivative relationship.
/// Apply this to the base confidence when using derivative evidence.
/// </summary>
/// <param name="confidence">The derivative confidence level.</param>
/// <returns>Multiplier value (0.95 for High, 0.80 for Medium).</returns>
public static decimal GetConfidenceMultiplier(DerivativeConfidence confidence)
{
return confidence switch
{
DerivativeConfidence.High => 0.95m,
DerivativeConfidence.Medium => 0.80m,
_ => 0.70m // Unknown - conservative
};
}
/// <summary>
/// Checks if a distro is a known canonical (parent) distro.
/// </summary>
/// <param name="distro">The distro identifier to check.</param>
/// <returns>True if the distro is a known canonical distro.</returns>
public static bool IsCanonicalDistro(string distro)
{
var lower = distro.ToLowerInvariant();
return lower is "rhel" or "debian" or "ubuntu" or "sles" or "alpine";
}
/// <summary>
/// Normalizes a distro name to its canonical form.
/// </summary>
/// <param name="distro">The distro name to normalize.</param>
/// <returns>Lowercase canonical form.</returns>
public static string NormalizeDistroName(string distro)
{
var lower = distro.ToLowerInvariant();
return lower switch
{
"redhat" or "red hat" or "red-hat" => "rhel",
"alma" or "almalinux-os" => "almalinux",
"rockylinux" or "rocky-linux" => "rocky",
"oracle linux" or "oraclelinux" => "oracle",
"opensuse" or "opensuse-tumbleweed" => "opensuse-leap",
"mint" => "linuxmint",
"popos" or "pop_os" => "pop",
_ => lower
};
}
}

View File

@@ -0,0 +1,11 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<RootNamespace>StellaOps.DistroIntel</RootNamespace>
</PropertyGroup>
</Project>

View File

@@ -0,0 +1,24 @@
# Infrastructure EfCore Agent Charter
## Mission
- Provide deterministic, tenant-safe EF Core infrastructure shared across modules.
## Responsibilities
- Keep DbContext wiring secure and consistent (schema, tenancy, UTC session).
- Validate configuration inputs and avoid production-only diagnostics in common paths.
- Maintain tests for interceptors, context options, and tenant accessors.
## Required Reading
- docs/modules/platform/architecture-overview.md
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
## Definition of Done
- Connection/session configuration is deterministic and safe.
- Configuration validation is explicit and tested.
- Tests cover tenant context, session settings, and schema wiring.
## Working Agreement
- 1. Update sprint status in docs/implplan/SPRINT_*.md and local TASKS.md.
- 2. Review required docs before changes.
- 3. Keep session configuration deterministic (UTC, schema, tenant).
- 4. Avoid enabling detailed errors in production by default.

View File

@@ -0,0 +1,10 @@
# Infrastructure EfCore Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0357-M | DONE | Maintainability audit for Infrastructure.EfCore. |
| AUDIT-0357-T | DONE | Test coverage audit for Infrastructure.EfCore. |
| AUDIT-0357-A | TODO | Pending approval (non-test project). |

View File

@@ -0,0 +1,10 @@
# StellaOps.Infrastructure.Postgres Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0358-M | DONE | Maintainability audit for Infrastructure.Postgres. |
| AUDIT-0358-T | DONE | Test coverage audit for Infrastructure.Postgres. |
| AUDIT-0358-A | TODO | Pending approval (non-test project). |

View File

@@ -0,0 +1,23 @@
# Ingestion Telemetry Agent Charter
## Mission
- Provide consistent, low-cardinality telemetry for ingestion flows.
## Responsibilities
- Maintain metric and activity naming stability across releases.
- Keep tag keys consistent and bounded to avoid cardinality blowups.
- Ensure instrumentation stays deterministic and offline-safe.
## Required Reading
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
## Working Directory & Scope
- Primary: src/__Libraries/StellaOps.Ingestion.Telemetry
## Testing Expectations
- Add tests for activity/metric tags and phase/result validation using ActivityListener and MeterListener.
## Working Agreement
- Update sprint status in docs/implplan/SPRINT_*.md and local TASKS.md.
- Keep telemetry tags ASCII and stable.

View File

@@ -0,0 +1,10 @@
# StellaOps.Ingestion.Telemetry Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0361-M | DONE | Maintainability audit for Ingestion.Telemetry. |
| AUDIT-0361-T | DONE | Test coverage audit for Ingestion.Telemetry. |
| AUDIT-0361-A | TODO | Pending approval (non-test project). |

View File

@@ -0,0 +1,25 @@
# Interop Library Agent Charter
## Mission
- Provide consistent tool discovery and execution for offline-safe interop workflows.
## Responsibilities
- Keep process execution deterministic and cancellable.
- Avoid environment-specific assumptions; prefer explicit tool paths.
- Maintain cross-platform tool resolution behavior.
## Required Reading
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/platform/architecture-overview.md
## Working Directory & Scope
- Primary: src/__Libraries/StellaOps.Interop
- Allowed shared projects: src/__Tests/interop/StellaOps.Interop.Tests
## Testing Expectations
- Add unit tests for path resolution and process execution outcomes.
- Avoid reliance on external network or installed tools in unit tests; use stubs.
## Working Agreement
- Update sprint status in docs/implplan/SPRINT_*.md and local TASKS.md.
- Keep outputs deterministic and avoid non-ASCII logs.

View File

@@ -0,0 +1,10 @@
# StellaOps.Interop Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0370-M | DONE | Maintainability audit for StellaOps.Interop. |
| AUDIT-0370-T | DONE | Test coverage audit for StellaOps.Interop. |
| AUDIT-0370-A | TODO | Pending approval. |

View File

@@ -0,0 +1,26 @@
# IssuerDirectory Client Agent Charter
## Mission
- Provide a reliable HTTP client for issuer key and trust lookups with deterministic caching.
## Responsibilities
- Validate options early and normalize tenant/issuer identifiers consistently.
- Keep cache keys stable and invalidation behavior correct.
- Emit actionable error context for remote failures.
## Required Reading
- docs/07_HIGH_LEVEL_ARCHITECTURE.md
- docs/modules/issuer-directory/architecture.md
- docs/modules/platform/architecture-overview.md
## Working Directory & Scope
- Primary: src/__Libraries/StellaOps.IssuerDirectory.Client
- Allowed shared projects: src/IssuerDirectory
## Testing Expectations
- Add unit tests using stubbed HttpMessageHandler to validate headers and paths.
- Cover cache key normalization and invalidation across includeGlobal variants.
## Working Agreement
- Update sprint status in docs/implplan/SPRINT_*.md and local TASKS.md.
- Keep outputs deterministic and avoid non-ASCII logs.

View File

@@ -0,0 +1,10 @@
# StellaOps.IssuerDirectory.Client Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0372-M | DONE | Maintainability audit for IssuerDirectory.Client. |
| AUDIT-0372-T | DONE | Test coverage audit for IssuerDirectory.Client. |
| AUDIT-0372-A | TODO | Pending approval. |

View File

@@ -0,0 +1,68 @@
// -----------------------------------------------------------------------------
// StringVersionComparer.cs
// Sprint: SPRINT_20251230_001_BE_backport_resolver (BP-101)
// Task: Create fallback string version comparer
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
namespace StellaOps.VersionComparison.Comparers;
/// <summary>
/// Fallback version comparer that uses ordinal string comparison.
/// Used when the package ecosystem is unknown or no specific comparator exists.
/// </summary>
public sealed class StringVersionComparer : IVersionComparator, IComparer<string>
{
/// <summary>
/// Singleton instance.
/// </summary>
public static StringVersionComparer Instance { get; } = new();
private StringVersionComparer() { }
/// <inheritdoc />
public ComparatorType ComparatorType => ComparatorType.SemVer;
/// <inheritdoc />
public int Compare(string? x, string? y)
{
if (ReferenceEquals(x, y)) return 0;
if (x is null) return -1;
if (y is null) return 1;
return string.Compare(x, y, StringComparison.Ordinal);
}
/// <inheritdoc />
public VersionComparisonResult CompareWithProof(string? left, string? right)
{
var proofLines = new List<string>();
if (left is null && right is null)
{
proofLines.Add("Both versions are null: equal");
return new VersionComparisonResult(0, [.. proofLines], ComparatorType.SemVer);
}
if (left is null)
{
proofLines.Add("Left version is null: less than right");
return new VersionComparisonResult(-1, [.. proofLines], ComparatorType.SemVer);
}
if (right is null)
{
proofLines.Add("Right version is null: left is greater");
return new VersionComparisonResult(1, [.. proofLines], ComparatorType.SemVer);
}
var cmp = string.Compare(left, right, StringComparison.Ordinal);
var resultStr = cmp < 0 ? "left is older" : cmp > 0 ? "left is newer" : "equal";
var symbol = cmp < 0 ? "<" : cmp > 0 ? ">" : "==";
proofLines.Add($"String comparison (fallback): '{left}' {symbol} '{right}' ({resultStr})");
return new VersionComparisonResult(cmp, [.. proofLines], ComparatorType.SemVer);
}
}

View File

@@ -5,6 +5,7 @@
// -----------------------------------------------------------------------------
using System.IO.Compression;
using System.Linq;
using System.Text.Json;
using FluentAssertions;
using StellaOps.AuditPack.Services;
@@ -25,7 +26,10 @@ public class AuditPackExportServiceIntegrationTests
public AuditPackExportServiceIntegrationTests()
{
var mockWriter = new MockAuditBundleWriter();
_service = new AuditPackExportService(mockWriter);
var repository = new FakeAuditPackRepository();
var timeProvider = new FixedTimeProvider(new DateTimeOffset(2025, 6, 1, 12, 0, 0, TimeSpan.Zero));
var dsseSigner = new FakeDsseSigner();
_service = new AuditPackExportService(mockWriter, repository, timeProvider, dsseSigner);
}
#region ZIP Export Tests
@@ -94,8 +98,7 @@ public class AuditPackExportServiceIntegrationTests
using var memoryStream = new MemoryStream(result.Data!);
using var archive = new ZipArchive(memoryStream, ZipArchiveMode.Read);
// Note: Attestations entry may be empty without repository
archive.Entries.Should().Contain(e => e.FullName.Contains("manifest.json"));
archive.GetEntry("attestations/attestations.json").Should().NotBeNull();
}
[Fact(DisplayName = "ZIP export includes proof chain when requested")]
@@ -117,6 +120,10 @@ public class AuditPackExportServiceIntegrationTests
// Assert
result.Success.Should().BeTrue();
using var memoryStream = new MemoryStream(result.Data!);
using var archive = new ZipArchive(memoryStream, ZipArchiveMode.Read);
archive.GetEntry("proof/proof-chain.json").Should().NotBeNull();
}
[Fact(DisplayName = "ZIP manifest contains export metadata")]
@@ -202,11 +209,10 @@ public class AuditPackExportServiceIntegrationTests
{
// Arrange
var request = CreateTestRequest(ExportFormat.Json);
var beforeExport = DateTimeOffset.UtcNow;
var expected = new DateTimeOffset(2025, 6, 1, 12, 0, 0, TimeSpan.Zero);
// Act
var result = await _service.ExportAsync(request);
var afterExport = DateTimeOffset.UtcNow;
// Assert
result.Success.Should().BeTrue();
@@ -214,8 +220,7 @@ public class AuditPackExportServiceIntegrationTests
var doc = JsonDocument.Parse(result.Data!);
var exportedAt = DateTimeOffset.Parse(doc.RootElement.GetProperty("exportedAt").GetString()!);
exportedAt.Should().BeOnOrAfter(beforeExport);
exportedAt.Should().BeOnOrBefore(afterExport);
exportedAt.Should().Be(expected);
}
#endregion
@@ -409,3 +414,39 @@ internal class MockAuditBundleWriter : Services.IAuditBundleWriter
});
}
}
internal sealed class FakeAuditPackRepository : IAuditPackRepository
{
public Task<byte[]?> GetSegmentDataAsync(string scanId, ExportSegment segment, CancellationToken ct)
{
var payload = new Dictionary<string, object>
{
["segment"] = segment.ToString(),
["scanId"] = scanId
};
return Task.FromResult<byte[]?>(JsonSerializer.SerializeToUtf8Bytes(payload));
}
public Task<IReadOnlyList<object>> GetAttestationsAsync(string scanId, CancellationToken ct)
=> Task.FromResult<IReadOnlyList<object>>(new[] { new { attestationId = "att-1", scanId } });
public Task<object?> GetProofChainAsync(string scanId, CancellationToken ct)
=> Task.FromResult<object?>(new { proof = "chain", scanId });
}
internal sealed class FixedTimeProvider(DateTimeOffset now) : TimeProvider
{
public override DateTimeOffset GetUtcNow() => now;
}
internal sealed class FakeDsseSigner : IAuditPackExportSigner
{
public Task<DsseSignature> SignAsync(byte[] payload, CancellationToken ct)
{
return Task.FromResult(new DsseSignature
{
KeyId = "test-key",
Sig = Convert.ToBase64String(payload.Take(4).ToArray())
});
}
}

View File

@@ -0,0 +1,100 @@
using System.Collections.Immutable;
using System.IO.Compression;
using System.Text.Json;
using FluentAssertions;
using StellaOps.AuditPack.Models;
using StellaOps.AuditPack.Services;
namespace StellaOps.AuditPack.Tests;
[Trait("Category", "Unit")]
public sealed class AuditPackImporterTests
{
[Fact]
public async Task ImportAsync_DeletesTempDirectory_WhenKeepExtractedIsFalse()
{
var archivePath = CreateArchiveWithManifest();
var importer = new AuditPackImporter(new GuidAuditPackIdGenerator());
var result = await importer.ImportAsync(archivePath, new ImportOptions { KeepExtracted = false });
result.Success.Should().BeTrue();
result.ExtractDirectory.Should().BeNull();
}
[Fact]
public async Task ImportAsync_FailsOnPathTraversalEntries()
{
var archivePath = CreateArchiveWithEntries(
new ArchivePayload("manifest.json", CreateManifestBytes()),
new ArchivePayload("../evil.txt", new byte[] { 1, 2, 3 }));
var importer = new AuditPackImporter(new GuidAuditPackIdGenerator());
var result = await importer.ImportAsync(archivePath, new ImportOptions());
result.Success.Should().BeFalse();
result.Errors.Should().NotBeNull();
}
[Fact]
public async Task ImportAsync_FailsWhenSignaturePresentWithoutTrustRoots()
{
var archivePath = CreateArchiveWithEntries(
new ArchivePayload("manifest.json", CreateManifestBytes()),
new ArchivePayload("manifest.sig", new byte[] { 1, 2, 3 }));
var importer = new AuditPackImporter(new GuidAuditPackIdGenerator());
var result = await importer.ImportAsync(archivePath, new ImportOptions { VerifySignatures = true });
result.Success.Should().BeFalse();
result.Errors.Should().Contain(e => e.Contains("Signature verification failed", StringComparison.Ordinal));
}
private static string CreateArchiveWithManifest()
=> CreateArchiveWithEntries(new ArchivePayload("manifest.json", CreateManifestBytes()));
private static string CreateArchiveWithEntries(params ArchivePayload[] payloads)
{
var outputPath = Path.Combine(Path.GetTempPath(), $"audit-pack-test-{Guid.NewGuid():N}.tar.gz");
using (var fileStream = File.Create(outputPath))
using (var gzip = new GZipStream(fileStream, CompressionLevel.Optimal, leaveOpen: false))
using (var tarWriter = new System.Formats.Tar.TarWriter(gzip, System.Formats.Tar.TarEntryFormat.Pax, leaveOpen: false))
{
foreach (var payload in payloads)
{
var entry = new System.Formats.Tar.PaxTarEntry(System.Formats.Tar.TarEntryType.RegularFile, payload.Path)
{
DataStream = new MemoryStream(payload.Content, writable: false)
};
tarWriter.WriteEntry(entry);
}
}
return outputPath;
}
private static byte[] CreateManifestBytes()
{
var pack = new StellaOps.AuditPack.Models.AuditPack
{
PackId = "pack-1",
Name = "pack",
CreatedAt = DateTimeOffset.UnixEpoch,
RunManifest = new RunManifest("scan-1", DateTimeOffset.UnixEpoch),
EvidenceIndex = new EvidenceIndex(Array.Empty<string>().ToImmutableArray()),
Verdict = new Verdict("verdict-1", "completed"),
OfflineBundle = new BundleManifest("bundle-1", "1.0"),
Contents = new PackContents
{
Files = Array.Empty<PackFile>().ToImmutableArray(),
TotalSizeBytes = 0,
FileCount = 0
}
};
return JsonSerializer.SerializeToUtf8Bytes(pack);
}
private sealed record ArchivePayload(string Path, byte[] Content);
}

View File

@@ -0,0 +1,130 @@
using System.Text.Json;
using StellaOps.AuditPack.Models;
using StellaOps.AuditPack.Services;
namespace StellaOps.AuditPack.Tests;
[Trait("Category", "Unit")]
public sealed class ReplayAttestationServiceTests
{
[Fact]
public async Task VerifyAsync_Fails_WhenEnvelopeHasNoSignatures()
{
var service = new ReplayAttestationService(timeProvider: new FixedTimeProvider(DateTimeOffset.UnixEpoch));
var attestation = await service.GenerateAsync(
new AuditBundleManifest
{
BundleId = "bundle-1",
Name = "bundle",
CreatedAt = DateTimeOffset.UnixEpoch,
ScanId = "scan-1",
ImageRef = "image",
ImageDigest = "sha256:abc",
MerkleRoot = "sha256:root",
Inputs = new InputDigests
{
SbomDigest = "sha256:sbom",
FeedsDigest = "sha256:feeds",
PolicyDigest = "sha256:policy"
},
VerdictDigest = "sha256:verdict",
Decision = "pass",
Files = [],
TotalSizeBytes = 0
},
new ReplayExecutionResult
{
Success = true,
Status = ReplayStatus.Match,
InputsVerified = true,
VerdictMatches = true,
DecisionMatches = true,
OriginalVerdictDigest = "sha256:verdict",
ReplayedVerdictDigest = "sha256:verdict",
OriginalDecision = "pass",
ReplayedDecision = "pass",
Drifts = [],
Errors = [],
DurationMs = 0,
EvaluatedAt = DateTimeOffset.UnixEpoch
});
var result = await service.VerifyAsync(attestation);
Assert.False(result.IsValid);
Assert.Contains(result.Errors, e => e.Contains("signatures", StringComparison.OrdinalIgnoreCase));
}
[Fact]
public async Task VerifyAsync_Succeeds_WithVerifierAndValidPayload()
{
var verifier = new AcceptAllVerifier();
var service = new ReplayAttestationService(verifier: verifier);
var payload = CanonicalJson.Serialize(new InTotoStatement
{
Type = "https://in-toto.io/Statement/v1",
Subject = [new InTotoSubject
{
Name = "verdict:bundle-1",
Digest = new Dictionary<string, string> { ["sha256"] = "abc" }
}],
PredicateType = "https://stellaops.io/attestation/verdict-replay/v1",
Predicate = new VerdictReplayAttestation
{
ManifestId = "bundle-1",
ScanId = "scan-1",
ImageRef = "image",
ImageDigest = "sha256:abc",
InputsDigest = "sha256:inputs",
OriginalVerdictDigest = "sha256:verdict",
OriginalDecision = "pass",
Match = true,
Status = "Match",
DriftCount = 0,
EvaluatedAt = DateTimeOffset.UnixEpoch,
ReplayedAt = DateTimeOffset.UnixEpoch,
DurationMs = 0
}
});
var attestation = new ReplayAttestation
{
AttestationId = "att-1",
ManifestId = "bundle-1",
CreatedAt = DateTimeOffset.UnixEpoch,
Statement = JsonSerializer.Deserialize<InTotoStatement>(payload)!,
StatementDigest = "sha256:" + Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(payload)).ToLowerInvariant(),
Envelope = new ReplayDsseEnvelope
{
PayloadType = "application/vnd.in-toto+json",
Payload = Convert.ToBase64String(payload),
Signatures = [new ReplayDsseSignature { KeyId = "key", Sig = "sig" }]
},
Match = true,
ReplayStatus = "Match"
};
var result = await service.VerifyAsync(attestation);
Assert.True(result.IsValid);
Assert.True(result.SignatureVerified);
}
private sealed class AcceptAllVerifier : IReplayAttestationSignatureVerifier
{
public Task<ReplayAttestationSignatureVerification> VerifyAsync(
ReplayDsseEnvelope envelope,
byte[] payload,
CancellationToken cancellationToken = default)
{
return Task.FromResult(new ReplayAttestationSignatureVerification { Verified = true });
}
}
private sealed class FixedTimeProvider(DateTimeOffset now) : TimeProvider
{
public override DateTimeOffset GetUtcNow() => now;
}
}

View File

@@ -0,0 +1,22 @@
# Auth Security Tests AGENTS
## Purpose & Scope
- Working directory: `src/__Libraries/__Tests/StellaOps.Auth.Security.Tests/`.
- Roles: QA automation, backend engineer.
- Focus: DPoP proof validation, nonce/replay caches, and edge-case coverage.
## Required Reading (treat as read before DOING)
- `docs/README.md`
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
- `docs/modules/platform/architecture-overview.md`
- `docs/modules/authority/architecture.md`
- Relevant sprint files.
## Working Agreements
- Keep tests deterministic (fixed time/IDs, stable ordering).
- Avoid live network calls and nondeterministic RNG.
- Update `docs/implplan/SPRINT_*.md` and local `TASKS.md` when starting or completing work.
## Testing
- Use xUnit + FluentAssertions + TestKit.
- Cover validator error paths, nonce stores, and replay cache semantics.

View File

@@ -0,0 +1,17 @@
using StellaOps.Auth.Security.Dpop;
using Xunit;
using StellaOps.TestKit;
namespace StellaOps.Auth.Security.Tests;
public class DpopNonceUtilitiesTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ComputeStorageKey_NormalizesToLowerInvariant()
{
var key = DpopNonceUtilities.ComputeStorageKey("API", "Client-Id", "ThumbPrint");
Assert.Equal("dpop-nonce:api:client-id:thumbprint", key);
}
}

View File

@@ -0,0 +1,226 @@
using System;
using System.IdentityModel.Tokens.Jwt;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using Microsoft.IdentityModel.Tokens;
using StellaOps.Auth.Security.Dpop;
using Xunit;
using StellaOps.TestKit;
namespace StellaOps.Auth.Security.Tests;
public class DpopProofValidatorTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_ReturnsFailure_ForNonStringTyp()
{
var proof = BuildUnsignedToken(
new { typ = 123, alg = "ES256" },
new { htm = "GET", htu = "https://api.test/resource", iat = 0, jti = "1" });
var validator = CreateValidator();
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.False(result.IsValid);
Assert.Equal("invalid_header", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_ReturnsFailure_ForNonStringAlg()
{
var proof = BuildUnsignedToken(
new { typ = "dpop+jwt", alg = 55 },
new { htm = "GET", htu = "https://api.test/resource", iat = 0, jti = "1" });
var validator = CreateValidator();
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.False(result.IsValid);
Assert.Equal("invalid_header", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_ReturnsFailure_ForNonStringHtm()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var (proof, _) = CreateSignedProof(now, payloadMutator: payload => payload["htm"] = 123);
var validator = CreateValidator(now);
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.False(result.IsValid);
Assert.Equal("invalid_payload", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_ReturnsFailure_ForNonStringHtu()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var (proof, _) = CreateSignedProof(now, payloadMutator: payload => payload["htu"] = 123);
var validator = CreateValidator(now);
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.False(result.IsValid);
Assert.Equal("invalid_payload", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_ReturnsFailure_ForNonStringNonce()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var (proof, _) = CreateSignedProof(now, payloadMutator: payload => payload["nonce"] = 999);
var validator = CreateValidator(now);
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"), nonce: "nonce-1");
Assert.False(result.IsValid);
Assert.Equal("invalid_token", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_RejectsProofIssuedInFuture()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var issuedAt = now.AddMinutes(2);
var (proof, _) = CreateSignedProof(issuedAt);
var validator = CreateValidator(now, options => options.AllowedClockSkew = TimeSpan.FromSeconds(5));
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.False(result.IsValid);
Assert.Equal("invalid_token", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_RejectsExpiredProofs()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var issuedAt = now.AddMinutes(-10);
var (proof, _) = CreateSignedProof(issuedAt);
var validator = CreateValidator(now, options =>
{
options.ProofLifetime = TimeSpan.FromMinutes(1);
options.AllowedClockSkew = TimeSpan.FromSeconds(5);
});
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.False(result.IsValid);
Assert.Equal("invalid_token", result.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_RejectsReplayTokens()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var jwtId = "jwt-1";
var (proof, _) = CreateSignedProof(now, jti: jwtId);
var timeProvider = new FakeTimeProvider(now);
var replayCache = new InMemoryDpopReplayCache(timeProvider);
var validator = CreateValidator(timeProvider, replayCache);
var first = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
var second = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.True(first.IsValid);
Assert.False(second.IsValid);
Assert.Equal("replay", second.ErrorCode);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ValidateAsync_UsesSnapshotOfOptions()
{
var now = DateTimeOffset.Parse("2025-01-01T00:00:00Z");
var (proof, _) = CreateSignedProof(now);
var options = new DpopValidationOptions();
var timeProvider = new FakeTimeProvider(now);
var validator = new DpopProofValidator(Options.Create(options), new InMemoryDpopReplayCache(timeProvider), timeProvider);
options.AllowedAlgorithms.Clear();
options.AllowedAlgorithms.Add("ES512");
var result = await validator.ValidateAsync(proof, "GET", new Uri("https://api.test/resource"));
Assert.True(result.IsValid);
}
private static DpopProofValidator CreateValidator(DateTimeOffset now, Action<DpopValidationOptions>? configure = null)
{
var timeProvider = new FakeTimeProvider(now);
return CreateValidator(timeProvider, null, configure);
}
private static DpopProofValidator CreateValidator(TimeProvider timeProvider, IDpopReplayCache? replayCache = null, Action<DpopValidationOptions>? configure = null)
{
var options = new DpopValidationOptions();
configure?.Invoke(options);
return new DpopProofValidator(Options.Create(options), replayCache, timeProvider);
}
private static (string Token, JsonWebKey Jwk) CreateSignedProof(
DateTimeOffset issuedAt,
string? method = "GET",
Uri? httpUri = null,
string? nonce = null,
string? jti = null,
Action<JwtHeader>? headerMutator = null,
Action<JwtPayload>? payloadMutator = null)
{
httpUri ??= new Uri("https://api.test/resource");
using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256);
var securityKey = new ECDsaSecurityKey(ecdsa) { KeyId = Guid.NewGuid().ToString("N") };
var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey);
var header = new JwtHeader(new SigningCredentials(securityKey, SecurityAlgorithms.EcdsaSha256))
{
{ "typ", "dpop+jwt" },
{ "jwk", jwk }
};
headerMutator?.Invoke(header);
var payload = new JwtPayload
{
{ "htm", method ?? "GET" },
{ "htu", httpUri.ToString() },
{ "iat", issuedAt.ToUnixTimeSeconds() },
{ "jti", jti ?? Guid.NewGuid().ToString("N") }
};
if (nonce is not null)
{
payload["nonce"] = nonce;
}
payloadMutator?.Invoke(payload);
var token = new JwtSecurityToken(header, payload);
var handler = new JwtSecurityTokenHandler();
return (handler.WriteToken(token), jwk);
}
private static string BuildUnsignedToken(object header, object payload)
{
var headerJson = JsonSerializer.Serialize(header);
var payloadJson = JsonSerializer.Serialize(payload);
var encodedHeader = Base64UrlEncoder.Encode(Encoding.UTF8.GetBytes(headerJson));
var encodedPayload = Base64UrlEncoder.Encode(Encoding.UTF8.GetBytes(payloadJson));
return $"{encodedHeader}.{encodedPayload}.signature";
}
}

View File

@@ -0,0 +1,110 @@
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Auth.Security.Dpop;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
using Xunit;
using StellaOps.TestKit;
namespace StellaOps.Auth.Security.Tests;
public class DpopReplayCacheTests
{
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task InMemoryReplayCache_RejectsDuplicatesUntilExpiry()
{
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-01-01T00:00:00Z"));
var cache = new InMemoryDpopReplayCache(timeProvider);
var expiresAt = timeProvider.GetUtcNow().AddMinutes(1);
Assert.True(await cache.TryStoreAsync("jti-1", expiresAt));
Assert.False(await cache.TryStoreAsync("jti-1", expiresAt));
timeProvider.Advance(TimeSpan.FromMinutes(2));
Assert.True(await cache.TryStoreAsync("jti-1", timeProvider.GetUtcNow().AddMinutes(1)));
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task MessagingReplayCache_RejectsDuplicatesUntilExpiry()
{
var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-01-01T00:00:00Z"));
var factory = new FakeIdempotencyStoreFactory(timeProvider);
var cache = new MessagingDpopReplayCache(factory, timeProvider);
var expiresAt = timeProvider.GetUtcNow().AddMinutes(1);
Assert.True(await cache.TryStoreAsync("jti-1", expiresAt));
Assert.False(await cache.TryStoreAsync("jti-1", expiresAt));
timeProvider.Advance(TimeSpan.FromMinutes(2));
Assert.True(await cache.TryStoreAsync("jti-1", timeProvider.GetUtcNow().AddMinutes(1)));
}
private sealed class FakeIdempotencyStoreFactory : IIdempotencyStoreFactory
{
private readonly FakeIdempotencyStore store;
public FakeIdempotencyStoreFactory(TimeProvider timeProvider)
{
store = new FakeIdempotencyStore(timeProvider);
}
public string ProviderName => "fake";
public IIdempotencyStore Create(string name) => store;
}
private sealed class FakeIdempotencyStore : IIdempotencyStore
{
private readonly Dictionary<string, Entry> entries = new(StringComparer.Ordinal);
private readonly TimeProvider timeProvider;
public FakeIdempotencyStore(TimeProvider timeProvider)
{
this.timeProvider = timeProvider;
}
public string ProviderName => "fake";
public ValueTask<IdempotencyResult> TryClaimAsync(string key, string value, TimeSpan window, CancellationToken cancellationToken = default)
{
var now = timeProvider.GetUtcNow();
if (entries.TryGetValue(key, out var entry) && entry.ExpiresAt > now)
{
return ValueTask.FromResult(IdempotencyResult.Duplicate(entry.Value));
}
entries[key] = new Entry(value, now.Add(window));
return ValueTask.FromResult(IdempotencyResult.Claimed());
}
public ValueTask<bool> ExistsAsync(string key, CancellationToken cancellationToken = default)
=> ValueTask.FromResult(entries.TryGetValue(key, out var entry) && entry.ExpiresAt > timeProvider.GetUtcNow());
public ValueTask<string?> GetAsync(string key, CancellationToken cancellationToken = default)
=> ValueTask.FromResult(entries.TryGetValue(key, out var entry) && entry.ExpiresAt > timeProvider.GetUtcNow() ? entry.Value : null);
public ValueTask<bool> ReleaseAsync(string key, CancellationToken cancellationToken = default)
=> ValueTask.FromResult(entries.Remove(key));
public ValueTask<bool> ExtendAsync(string key, TimeSpan extension, CancellationToken cancellationToken = default)
{
if (!entries.TryGetValue(key, out var entry))
{
return ValueTask.FromResult(false);
}
entries[key] = entry with { ExpiresAt = entry.ExpiresAt.Add(extension) };
return ValueTask.FromResult(true);
}
private readonly record struct Entry(string Value, DateTimeOffset ExpiresAt);
}
}

View File

@@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" />
<ProjectReference Include="../../StellaOps.TestKit/StellaOps.TestKit.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,8 @@
# Auth Security Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0082-A | DONE | Test coverage for DPoP validation, nonce stores, and replay cache. |

View File

@@ -0,0 +1,26 @@
# Infrastructure Postgres Tests Agent Charter
## Mission
- Validate Infrastructure.Postgres migration and fixture behaviors with deterministic integration tests.
## Responsibilities
- Maintain Testcontainers-based coverage for migrations and fixtures.
- Keep tests categorized for CI selection and handle Docker availability gracefully.
- Ensure test data and schema naming remain deterministic and cleaned up.
## Required Reading
- src/__Libraries/StellaOps.Infrastructure.Postgres/AGENTS.md
- src/__Tests/__Libraries/StellaOps.Infrastructure.Postgres.Testing/AGENTS.md
- docs/modules/platform/architecture-overview.md
## Working Directory & Scope
- Primary: src/__Libraries/__Tests/StellaOps.Infrastructure.Postgres.Tests
- Allowed shared libs/tests: src/__Libraries/StellaOps.Infrastructure.Postgres, src/__Tests/__Libraries/StellaOps.Infrastructure.Postgres.Testing
## Testing Expectations
- Integration tests must be tagged as Integration and isolated per schema.
- Skip or gate tests when Docker/Testcontainers is unavailable.
## Working Agreement
- Update sprint status in docs/implplan/SPRINT_*.md and local TASKS.md.
- Keep test artifacts deterministic and clean up schemas after runs.

View File

@@ -0,0 +1,10 @@
# StellaOps.Infrastructure.Postgres.Tests Task Board
This board mirrors active sprint tasks for this module.
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
| Task ID | Status | Notes |
| --- | --- | --- |
| AUDIT-0360-M | DONE | Maintainability audit for Infrastructure.Postgres.Tests. |
| AUDIT-0360-T | DONE | Test coverage audit for Infrastructure.Postgres.Tests. |
| AUDIT-0360-A | DONE | Waived (test project). |