sprints and audit work

This commit is contained in:
StellaOps Bot
2026-01-07 09:36:16 +02:00
parent 05833e0af2
commit ab364c6032
377 changed files with 64534 additions and 1627 deletions

View File

@@ -47,6 +47,17 @@ public interface IVerdictBuilder
string fromCgs,
string toCgs,
CancellationToken ct = default);
/// <summary>
/// Replay a verdict from bundle inputs (frozen files).
/// Used by CLI verify --bundle command for deterministic replay.
/// </summary>
/// <param name="request">Request containing paths to frozen inputs.</param>
/// <param name="ct">Cancellation token.</param>
/// <returns>Replay result with computed verdict hash.</returns>
ValueTask<VerdictReplayResult> ReplayFromBundleAsync(
VerdictReplayRequest request,
CancellationToken ct = default);
}
/// <summary>
@@ -160,3 +171,76 @@ public enum CgsVerdictStatus
Fixed,
UnderInvestigation
}
/// <summary>
/// Request for replaying a verdict from a replay bundle.
/// Used by CLI verify --bundle command.
/// </summary>
public sealed record VerdictReplayRequest
{
/// <summary>
/// Path to the SBOM file in the bundle.
/// </summary>
public required string SbomPath { get; init; }
/// <summary>
/// Path to the feeds snapshot directory in the bundle (optional).
/// </summary>
public string? FeedsPath { get; init; }
/// <summary>
/// Path to the VEX documents directory in the bundle (optional).
/// </summary>
public string? VexPath { get; init; }
/// <summary>
/// Path to the policy bundle in the bundle (optional).
/// </summary>
public string? PolicyPath { get; init; }
/// <summary>
/// Image digest (sha256:...) being evaluated.
/// </summary>
public required string ImageDigest { get; init; }
/// <summary>
/// Policy version digest for determinism.
/// </summary>
public required string PolicyDigest { get; init; }
/// <summary>
/// Feed snapshot digest for determinism.
/// </summary>
public required string FeedSnapshotDigest { get; init; }
}
/// <summary>
/// Result of a bundle-based verdict replay.
/// </summary>
public sealed record VerdictReplayResult
{
/// <summary>
/// Whether the replay completed successfully.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Computed verdict hash from replay.
/// </summary>
public string? VerdictHash { get; init; }
/// <summary>
/// Error message if replay failed.
/// </summary>
public string? Error { get; init; }
/// <summary>
/// Duration of replay in milliseconds.
/// </summary>
public long DurationMs { get; init; }
/// <summary>
/// Engine version that performed the replay.
/// </summary>
public string? EngineVersion { get; init; }
}

View File

@@ -121,6 +121,140 @@ public sealed class VerdictBuilderService : IVerdictBuilder
);
}
/// <inheritdoc/>
public async ValueTask<VerdictReplayResult> ReplayFromBundleAsync(
VerdictReplayRequest request,
CancellationToken ct = default)
{
ArgumentNullException.ThrowIfNull(request);
var sw = System.Diagnostics.Stopwatch.StartNew();
const string engineVersion = "1.0.0";
try
{
_logger.LogInformation(
"Starting bundle replay for image={ImageDigest}, policy={PolicyDigest}",
request.ImageDigest,
request.PolicyDigest);
// 1. Load and validate SBOM
if (!File.Exists(request.SbomPath))
{
return new VerdictReplayResult
{
Success = false,
Error = $"SBOM file not found: {request.SbomPath}",
DurationMs = sw.ElapsedMilliseconds,
EngineVersion = engineVersion
};
}
var sbomContent = await File.ReadAllTextAsync(request.SbomPath, ct).ConfigureAwait(false);
// 2. Load VEX documents if present
var vexDocuments = new List<string>();
if (!string.IsNullOrEmpty(request.VexPath) && Directory.Exists(request.VexPath))
{
foreach (var vexFile in Directory.GetFiles(request.VexPath, "*.json", SearchOption.AllDirectories)
.OrderBy(f => f, StringComparer.Ordinal))
{
ct.ThrowIfCancellationRequested();
var vexContent = await File.ReadAllTextAsync(vexFile, ct).ConfigureAwait(false);
vexDocuments.Add(vexContent);
}
_logger.LogDebug("Loaded {VexCount} VEX documents", vexDocuments.Count);
}
// 3. Load reachability graph if present
string? reachabilityJson = null;
var reachPath = Path.Combine(Path.GetDirectoryName(request.SbomPath) ?? string.Empty, "reachability.json");
if (File.Exists(reachPath))
{
reachabilityJson = await File.ReadAllTextAsync(reachPath, ct).ConfigureAwait(false);
_logger.LogDebug("Loaded reachability graph");
}
// 4. Build evidence pack
var evidencePack = new EvidencePack(
SbomCanonJson: sbomContent,
VexCanonJson: vexDocuments,
ReachabilityGraphJson: reachabilityJson,
FeedSnapshotDigest: request.FeedSnapshotDigest);
// 5. Build policy lock from bundle
var policyLock = await LoadPolicyLockAsync(request.PolicyPath, request.PolicyDigest, ct)
.ConfigureAwait(false);
// 6. Compute verdict
var result = await BuildAsync(evidencePack, policyLock, ct).ConfigureAwait(false);
sw.Stop();
_logger.LogInformation(
"Bundle replay completed: cgs={CgsHash}, duration={DurationMs}ms",
result.CgsHash,
sw.ElapsedMilliseconds);
return new VerdictReplayResult
{
Success = true,
VerdictHash = result.CgsHash,
DurationMs = sw.ElapsedMilliseconds,
EngineVersion = engineVersion
};
}
catch (OperationCanceledException)
{
throw;
}
catch (Exception ex)
{
_logger.LogError(ex, "Bundle replay failed");
sw.Stop();
return new VerdictReplayResult
{
Success = false,
Error = ex.Message,
DurationMs = sw.ElapsedMilliseconds,
EngineVersion = engineVersion
};
}
}
/// <summary>
/// Load or generate policy lock from bundle.
/// </summary>
private static async ValueTask<PolicyLock> LoadPolicyLockAsync(
string? policyPath,
string policyDigest,
CancellationToken ct)
{
if (!string.IsNullOrEmpty(policyPath) && File.Exists(policyPath))
{
var policyJson = await File.ReadAllTextAsync(policyPath, ct).ConfigureAwait(false);
var loaded = JsonSerializer.Deserialize<PolicyLock>(policyJson, CanonicalJsonOptions);
if (loaded is not null)
{
return loaded;
}
}
// Default policy lock when not present in bundle
return new PolicyLock(
SchemaVersion: "1.0.0",
PolicyVersion: policyDigest,
RuleHashes: new Dictionary<string, string>
{
["default"] = policyDigest
},
EngineVersion: "1.0.0",
GeneratedAt: DateTimeOffset.UtcNow
);
}
/// <summary>
/// Compute CGS hash using deterministic Merkle tree.
/// </summary>