Files
git.stella-ops.org/src/Cli/StellaOps.Cli/Commands/BundleVerifyCommand.cs
2026-04-06 00:51:15 +03:00

2002 lines
68 KiB
C#

// -----------------------------------------------------------------------------
// BundleVerifyCommand.cs
// Sprint: SPRINT_20260118_018_AirGap_router_integration
// Task: TASK-018-003 - Bundle Verification CLI
// Description: Offline bundle verification command with full cryptographic verification
// -----------------------------------------------------------------------------
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using StellaOps.Attestor.Core.Predicates;
using StellaOps.Attestor.Core.Signing;
using StellaOps.Attestor.Core.Verification;
using StellaOps.Attestor.Envelope;
using StellaOps.Attestor.Serialization;
using StellaOps.Cryptography;
using System.CommandLine;
using System.Globalization;
using System.IO.Compression;
using System.Security.Cryptography;
using System.Security.Cryptography.X509Certificates;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Cli.Commands;
/// <summary>
/// Command builder for offline bundle verification.
/// Verifies checksums, DSSE signatures, and Rekor proofs.
/// </summary>
public static class BundleVerifyCommand
{
private static readonly JsonSerializerOptions JsonOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase
};
/// <summary>
/// Builds the 'bundle verify' enhanced command.
/// </summary>
public static Command BuildVerifyBundleEnhancedCommand(
IServiceProvider services,
Option<bool> verboseOption,
CancellationToken cancellationToken)
{
var bundleOption = new Option<string>("--bundle", "-b")
{
Description = "Path to bundle (tar.gz or directory)",
Required = true
};
var trustRootOption = new Option<string?>("--trust-root")
{
Description = "Path to trusted root certificate (PEM)"
};
var rekorCheckpointOption = new Option<string?>("--rekor-checkpoint")
{
Description = "Path to Rekor checkpoint for offline proof verification"
};
var offlineOption = new Option<bool>("--offline")
{
Description = "Run in offline mode (no network access)"
};
var outputOption = new Option<string>("--output", "-o")
{
Description = "Output format: table (default), json"
};
outputOption.SetDefaultValue("table");
var strictOption = new Option<bool>("--strict")
{
Description = "Fail on any warning (missing optional artifacts)"
};
var signerOption = new Option<string?>("--signer")
{
Description = "Path to signing key (PEM) for DSSE verification report"
};
var signerCertOption = new Option<string?>("--signer-cert")
{
Description = "Path to signer certificate PEM (optional; embedded in report metadata)"
};
// Sprint 040-06: Replay blob fetch options
var replayOption = new Option<bool>("--replay")
{
Description = "Verify binary content by fetching/reading large blobs referenced in attestations"
};
var blobSourceOption = new Option<string?>("--blob-source")
{
Description = "Override blob source (registry URL or local directory path)"
};
var command = new Command("verify", "Verify offline evidence bundle with full cryptographic verification")
{
bundleOption,
trustRootOption,
rekorCheckpointOption,
offlineOption,
outputOption,
strictOption,
signerOption,
signerCertOption,
replayOption,
blobSourceOption,
verboseOption
};
command.SetAction(async (parseResult, ct) =>
{
var bundle = parseResult.GetValue(bundleOption)!;
var trustRoot = parseResult.GetValue(trustRootOption);
var rekorCheckpoint = parseResult.GetValue(rekorCheckpointOption);
var offline = parseResult.GetValue(offlineOption);
var output = parseResult.GetValue(outputOption) ?? "table";
var strict = parseResult.GetValue(strictOption);
var signer = parseResult.GetValue(signerOption);
var signerCert = parseResult.GetValue(signerCertOption);
var replay = parseResult.GetValue(replayOption);
var blobSource = parseResult.GetValue(blobSourceOption);
var verbose = parseResult.GetValue(verboseOption);
return await HandleVerifyBundleAsync(
services,
bundle,
trustRoot,
rekorCheckpoint,
offline,
output,
strict,
signer,
signerCert,
replay,
blobSource,
verbose,
cancellationToken);
});
return command;
}
private static async Task<int> HandleVerifyBundleAsync(
IServiceProvider services,
string bundlePath,
string? trustRoot,
string? rekorCheckpoint,
bool offline,
string outputFormat,
bool strict,
string? signerKeyPath,
string? signerCertPath,
bool replay,
string? blobSource,
bool verbose,
CancellationToken ct)
{
var loggerFactory = services.GetService<ILoggerFactory>();
var logger = loggerFactory?.CreateLogger(typeof(BundleVerifyCommand));
var result = new VerificationResult
{
BundlePath = bundlePath,
StartedAt = DateTimeOffset.UtcNow,
Offline = offline
};
string? bundleDir = null;
BundleManifestDto? manifest = null;
try
{
if (outputFormat != "json")
{
Console.WriteLine("Verifying evidence bundle...");
Console.WriteLine($" Bundle: {bundlePath}");
Console.WriteLine($" Mode: {(offline ? "Offline" : "Online")}");
Console.WriteLine();
}
// Step 1: Extract/read bundle
bundleDir = await ExtractBundleAsync(bundlePath, ct);
// Step 2: Parse manifest
var manifestPath = Path.Combine(bundleDir, "manifest.json");
if (!File.Exists(manifestPath))
{
result.Checks.Add(new VerificationCheck("manifest", false, "manifest.json not found"));
return await FinalizeResultAsync(
result,
manifest,
bundleDir,
trustRoot,
rekorCheckpoint,
offline,
outputFormat,
strict,
signerKeyPath,
signerCertPath,
ct);
}
var manifestJson = await File.ReadAllTextAsync(manifestPath, ct);
manifest = JsonSerializer.Deserialize<BundleManifestDto>(manifestJson, JsonOptions);
result.Checks.Add(new VerificationCheck("manifest", true, "manifest.json parsed successfully"));
result.SchemaVersion = manifest?.SchemaVersion;
result.Image = manifest?.Bundle?.Image;
if (outputFormat != "json")
{
Console.WriteLine("Step 1: Manifest ✓");
}
// Step 3: Verify artifact checksums
var checksumsPassed = await VerifyChecksumsAsync(bundleDir, manifest, result, verbose, ct);
if (outputFormat != "json")
{
Console.WriteLine($"Step 2: Checksums {(checksumsPassed ? "" : "")}");
}
// Step 4: Verify DSSE signatures
var dssePassed = await VerifyDsseSignaturesAsync(bundleDir, trustRoot, result, verbose, ct);
if (outputFormat != "json")
{
Console.WriteLine($"Step 3: DSSE Signatures {(dssePassed ? "" : " (no trust root provided)")}");
}
// Step 5: Verify Rekor proofs
var rekorPassed = await VerifyRekorProofsAsync(bundleDir, rekorCheckpoint, offline, result, verbose, ct);
if (outputFormat != "json")
{
Console.WriteLine($"Step 4: Rekor Proofs {(rekorPassed ? "" : " (no checkpoint provided)")}");
}
// Step 6: Verify payload types match expectations
var payloadsPassed = VerifyPayloadTypes(manifest, result, verbose);
if (outputFormat != "json")
{
Console.WriteLine($"Step 5: Payload Types {(payloadsPassed ? "" : "")}");
}
// Step 7 (040-06): Replay blob verification
if (replay)
{
var replayPassed = await VerifyBlobReplayAsync(
bundleDir, manifest, blobSource, offline, result, verbose, ct);
if (outputFormat != "json")
{
Console.WriteLine($"Step 6: Blob Replay {(replayPassed ? "" : "")}");
}
}
return await FinalizeResultAsync(
result,
manifest,
bundleDir,
trustRoot,
rekorCheckpoint,
offline,
outputFormat,
strict,
signerKeyPath,
signerCertPath,
ct);
}
catch (Exception ex)
{
logger?.LogError(ex, "Bundle verification failed");
result.Checks.Add(new VerificationCheck("exception", false, ex.Message) { Severity = "error" });
result.OverallStatus = "FAILED";
result.CompletedAt = DateTimeOffset.UtcNow;
return OutputResult(result, outputFormat, strict);
}
}
private static async Task<string> ExtractBundleAsync(string bundlePath, CancellationToken ct)
{
if (Directory.Exists(bundlePath))
{
return bundlePath;
}
if (!File.Exists(bundlePath))
{
throw new FileNotFoundException($"Bundle not found: {bundlePath}");
}
// Extract tar.gz to temp directory
var tempDir = Path.Combine(Path.GetTempPath(), $"stella-verify-{Guid.NewGuid():N}");
Directory.CreateDirectory(tempDir);
await using var fs = File.OpenRead(bundlePath);
await using var gz = new GZipStream(fs, CompressionMode.Decompress);
using var reader = new StreamReader(gz);
// Simple extraction (matches our simple tar format)
string? line;
while ((line = await reader.ReadLineAsync(ct)) != null)
{
if (line.StartsWith("FILE:"))
{
var parts = line[5..].Split(':');
if (parts.Length >= 2)
{
var filePath = parts[0];
var size = int.Parse(parts[1]);
var fullPath = Path.Combine(tempDir, filePath);
var dir = Path.GetDirectoryName(fullPath);
if (dir != null && !Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
var buffer = new char[size];
await reader.ReadBlockAsync(buffer, 0, size);
await File.WriteAllTextAsync(fullPath, new string(buffer), ct);
}
}
}
return tempDir;
}
private static async Task<bool> VerifyChecksumsAsync(
string bundleDir,
BundleManifestDto? manifest,
VerificationResult result,
bool verbose,
CancellationToken ct)
{
if (manifest?.Bundle?.Artifacts == null)
{
result.Checks.Add(new VerificationCheck("checksums", false, "No artifacts in manifest"));
return false;
}
var allPassed = true;
foreach (var artifact in manifest.Bundle.Artifacts)
{
var filePath = Path.Combine(bundleDir, artifact.Path);
if (!File.Exists(filePath))
{
result.Checks.Add(new VerificationCheck($"checksum:{artifact.Path}", false, "File not found")
{
Severity = "warning"
});
allPassed = false;
continue;
}
// Compute hash
await using var fs = File.OpenRead(filePath);
var hash = await SHA256.HashDataAsync(fs, ct);
var hashStr = $"sha256:{Convert.ToHexStringLower(hash)}";
// If digest specified in manifest, verify it
if (!string.IsNullOrEmpty(artifact.Digest))
{
var matches = hashStr.Equals(artifact.Digest, StringComparison.OrdinalIgnoreCase);
result.Checks.Add(new VerificationCheck($"checksum:{artifact.Path}", matches,
matches ? "Checksum verified" : $"Checksum mismatch: expected {artifact.Digest}, got {hashStr}"));
if (!matches) allPassed = false;
}
else
{
result.Checks.Add(new VerificationCheck($"checksum:{artifact.Path}", true,
$"Computed: {hashStr}"));
}
}
return allPassed;
}
private static async Task<bool> VerifyDsseSignaturesAsync(
string bundleDir,
string? trustRoot,
VerificationResult result,
bool verbose,
CancellationToken ct)
{
// Well-known DSSE files in the bundle root
var rootDsseFiles = new[] { "sbom.statement.dsse.json", "vex.statement.dsse.json" };
// Discover additional DSSE files in subdirectories (function-maps, verification)
var additionalDsseFiles = new List<string>();
var searchDirs = new[] { "function-maps", "verification" };
foreach (var subDir in searchDirs)
{
var dirPath = Path.Combine(bundleDir, subDir);
if (Directory.Exists(dirPath))
{
foreach (var file in Directory.GetFiles(dirPath, "*.dsse.json"))
{
var relativePath = Path.GetRelativePath(bundleDir, file).Replace('\\', '/');
additionalDsseFiles.Add(relativePath);
}
}
}
var allDsseFiles = rootDsseFiles.Concat(additionalDsseFiles).ToList();
var verified = 0;
var allPassed = true;
foreach (var dsseFile in allDsseFiles)
{
var filePath = Path.Combine(bundleDir, dsseFile);
if (!File.Exists(filePath))
{
result.Checks.Add(new VerificationCheck($"dsse:{dsseFile}", true, "Not present (optional)")
{
Severity = "info"
});
continue;
}
var content = await File.ReadAllTextAsync(filePath, ct);
var envelope = JsonSerializer.Deserialize<DsseEnvelopeDto>(content, JsonOptions);
if (envelope?.Signatures == null || envelope.Signatures.Count == 0)
{
result.Checks.Add(new VerificationCheck($"dsse:{dsseFile}", false, "No signatures found"));
allPassed = false;
continue;
}
if (!string.IsNullOrEmpty(trustRoot))
{
if (!File.Exists(trustRoot))
{
result.Checks.Add(new VerificationCheck($"dsse:{dsseFile}", false,
$"Trust root file not found: {trustRoot}"));
allPassed = false;
continue;
}
if (string.IsNullOrWhiteSpace(envelope.Payload) || string.IsNullOrWhiteSpace(envelope.PayloadType))
{
result.Checks.Add(new VerificationCheck($"dsse:{dsseFile}", false,
"DSSE payload or payloadType missing"));
allPassed = false;
continue;
}
var signatureVerified = false;
string? lastError = null;
foreach (var signature in envelope.Signatures)
{
if (string.IsNullOrWhiteSpace(signature.Sig))
{
lastError = "Signature value missing";
continue;
}
if (TryVerifyDsseSignature(trustRoot, envelope.PayloadType, envelope.Payload, signature.Sig, out var error))
{
signatureVerified = true;
break;
}
lastError = error;
}
result.Checks.Add(new VerificationCheck($"dsse:{dsseFile}", signatureVerified,
signatureVerified
? $"Cryptographic signature verified ({envelope.Signatures.Count} signature(s))"
: $"Signature verification failed: {lastError ?? "invalid_signature"}"));
if (!signatureVerified)
{
allPassed = false;
}
}
else
{
result.Checks.Add(new VerificationCheck($"dsse:{dsseFile}", true,
$"Signature present ({envelope.Signatures.Count} signature(s)) - not cryptographically verified (no trust root)")
{
Severity = "warning"
});
}
verified++;
}
return verified > 0 && allPassed;
}
private static bool TryVerifyDsseSignature(
string trustRootPath,
string payloadType,
string payloadBase64,
string signatureBase64,
out string? error)
{
error = null;
try
{
var payloadBytes = Convert.FromBase64String(payloadBase64);
var signatureBytes = Convert.FromBase64String(signatureBase64);
var pae = BuildDssePae(payloadType, payloadBytes);
var publicKeyPem = File.ReadAllText(trustRootPath);
try
{
using var rsa = RSA.Create();
rsa.ImportFromPem(publicKeyPem);
if (rsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1))
{
return true;
}
}
catch
{
// Try certificate/ECDSA path below.
}
try
{
using var cert = X509CertificateLoader.LoadCertificateFromFile(trustRootPath);
using var certKey = cert.GetRSAPublicKey();
if (certKey is not null &&
certKey.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1))
{
return true;
}
}
catch
{
// Try ECDSA path.
}
try
{
using var ecdsa = ECDsa.Create();
ecdsa.ImportFromPem(publicKeyPem);
return ecdsa.VerifyData(pae, signatureBytes, HashAlgorithmName.SHA256);
}
catch (Exception ex)
{
error = ex.Message;
return false;
}
}
catch (Exception ex)
{
error = ex.Message;
return false;
}
}
private static byte[] BuildDssePae(string payloadType, byte[] payload)
{
var header = Encoding.UTF8.GetBytes("DSSEv1");
var payloadTypeBytes = Encoding.UTF8.GetBytes(payloadType ?? string.Empty);
var payloadTypeLengthBytes = Encoding.UTF8.GetBytes(payloadTypeBytes.Length.ToString(CultureInfo.InvariantCulture));
var payloadLengthBytes = Encoding.UTF8.GetBytes(payload.Length.ToString(CultureInfo.InvariantCulture));
var space = new[] { (byte)' ' };
var output = new byte[
header.Length + space.Length + payloadTypeLengthBytes.Length + space.Length +
payloadTypeBytes.Length + space.Length + payloadLengthBytes.Length + space.Length +
payload.Length];
var offset = 0;
Buffer.BlockCopy(header, 0, output, offset, header.Length); offset += header.Length;
Buffer.BlockCopy(space, 0, output, offset, space.Length); offset += space.Length;
Buffer.BlockCopy(payloadTypeLengthBytes, 0, output, offset, payloadTypeLengthBytes.Length); offset += payloadTypeLengthBytes.Length;
Buffer.BlockCopy(space, 0, output, offset, space.Length); offset += space.Length;
Buffer.BlockCopy(payloadTypeBytes, 0, output, offset, payloadTypeBytes.Length); offset += payloadTypeBytes.Length;
Buffer.BlockCopy(space, 0, output, offset, space.Length); offset += space.Length;
Buffer.BlockCopy(payloadLengthBytes, 0, output, offset, payloadLengthBytes.Length); offset += payloadLengthBytes.Length;
Buffer.BlockCopy(space, 0, output, offset, space.Length); offset += space.Length;
Buffer.BlockCopy(payload, 0, output, offset, payload.Length);
return output;
}
private static async Task<bool> VerifyRekorProofsAsync(
string bundleDir,
string? checkpointPath,
bool offline,
VerificationResult result,
bool verbose,
CancellationToken ct)
{
var proofPath = Path.Combine(bundleDir, "rekor.proof.json");
if (!File.Exists(proofPath))
{
result.Checks.Add(new VerificationCheck("rekor:proof", true, "Not present (optional)")
{
Severity = "info"
});
return true;
}
var proofJson = await File.ReadAllTextAsync(proofPath, ct);
JsonDocument proofDocument;
try
{
proofDocument = JsonDocument.Parse(proofJson);
}
catch (JsonException ex)
{
result.Checks.Add(new VerificationCheck("rekor:proof", false, $"proof-parse-failed: {ex.Message}"));
return false;
}
using (proofDocument)
{
if (!TryReadLogIndex(proofDocument.RootElement, out var logIndex))
{
result.Checks.Add(new VerificationCheck("rekor:proof", false, "proof-log-index-missing"));
return false;
}
result.Checks.Add(new VerificationCheck("rekor:proof", true, $"Proof parsed (log index: {logIndex})"));
if (!string.IsNullOrWhiteSpace(checkpointPath))
{
if (!File.Exists(checkpointPath))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
$"checkpoint-not-found: {checkpointPath}"));
return false;
}
var checkpointJson = await File.ReadAllTextAsync(checkpointPath, ct);
if (!TryParseCheckpoint(checkpointJson, out var checkpoint, out var checkpointError))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
$"checkpoint-invalid: {checkpointError ?? "unknown"}"));
return false;
}
if (logIndex < 0 || logIndex >= checkpoint.TreeSize)
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
$"proof-log-index-out-of-range: logIndex={logIndex}, checkpointTreeSize={checkpoint.TreeSize}"));
return false;
}
if (!TryResolveProofRootHash(proofDocument.RootElement, out var proofRootHash, out var rootError))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
$"proof-root-hash-invalid: {rootError ?? "missing"}"));
return false;
}
if (!CryptographicOperations.FixedTimeEquals(proofRootHash, checkpoint.RootHash))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
"proof-root-hash-mismatch-with-checkpoint"));
return false;
}
if (!TryResolveProofHashes(proofDocument.RootElement, out var proofHashes, out var hashError))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
$"proof-hashes-invalid: {hashError ?? "missing"}"));
return false;
}
if (!TryResolveProofTreeSize(proofDocument.RootElement, checkpoint.TreeSize, out var proofTreeSize))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
"proof-tree-size-invalid"));
return false;
}
if (!TryResolveLeafHash(proofDocument.RootElement, out var leafHash, out var leafError))
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
$"proof-leaf-hash-missing: {leafError ?? "cannot-verify-merkle"}"));
return false;
}
var inclusionValid = MerkleProofVerifier.VerifyInclusion(
leafHash,
logIndex,
proofTreeSize,
proofHashes,
checkpoint.RootHash);
if (!inclusionValid)
{
result.Checks.Add(new VerificationCheck(
"rekor:inclusion",
false,
"proof-merkle-verification-failed"));
return false;
}
result.Checks.Add(new VerificationCheck("rekor:inclusion", true, $"Inclusion verified at log index {logIndex}"));
return true;
}
if (!offline)
{
result.Checks.Add(new VerificationCheck("rekor:inclusion", true,
$"Log index {logIndex} present - checkpoint not provided for offline verification")
{
Severity = "warning"
});
return true;
}
result.Checks.Add(new VerificationCheck("rekor:inclusion", true,
$"Log index {logIndex} present - no checkpoint for offline verification")
{
Severity = "warning"
});
return true;
}
}
private static bool TryParseCheckpoint(
string checkpointJson,
out ParsedCheckpoint checkpoint,
out string? error)
{
checkpoint = default;
error = null;
JsonDocument document;
try
{
document = JsonDocument.Parse(checkpointJson);
}
catch (JsonException ex)
{
error = ex.Message;
return false;
}
using (document)
{
var root = document.RootElement;
var checkpointElement = root.TryGetProperty("checkpoint", out var nestedCheckpoint) &&
nestedCheckpoint.ValueKind == JsonValueKind.Object
? nestedCheckpoint
: root;
if (!TryGetInt64Property(checkpointElement, "treeSize", out var treeSize))
{
if (!TryGetInt64Property(checkpointElement, "size", out treeSize))
{
error = "treeSize/size missing";
return false;
}
}
if (!TryGetStringProperty(checkpointElement, "rootHash", out var rootHashString))
{
if (!TryGetStringProperty(checkpointElement, "hash", out rootHashString))
{
error = "rootHash/hash missing";
return false;
}
}
if (!TryDecodeHashValue(rootHashString, out var rootHashBytes))
{
error = "root hash must be lowercase hex, sha256:hex, or base64";
return false;
}
checkpoint = new ParsedCheckpoint(treeSize, rootHashBytes);
return true;
}
}
private static bool TryReadLogIndex(JsonElement root, out long logIndex)
{
if (TryGetInt64Property(root, "logIndex", out logIndex))
{
return true;
}
if (TryGetObjectProperty(root, "inclusion", out var inclusion) &&
TryGetInt64Property(inclusion, "logIndex", out logIndex))
{
return true;
}
if (TryGetObjectProperty(root, "inclusionProof", out var inclusionProof) &&
TryGetInt64Property(inclusionProof, "logIndex", out logIndex))
{
return true;
}
logIndex = -1;
return false;
}
private static bool TryResolveProofTreeSize(JsonElement root, long fallbackTreeSize, out long treeSize)
{
if (TryGetInt64Property(root, "treeSize", out treeSize))
{
return treeSize > 0;
}
if (TryGetObjectProperty(root, "inclusion", out var inclusion) &&
TryGetInt64Property(inclusion, "treeSize", out treeSize))
{
return treeSize > 0;
}
if (TryGetObjectProperty(root, "inclusionProof", out var inclusionProof) &&
TryGetInt64Property(inclusionProof, "treeSize", out treeSize))
{
return treeSize > 0;
}
treeSize = fallbackTreeSize;
return treeSize > 0;
}
private static bool TryResolveProofRootHash(JsonElement root, out byte[] rootHash, out string? error)
{
rootHash = Array.Empty<byte>();
error = null;
string? rootHashString = null;
if (TryGetStringProperty(root, "rootHash", out var directRootHash))
{
rootHashString = directRootHash;
}
else if (TryGetObjectProperty(root, "inclusion", out var inclusion) &&
TryGetStringProperty(inclusion, "rootHash", out var inclusionRootHash))
{
rootHashString = inclusionRootHash;
}
else if (TryGetObjectProperty(root, "inclusionProof", out var inclusionProof) &&
TryGetStringProperty(inclusionProof, "rootHash", out var inclusionProofRootHash))
{
rootHashString = inclusionProofRootHash;
}
else if (TryGetObjectProperty(root, "checkpoint", out var checkpointObject))
{
if (TryGetStringProperty(checkpointObject, "rootHash", out var checkpointRootHash))
{
rootHashString = checkpointRootHash;
}
else if (TryGetStringProperty(checkpointObject, "hash", out var checkpointHash))
{
rootHashString = checkpointHash;
}
}
if (string.IsNullOrWhiteSpace(rootHashString))
{
error = "missing rootHash";
return false;
}
if (!TryDecodeHashValue(rootHashString, out rootHash))
{
error = "invalid rootHash format";
return false;
}
return true;
}
private static bool TryResolveProofHashes(JsonElement root, out List<byte[]> hashes, out string? error)
{
hashes = new List<byte[]>();
error = null;
JsonElement hashesElement;
if (TryGetArrayProperty(root, "hashes", out hashesElement) ||
(TryGetObjectProperty(root, "inclusion", out var inclusion) && TryGetArrayProperty(inclusion, "hashes", out hashesElement)) ||
(TryGetObjectProperty(root, "inclusion", out inclusion) && TryGetArrayProperty(inclusion, "path", out hashesElement)) ||
(TryGetObjectProperty(root, "inclusionProof", out var inclusionProof) && TryGetArrayProperty(inclusionProof, "hashes", out hashesElement)) ||
(TryGetObjectProperty(root, "inclusionProof", out inclusionProof) && TryGetArrayProperty(inclusionProof, "path", out hashesElement)))
{
foreach (var hashElement in hashesElement.EnumerateArray())
{
if (hashElement.ValueKind != JsonValueKind.String)
{
error = "hash entry is not a string";
return false;
}
var hashText = hashElement.GetString();
if (string.IsNullOrWhiteSpace(hashText))
{
error = "hash entry is empty";
return false;
}
if (!TryDecodeHashValue(hashText, out var hashBytes))
{
error = $"invalid hash entry: {hashText}";
return false;
}
hashes.Add(hashBytes);
}
return true;
}
error = "hashes/path array missing";
return false;
}
private static bool TryResolveLeafHash(JsonElement root, out byte[] leafHash, out string? error)
{
leafHash = Array.Empty<byte>();
error = null;
if (TryGetStringProperty(root, "leafHash", out var directLeafHash) &&
TryDecodeHashValue(directLeafHash, out leafHash))
{
return true;
}
if (TryGetObjectProperty(root, "inclusion", out var inclusion) &&
TryGetStringProperty(inclusion, "leafHash", out var inclusionLeafHash) &&
TryDecodeHashValue(inclusionLeafHash, out leafHash))
{
return true;
}
if (TryGetObjectProperty(root, "inclusionProof", out var inclusionProof) &&
TryGetStringProperty(inclusionProof, "leafHash", out var inclusionProofLeafHash) &&
TryDecodeHashValue(inclusionProofLeafHash, out leafHash))
{
return true;
}
error = "leafHash missing";
return false;
}
private static bool TryDecodeHashValue(string value, out byte[] hashBytes)
{
hashBytes = Array.Empty<byte>();
if (string.IsNullOrWhiteSpace(value))
{
return false;
}
var normalized = value.Trim();
if (normalized.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
{
normalized = normalized["sha256:".Length..];
}
if (normalized.StartsWith("0x", StringComparison.OrdinalIgnoreCase))
{
normalized = normalized[2..];
}
if (normalized.Length == 64 && normalized.All(IsHexChar))
{
try
{
hashBytes = Convert.FromHexString(normalized);
return hashBytes.Length == 32;
}
catch
{
return false;
}
}
try
{
var base64Bytes = Convert.FromBase64String(normalized);
if (base64Bytes.Length == 32)
{
hashBytes = base64Bytes;
return true;
}
}
catch
{
// Not base64.
}
return false;
}
private static bool IsHexChar(char value)
{
return (value >= '0' && value <= '9') ||
(value >= 'a' && value <= 'f') ||
(value >= 'A' && value <= 'F');
}
private static bool TryGetInt64Property(JsonElement element, string propertyName, out long value)
{
if (element.ValueKind == JsonValueKind.Object &&
element.TryGetProperty(propertyName, out var property))
{
if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out value))
{
return true;
}
if (property.ValueKind == JsonValueKind.String &&
long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out value))
{
return true;
}
}
value = 0;
return false;
}
private static bool TryGetStringProperty(JsonElement element, string propertyName, out string value)
{
if (element.ValueKind == JsonValueKind.Object &&
element.TryGetProperty(propertyName, out var property) &&
property.ValueKind == JsonValueKind.String)
{
var text = property.GetString();
if (!string.IsNullOrWhiteSpace(text))
{
value = text;
return true;
}
}
value = string.Empty;
return false;
}
private static bool TryGetArrayProperty(JsonElement element, string propertyName, out JsonElement value)
{
if (element.ValueKind == JsonValueKind.Object &&
element.TryGetProperty(propertyName, out value) &&
value.ValueKind == JsonValueKind.Array)
{
return true;
}
value = default;
return false;
}
private static bool TryGetObjectProperty(JsonElement element, string propertyName, out JsonElement value)
{
if (element.ValueKind == JsonValueKind.Object &&
element.TryGetProperty(propertyName, out value) &&
value.ValueKind == JsonValueKind.Object)
{
return true;
}
value = default;
return false;
}
private static bool VerifyPayloadTypes(
BundleManifestDto? manifest,
VerificationResult result,
bool verbose)
{
var expected = manifest?.Verify?.Expectations?.PayloadTypes ?? [];
if (expected.Count == 0)
{
result.Checks.Add(new VerificationCheck("payloads", true, "No payload type expectations defined"));
return true;
}
// Check that required payload types are present
var present = manifest?.Bundle?.Artifacts?
.Select(a => a.MediaType)
.Where(mediaType => !string.IsNullOrWhiteSpace(mediaType))
.Select(mediaType => mediaType!)
.ToHashSet(StringComparer.OrdinalIgnoreCase) ?? [];
var missing = expected.Where(e => !present.Any(p =>
p.Contains(e.Split(';')[0], StringComparison.OrdinalIgnoreCase))).ToList();
if (missing.Count > 0)
{
result.Checks.Add(new VerificationCheck("payloads", false,
$"Missing expected payload types: {string.Join(", ", missing)}"));
return false;
}
result.Checks.Add(new VerificationCheck("payloads", true,
$"All {expected.Count} expected payload types present"));
return true;
}
/// <summary>
/// Sprint 040-06: Verify large blobs referenced in attestations.
/// For full bundles, reads blobs from the blobs/ directory.
/// For light bundles, fetches blobs from registry or --blob-source.
/// </summary>
private static async Task<bool> VerifyBlobReplayAsync(
string bundleDir,
BundleManifestDto? manifest,
string? blobSource,
bool offline,
VerificationResult result,
bool verbose,
CancellationToken ct)
{
var exportMode = manifest?.ExportMode ?? "light";
var isFullBundle = string.Equals(exportMode, "full", StringComparison.OrdinalIgnoreCase);
// Collect all largeBlob references from DSSE attestation payloads
var blobRefs = await ExtractLargeBlobRefsAsync(bundleDir, verbose, ct);
if (blobRefs.Count == 0)
{
result.Checks.Add(new VerificationCheck("blob-replay", true,
"No large blob references found in attestations"));
return true;
}
if (verbose)
{
Console.WriteLine($" Found {blobRefs.Count} large blob reference(s) to verify");
}
var allPassed = true;
var verified = 0;
foreach (var blobRef in blobRefs)
{
byte[]? blobContent = null;
if (isFullBundle)
{
// Full bundle: blobs are embedded in blobs/ directory
var blobPath = Path.Combine(bundleDir, "blobs", blobRef.Digest.Replace(":", "-"));
if (!File.Exists(blobPath))
{
// Try alternate naming: sha256/<hash>
var parts = blobRef.Digest.Split(':');
if (parts.Length == 2)
{
blobPath = Path.Combine(bundleDir, "blobs", parts[0], parts[1]);
}
}
if (File.Exists(blobPath))
{
blobContent = await File.ReadAllBytesAsync(blobPath, ct);
}
else
{
result.Checks.Add(new VerificationCheck("blob-replay", false,
$"Missing embedded blob: {blobRef.Digest}") { Severity = "error" });
allPassed = false;
continue;
}
}
else
{
// Light bundle: must fetch from registry or blob-source
if (offline)
{
result.Checks.Add(new VerificationCheck("blob-replay", false,
$"Cannot fetch blob {blobRef.Digest} in offline mode (light bundle)")
{ Severity = "error" });
allPassed = false;
continue;
}
blobContent = await FetchBlobAsync(blobRef.Digest, blobSource, verbose, ct);
if (blobContent is null)
{
result.Checks.Add(new VerificationCheck("blob-replay", false,
$"Failed to fetch blob: {blobRef.Digest}") { Severity = "error" });
allPassed = false;
continue;
}
}
// Verify digest
var actualDigest = ComputeBlobDigest(blobContent, blobRef.Digest);
if (!string.Equals(actualDigest, blobRef.Digest, StringComparison.OrdinalIgnoreCase))
{
result.Checks.Add(new VerificationCheck("blob-replay", false,
$"Digest mismatch for blob: expected {blobRef.Digest}, got {actualDigest}")
{ Severity = "error" });
allPassed = false;
}
else
{
verified++;
if (verbose)
{
Console.WriteLine($" Blob verified: {blobRef.Digest} ({blobContent.Length} bytes)");
}
}
}
if (allPassed)
{
result.Checks.Add(new VerificationCheck("blob-replay", true,
$"All {verified} large blob(s) verified successfully"));
}
return allPassed;
}
/// <summary>
/// Extracts largeBlobs[] references from DSSE attestation payloads in the bundle.
/// </summary>
private static async Task<List<LargeBlobRef>> ExtractLargeBlobRefsAsync(
string bundleDir, bool verbose, CancellationToken ct)
{
var refs = new List<LargeBlobRef>();
var attestationsDir = Path.Combine(bundleDir, "attestations");
if (!Directory.Exists(attestationsDir))
{
// Also check for DSSE envelopes directly in the bundle root
attestationsDir = bundleDir;
}
var dsseFiles = Directory.Exists(attestationsDir)
? Directory.GetFiles(attestationsDir, "*.dsse.json", SearchOption.AllDirectories)
.Concat(Directory.GetFiles(attestationsDir, "*.intoto.json", SearchOption.AllDirectories))
.ToArray()
: [];
foreach (var dsseFile in dsseFiles)
{
try
{
var json = await File.ReadAllTextAsync(dsseFile, ct);
using var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
// Extract payload from DSSE envelope
if (!root.TryGetProperty("payload", out var payloadProp))
continue;
var payloadB64 = payloadProp.GetString();
if (string.IsNullOrEmpty(payloadB64))
continue;
var payloadBytes = Convert.FromBase64String(payloadB64);
using var payloadDoc = JsonDocument.Parse(payloadBytes);
var payload = payloadDoc.RootElement;
// Look for largeBlobs in the predicate
if (!payload.TryGetProperty("predicate", out var predicate))
continue;
if (!predicate.TryGetProperty("largeBlobs", out var largeBlobs))
continue;
if (largeBlobs.ValueKind != JsonValueKind.Array)
continue;
foreach (var blob in largeBlobs.EnumerateArray())
{
var digest = blob.TryGetProperty("digest", out var d) ? d.GetString() : null;
var kind = blob.TryGetProperty("kind", out var k) ? k.GetString() : null;
var sizeBytes = blob.TryGetProperty("sizeBytes", out var s) ? s.GetInt64() : 0L;
if (!string.IsNullOrEmpty(digest))
{
refs.Add(new LargeBlobRef(digest, kind, sizeBytes));
if (verbose)
{
Console.WriteLine($" Found blob ref: {digest} ({kind ?? "unknown"}, {sizeBytes} bytes)");
}
}
}
}
catch (Exception ex)
{
if (verbose)
{
Console.WriteLine($" Warning: Failed to parse {Path.GetFileName(dsseFile)}: {ex.Message}");
}
}
}
return refs;
}
/// <summary>
/// Fetches a blob by digest from registry or local blob-source.
/// </summary>
private static async Task<byte[]?> FetchBlobAsync(
string digest, string? blobSource, bool verbose, CancellationToken ct)
{
if (!string.IsNullOrEmpty(blobSource) && Directory.Exists(blobSource))
{
// Local directory: look for blob by digest
var localPath = Path.Combine(blobSource, digest.Replace(":", "-"));
if (File.Exists(localPath))
return await File.ReadAllBytesAsync(localPath, ct);
// Try sha256/<hash> structure
var parts = digest.Split(':');
if (parts.Length == 2)
{
localPath = Path.Combine(blobSource, parts[0], parts[1]);
if (File.Exists(localPath))
return await File.ReadAllBytesAsync(localPath, ct);
}
if (verbose)
{
Console.WriteLine($" Blob not found in local source: {digest}");
}
return null;
}
if (!string.IsNullOrEmpty(blobSource))
{
// Registry URL: fetch via OCI blob API
// TODO: Implement OCI registry blob fetch when IOciRegistryClient is available
if (verbose)
{
Console.WriteLine($" Fetching blob from registry: {blobSource}/blobs/{digest}");
}
try
{
using var http = StellaOps.Cli.Services.CliHttpClients.CreateClient(timeout: TimeSpan.FromSeconds(60));
var url = $"{blobSource.TrimEnd('/')}/v2/_blobs/{digest}";
var response = await http.GetAsync(url, ct);
if (response.IsSuccessStatusCode)
{
return await response.Content.ReadAsByteArrayAsync(ct);
}
if (verbose)
{
Console.WriteLine($" Registry returned: {response.StatusCode}");
}
}
catch (Exception ex)
{
if (verbose)
{
Console.WriteLine($" Fetch error: {ex.Message}");
}
}
return null;
}
// No blob source specified - cannot fetch
return null;
}
/// <summary>
/// Computes the digest of blob content using the algorithm specified in the expected digest.
/// </summary>
private static string ComputeBlobDigest(byte[] content, string expectedDigest)
{
var algorithm = expectedDigest.Split(':')[0].ToLowerInvariant();
var hash = algorithm switch
{
"sha256" => SHA256.HashData(content),
"sha384" => SHA384.HashData(content),
"sha512" => SHA512.HashData(content),
_ => SHA256.HashData(content)
};
return $"{algorithm}:{Convert.ToHexStringLower(hash)}";
}
/// <summary>
/// Reference to a large blob in a DSSE attestation predicate.
/// </summary>
private sealed record LargeBlobRef(string Digest, string? Kind, long SizeBytes);
private static async Task<int> FinalizeResultAsync(
VerificationResult result,
BundleManifestDto? manifest,
string bundleDir,
string? trustRoot,
string? rekorCheckpoint,
bool offline,
string outputFormat,
bool strict,
string? signerKeyPath,
string? signerCertPath,
CancellationToken ct)
{
result.CompletedAt ??= DateTimeOffset.UtcNow;
if (!string.IsNullOrWhiteSpace(signerKeyPath))
{
var outcome = await TryWriteSignedReportAsync(
result,
manifest,
bundleDir,
trustRoot,
rekorCheckpoint,
offline,
signerKeyPath,
signerCertPath,
ct);
if (outcome.Success)
{
result.SignedReportPath = outcome.ReportPath;
result.SignerKeyId = outcome.KeyId;
result.SignerAlgorithm = outcome.Algorithm;
result.SignedAt = outcome.SignedAt;
result.Checks.Add(new VerificationCheck(
"report:signature",
true,
$"Signed report written to {outcome.ReportPath}"));
}
else
{
result.Checks.Add(new VerificationCheck(
"report:signature",
false,
outcome.Error ?? "Signed report generation failed")
{
Severity = "error"
});
}
}
result.OverallStatus = ComputeOverallStatus(result.Checks);
return OutputResult(result, outputFormat, strict);
}
private static async Task<SignedReportOutcome> TryWriteSignedReportAsync(
VerificationResult result,
BundleManifestDto? manifest,
string bundleDir,
string? trustRoot,
string? rekorCheckpoint,
bool offline,
string signerKeyPath,
string? signerCertPath,
CancellationToken ct)
{
try
{
var signingKey = LoadSigningKey(signerKeyPath);
var signerCert = await LoadSignerCertificateAsync(signerCertPath, signerKeyPath, ct);
var report = BuildVerificationReport(result, manifest, trustRoot, rekorCheckpoint, offline);
var signer = new DsseVerificationReportSigner(new EnvelopeSignatureService());
var signedAt = result.CompletedAt ?? DateTimeOffset.UtcNow;
var signResult = await signer.SignAsync(new VerificationReportSigningRequest(
report,
signingKey,
signerCert,
signedAt), ct);
var outputDir = Path.Combine(bundleDir, "out");
Directory.CreateDirectory(outputDir);
var reportPath = Path.Combine(outputDir, "verification.report.json");
await File.WriteAllTextAsync(reportPath, signResult.EnvelopeJson, ct);
return new SignedReportOutcome(
true,
reportPath,
signingKey.KeyId,
signingKey.AlgorithmId,
signResult.Report.Verifier?.SignedAt,
null);
}
catch (Exception ex)
{
return new SignedReportOutcome(false, null, null, null, null, ex.Message);
}
}
private static VerificationReportPredicate BuildVerificationReport(
VerificationResult result,
BundleManifestDto? manifest,
string? trustRoot,
string? rekorCheckpoint,
bool offline)
{
var steps = result.Checks
.Select((check, index) => new VerificationStep
{
Step = index + 1,
Name = check.Name,
Status = MapStepStatus(check),
DurationMs = 0,
Details = check.Message,
Issues = BuildIssues(check)
})
.ToArray();
var summary = ComputeOverallStatus(result.Checks);
var overallStatus = MapOverallStatus(summary);
var overall = new OverallVerificationResult
{
Status = overallStatus,
Summary = summary,
TotalDurationMs = (long?)((result.CompletedAt - result.StartedAt)?.TotalMilliseconds) ?? 0,
PassedSteps = steps.Count(step => step.Status == VerificationStepStatus.Passed),
FailedSteps = steps.Count(step => step.Status == VerificationStepStatus.Failed),
WarningSteps = steps.Count(step => step.Status == VerificationStepStatus.Warning),
SkippedSteps = steps.Count(step => step.Status == VerificationStepStatus.Skipped)
};
TrustChainInfo? trustChain = null;
if (!string.IsNullOrWhiteSpace(trustRoot) || !string.IsNullOrWhiteSpace(rekorCheckpoint))
{
var rekorVerified = result.Checks.Any(check =>
string.Equals(check.Name, "rekor:inclusion", StringComparison.OrdinalIgnoreCase) && check.Passed);
trustChain = new TrustChainInfo
{
RootOfTrust = trustRoot,
RekorVerified = rekorVerified,
RekorLogIndex = null,
TsaVerified = false,
Timestamp = null,
SignerIdentity = result.SignerKeyId
};
}
return new VerificationReportPredicate
{
ReportId = ComputeReportId(result, manifest),
GeneratedAt = result.CompletedAt ?? DateTimeOffset.UtcNow,
Generator = new GeneratorInfo
{
Tool = "stella bundle verify",
Version = GetCliVersion()
},
Subject = new VerificationSubject
{
BundleId = manifest?.CanonicalManifestHash,
BundleDigest = manifest?.Subject?.Sha256,
ArtifactDigest = manifest?.Bundle?.Digest,
ArtifactName = manifest?.Bundle?.Image
},
VerificationSteps = steps,
OverallResult = overall,
TrustChain = trustChain,
ReplayMode = offline ? "offline" : "online"
};
}
private static VerificationStepStatus MapStepStatus(VerificationCheck check)
{
if (!check.Passed)
{
return VerificationStepStatus.Failed;
}
return check.Severity switch
{
"warning" => VerificationStepStatus.Warning,
"info" => VerificationStepStatus.Passed,
_ => VerificationStepStatus.Passed
};
}
private static IReadOnlyList<VerificationIssue>? BuildIssues(VerificationCheck check)
{
if (check.Passed && !string.Equals(check.Severity, "warning", StringComparison.OrdinalIgnoreCase))
{
return null;
}
return new[]
{
new VerificationIssue
{
Severity = MapIssueSeverity(check),
Code = check.Name,
Message = check.Message
}
};
}
private static IssueSeverity MapIssueSeverity(VerificationCheck check)
{
if (!check.Passed)
{
return IssueSeverity.Error;
}
return string.Equals(check.Severity, "warning", StringComparison.OrdinalIgnoreCase)
? IssueSeverity.Warning
: IssueSeverity.Info;
}
private static VerificationStepStatus MapOverallStatus(string? status)
{
return status switch
{
"PASSED" => VerificationStepStatus.Passed,
"FAILED" => VerificationStepStatus.Failed,
"PASSED_WITH_WARNINGS" => VerificationStepStatus.Warning,
_ => VerificationStepStatus.Skipped
};
}
private static string ComputeOverallStatus(IReadOnlyList<VerificationCheck> checks)
{
if (checks.Count == 0)
{
return "UNKNOWN";
}
if (checks.All(check => check.Passed))
{
return "PASSED";
}
return checks.Any(check => !check.Passed && check.Severity == "error")
? "FAILED"
: "PASSED_WITH_WARNINGS";
}
private static string ComputeReportId(VerificationResult result, BundleManifestDto? manifest)
{
if (!string.IsNullOrWhiteSpace(manifest?.CanonicalManifestHash))
{
return manifest.CanonicalManifestHash!;
}
if (!string.IsNullOrWhiteSpace(manifest?.Subject?.Sha256))
{
return manifest.Subject.Sha256!;
}
return ComputeSha256Hex(result.BundlePath);
}
private static string ComputeSha256Hex(string value)
{
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(value ?? string.Empty));
return $"sha256:{Convert.ToHexString(bytes).ToLowerInvariant()}";
}
private static EnvelopeKey LoadSigningKey(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new InvalidOperationException("Signing key path is required for report signing.");
}
if (!File.Exists(path))
{
throw new FileNotFoundException($"Signing key file not found: {path}");
}
var pem = File.ReadAllText(path);
using var ecdsa = ECDsa.Create();
try
{
ecdsa.ImportFromPem(pem);
}
catch (CryptographicException ex)
{
throw new InvalidOperationException("Failed to load ECDSA private key from PEM.", ex);
}
var parameters = ecdsa.ExportParameters(true);
var algorithm = ResolveEcdsaAlgorithm(ecdsa.KeySize);
return EnvelopeKey.CreateEcdsaSigner(algorithm, parameters);
}
private static string ResolveEcdsaAlgorithm(int keySize)
{
return keySize switch
{
256 => SignatureAlgorithms.Es256,
384 => SignatureAlgorithms.Es384,
521 => SignatureAlgorithms.Es512,
_ => throw new InvalidOperationException($"Unsupported ECDSA key size: {keySize}.")
};
}
private static async Task<string?> LoadSignerCertificateAsync(
string? signerCertPath,
string signerKeyPath,
CancellationToken ct)
{
if (!string.IsNullOrWhiteSpace(signerCertPath))
{
if (!File.Exists(signerCertPath))
{
throw new FileNotFoundException($"Signer certificate file not found: {signerCertPath}");
}
var certPem = await File.ReadAllTextAsync(signerCertPath, ct);
return NormalizePem(certPem);
}
var keyPem = await File.ReadAllTextAsync(signerKeyPath, ct);
return ExtractCertificatePem(keyPem);
}
private static string? ExtractCertificatePem(string pem)
{
const string beginMarker = "-----BEGIN CERTIFICATE-----";
const string endMarker = "-----END CERTIFICATE-----";
var builder = new StringBuilder();
var startIndex = 0;
while (true)
{
var begin = pem.IndexOf(beginMarker, startIndex, StringComparison.Ordinal);
if (begin < 0)
{
break;
}
var end = pem.IndexOf(endMarker, begin, StringComparison.Ordinal);
if (end < 0)
{
break;
}
var block = pem.Substring(begin, end - begin + endMarker.Length).Trim();
if (builder.Length > 0)
{
builder.Append('\n');
}
builder.Append(block);
startIndex = end + endMarker.Length;
}
return builder.Length == 0 ? null : NormalizePem(builder.ToString());
}
private static string? NormalizePem(string? pem)
{
if (string.IsNullOrWhiteSpace(pem))
{
return null;
}
return pem.Replace("\r\n", "\n").Trim();
}
private static string GetCliVersion()
{
return typeof(BundleVerifyCommand).Assembly.GetName().Version?.ToString() ?? "unknown";
}
private static int OutputResult(VerificationResult result, string format, bool strict)
{
if (format == "json")
{
Console.WriteLine(JsonSerializer.Serialize(result, JsonOptions));
}
else
{
Console.WriteLine();
Console.WriteLine("═══════════════════════════════════════════════════════════");
Console.WriteLine($"Verification Result: {result.OverallStatus}");
Console.WriteLine("═══════════════════════════════════════════════════════════");
if (result.Checks.Any())
{
Console.WriteLine();
Console.WriteLine("Checks:");
foreach (var check in result.Checks)
{
var icon = check.Passed ? "✓" : (check.Severity == "warning" ? "⚠" : "✗");
Console.WriteLine($" {icon} {check.Name}: {check.Message}");
}
}
Console.WriteLine();
Console.WriteLine($"Duration: {(result.CompletedAt - result.StartedAt)?.TotalMilliseconds:F0}ms");
if (!string.IsNullOrWhiteSpace(result.SignedReportPath))
{
Console.WriteLine($"Signed report: {result.SignedReportPath}");
if (!string.IsNullOrWhiteSpace(result.SignerKeyId))
{
var algo = string.IsNullOrWhiteSpace(result.SignerAlgorithm)
? string.Empty
: $" ({result.SignerAlgorithm})";
Console.WriteLine($"Signer key: {result.SignerKeyId}{algo}");
}
}
}
// Exit code
if (result.OverallStatus == "FAILED")
return 1;
if (strict && result.OverallStatus == "PASSED_WITH_WARNINGS")
return 1;
return 0;
}
#region DTOs
private sealed class VerificationResult
{
[JsonPropertyName("bundlePath")]
public string BundlePath { get; set; } = "";
[JsonPropertyName("startedAt")]
public DateTimeOffset StartedAt { get; set; }
[JsonPropertyName("completedAt")]
public DateTimeOffset? CompletedAt { get; set; }
[JsonPropertyName("offline")]
public bool Offline { get; set; }
[JsonPropertyName("overallStatus")]
public string OverallStatus { get; set; } = "UNKNOWN";
[JsonPropertyName("schemaVersion")]
public string? SchemaVersion { get; set; }
[JsonPropertyName("image")]
public string? Image { get; set; }
[JsonPropertyName("signedReportPath")]
public string? SignedReportPath { get; set; }
[JsonPropertyName("signerKeyId")]
public string? SignerKeyId { get; set; }
[JsonPropertyName("signerAlgorithm")]
public string? SignerAlgorithm { get; set; }
[JsonPropertyName("signedAt")]
public DateTimeOffset? SignedAt { get; set; }
[JsonPropertyName("checks")]
public List<VerificationCheck> Checks { get; set; } = [];
}
private sealed class VerificationCheck
{
public VerificationCheck() { }
public VerificationCheck(string name, bool passed, string message)
{
Name = name;
Passed = passed;
Message = message;
Severity = passed ? "info" : "error";
}
[JsonPropertyName("name")]
public string Name { get; set; } = "";
[JsonPropertyName("passed")]
public bool Passed { get; set; }
[JsonPropertyName("message")]
public string Message { get; set; } = "";
[JsonPropertyName("severity")]
public string Severity { get; set; } = "info";
}
private sealed record SignedReportOutcome(
bool Success,
string? ReportPath,
string? KeyId,
string? Algorithm,
DateTimeOffset? SignedAt,
string? Error);
private sealed class BundleManifestDto
{
[JsonPropertyName("canonicalManifestHash")]
public string? CanonicalManifestHash { get; set; }
[JsonPropertyName("schemaVersion")]
public string? SchemaVersion { get; set; }
[JsonPropertyName("subject")]
public BundleSubjectDto? Subject { get; set; }
[JsonPropertyName("bundle")]
public BundleInfoDto? Bundle { get; set; }
[JsonPropertyName("verify")]
public VerifySectionDto? Verify { get; set; }
/// <summary>Sprint 040-06: Export mode (light or full) for blob replay verification.</summary>
[JsonPropertyName("exportMode")]
public string? ExportMode { get; set; }
}
private sealed class BundleSubjectDto
{
[JsonPropertyName("sha256")]
public string? Sha256 { get; set; }
[JsonPropertyName("sha512")]
public string? Sha512 { get; set; }
}
private sealed class BundleInfoDto
{
[JsonPropertyName("image")]
public string? Image { get; set; }
[JsonPropertyName("digest")]
public string? Digest { get; set; }
[JsonPropertyName("artifacts")]
public List<ArtifactDto>? Artifacts { get; set; }
}
private sealed class ArtifactDto
{
[JsonPropertyName("path")]
public string Path { get; set; } = "";
[JsonPropertyName("digest")]
public string? Digest { get; set; }
[JsonPropertyName("mediaType")]
public string? MediaType { get; set; }
}
private sealed class VerifySectionDto
{
[JsonPropertyName("expectations")]
public ExpectationsDto? Expectations { get; set; }
}
private sealed class ExpectationsDto
{
[JsonPropertyName("payloadTypes")]
public List<string> PayloadTypes { get; set; } = [];
}
private sealed class DsseEnvelopeDto
{
[JsonPropertyName("signatures")]
public List<SignatureDto>? Signatures { get; set; }
[JsonPropertyName("payload")]
public string? Payload { get; set; }
[JsonPropertyName("payloadType")]
public string? PayloadType { get; set; }
}
private sealed class SignatureDto
{
[JsonPropertyName("keyid")]
public string? KeyId { get; set; }
[JsonPropertyName("sig")]
public string? Sig { get; set; }
}
private sealed class RekorProofDto
{
[JsonPropertyName("logIndex")]
public long LogIndex { get; set; }
}
private sealed class CheckpointDto
{
[JsonPropertyName("treeSize")]
public long TreeSize { get; set; }
[JsonPropertyName("rootHash")]
public string? RootHash { get; set; }
}
private readonly record struct ParsedCheckpoint(long TreeSize, byte[] RootHash);
#endregion
}