stabilizaiton work - projects rework for maintenanceability and ui livening

This commit is contained in:
master
2026-02-03 23:40:04 +02:00
parent 074ce117ba
commit 557feefdc3
3305 changed files with 186813 additions and 107843 deletions

View File

@@ -0,0 +1,36 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Manifest describing all chunks for a proof root (metadata only).
/// Used for lazy fetching where blobs are retrieved on demand.
/// </summary>
public sealed record ChunkManifest
{
/// <summary>
/// The proof root (Merkle root of all chunks).
/// </summary>
public required string ProofRoot { get; init; }
/// <summary>
/// Total number of chunks.
/// </summary>
public required int TotalChunks { get; init; }
/// <summary>
/// Total size of all chunks in bytes.
/// </summary>
public required long TotalSize { get; init; }
/// <summary>
/// Ordered list of chunk metadata.
/// </summary>
public required IReadOnlyList<ChunkMetadata> Chunks { get; init; }
/// <summary>
/// When the manifest was generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
}

View File

@@ -0,0 +1,34 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Metadata for a single chunk (no blob).
/// </summary>
public sealed record ChunkMetadata
{
/// <summary>
/// Chunk identifier.
/// </summary>
public required Guid ChunkId { get; init; }
/// <summary>
/// Zero-based index.
/// </summary>
public required int Index { get; init; }
/// <summary>
/// SHA256 hash for verification.
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public required int Size { get; init; }
/// <summary>
/// Content type.
/// </summary>
public required string ContentType { get; init; }
}

View File

@@ -0,0 +1,24 @@
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Result of chunking evidence.
/// </summary>
public sealed record ChunkingResult
{
/// <summary>
/// The computed Merkle root of all chunks.
/// </summary>
public required string ProofRoot { get; init; }
/// <summary>
/// The generated chunks.
/// </summary>
public required IReadOnlyList<EvidenceChunk> Chunks { get; init; }
/// <summary>
/// Total size of the original evidence.
/// </summary>
public required long TotalSize { get; init; }
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class EvidenceChunker
{
/// <inheritdoc />
public Task<ChunkingResult> ChunkAsync(
ReadOnlyMemory<byte> evidence,
string contentType,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(contentType);
var chunks = new List<EvidenceChunk>();
var chunkHashes = new List<string>();
var chunkSize = _options.ChunkSize;
var now = _timeProvider.GetUtcNow();
var span = evidence.Span;
var totalSize = span.Length;
var chunkIndex = 0;
for (var offset = 0; offset < totalSize; offset += chunkSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remainingBytes = totalSize - offset;
var currentChunkSize = Math.Min(chunkSize, remainingBytes);
var chunkData = span.Slice(offset, currentChunkSize).ToArray();
var chunkHash = ComputeHash(chunkData);
chunks.Add(new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = string.Empty, // Will be set after computing Merkle root.
ChunkIndex = chunkIndex,
ChunkHash = chunkHash,
Blob = chunkData,
BlobSize = currentChunkSize,
ContentType = contentType,
CreatedAt = now
});
chunkHashes.Add(chunkHash);
chunkIndex++;
}
var proofRoot = ComputeMerkleRoot(chunkHashes);
// Update proof root in all chunks.
var finalChunks = chunks.Select(c => c with { ProofRoot = proofRoot }).ToList();
return Task.FromResult(new ChunkingResult
{
ProofRoot = proofRoot,
Chunks = finalChunks,
TotalSize = totalSize
});
}
}

View File

@@ -0,0 +1,88 @@
using StellaOps.Determinism;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
namespace StellaOps.Provcache;
public sealed partial class EvidenceChunker
{
/// <inheritdoc />
public bool VerifyChunk(EvidenceChunk chunk)
{
ArgumentNullException.ThrowIfNull(chunk);
var computedHash = ComputeHash(chunk.Blob);
return string.Equals(computedHash, chunk.ChunkHash, StringComparison.OrdinalIgnoreCase);
}
/// <inheritdoc />
public string ComputeMerkleRoot(IEnumerable<string> chunkHashes)
{
ArgumentNullException.ThrowIfNull(chunkHashes);
var hashes = chunkHashes.ToList();
if (hashes.Count == 0)
{
// Empty Merkle tree.
return ComputeHash([]);
}
if (hashes.Count == 1)
{
return hashes[0];
}
// Build Merkle tree bottom-up.
var currentLevel = hashes.Select(HexToBytes).ToList();
while (currentLevel.Count > 1)
{
var nextLevel = new List<byte[]>();
for (var i = 0; i < currentLevel.Count; i += 2)
{
byte[] combined;
if (i + 1 < currentLevel.Count)
{
// Pair exists - concatenate and hash.
combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
currentLevel[i].CopyTo(combined, 0);
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
}
else
{
// Odd node - duplicate itself.
combined = new byte[currentLevel[i].Length * 2];
currentLevel[i].CopyTo(combined, 0);
currentLevel[i].CopyTo(combined, currentLevel[i].Length);
}
nextLevel.Add(SHA256.HashData(combined));
}
currentLevel = nextLevel;
}
return $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
}
private static string ComputeHash(ReadOnlySpan<byte> data)
{
var hash = SHA256.HashData(data);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private static byte[] HexToBytes(string hash)
{
// Strip sha256: prefix if present.
var hex = hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? hash[7..]
: hash;
return Convert.FromHexString(hex);
}
}

View File

@@ -0,0 +1,62 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class EvidenceChunker
{
/// <inheritdoc />
public Task<byte[]> ReassembleAsync(
IEnumerable<EvidenceChunk> chunks,
string expectedProofRoot,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(chunks);
ArgumentException.ThrowIfNullOrWhiteSpace(expectedProofRoot);
var orderedChunks = chunks.OrderBy(c => c.ChunkIndex).ToList();
if (orderedChunks.Count == 0)
{
throw new ArgumentException("No chunks provided.", nameof(chunks));
}
// Verify Merkle root.
var chunkHashes = orderedChunks.Select(c => c.ChunkHash).ToList();
var computedRoot = ComputeMerkleRoot(chunkHashes);
if (!string.Equals(computedRoot, expectedProofRoot, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Merkle root mismatch. Expected: {expectedProofRoot}, Computed: {computedRoot}");
}
// Verify each chunk.
foreach (var chunk in orderedChunks)
{
cancellationToken.ThrowIfCancellationRequested();
if (!VerifyChunk(chunk))
{
throw new InvalidOperationException(
$"Chunk {chunk.ChunkIndex} verification failed. Expected hash: {chunk.ChunkHash}");
}
}
// Reassemble.
var totalSize = orderedChunks.Sum(c => c.BlobSize);
var result = new byte[totalSize];
var offset = 0;
foreach (var chunk in orderedChunks)
{
chunk.Blob.CopyTo(result, offset);
offset += chunk.BlobSize;
}
return Task.FromResult(result);
}
}

View File

@@ -0,0 +1,48 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class EvidenceChunker
{
/// <inheritdoc />
public async IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
Stream evidenceStream,
string contentType,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(evidenceStream);
ArgumentNullException.ThrowIfNull(contentType);
var chunkSize = _options.ChunkSize;
var buffer = new byte[chunkSize];
var chunkIndex = 0;
var now = _timeProvider.GetUtcNow();
int bytesRead;
while ((bytesRead = await evidenceStream.ReadAsync(buffer, cancellationToken)) > 0)
{
var chunkData = bytesRead == chunkSize ? buffer : buffer[..bytesRead];
var chunkHash = ComputeHash(chunkData);
yield return new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = string.Empty, // Caller must compute after all chunks.
ChunkIndex = chunkIndex,
ChunkHash = chunkHash,
Blob = chunkData.ToArray(),
BlobSize = bytesRead,
ContentType = contentType,
CreatedAt = now
};
chunkIndex++;
buffer = new byte[chunkSize]; // New buffer for next chunk.
}
}
}

View File

@@ -1,91 +1,12 @@
using StellaOps.Determinism;
using System.Security.Cryptography;
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Interface for splitting large evidence into fixed-size chunks
/// and reassembling them with Merkle verification.
/// </summary>
public interface IEvidenceChunker
{
/// <summary>
/// Splits evidence into chunks.
/// </summary>
/// <param name="evidence">The evidence bytes to split.</param>
/// <param name="contentType">MIME type of the evidence.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The chunking result with chunks and proof root.</returns>
Task<ChunkingResult> ChunkAsync(
ReadOnlyMemory<byte> evidence,
string contentType,
CancellationToken cancellationToken = default);
/// <summary>
/// Splits evidence from a stream.
/// </summary>
/// <param name="evidenceStream">Stream containing evidence.</param>
/// <param name="contentType">MIME type of the evidence.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Async enumerable of chunks as they are created.</returns>
IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
Stream evidenceStream,
string contentType,
CancellationToken cancellationToken = default);
/// <summary>
/// Reassembles chunks into the original evidence.
/// </summary>
/// <param name="chunks">The chunks to reassemble (must be in order).</param>
/// <param name="expectedProofRoot">Expected Merkle root for verification.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The reassembled evidence bytes.</returns>
Task<byte[]> ReassembleAsync(
IEnumerable<EvidenceChunk> chunks,
string expectedProofRoot,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a single chunk against its hash.
/// </summary>
/// <param name="chunk">The chunk to verify.</param>
/// <returns>True if the chunk is valid.</returns>
bool VerifyChunk(EvidenceChunk chunk);
/// <summary>
/// Computes the Merkle root from chunk hashes.
/// </summary>
/// <param name="chunkHashes">Ordered list of chunk hashes.</param>
/// <returns>The Merkle root.</returns>
string ComputeMerkleRoot(IEnumerable<string> chunkHashes);
}
/// <summary>
/// Result of chunking evidence.
/// </summary>
public sealed record ChunkingResult
{
/// <summary>
/// The computed Merkle root of all chunks.
/// </summary>
public required string ProofRoot { get; init; }
/// <summary>
/// The generated chunks.
/// </summary>
public required IReadOnlyList<EvidenceChunk> Chunks { get; init; }
/// <summary>
/// Total size of the original evidence.
/// </summary>
public required long TotalSize { get; init; }
}
/// <summary>
/// Default implementation of <see cref="IEvidenceChunker"/>.
/// </summary>
public sealed class EvidenceChunker : IEvidenceChunker
public sealed partial class EvidenceChunker : IEvidenceChunker
{
private readonly ProvcacheOptions _options;
private readonly TimeProvider _timeProvider;
@@ -100,226 +21,4 @@ public sealed class EvidenceChunker : IEvidenceChunker
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
/// <inheritdoc />
public Task<ChunkingResult> ChunkAsync(
ReadOnlyMemory<byte> evidence,
string contentType,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(contentType);
var chunks = new List<EvidenceChunk>();
var chunkHashes = new List<string>();
var chunkSize = _options.ChunkSize;
var now = _timeProvider.GetUtcNow();
var span = evidence.Span;
var totalSize = span.Length;
var chunkIndex = 0;
for (var offset = 0; offset < totalSize; offset += chunkSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remainingBytes = totalSize - offset;
var currentChunkSize = Math.Min(chunkSize, remainingBytes);
var chunkData = span.Slice(offset, currentChunkSize).ToArray();
var chunkHash = ComputeHash(chunkData);
chunks.Add(new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = string.Empty, // Will be set after computing Merkle root
ChunkIndex = chunkIndex,
ChunkHash = chunkHash,
Blob = chunkData,
BlobSize = currentChunkSize,
ContentType = contentType,
CreatedAt = now
});
chunkHashes.Add(chunkHash);
chunkIndex++;
}
var proofRoot = ComputeMerkleRoot(chunkHashes);
// Update proof root in all chunks
var finalChunks = chunks.Select(c => c with { ProofRoot = proofRoot }).ToList();
return Task.FromResult(new ChunkingResult
{
ProofRoot = proofRoot,
Chunks = finalChunks,
TotalSize = totalSize
});
}
/// <inheritdoc />
public async IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
Stream evidenceStream,
string contentType,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(evidenceStream);
ArgumentNullException.ThrowIfNull(contentType);
var chunkSize = _options.ChunkSize;
var buffer = new byte[chunkSize];
var chunkIndex = 0;
var now = _timeProvider.GetUtcNow();
int bytesRead;
while ((bytesRead = await evidenceStream.ReadAsync(buffer, cancellationToken)) > 0)
{
var chunkData = bytesRead == chunkSize ? buffer : buffer[..bytesRead];
var chunkHash = ComputeHash(chunkData);
yield return new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = string.Empty, // Caller must compute after all chunks
ChunkIndex = chunkIndex,
ChunkHash = chunkHash,
Blob = chunkData.ToArray(),
BlobSize = bytesRead,
ContentType = contentType,
CreatedAt = now
};
chunkIndex++;
buffer = new byte[chunkSize]; // New buffer for next chunk
}
}
/// <inheritdoc />
public Task<byte[]> ReassembleAsync(
IEnumerable<EvidenceChunk> chunks,
string expectedProofRoot,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(chunks);
ArgumentException.ThrowIfNullOrWhiteSpace(expectedProofRoot);
var orderedChunks = chunks.OrderBy(c => c.ChunkIndex).ToList();
if (orderedChunks.Count == 0)
{
throw new ArgumentException("No chunks provided.", nameof(chunks));
}
// Verify Merkle root
var chunkHashes = orderedChunks.Select(c => c.ChunkHash).ToList();
var computedRoot = ComputeMerkleRoot(chunkHashes);
if (!string.Equals(computedRoot, expectedProofRoot, StringComparison.OrdinalIgnoreCase))
{
throw new InvalidOperationException(
$"Merkle root mismatch. Expected: {expectedProofRoot}, Computed: {computedRoot}");
}
// Verify each chunk
foreach (var chunk in orderedChunks)
{
cancellationToken.ThrowIfCancellationRequested();
if (!VerifyChunk(chunk))
{
throw new InvalidOperationException(
$"Chunk {chunk.ChunkIndex} verification failed. Expected hash: {chunk.ChunkHash}");
}
}
// Reassemble
var totalSize = orderedChunks.Sum(c => c.BlobSize);
var result = new byte[totalSize];
var offset = 0;
foreach (var chunk in orderedChunks)
{
chunk.Blob.CopyTo(result, offset);
offset += chunk.BlobSize;
}
return Task.FromResult(result);
}
/// <inheritdoc />
public bool VerifyChunk(EvidenceChunk chunk)
{
ArgumentNullException.ThrowIfNull(chunk);
var computedHash = ComputeHash(chunk.Blob);
return string.Equals(computedHash, chunk.ChunkHash, StringComparison.OrdinalIgnoreCase);
}
/// <inheritdoc />
public string ComputeMerkleRoot(IEnumerable<string> chunkHashes)
{
ArgumentNullException.ThrowIfNull(chunkHashes);
var hashes = chunkHashes.ToList();
if (hashes.Count == 0)
{
// Empty Merkle tree
return ComputeHash([]);
}
if (hashes.Count == 1)
{
return hashes[0];
}
// Build Merkle tree bottom-up
var currentLevel = hashes.Select(h => HexToBytes(h)).ToList();
while (currentLevel.Count > 1)
{
var nextLevel = new List<byte[]>();
for (var i = 0; i < currentLevel.Count; i += 2)
{
byte[] combined;
if (i + 1 < currentLevel.Count)
{
// Pair exists - concatenate and hash
combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
currentLevel[i].CopyTo(combined, 0);
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
}
else
{
// Odd node - duplicate itself
combined = new byte[currentLevel[i].Length * 2];
currentLevel[i].CopyTo(combined, 0);
currentLevel[i].CopyTo(combined, currentLevel[i].Length);
}
nextLevel.Add(SHA256.HashData(combined));
}
currentLevel = nextLevel;
}
return $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
}
private static string ComputeHash(ReadOnlySpan<byte> data)
{
var hash = SHA256.HashData(data);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private static byte[] HexToBytes(string hash)
{
// Strip sha256: prefix if present
var hex = hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
? hash[7..]
: hash;
return Convert.FromHexString(hex);
}
}

View File

@@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
/// <summary>
/// Interface for splitting large evidence into fixed-size chunks
/// and reassembling them with Merkle verification.
/// </summary>
public interface IEvidenceChunker
{
/// <summary>
/// Splits evidence into chunks.
/// </summary>
/// <param name="evidence">The evidence bytes to split.</param>
/// <param name="contentType">MIME type of the evidence.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The chunking result with chunks and proof root.</returns>
Task<ChunkingResult> ChunkAsync(
ReadOnlyMemory<byte> evidence,
string contentType,
CancellationToken cancellationToken = default);
/// <summary>
/// Splits evidence from a stream.
/// </summary>
/// <param name="evidenceStream">Stream containing evidence.</param>
/// <param name="contentType">MIME type of the evidence.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Async enumerable of chunks as they are created.</returns>
IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
Stream evidenceStream,
string contentType,
CancellationToken cancellationToken = default);
/// <summary>
/// Reassembles chunks into the original evidence.
/// </summary>
/// <param name="chunks">The chunks to reassemble (must be in order).</param>
/// <param name="expectedProofRoot">Expected Merkle root for verification.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The reassembled evidence bytes.</returns>
Task<byte[]> ReassembleAsync(
IEnumerable<EvidenceChunk> chunks,
string expectedProofRoot,
CancellationToken cancellationToken = default);
/// <summary>
/// Verifies a single chunk against its hash.
/// </summary>
/// <param name="chunk">The chunk to verify.</param>
/// <returns>True if the chunk is valid.</returns>
bool VerifyChunk(EvidenceChunk chunk);
/// <summary>
/// Computes the Merkle root from chunk hashes.
/// </summary>
/// <param name="chunkHashes">Ordered list of chunk hashes.</param>
/// <returns>The Merkle root.</returns>
string ComputeMerkleRoot(IEnumerable<string> chunkHashes);
}

View File

@@ -0,0 +1,73 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Builds the final DecisionDigest.
/// </summary>
/// <returns>The constructed DecisionDigest.</returns>
/// <exception cref="InvalidOperationException">If required components are missing.</exception>
public DecisionDigest Build()
{
ValidateRequiredComponents();
return new DecisionDigest
{
DigestVersion = _options.DigestVersion,
VeriKey = _veriKey!,
VerdictHash = _verdictHash!,
ProofRoot = _proofRoot!,
ReplaySeed = _replaySeed!,
CreatedAt = _createdAt!.Value,
ExpiresAt = _expiresAt!.Value,
TrustScore = _trustScore!.Value,
TrustScoreBreakdown = _trustScoreBreakdown
};
}
/// <summary>
/// Resets the builder to its initial state.
/// </summary>
public DecisionDigestBuilder Reset()
{
_veriKey = null;
_verdictHash = null;
_proofRoot = null;
_replaySeed = null;
_createdAt = null;
_expiresAt = null;
_trustScore = null;
_trustScoreBreakdown = null;
return this;
}
private void ValidateRequiredComponents()
{
var missing = new List<string>();
if (string.IsNullOrWhiteSpace(_veriKey))
missing.Add("VeriKey");
if (string.IsNullOrWhiteSpace(_verdictHash))
missing.Add("VerdictHash");
if (string.IsNullOrWhiteSpace(_proofRoot))
missing.Add("ProofRoot");
if (_replaySeed is null)
missing.Add("ReplaySeed");
if (!_createdAt.HasValue)
missing.Add("CreatedAt");
if (!_expiresAt.HasValue)
missing.Add("ExpiresAt");
if (!_trustScore.HasValue)
missing.Add("TrustScore");
if (missing.Count > 0)
{
throw new InvalidOperationException(
$"Cannot build DecisionDigest: missing required components: {string.Join(", ", missing)}. " +
"Use the With* methods to set all required components before calling Build().");
}
}
}

View File

@@ -0,0 +1,21 @@
using System;
using System.Security.Cryptography;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
private static string ComputeHash(ReadOnlySpan<byte> data)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(data, hash);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private static string StripPrefix(string hash)
{
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
return hash[7..];
return hash;
}
}

View File

@@ -0,0 +1,64 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Sets the proof root (Merkle root of evidence) directly.
/// </summary>
public DecisionDigestBuilder WithProofRoot(string proofRoot)
{
_proofRoot = proofRoot ?? throw new ArgumentNullException(nameof(proofRoot));
return this;
}
/// <summary>
/// Computes proof root from a list of evidence chunk hashes.
/// Builds a simple binary Merkle tree for verification.
/// </summary>
/// <param name="evidenceChunkHashes">Ordered list of evidence chunk hashes.</param>
public DecisionDigestBuilder WithEvidenceChunks(IReadOnlyList<string> evidenceChunkHashes)
{
ArgumentNullException.ThrowIfNull(evidenceChunkHashes);
if (evidenceChunkHashes.Count == 0)
{
_proofRoot = ComputeHash(Encoding.UTF8.GetBytes("empty-proof"));
return this;
}
var currentLevel = evidenceChunkHashes
.Select(h => Convert.FromHexString(StripPrefix(h)))
.ToList();
while (currentLevel.Count > 1)
{
var nextLevel = new List<byte[]>();
for (int i = 0; i < currentLevel.Count; i += 2)
{
if (i + 1 < currentLevel.Count)
{
var combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
currentLevel[i].CopyTo(combined, 0);
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
nextLevel.Add(SHA256.HashData(combined));
}
else
{
nextLevel.Add(currentLevel[i]);
}
}
currentLevel = nextLevel;
}
_proofRoot = $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
return this;
}
}

View File

@@ -0,0 +1,34 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Sets the replay seed directly.
/// </summary>
public DecisionDigestBuilder WithReplaySeed(ReplaySeed replaySeed)
{
_replaySeed = replaySeed ?? throw new ArgumentNullException(nameof(replaySeed));
return this;
}
/// <summary>
/// Builds replay seed from feed and rule identifiers.
/// </summary>
public DecisionDigestBuilder WithReplaySeed(
IEnumerable<string> feedIds,
IEnumerable<string> ruleIds,
DateTimeOffset? frozenEpoch = null)
{
_replaySeed = new ReplaySeed
{
FeedIds = feedIds?.ToList() ?? [],
RuleIds = ruleIds?.ToList() ?? [],
FrozenEpoch = frozenEpoch
};
return this;
}
}

View File

@@ -0,0 +1,26 @@
using System;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Sets explicit timestamps for created and expires.
/// </summary>
public DecisionDigestBuilder WithTimestamps(DateTimeOffset createdAt, DateTimeOffset expiresAt)
{
_createdAt = createdAt;
_expiresAt = expiresAt;
return this;
}
/// <summary>
/// Sets timestamps using the default TTL from options.
/// </summary>
public DecisionDigestBuilder WithDefaultTimestamps()
{
_createdAt = _timeProvider.GetUtcNow();
_expiresAt = _createdAt.Value.Add(_options.DefaultTtl);
return this;
}
}

View File

@@ -0,0 +1,59 @@
using System;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Sets the trust score directly.
/// </summary>
/// <param name="trustScore">Trust score (0-100).</param>
public DecisionDigestBuilder WithTrustScore(int trustScore)
{
if (trustScore < 0 || trustScore > 100)
throw new ArgumentOutOfRangeException(nameof(trustScore), "Trust score must be between 0 and 100.");
_trustScore = trustScore;
return this;
}
/// <summary>
/// Sets the trust score from a breakdown, computing the total automatically.
/// </summary>
/// <param name="breakdown">The trust score breakdown with component scores.</param>
public DecisionDigestBuilder WithTrustScoreBreakdown(TrustScoreBreakdown breakdown)
{
ArgumentNullException.ThrowIfNull(breakdown);
_trustScoreBreakdown = breakdown;
_trustScore = breakdown.ComputeTotal();
return this;
}
/// <summary>
/// Computes trust score from component scores using weighted formula,
/// and stores the breakdown for API responses.
/// </summary>
/// <param name="reachabilityScore">Reachability analysis coverage (0-100).</param>
/// <param name="sbomCompletenessScore">SBOM completeness (0-100).</param>
/// <param name="vexCoverageScore">VEX statement coverage (0-100).</param>
/// <param name="policyFreshnessScore">Policy freshness (0-100).</param>
/// <param name="signerTrustScore">Signer trust level (0-100).</param>
public DecisionDigestBuilder WithTrustScore(
int reachabilityScore,
int sbomCompletenessScore,
int vexCoverageScore,
int policyFreshnessScore,
int signerTrustScore)
{
_trustScoreBreakdown = TrustScoreBreakdown.CreateDefault(
reachabilityScore,
sbomCompletenessScore,
vexCoverageScore,
policyFreshnessScore,
signerTrustScore);
_trustScore = _trustScoreBreakdown.ComputeTotal();
_trustScore = Math.Clamp(_trustScore.Value, 0, 100);
return this;
}
}

View File

@@ -0,0 +1,50 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Sets the verdict hash directly.
/// </summary>
public DecisionDigestBuilder WithVerdictHash(string verdictHash)
{
_verdictHash = verdictHash ?? throw new ArgumentNullException(nameof(verdictHash));
return this;
}
/// <summary>
/// Computes verdict hash from sorted dispositions.
/// Dispositions are sorted by key for deterministic hashing.
/// </summary>
/// <param name="dispositions">Dictionary of finding ID to disposition.</param>
public DecisionDigestBuilder WithDispositions(IReadOnlyDictionary<string, string> dispositions)
{
ArgumentNullException.ThrowIfNull(dispositions);
var sorted = dispositions
.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
.ToList();
if (sorted.Count == 0)
{
_verdictHash = ComputeHash(Encoding.UTF8.GetBytes("empty-verdict"));
return this;
}
var sb = new StringBuilder();
foreach (var (key, value) in sorted)
{
if (sb.Length > 0) sb.Append('|');
sb.Append(key);
sb.Append('=');
sb.Append(value);
}
_verdictHash = ComputeHash(Encoding.UTF8.GetBytes(sb.ToString()));
return this;
}
}

View File

@@ -0,0 +1,25 @@
using System;
namespace StellaOps.Provcache;
public sealed partial class DecisionDigestBuilder
{
/// <summary>
/// Sets the VeriKey for this digest.
/// </summary>
public DecisionDigestBuilder WithVeriKey(string veriKey)
{
_veriKey = veriKey ?? throw new ArgumentNullException(nameof(veriKey));
return this;
}
/// <summary>
/// Sets the VeriKey from a builder.
/// </summary>
public DecisionDigestBuilder WithVeriKey(VeriKeyBuilder builder)
{
ArgumentNullException.ThrowIfNull(builder);
_veriKey = builder.Build();
return this;
}
}

View File

@@ -1,6 +1,4 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System;
namespace StellaOps.Provcache;
@@ -8,7 +6,7 @@ namespace StellaOps.Provcache;
/// Builder for constructing <see cref="DecisionDigest"/> from evaluation results.
/// Ensures deterministic digest computation for cache consistency.
/// </summary>
public sealed class DecisionDigestBuilder
public sealed partial class DecisionDigestBuilder
{
private string? _veriKey;
private string? _verdictHash;
@@ -38,306 +36,4 @@ public sealed class DecisionDigestBuilder
_options = options ?? throw new ArgumentNullException(nameof(options));
_timeProvider = timeProvider ?? TimeProvider.System;
}
/// <summary>
/// Sets the VeriKey for this digest.
/// </summary>
public DecisionDigestBuilder WithVeriKey(string veriKey)
{
_veriKey = veriKey ?? throw new ArgumentNullException(nameof(veriKey));
return this;
}
/// <summary>
/// Sets the VeriKey from a builder.
/// </summary>
public DecisionDigestBuilder WithVeriKey(VeriKeyBuilder builder)
{
ArgumentNullException.ThrowIfNull(builder);
_veriKey = builder.Build();
return this;
}
/// <summary>
/// Sets the verdict hash directly.
/// </summary>
public DecisionDigestBuilder WithVerdictHash(string verdictHash)
{
_verdictHash = verdictHash ?? throw new ArgumentNullException(nameof(verdictHash));
return this;
}
/// <summary>
/// Computes verdict hash from sorted dispositions.
/// Dispositions are sorted by key for deterministic hashing.
/// </summary>
/// <param name="dispositions">Dictionary of finding ID to disposition.</param>
public DecisionDigestBuilder WithDispositions(IReadOnlyDictionary<string, string> dispositions)
{
ArgumentNullException.ThrowIfNull(dispositions);
// Sort by key for deterministic hash
var sorted = dispositions
.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)
.ToList();
if (sorted.Count == 0)
{
_verdictHash = ComputeHash(Encoding.UTF8.GetBytes("empty-verdict"));
return this;
}
// Build deterministic string: key1=value1|key2=value2|...
var sb = new StringBuilder();
foreach (var (key, value) in sorted)
{
if (sb.Length > 0) sb.Append('|');
sb.Append(key);
sb.Append('=');
sb.Append(value);
}
_verdictHash = ComputeHash(Encoding.UTF8.GetBytes(sb.ToString()));
return this;
}
/// <summary>
/// Sets the proof root (Merkle root of evidence) directly.
/// </summary>
public DecisionDigestBuilder WithProofRoot(string proofRoot)
{
_proofRoot = proofRoot ?? throw new ArgumentNullException(nameof(proofRoot));
return this;
}
/// <summary>
/// Computes proof root from a list of evidence chunk hashes.
/// Builds a simple binary Merkle tree for verification.
/// </summary>
/// <param name="evidenceChunkHashes">Ordered list of evidence chunk hashes.</param>
public DecisionDigestBuilder WithEvidenceChunks(IReadOnlyList<string> evidenceChunkHashes)
{
ArgumentNullException.ThrowIfNull(evidenceChunkHashes);
if (evidenceChunkHashes.Count == 0)
{
_proofRoot = ComputeHash(Encoding.UTF8.GetBytes("empty-proof"));
return this;
}
// Simple Merkle tree: recursively pair and hash until single root
var currentLevel = evidenceChunkHashes
.Select(h => Convert.FromHexString(StripPrefix(h)))
.ToList();
while (currentLevel.Count > 1)
{
var nextLevel = new List<byte[]>();
for (int i = 0; i < currentLevel.Count; i += 2)
{
if (i + 1 < currentLevel.Count)
{
// Hash pair
var combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
currentLevel[i].CopyTo(combined, 0);
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
nextLevel.Add(SHA256.HashData(combined));
}
else
{
// Odd element: promote to next level
nextLevel.Add(currentLevel[i]);
}
}
currentLevel = nextLevel;
}
_proofRoot = $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
return this;
}
/// <summary>
/// Sets the replay seed directly.
/// </summary>
public DecisionDigestBuilder WithReplaySeed(ReplaySeed replaySeed)
{
_replaySeed = replaySeed ?? throw new ArgumentNullException(nameof(replaySeed));
return this;
}
/// <summary>
/// Builds replay seed from feed and rule identifiers.
/// </summary>
public DecisionDigestBuilder WithReplaySeed(
IEnumerable<string> feedIds,
IEnumerable<string> ruleIds,
DateTimeOffset? frozenEpoch = null)
{
_replaySeed = new ReplaySeed
{
FeedIds = feedIds?.ToList() ?? [],
RuleIds = ruleIds?.ToList() ?? [],
FrozenEpoch = frozenEpoch
};
return this;
}
/// <summary>
/// Sets explicit timestamps for created and expires.
/// </summary>
public DecisionDigestBuilder WithTimestamps(DateTimeOffset createdAt, DateTimeOffset expiresAt)
{
_createdAt = createdAt;
_expiresAt = expiresAt;
return this;
}
/// <summary>
/// Sets timestamps using the default TTL from options.
/// </summary>
public DecisionDigestBuilder WithDefaultTimestamps()
{
_createdAt = _timeProvider.GetUtcNow();
_expiresAt = _createdAt.Value.Add(_options.DefaultTtl);
return this;
}
/// <summary>
/// Sets the trust score directly.
/// </summary>
/// <param name="trustScore">Trust score (0-100).</param>
public DecisionDigestBuilder WithTrustScore(int trustScore)
{
if (trustScore < 0 || trustScore > 100)
throw new ArgumentOutOfRangeException(nameof(trustScore), "Trust score must be between 0 and 100.");
_trustScore = trustScore;
return this;
}
/// <summary>
/// Sets the trust score from a breakdown, computing the total automatically.
/// </summary>
/// <param name="breakdown">The trust score breakdown with component scores.</param>
public DecisionDigestBuilder WithTrustScoreBreakdown(TrustScoreBreakdown breakdown)
{
ArgumentNullException.ThrowIfNull(breakdown);
_trustScoreBreakdown = breakdown;
_trustScore = breakdown.ComputeTotal();
return this;
}
/// <summary>
/// Computes trust score from component scores using weighted formula,
/// and stores the breakdown for API responses.
/// </summary>
/// <param name="reachabilityScore">Reachability analysis coverage (0-100).</param>
/// <param name="sbomCompletenessScore">SBOM completeness (0-100).</param>
/// <param name="vexCoverageScore">VEX statement coverage (0-100).</param>
/// <param name="policyFreshnessScore">Policy freshness (0-100).</param>
/// <param name="signerTrustScore">Signer trust level (0-100).</param>
public DecisionDigestBuilder WithTrustScore(
int reachabilityScore,
int sbomCompletenessScore,
int vexCoverageScore,
int policyFreshnessScore,
int signerTrustScore)
{
// Create breakdown with standard weights
_trustScoreBreakdown = TrustScoreBreakdown.CreateDefault(
reachabilityScore,
sbomCompletenessScore,
vexCoverageScore,
policyFreshnessScore,
signerTrustScore);
// Compute total from breakdown
_trustScore = _trustScoreBreakdown.ComputeTotal();
// Clamp to valid range
_trustScore = Math.Clamp(_trustScore.Value, 0, 100);
return this;
}
/// <summary>
/// Builds the final DecisionDigest.
/// </summary>
/// <returns>The constructed DecisionDigest.</returns>
/// <exception cref="InvalidOperationException">If required components are missing.</exception>
public DecisionDigest Build()
{
ValidateRequiredComponents();
return new DecisionDigest
{
DigestVersion = _options.DigestVersion,
VeriKey = _veriKey!,
VerdictHash = _verdictHash!,
ProofRoot = _proofRoot!,
ReplaySeed = _replaySeed!,
CreatedAt = _createdAt!.Value,
ExpiresAt = _expiresAt!.Value,
TrustScore = _trustScore!.Value,
TrustScoreBreakdown = _trustScoreBreakdown
};
}
/// <summary>
/// Resets the builder to its initial state.
/// </summary>
public DecisionDigestBuilder Reset()
{
_veriKey = null;
_verdictHash = null;
_proofRoot = null;
_replaySeed = null;
_createdAt = null;
_expiresAt = null;
_trustScore = null;
_trustScoreBreakdown = null;
return this;
}
private void ValidateRequiredComponents()
{
var missing = new List<string>();
if (string.IsNullOrWhiteSpace(_veriKey))
missing.Add("VeriKey");
if (string.IsNullOrWhiteSpace(_verdictHash))
missing.Add("VerdictHash");
if (string.IsNullOrWhiteSpace(_proofRoot))
missing.Add("ProofRoot");
if (_replaySeed is null)
missing.Add("ReplaySeed");
if (!_createdAt.HasValue)
missing.Add("CreatedAt");
if (!_expiresAt.HasValue)
missing.Add("ExpiresAt");
if (!_trustScore.HasValue)
missing.Add("TrustScore");
if (missing.Count > 0)
{
throw new InvalidOperationException(
$"Cannot build DecisionDigest: missing required components: {string.Join(", ", missing)}. " +
"Use the With* methods to set all required components before calling Build().");
}
}
private static string ComputeHash(ReadOnlySpan<byte> data)
{
Span<byte> hash = stackalloc byte[32];
SHA256.HashData(data, hash);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private static string StripPrefix(string hash)
{
if (hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase))
return hash[7..];
return hash;
}
}

View File

@@ -77,34 +77,3 @@ public sealed class ProvRevocationEntity
[Column("metadata", TypeName = "jsonb")]
public string? Metadata { get; set; }
}
/// <summary>
/// Types of revocation events.
/// </summary>
public static class RevocationTypes
{
/// <summary>
/// Signer certificate revoked.
/// </summary>
public const string Signer = "signer";
/// <summary>
/// Feed epoch advanced (older epochs revoked).
/// </summary>
public const string FeedEpoch = "feed_epoch";
/// <summary>
/// Policy bundle updated/revoked.
/// </summary>
public const string Policy = "policy";
/// <summary>
/// Explicit revocation of specific entry.
/// </summary>
public const string Explicit = "explicit";
/// <summary>
/// TTL expiration (for audit completeness).
/// </summary>
public const string Expiration = "expiration";
}

View File

@@ -0,0 +1,64 @@
using System;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace StellaOps.Provcache.Entities;
/// <summary>
/// EF Core entity for provcache.prov_evidence_chunks table.
/// </summary>
[Table("prov_evidence_chunks", Schema = "provcache")]
public sealed class ProvcacheEvidenceChunkEntity
{
/// <summary>
/// Unique chunk identifier.
/// </summary>
[Key]
[Column("chunk_id")]
public Guid ChunkId { get; set; }
/// <summary>
/// Proof root this chunk belongs to.
/// </summary>
[Column("proof_root")]
[MaxLength(128)]
public required string ProofRoot { get; set; }
/// <summary>
/// Index of this chunk in the Merkle tree.
/// </summary>
[Column("chunk_index")]
public int ChunkIndex { get; set; }
/// <summary>
/// Hash of the chunk content.
/// </summary>
[Column("chunk_hash")]
[MaxLength(128)]
public required string ChunkHash { get; set; }
/// <summary>
/// Chunk content.
/// </summary>
[Column("blob")]
public required byte[] Blob { get; set; }
/// <summary>
/// Size of the blob in bytes.
/// </summary>
[Column("blob_size")]
public int BlobSize { get; set; }
/// <summary>
/// MIME type of the content.
/// </summary>
[Column("content_type")]
[MaxLength(128)]
public required string ContentType { get; set; }
/// <summary>
/// UTC timestamp when chunk was created.
/// </summary>
[Column("created_at")]
public DateTimeOffset CreatedAt { get; set; }
}

View File

@@ -0,0 +1,31 @@
using System;
using System.ComponentModel.DataAnnotations.Schema;
namespace StellaOps.Provcache.Entities;
public sealed partial class ProvcacheItemEntity
{
/// <summary>
/// UTC timestamp when entry was created.
/// </summary>
[Column("created_at")]
public DateTimeOffset CreatedAt { get; set; }
/// <summary>
/// UTC timestamp when entry expires.
/// </summary>
[Column("expires_at")]
public DateTimeOffset ExpiresAt { get; set; }
/// <summary>
/// UTC timestamp when entry was last updated.
/// </summary>
[Column("updated_at")]
public DateTimeOffset UpdatedAt { get; set; }
/// <summary>
/// UTC timestamp when entry was last accessed.
/// </summary>
[Column("last_accessed_at")]
public DateTimeOffset? LastAccessedAt { get; set; }
}

View File

@@ -7,7 +7,7 @@ namespace StellaOps.Provcache.Entities;
/// EF Core entity for provcache.provcache_items table.
/// </summary>
[Table("provcache_items", Schema = "provcache")]
public sealed class ProvcacheItemEntity
public sealed partial class ProvcacheItemEntity
{
/// <summary>
/// Composite cache key (VeriKey).
@@ -78,140 +78,4 @@ public sealed class ProvcacheItemEntity
[Column("hit_count")]
public long HitCount { get; set; }
/// <summary>
/// UTC timestamp when entry was created.
/// </summary>
[Column("created_at")]
public DateTimeOffset CreatedAt { get; set; }
/// <summary>
/// UTC timestamp when entry expires.
/// </summary>
[Column("expires_at")]
public DateTimeOffset ExpiresAt { get; set; }
/// <summary>
/// UTC timestamp when entry was last updated.
/// </summary>
[Column("updated_at")]
public DateTimeOffset UpdatedAt { get; set; }
/// <summary>
/// UTC timestamp when entry was last accessed.
/// </summary>
[Column("last_accessed_at")]
public DateTimeOffset? LastAccessedAt { get; set; }
}
/// <summary>
/// EF Core entity for provcache.prov_evidence_chunks table.
/// </summary>
[Table("prov_evidence_chunks", Schema = "provcache")]
public sealed class ProvcacheEvidenceChunkEntity
{
/// <summary>
/// Unique chunk identifier.
/// </summary>
[Key]
[Column("chunk_id")]
public Guid ChunkId { get; set; }
/// <summary>
/// Proof root this chunk belongs to.
/// </summary>
[Column("proof_root")]
[MaxLength(128)]
public required string ProofRoot { get; set; }
/// <summary>
/// Index of this chunk in the Merkle tree.
/// </summary>
[Column("chunk_index")]
public int ChunkIndex { get; set; }
/// <summary>
/// Hash of the chunk content.
/// </summary>
[Column("chunk_hash")]
[MaxLength(128)]
public required string ChunkHash { get; set; }
/// <summary>
/// Chunk content.
/// </summary>
[Column("blob")]
public required byte[] Blob { get; set; }
/// <summary>
/// Size of the blob in bytes.
/// </summary>
[Column("blob_size")]
public int BlobSize { get; set; }
/// <summary>
/// MIME type of the content.
/// </summary>
[Column("content_type")]
[MaxLength(128)]
public required string ContentType { get; set; }
/// <summary>
/// UTC timestamp when chunk was created.
/// </summary>
[Column("created_at")]
public DateTimeOffset CreatedAt { get; set; }
}
/// <summary>
/// EF Core entity for provcache.prov_revocations table.
/// </summary>
[Table("prov_revocations", Schema = "provcache")]
public sealed class ProvcacheRevocationEntity
{
/// <summary>
/// Unique revocation identifier.
/// </summary>
[Key]
[Column("revocation_id")]
public Guid RevocationId { get; set; }
/// <summary>
/// Type of revocation (policy, signer, feed, pattern).
/// </summary>
[Column("revocation_type")]
[MaxLength(64)]
public required string RevocationType { get; set; }
/// <summary>
/// Target hash that was revoked.
/// </summary>
[Column("target_hash")]
[MaxLength(256)]
public required string TargetHash { get; set; }
/// <summary>
/// Reason for revocation.
/// </summary>
[Column("reason")]
[MaxLength(512)]
public string? Reason { get; set; }
/// <summary>
/// Actor who initiated the revocation.
/// </summary>
[Column("actor")]
[MaxLength(256)]
public string? Actor { get; set; }
/// <summary>
/// Number of entries affected by the revocation.
/// </summary>
[Column("entries_affected")]
public long EntriesAffected { get; set; }
/// <summary>
/// UTC timestamp when revocation occurred.
/// </summary>
[Column("created_at")]
public DateTimeOffset CreatedAt { get; set; }
}

View File

@@ -0,0 +1,59 @@
using System;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
namespace StellaOps.Provcache.Entities;
/// <summary>
/// EF Core entity for provcache.prov_revocations table.
/// </summary>
[Table("prov_revocations", Schema = "provcache")]
public sealed class ProvcacheRevocationEntity
{
/// <summary>
/// Unique revocation identifier.
/// </summary>
[Key]
[Column("revocation_id")]
public Guid RevocationId { get; set; }
/// <summary>
/// Type of revocation (policy, signer, feed, pattern).
/// </summary>
[Column("revocation_type")]
[MaxLength(64)]
public required string RevocationType { get; set; }
/// <summary>
/// Target hash that was revoked.
/// </summary>
[Column("target_hash")]
[MaxLength(256)]
public required string TargetHash { get; set; }
/// <summary>
/// Reason for revocation.
/// </summary>
[Column("reason")]
[MaxLength(512)]
public string? Reason { get; set; }
/// <summary>
/// Actor who initiated the revocation.
/// </summary>
[Column("actor")]
[MaxLength(256)]
public string? Actor { get; set; }
/// <summary>
/// Number of entries affected by the revocation.
/// </summary>
[Column("entries_affected")]
public long EntriesAffected { get; set; }
/// <summary>
/// UTC timestamp when revocation occurred.
/// </summary>
[Column("created_at")]
public DateTimeOffset CreatedAt { get; set; }
}

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Provcache.Entities;
/// <summary>
/// Types of revocation events.
/// </summary>
public static class RevocationTypes
{
/// <summary>
/// Signer certificate revoked.
/// </summary>
public const string Signer = "signer";
/// <summary>
/// Feed epoch advanced (older epochs revoked).
/// </summary>
public const string FeedEpoch = "feed_epoch";
/// <summary>
/// Policy bundle updated/revoked.
/// </summary>
public const string Policy = "policy";
/// <summary>
/// Explicit revocation of specific entry.
/// </summary>
public const string Explicit = "explicit";
/// <summary>
/// TTL expiration (for audit completeness).
/// </summary>
public const string Expiration = "expiration";
}

View File

@@ -0,0 +1,57 @@
using System;
using StellaOps.Determinism;
namespace StellaOps.Provcache.Events;
public sealed partial record FeedEpochAdvancedEvent
{
/// <summary>
/// Creates a new FeedEpochAdvancedEvent.
/// </summary>
/// <param name="feedId">The feed identifier.</param>
/// <param name="previousEpoch">The previous epoch identifier.</param>
/// <param name="newEpoch">The new epoch identifier.</param>
/// <param name="effectiveAt">When the new epoch became effective.</param>
/// <param name="advisoriesAdded">Number of advisories added (for metrics).</param>
/// <param name="advisoriesModified">Number of advisories modified (for metrics).</param>
/// <param name="advisoriesWithdrawn">Number of advisories withdrawn (for metrics).</param>
/// <param name="tenantId">Tenant ID if multi-tenant.</param>
/// <param name="correlationId">Correlation ID for tracing.</param>
/// <param name="eventId">Optional event ID (defaults to new GUID).</param>
/// <param name="timestamp">Optional timestamp (defaults to current UTC time).</param>
/// <param name="guidProvider">Optional GUID provider for deterministic IDs.</param>
/// <param name="timeProvider">Optional time provider for deterministic timestamps.</param>
public static FeedEpochAdvancedEvent Create(
string feedId,
string previousEpoch,
string newEpoch,
DateTimeOffset effectiveAt,
int? advisoriesAdded = null,
int? advisoriesModified = null,
int? advisoriesWithdrawn = null,
string? tenantId = null,
string? correlationId = null,
Guid? eventId = null,
DateTimeOffset? timestamp = null,
IGuidProvider? guidProvider = null,
TimeProvider? timeProvider = null)
{
var guidSource = guidProvider ?? SystemGuidProvider.Instance;
var timeSource = timeProvider ?? TimeProvider.System;
return new FeedEpochAdvancedEvent
{
EventId = eventId ?? guidSource.NewGuid(),
Timestamp = timestamp ?? timeSource.GetUtcNow(),
FeedId = feedId,
PreviousEpoch = previousEpoch,
NewEpoch = newEpoch,
EffectiveAt = effectiveAt,
AdvisoriesAdded = advisoriesAdded,
AdvisoriesModified = advisoriesModified,
AdvisoriesWithdrawn = advisoriesWithdrawn,
TenantId = tenantId,
CorrelationId = correlationId
};
}
}

View File

@@ -1,5 +1,3 @@
using StellaOps.Determinism;
namespace StellaOps.Provcache.Events;
/// <summary>
@@ -10,7 +8,7 @@ namespace StellaOps.Provcache.Events;
/// <remarks>
/// Stream name: <c>stellaops:events:feed-epoch-advanced</c>
/// </remarks>
public sealed record FeedEpochAdvancedEvent
public sealed partial record FeedEpochAdvancedEvent
{
/// <summary>
/// Stream name for feed epoch events.
@@ -79,53 +77,4 @@ public sealed record FeedEpochAdvancedEvent
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// Creates a new FeedEpochAdvancedEvent.
/// </summary>
/// <param name="feedId">The feed identifier.</param>
/// <param name="previousEpoch">The previous epoch identifier.</param>
/// <param name="newEpoch">The new epoch identifier.</param>
/// <param name="effectiveAt">When the new epoch became effective.</param>
/// <param name="advisoriesAdded">Number of advisories added (for metrics).</param>
/// <param name="advisoriesModified">Number of advisories modified (for metrics).</param>
/// <param name="advisoriesWithdrawn">Number of advisories withdrawn (for metrics).</param>
/// <param name="tenantId">Tenant ID if multi-tenant.</param>
/// <param name="correlationId">Correlation ID for tracing.</param>
/// <param name="eventId">Optional event ID (defaults to new GUID).</param>
/// <param name="timestamp">Optional timestamp (defaults to current UTC time).</param>
/// <param name="guidProvider">Optional GUID provider for deterministic IDs.</param>
/// <param name="timeProvider">Optional time provider for deterministic timestamps.</param>
public static FeedEpochAdvancedEvent Create(
string feedId,
string previousEpoch,
string newEpoch,
DateTimeOffset effectiveAt,
int? advisoriesAdded = null,
int? advisoriesModified = null,
int? advisoriesWithdrawn = null,
string? tenantId = null,
string? correlationId = null,
Guid? eventId = null,
DateTimeOffset? timestamp = null,
IGuidProvider? guidProvider = null,
TimeProvider? timeProvider = null)
{
var guidSource = guidProvider ?? SystemGuidProvider.Instance;
var timeSource = timeProvider ?? TimeProvider.System;
return new FeedEpochAdvancedEvent
{
EventId = eventId ?? guidSource.NewGuid(),
Timestamp = timestamp ?? timeSource.GetUtcNow(),
FeedId = feedId,
PreviousEpoch = previousEpoch,
NewEpoch = newEpoch,
EffectiveAt = effectiveAt,
AdvisoriesAdded = advisoriesAdded,
AdvisoriesModified = advisoriesModified,
AdvisoriesWithdrawn = advisoriesWithdrawn,
TenantId = tenantId,
CorrelationId = correlationId
};
}
}

View File

@@ -0,0 +1,51 @@
using System;
using StellaOps.Determinism;
namespace StellaOps.Provcache.Events;
public sealed partial record SignerRevokedEvent
{
/// <summary>
/// Creates a new SignerRevokedEvent.
/// </summary>
/// <param name="anchorId">The trust anchor ID that owns the revoked key.</param>
/// <param name="keyId">The revoked key identifier.</param>
/// <param name="signerHash">Hash of the revoked signer's certificate/public key.</param>
/// <param name="effectiveAt">When the revocation became effective.</param>
/// <param name="reason">Reason for the revocation (for audit purposes).</param>
/// <param name="actor">Actor who initiated the revocation.</param>
/// <param name="correlationId">Correlation ID for tracing.</param>
/// <param name="eventId">Optional event ID (defaults to new GUID).</param>
/// <param name="timestamp">Optional timestamp (defaults to current UTC time).</param>
/// <param name="guidProvider">Optional GUID provider for deterministic IDs.</param>
/// <param name="timeProvider">Optional time provider for deterministic timestamps.</param>
public static SignerRevokedEvent Create(
Guid anchorId,
string keyId,
string signerHash,
DateTimeOffset effectiveAt,
string? reason = null,
string? actor = null,
string? correlationId = null,
Guid? eventId = null,
DateTimeOffset? timestamp = null,
IGuidProvider? guidProvider = null,
TimeProvider? timeProvider = null)
{
var guidSource = guidProvider ?? SystemGuidProvider.Instance;
var timeSource = timeProvider ?? TimeProvider.System;
return new SignerRevokedEvent
{
EventId = eventId ?? guidSource.NewGuid(),
Timestamp = timestamp ?? timeSource.GetUtcNow(),
AnchorId = anchorId,
KeyId = keyId,
SignerHash = signerHash,
EffectiveAt = effectiveAt,
Reason = reason,
Actor = actor,
CorrelationId = correlationId
};
}
}

View File

@@ -1,5 +1,3 @@
using StellaOps.Determinism;
namespace StellaOps.Provcache.Events;
/// <summary>
@@ -10,7 +8,7 @@ namespace StellaOps.Provcache.Events;
/// <remarks>
/// Stream name: <c>stellaops:events:signer-revoked</c>
/// </remarks>
public sealed record SignerRevokedEvent
public sealed partial record SignerRevokedEvent
{
/// <summary>
/// Stream name for signer revocation events.
@@ -70,47 +68,4 @@ public sealed record SignerRevokedEvent
/// </summary>
public string? CorrelationId { get; init; }
/// <summary>
/// Creates a new SignerRevokedEvent.
/// </summary>
/// <param name="anchorId">The trust anchor ID that owns the revoked key.</param>
/// <param name="keyId">The revoked key identifier.</param>
/// <param name="signerHash">Hash of the revoked signer's certificate/public key.</param>
/// <param name="effectiveAt">When the revocation became effective.</param>
/// <param name="reason">Reason for the revocation.</param>
/// <param name="actor">Actor who initiated the revocation.</param>
/// <param name="correlationId">Correlation ID for tracing.</param>
/// <param name="eventId">Optional event ID (defaults to new GUID).</param>
/// <param name="timestamp">Optional timestamp (defaults to current UTC time).</param>
/// <param name="guidProvider">Optional GUID provider for deterministic IDs.</param>
/// <param name="timeProvider">Optional time provider for deterministic timestamps.</param>
public static SignerRevokedEvent Create(
Guid anchorId,
string keyId,
string signerHash,
DateTimeOffset effectiveAt,
string? reason = null,
string? actor = null,
string? correlationId = null,
Guid? eventId = null,
DateTimeOffset? timestamp = null,
IGuidProvider? guidProvider = null,
TimeProvider? timeProvider = null)
{
var guidSource = guidProvider ?? SystemGuidProvider.Instance;
var timeSource = timeProvider ?? TimeProvider.System;
return new SignerRevokedEvent
{
EventId = eventId ?? guidSource.NewGuid(),
Timestamp = timestamp ?? timeSource.GetUtcNow(),
AnchorId = anchorId,
KeyId = keyId,
SignerHash = signerHash,
EffectiveAt = effectiveAt,
Reason = reason,
Actor = actor,
CorrelationId = correlationId
};
}
}

View File

@@ -0,0 +1,49 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Represents an evidence chunk.
/// </summary>
public sealed record EvidenceChunk
{
/// <summary>
/// Unique chunk identifier.
/// </summary>
public required Guid ChunkId { get; init; }
/// <summary>
/// The proof root this chunk belongs to.
/// </summary>
public required string ProofRoot { get; init; }
/// <summary>
/// Zero-based index within the proof.
/// </summary>
public required int ChunkIndex { get; init; }
/// <summary>
/// SHA256 hash of the chunk for verification.
/// </summary>
public required string ChunkHash { get; init; }
/// <summary>
/// The binary content.
/// </summary>
public required byte[] Blob { get; init; }
/// <summary>
/// Size of the blob in bytes.
/// </summary>
public required int BlobSize { get; init; }
/// <summary>
/// MIME type of the content.
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// When the chunk was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
}

View File

@@ -0,0 +1,39 @@
using System.Text.Json.Serialization;
namespace StellaOps.Provcache;
/// <summary>
/// Chunk included in the bundle with base64-encoded blob.
/// </summary>
public sealed record BundleChunk
{
/// <summary>
/// Zero-based chunk index.
/// </summary>
[JsonPropertyName("index")]
public required int Index { get; init; }
/// <summary>
/// SHA256 hash for verification.
/// </summary>
[JsonPropertyName("hash")]
public required string Hash { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
[JsonPropertyName("size")]
public required int Size { get; init; }
/// <summary>
/// MIME type.
/// </summary>
[JsonPropertyName("contentType")]
public required string ContentType { get; init; }
/// <summary>
/// Base64-encoded chunk data.
/// </summary>
[JsonPropertyName("data")]
public required string Data { get; init; }
}

View File

@@ -0,0 +1,41 @@
using System;
using System.Collections.Generic;
using System.Text.Json.Serialization;
namespace StellaOps.Provcache;
/// <summary>
/// DSSE signature envelope for bundle integrity.
/// </summary>
public sealed record BundleSignature
{
/// <summary>
/// Signature algorithm (e.g., "ES256", "RS256", "Ed25519").
/// </summary>
[JsonPropertyName("algorithm")]
public required string Algorithm { get; init; }
/// <summary>
/// Key identifier used for signing.
/// </summary>
[JsonPropertyName("keyId")]
public required string KeyId { get; init; }
/// <summary>
/// Base64-encoded signature bytes.
/// </summary>
[JsonPropertyName("signature")]
public required string SignatureBytes { get; init; }
/// <summary>
/// UTC timestamp when bundle was signed.
/// </summary>
[JsonPropertyName("signedAt")]
public required DateTimeOffset SignedAt { get; init; }
/// <summary>
/// Optional certificate chain for verification.
/// </summary>
[JsonPropertyName("certificateChain")]
public IReadOnlyList<string>? CertificateChain { get; init; }
}

View File

@@ -0,0 +1,34 @@
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Verification results from importing a bundle.
/// </summary>
public sealed record ImportVerification
{
/// <summary>
/// Whether the Merkle root matches the proof root.
/// </summary>
public required bool MerkleRootValid { get; init; }
/// <summary>
/// Whether the signature was verified (if present).
/// </summary>
public required bool? SignatureValid { get; init; }
/// <summary>
/// Whether all included chunks passed hash verification.
/// </summary>
public required bool ChunksValid { get; init; }
/// <summary>
/// Whether the digest integrity check passed.
/// </summary>
public required bool DigestValid { get; init; }
/// <summary>
/// List of failed chunk indices (if any).
/// </summary>
public IReadOnlyList<int> FailedChunkIndices { get; init; } = [];
}

View File

@@ -1,31 +1,8 @@
using System;
using System.Text.Json.Serialization;
namespace StellaOps.Provcache;
/// <summary>
/// Density levels for minimal proof export.
/// </summary>
public enum ProofDensity
{
/// <summary>
/// Digest + proof root + chunk manifest only (~2KB).
/// For quick verification and high-trust networks.
/// </summary>
Lite,
/// <summary>
/// Lite + first N chunks (~200KB typical).
/// For normal air-gap scenarios and auditor preview.
/// </summary>
Standard,
/// <summary>
/// Full evidence with all chunks (variable size).
/// For complete audit and compliance evidence.
/// </summary>
Strict
}
/// <summary>
/// Minimal proof bundle for air-gap export/import.
/// Contains the decision digest, proof root, and optionally evidence chunks.
@@ -85,179 +62,3 @@ public sealed record MinimalProofBundle
[JsonPropertyName("signature")]
public BundleSignature? Signature { get; init; }
}
/// <summary>
/// Chunk included in the bundle with base64-encoded blob.
/// </summary>
public sealed record BundleChunk
{
/// <summary>
/// Zero-based chunk index.
/// </summary>
[JsonPropertyName("index")]
public required int Index { get; init; }
/// <summary>
/// SHA256 hash for verification.
/// </summary>
[JsonPropertyName("hash")]
public required string Hash { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
[JsonPropertyName("size")]
public required int Size { get; init; }
/// <summary>
/// MIME type.
/// </summary>
[JsonPropertyName("contentType")]
public required string ContentType { get; init; }
/// <summary>
/// Base64-encoded chunk data.
/// </summary>
[JsonPropertyName("data")]
public required string Data { get; init; }
}
/// <summary>
/// DSSE signature envelope for bundle integrity.
/// </summary>
public sealed record BundleSignature
{
/// <summary>
/// Signature algorithm (e.g., "ES256", "RS256", "Ed25519").
/// </summary>
[JsonPropertyName("algorithm")]
public required string Algorithm { get; init; }
/// <summary>
/// Key identifier used for signing.
/// </summary>
[JsonPropertyName("keyId")]
public required string KeyId { get; init; }
/// <summary>
/// Base64-encoded signature bytes.
/// </summary>
[JsonPropertyName("signature")]
public required string SignatureBytes { get; init; }
/// <summary>
/// UTC timestamp when bundle was signed.
/// </summary>
[JsonPropertyName("signedAt")]
public required DateTimeOffset SignedAt { get; init; }
/// <summary>
/// Optional certificate chain for verification.
/// </summary>
[JsonPropertyName("certificateChain")]
public IReadOnlyList<string>? CertificateChain { get; init; }
}
/// <summary>
/// Options for exporting a minimal proof bundle.
/// </summary>
public sealed record MinimalProofExportOptions
{
/// <summary>
/// Density level determining how much evidence to include.
/// </summary>
public ProofDensity Density { get; init; } = ProofDensity.Standard;
/// <summary>
/// Number of leading chunks to include for Standard density.
/// Default is 3 (~192KB with 64KB chunks).
/// </summary>
public int StandardDensityChunkCount { get; init; } = 3;
/// <summary>
/// Whether to sign the bundle.
/// </summary>
public bool Sign { get; init; }
/// <summary>
/// Key ID to use for signing (if Sign is true).
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// Optional system identifier for audit trail.
/// </summary>
public string? ExportedBy { get; init; }
}
/// <summary>
/// Result of importing a minimal proof bundle.
/// </summary>
public sealed record MinimalProofImportResult
{
/// <summary>
/// Whether the import was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// The imported decision digest.
/// </summary>
public required DecisionDigest Digest { get; init; }
/// <summary>
/// The chunk manifest.
/// </summary>
public required ChunkManifest Manifest { get; init; }
/// <summary>
/// Number of chunks imported.
/// </summary>
public required int ChunksImported { get; init; }
/// <summary>
/// Number of chunks remaining to fetch (for lazy fetch scenarios).
/// </summary>
public required int ChunksPending { get; init; }
/// <summary>
/// Verification results.
/// </summary>
public required ImportVerification Verification { get; init; }
/// <summary>
/// Any warnings during import.
/// </summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
}
/// <summary>
/// Verification results from importing a bundle.
/// </summary>
public sealed record ImportVerification
{
/// <summary>
/// Whether the Merkle root matches the proof root.
/// </summary>
public required bool MerkleRootValid { get; init; }
/// <summary>
/// Whether the signature was verified (if present).
/// </summary>
public required bool? SignatureValid { get; init; }
/// <summary>
/// Whether all included chunks passed hash verification.
/// </summary>
public required bool ChunksValid { get; init; }
/// <summary>
/// Whether the digest integrity check passed.
/// </summary>
public required bool DigestValid { get; init; }
/// <summary>
/// List of failed chunk indices (if any).
/// </summary>
public IReadOnlyList<int> FailedChunkIndices { get; init; } = [];
}

View File

@@ -0,0 +1,33 @@
namespace StellaOps.Provcache;
/// <summary>
/// Options for exporting a minimal proof bundle.
/// </summary>
public sealed record MinimalProofExportOptions
{
/// <summary>
/// Density level determining how much evidence to include.
/// </summary>
public ProofDensity Density { get; init; } = ProofDensity.Standard;
/// <summary>
/// Number of leading chunks to include for Standard density.
/// Default is 3 (~192KB with 64KB chunks).
/// </summary>
public int StandardDensityChunkCount { get; init; } = 3;
/// <summary>
/// Whether to sign the bundle.
/// </summary>
public bool Sign { get; init; }
/// <summary>
/// Key ID to use for signing (if Sign is true).
/// </summary>
public string? SigningKeyId { get; init; }
/// <summary>
/// Optional system identifier for audit trail.
/// </summary>
public string? ExportedBy { get; init; }
}

View File

@@ -0,0 +1,46 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
private async Task<IReadOnlyList<BundleChunk>> GetChunksForDensityAsync(
string proofRoot,
ChunkManifest manifest,
ProofDensity density,
int standardChunkCount,
CancellationToken cancellationToken)
{
var chunkCount = density switch
{
ProofDensity.Lite => 0,
ProofDensity.Standard => Math.Min(standardChunkCount, manifest.TotalChunks),
ProofDensity.Strict => manifest.TotalChunks,
_ => 0
};
if (chunkCount == 0)
{
return [];
}
var chunks = await _chunkRepository.GetChunkRangeAsync(
proofRoot,
startIndex: 0,
count: chunkCount,
cancellationToken);
return chunks.Select(c => new BundleChunk
{
Index = c.ChunkIndex,
Hash = c.ChunkHash,
Size = c.BlobSize,
ContentType = c.ContentType,
Data = Convert.ToBase64String(c.Blob)
}).ToList();
}
}

View File

@@ -0,0 +1,59 @@
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
/// <inheritdoc />
public async Task<long> EstimateExportSizeAsync(
string veriKey,
ProofDensity density,
int standardChunkCount = 3,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
var cacheResult = await _provcacheService.GetAsync(veriKey, bypassCache: false, cancellationToken);
if (cacheResult.Entry is null)
{
return 0;
}
var proofRoot = cacheResult.Entry.Decision.ProofRoot;
var manifest = await _chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
if (manifest is null)
{
return 0;
}
// Base size: digest + manifest (roughly 2KB).
const long baseSize = 2048;
return density switch
{
ProofDensity.Lite => baseSize,
ProofDensity.Standard => baseSize + CalculateChunkDataSize(manifest, standardChunkCount),
ProofDensity.Strict => baseSize + CalculateChunkDataSize(manifest, manifest.TotalChunks),
_ => baseSize
};
}
private static long CalculateChunkDataSize(ChunkManifest manifest, int chunkCount)
{
if (chunkCount <= 0 || manifest.Chunks.Count == 0)
{
return 0;
}
var actualCount = Math.Min(chunkCount, manifest.TotalChunks);
var rawSize = manifest.Chunks
.Take(actualCount)
.Sum(c => (long)c.Size);
// Base64 encoding overhead: ~33% increase.
return (long)(rawSize * 1.37);
}
}

View File

@@ -0,0 +1,34 @@
using StellaOps.Canonical.Json;
using System;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
/// <inheritdoc />
public async Task<byte[]> ExportAsJsonAsync(
string veriKey,
MinimalProofExportOptions options,
CancellationToken cancellationToken = default)
{
var bundle = await ExportAsync(veriKey, options, cancellationToken);
return CanonJson.Canonicalize(bundle, s_jsonOptions);
}
/// <inheritdoc />
public async Task ExportToStreamAsync(
string veriKey,
MinimalProofExportOptions options,
Stream outputStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(outputStream);
var bundle = await ExportAsync(veriKey, options, cancellationToken);
var payload = CanonJson.Canonicalize(bundle, s_jsonOptions);
await outputStream.WriteAsync(payload, cancellationToken);
}
}

View File

@@ -0,0 +1,81 @@
using Microsoft.Extensions.Logging;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
/// <inheritdoc />
public async Task<MinimalProofBundle> ExportAsync(
string veriKey,
MinimalProofExportOptions options,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
ArgumentNullException.ThrowIfNull(options);
_logger.LogDebug(
"Exporting minimal proof bundle for {VeriKey} with density {Density}",
veriKey,
options.Density);
// Get the cache entry.
var cacheResult = await _provcacheService.GetAsync(veriKey, bypassCache: false, cancellationToken);
if (cacheResult.Entry is null)
{
throw new InvalidOperationException($"Cache entry not found for VeriKey: {veriKey}");
}
var entry = cacheResult.Entry;
var proofRoot = entry.Decision.ProofRoot;
var now = _timeProvider.GetUtcNow();
// Get the chunk manifest.
var manifest = await _chunkRepository.GetManifestAsync(proofRoot, cancellationToken)
?? throw new InvalidOperationException($"Chunk manifest not found for proof root: {proofRoot}");
// Build chunks based on density.
var bundleChunks = await GetChunksForDensityAsync(
proofRoot,
manifest,
options.Density,
options.StandardDensityChunkCount,
cancellationToken);
// Build the bundle.
var bundle = new MinimalProofBundle
{
BundleVersion = "v1",
Density = options.Density,
Digest = entry.Decision,
Manifest = manifest,
Chunks = bundleChunks,
ExportedAt = now,
ExportedBy = options.ExportedBy
};
// Sign if requested.
if (options.Sign)
{
if (_signer is null)
{
throw new InvalidOperationException("Signing requested but no signer is configured.");
}
bundle = await SignBundleAsync(bundle, options.SigningKeyId, cancellationToken);
}
_logger.LogInformation(
"Exported minimal proof bundle for {VeriKey}: density={Density}, chunks={ChunkCount}/{TotalChunks}, signed={Signed}",
veriKey,
options.Density,
bundleChunks.Count,
manifest.TotalChunks,
options.Sign);
return bundle;
}
}

View File

@@ -0,0 +1,38 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
private List<EvidenceChunk> BuildEvidenceChunks(
MinimalProofBundle bundle,
ImportVerification verification,
DateTimeOffset now)
{
var chunksToStore = new List<EvidenceChunk>();
foreach (var bundleChunk in bundle.Chunks)
{
if (verification.FailedChunkIndices.Contains(bundleChunk.Index))
{
continue; // Skip failed chunks.
}
chunksToStore.Add(new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = bundle.Digest.ProofRoot,
ChunkIndex = bundleChunk.Index,
ChunkHash = bundleChunk.Hash,
Blob = Convert.FromBase64String(bundleChunk.Data),
BlobSize = bundleChunk.Size,
ContentType = bundleChunk.ContentType,
CreatedAt = now
});
}
return chunksToStore;
}
}

View File

@@ -0,0 +1,39 @@
using System;
using System.IO;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
/// <inheritdoc />
public async Task<MinimalProofImportResult> ImportFromJsonAsync(
byte[] jsonBytes,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(jsonBytes);
var bundle = JsonSerializer.Deserialize<MinimalProofBundle>(jsonBytes, s_jsonOptions)
?? throw new InvalidOperationException("Failed to deserialize bundle.");
return await ImportAsync(bundle, cancellationToken);
}
/// <inheritdoc />
public async Task<MinimalProofImportResult> ImportFromStreamAsync(
Stream inputStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(inputStream);
var bundle = await JsonSerializer.DeserializeAsync<MinimalProofBundle>(
inputStream,
s_jsonOptions,
cancellationToken)
?? throw new InvalidOperationException("Failed to deserialize bundle.");
return await ImportAsync(bundle, cancellationToken);
}
}

View File

@@ -0,0 +1,95 @@
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
/// <inheritdoc />
public async Task<MinimalProofImportResult> ImportAsync(
MinimalProofBundle bundle,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
_logger.LogDebug(
"Importing minimal proof bundle: density={Density}, chunks={ChunkCount}",
bundle.Density,
bundle.Chunks.Count);
var warnings = new List<string>();
// Verify the bundle.
var verification = await VerifyAsync(bundle, cancellationToken);
if (!verification.DigestValid)
{
return new MinimalProofImportResult
{
Success = false,
Digest = bundle.Digest,
Manifest = bundle.Manifest,
ChunksImported = 0,
ChunksPending = bundle.Manifest.TotalChunks,
Verification = verification,
Warnings = ["Digest verification failed."]
};
}
if (!verification.MerkleRootValid)
{
return new MinimalProofImportResult
{
Success = false,
Digest = bundle.Digest,
Manifest = bundle.Manifest,
ChunksImported = 0,
ChunksPending = bundle.Manifest.TotalChunks,
Verification = verification,
Warnings = ["Merkle root verification failed."]
};
}
if (!verification.ChunksValid)
{
warnings.Add(
$"Some chunks failed verification: indices {string.Join(", ", verification.FailedChunkIndices)}");
}
if (verification.SignatureValid == false)
{
warnings.Add("Signature verification failed.");
}
// Store chunks.
var chunksToStore = BuildEvidenceChunks(bundle, verification, _timeProvider.GetUtcNow());
if (chunksToStore.Count > 0)
{
await _chunkRepository.StoreChunksAsync(bundle.Digest.ProofRoot, chunksToStore, cancellationToken);
}
var chunksImported = chunksToStore.Count;
var chunksPending = bundle.Manifest.TotalChunks - chunksImported;
_logger.LogInformation(
"Imported minimal proof bundle: chunksImported={ChunksImported}, chunksPending={ChunksPending}",
chunksImported,
chunksPending);
return new MinimalProofImportResult
{
Success = verification.DigestValid && verification.MerkleRootValid,
Digest = bundle.Digest,
Manifest = bundle.Manifest,
ChunksImported = chunksImported,
ChunksPending = chunksPending,
Verification = verification,
Warnings = warnings
};
}
}

View File

@@ -0,0 +1,87 @@
using Microsoft.Extensions.Logging;
using StellaOps.Canonical.Json;
using StellaOps.Cryptography;
using StellaOps.Provenance.Attestation;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
private async Task<MinimalProofBundle> SignBundleAsync(
MinimalProofBundle bundle,
string? signingKeyId,
CancellationToken cancellationToken)
{
if (_signer is null)
{
throw new InvalidOperationException("Signer is not configured.");
}
// Serialize bundle without signature for signing.
var bundleWithoutSig = bundle with { Signature = null };
var payload = CanonJson.Canonicalize(bundleWithoutSig, s_jsonOptions);
var signRequest = new SignRequest(
Payload: payload,
ContentType: "application/vnd.stellaops.proof-bundle+json");
var signResult = await _signer.SignAsync(signRequest, cancellationToken);
var algorithm = _cryptoHmac?.GetAlgorithmForPurpose(HmacPurpose.Signing) ?? "HMAC-SHA256";
return bundle with
{
Signature = new BundleSignature
{
Algorithm = algorithm,
KeyId = signResult.KeyId,
SignatureBytes = Convert.ToBase64String(signResult.Signature),
SignedAt = signResult.SignedAt
}
};
}
private bool VerifySignature(MinimalProofBundle bundle)
{
if (bundle.Signature is null)
{
return false;
}
if (_cryptoHmac is null || _keyProvider is null)
{
_logger.LogWarning("Signature verification skipped: no HMAC verifier or key configured");
return false;
}
if (!string.Equals(bundle.Signature.KeyId, _keyProvider.KeyId, StringComparison.Ordinal))
{
_logger.LogWarning(
"Signature key mismatch: expected {Expected}, got {Actual}",
_keyProvider.KeyId,
bundle.Signature.KeyId);
return false;
}
var expectedAlgorithm = _cryptoHmac.GetAlgorithmForPurpose(HmacPurpose.Signing);
if (!string.Equals(bundle.Signature.Algorithm, expectedAlgorithm, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Signature algorithm mismatch: expected {Expected}, got {Actual}",
expectedAlgorithm,
bundle.Signature.Algorithm);
return false;
}
var bundleWithoutSig = bundle with { Signature = null };
var payload = CanonJson.Canonicalize(bundleWithoutSig, s_jsonOptions);
return _cryptoHmac.VerifyHmacBase64ForPurpose(
_keyProvider.KeyMaterial,
payload,
bundle.Signature.SignatureBytes,
HmacPurpose.Signing);
}
}

View File

@@ -0,0 +1,86 @@
using StellaOps.Determinism;
using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class MinimalProofExporter
{
/// <inheritdoc />
public Task<ImportVerification> VerifyAsync(
MinimalProofBundle bundle,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
// Verify digest integrity.
var digestValid = VerifyDigest(bundle.Digest);
// Verify Merkle root matches digest.
var merkleRootValid = string.Equals(
bundle.Manifest.ProofRoot,
bundle.Digest.ProofRoot,
StringComparison.OrdinalIgnoreCase);
// Verify included chunks.
var failedChunks = new List<int>();
foreach (var chunk in bundle.Chunks)
{
if (!VerifyChunk(chunk))
{
failedChunks.Add(chunk.Index);
}
}
var chunksValid = failedChunks.Count == 0;
// Verify signature if present.
bool? signatureValid = null;
if (bundle.Signature is not null)
{
signatureValid = VerifySignature(bundle);
}
return Task.FromResult(new ImportVerification
{
DigestValid = digestValid,
MerkleRootValid = merkleRootValid,
ChunksValid = chunksValid,
SignatureValid = signatureValid,
FailedChunkIndices = failedChunks
});
}
private static bool VerifyDigest(DecisionDigest digest)
{
// Basic integrity checks.
if (string.IsNullOrWhiteSpace(digest.VeriKey)) return false;
if (string.IsNullOrWhiteSpace(digest.VerdictHash)) return false;
if (string.IsNullOrWhiteSpace(digest.ProofRoot)) return false;
if (digest.TrustScore < 0 || digest.TrustScore > 100) return false;
if (digest.CreatedAt > digest.ExpiresAt) return false;
return true;
}
private static bool VerifyChunk(BundleChunk chunk)
{
try
{
var data = Convert.FromBase64String(chunk.Data);
if (data.Length != chunk.Size) return false;
var hash = SHA256.HashData(data);
var computedHash = $"sha256:{Convert.ToHexStringLower(hash)}";
return string.Equals(computedHash, chunk.Hash, StringComparison.OrdinalIgnoreCase);
}
catch
{
return false;
}
}
}

View File

@@ -1,11 +1,11 @@
using Microsoft.Extensions.Logging;
using StellaOps.Canonical.Json;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Cryptography;
using StellaOps.Determinism;
using StellaOps.Provenance.Attestation;
using System.Security.Cryptography;
using System;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Provcache;
@@ -13,7 +13,7 @@ namespace StellaOps.Provcache;
/// Implementation of <see cref="IMinimalProofExporter"/> supporting
/// multiple density levels for air-gap scenarios.
/// </summary>
public sealed class MinimalProofExporter : IMinimalProofExporter
public sealed partial class MinimalProofExporter : IMinimalProofExporter
{
private readonly IProvcacheService _provcacheService;
private readonly IEvidenceChunkRepository _chunkRepository;
@@ -28,7 +28,7 @@ public sealed class MinimalProofExporter : IMinimalProofExporter
{
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
WriteIndented = false,
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public MinimalProofExporter(
@@ -48,458 +48,6 @@ public sealed class MinimalProofExporter : IMinimalProofExporter
_keyProvider = keyProvider;
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<MinimalProofExporter>.Instance;
}
/// <inheritdoc />
public async Task<MinimalProofBundle> ExportAsync(
string veriKey,
MinimalProofExportOptions options,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
ArgumentNullException.ThrowIfNull(options);
_logger.LogDebug("Exporting minimal proof bundle for {VeriKey} with density {Density}",
veriKey, options.Density);
// Get the cache entry
var cacheResult = await _provcacheService.GetAsync(veriKey, bypassCache: false, cancellationToken);
if (cacheResult.Entry is null)
{
throw new InvalidOperationException($"Cache entry not found for VeriKey: {veriKey}");
}
var entry = cacheResult.Entry;
var proofRoot = entry.Decision.ProofRoot;
var now = _timeProvider.GetUtcNow();
// Get the chunk manifest
var manifest = await _chunkRepository.GetManifestAsync(proofRoot, cancellationToken)
?? throw new InvalidOperationException($"Chunk manifest not found for proof root: {proofRoot}");
// Build chunks based on density
var bundleChunks = await GetChunksForDensityAsync(
proofRoot,
manifest,
options.Density,
options.StandardDensityChunkCount,
cancellationToken);
// Build the bundle
var bundle = new MinimalProofBundle
{
BundleVersion = "v1",
Density = options.Density,
Digest = entry.Decision,
Manifest = manifest,
Chunks = bundleChunks,
ExportedAt = now,
ExportedBy = options.ExportedBy
};
// Sign if requested
if (options.Sign)
{
if (_signer is null)
{
throw new InvalidOperationException("Signing requested but no signer is configured.");
}
bundle = await SignBundleAsync(bundle, options.SigningKeyId, cancellationToken);
}
_logger.LogInformation(
"Exported minimal proof bundle for {VeriKey}: density={Density}, chunks={ChunkCount}/{TotalChunks}, signed={Signed}",
veriKey, options.Density, bundleChunks.Count, manifest.TotalChunks, options.Sign);
return bundle;
}
/// <inheritdoc />
public async Task<byte[]> ExportAsJsonAsync(
string veriKey,
MinimalProofExportOptions options,
CancellationToken cancellationToken = default)
{
var bundle = await ExportAsync(veriKey, options, cancellationToken);
return CanonJson.Canonicalize(bundle, s_jsonOptions);
}
/// <inheritdoc />
public async Task ExportToStreamAsync(
string veriKey,
MinimalProofExportOptions options,
Stream outputStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(outputStream);
var bundle = await ExportAsync(veriKey, options, cancellationToken);
var payload = CanonJson.Canonicalize(bundle, s_jsonOptions);
await outputStream.WriteAsync(payload, cancellationToken);
}
/// <inheritdoc />
public async Task<MinimalProofImportResult> ImportAsync(
MinimalProofBundle bundle,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
_logger.LogDebug("Importing minimal proof bundle: density={Density}, chunks={ChunkCount}",
bundle.Density, bundle.Chunks.Count);
var warnings = new List<string>();
// Verify the bundle
var verification = await VerifyAsync(bundle, cancellationToken);
if (!verification.DigestValid)
{
return new MinimalProofImportResult
{
Success = false,
Digest = bundle.Digest,
Manifest = bundle.Manifest,
ChunksImported = 0,
ChunksPending = bundle.Manifest.TotalChunks,
Verification = verification,
Warnings = ["Digest verification failed."]
};
}
if (!verification.MerkleRootValid)
{
return new MinimalProofImportResult
{
Success = false,
Digest = bundle.Digest,
Manifest = bundle.Manifest,
ChunksImported = 0,
ChunksPending = bundle.Manifest.TotalChunks,
Verification = verification,
Warnings = ["Merkle root verification failed."]
};
}
if (!verification.ChunksValid)
{
warnings.Add($"Some chunks failed verification: indices {string.Join(", ", verification.FailedChunkIndices)}");
}
if (verification.SignatureValid == false)
{
warnings.Add("Signature verification failed.");
}
// Store chunks
var chunksToStore = new List<EvidenceChunk>();
var now = _timeProvider.GetUtcNow();
foreach (var bundleChunk in bundle.Chunks)
{
if (verification.FailedChunkIndices.Contains(bundleChunk.Index))
{
continue; // Skip failed chunks
}
chunksToStore.Add(new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = bundle.Digest.ProofRoot,
ChunkIndex = bundleChunk.Index,
ChunkHash = bundleChunk.Hash,
Blob = Convert.FromBase64String(bundleChunk.Data),
BlobSize = bundleChunk.Size,
ContentType = bundleChunk.ContentType,
CreatedAt = now
});
}
if (chunksToStore.Count > 0)
{
await _chunkRepository.StoreChunksAsync(bundle.Digest.ProofRoot, chunksToStore, cancellationToken);
}
var chunksImported = chunksToStore.Count;
var chunksPending = bundle.Manifest.TotalChunks - chunksImported;
_logger.LogInformation(
"Imported minimal proof bundle: chunksImported={ChunksImported}, chunksPending={ChunksPending}",
chunksImported, chunksPending);
return new MinimalProofImportResult
{
Success = verification.DigestValid && verification.MerkleRootValid,
Digest = bundle.Digest,
Manifest = bundle.Manifest,
ChunksImported = chunksImported,
ChunksPending = chunksPending,
Verification = verification,
Warnings = warnings
};
}
/// <inheritdoc />
public async Task<MinimalProofImportResult> ImportFromJsonAsync(
byte[] jsonBytes,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(jsonBytes);
var bundle = JsonSerializer.Deserialize<MinimalProofBundle>(jsonBytes, s_jsonOptions)
?? throw new InvalidOperationException("Failed to deserialize bundle.");
return await ImportAsync(bundle, cancellationToken);
}
/// <inheritdoc />
public async Task<MinimalProofImportResult> ImportFromStreamAsync(
Stream inputStream,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(inputStream);
var bundle = await JsonSerializer.DeserializeAsync<MinimalProofBundle>(inputStream, s_jsonOptions, cancellationToken)
?? throw new InvalidOperationException("Failed to deserialize bundle.");
return await ImportAsync(bundle, cancellationToken);
}
/// <inheritdoc />
public Task<ImportVerification> VerifyAsync(
MinimalProofBundle bundle,
CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(bundle);
// Verify digest integrity
var digestValid = VerifyDigest(bundle.Digest);
// Verify Merkle root matches digest
var merkleRootValid = string.Equals(
bundle.Manifest.ProofRoot,
bundle.Digest.ProofRoot,
StringComparison.OrdinalIgnoreCase);
// Verify included chunks
var failedChunks = new List<int>();
foreach (var chunk in bundle.Chunks)
{
if (!VerifyChunk(chunk))
{
failedChunks.Add(chunk.Index);
}
}
var chunksValid = failedChunks.Count == 0;
// Verify signature if present
bool? signatureValid = null;
if (bundle.Signature is not null)
{
signatureValid = VerifySignature(bundle);
}
return Task.FromResult(new ImportVerification
{
DigestValid = digestValid,
MerkleRootValid = merkleRootValid,
ChunksValid = chunksValid,
SignatureValid = signatureValid,
FailedChunkIndices = failedChunks
});
}
/// <inheritdoc />
public async Task<long> EstimateExportSizeAsync(
string veriKey,
ProofDensity density,
int standardChunkCount = 3,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
var cacheResult = await _provcacheService.GetAsync(veriKey, bypassCache: false, cancellationToken);
if (cacheResult.Entry is null)
{
return 0;
}
var proofRoot = cacheResult.Entry.Decision.ProofRoot;
var manifest = await _chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
if (manifest is null)
{
return 0;
}
// Base size: digest + manifest (roughly 2KB)
const long baseSize = 2048;
return density switch
{
ProofDensity.Lite => baseSize,
ProofDensity.Standard => baseSize + CalculateChunkDataSize(manifest, standardChunkCount),
ProofDensity.Strict => baseSize + CalculateChunkDataSize(manifest, manifest.TotalChunks),
_ => baseSize
};
}
private async Task<IReadOnlyList<BundleChunk>> GetChunksForDensityAsync(
string proofRoot,
ChunkManifest manifest,
ProofDensity density,
int standardChunkCount,
CancellationToken cancellationToken)
{
var chunkCount = density switch
{
ProofDensity.Lite => 0,
ProofDensity.Standard => Math.Min(standardChunkCount, manifest.TotalChunks),
ProofDensity.Strict => manifest.TotalChunks,
_ => 0
};
if (chunkCount == 0)
{
return [];
}
var chunks = await _chunkRepository.GetChunkRangeAsync(
proofRoot,
startIndex: 0,
count: chunkCount,
cancellationToken);
return chunks.Select(c => new BundleChunk
{
Index = c.ChunkIndex,
Hash = c.ChunkHash,
Size = c.BlobSize,
ContentType = c.ContentType,
Data = Convert.ToBase64String(c.Blob)
}).ToList();
}
private async Task<MinimalProofBundle> SignBundleAsync(
MinimalProofBundle bundle,
string? signingKeyId,
CancellationToken cancellationToken)
{
if (_signer is null)
{
throw new InvalidOperationException("Signer is not configured.");
}
// Serialize bundle without signature for signing
var bundleWithoutSig = bundle with { Signature = null };
var payload = CanonJson.Canonicalize(bundleWithoutSig, s_jsonOptions);
var signRequest = new SignRequest(
Payload: payload,
ContentType: "application/vnd.stellaops.proof-bundle+json");
var signResult = await _signer.SignAsync(signRequest, cancellationToken);
var algorithm = _cryptoHmac?.GetAlgorithmForPurpose(HmacPurpose.Signing) ?? "HMAC-SHA256";
return bundle with
{
Signature = new BundleSignature
{
Algorithm = algorithm,
KeyId = signResult.KeyId,
SignatureBytes = Convert.ToBase64String(signResult.Signature),
SignedAt = signResult.SignedAt
}
};
}
private static bool VerifyDigest(DecisionDigest digest)
{
// Basic integrity checks
if (string.IsNullOrWhiteSpace(digest.VeriKey)) return false;
if (string.IsNullOrWhiteSpace(digest.VerdictHash)) return false;
if (string.IsNullOrWhiteSpace(digest.ProofRoot)) return false;
if (digest.TrustScore < 0 || digest.TrustScore > 100) return false;
if (digest.CreatedAt > digest.ExpiresAt) return false;
return true;
}
private static bool VerifyChunk(BundleChunk chunk)
{
try
{
var data = Convert.FromBase64String(chunk.Data);
if (data.Length != chunk.Size) return false;
var hash = SHA256.HashData(data);
var computedHash = $"sha256:{Convert.ToHexStringLower(hash)}";
return string.Equals(computedHash, chunk.Hash, StringComparison.OrdinalIgnoreCase);
}
catch
{
return false;
}
}
private bool VerifySignature(MinimalProofBundle bundle)
{
if (bundle.Signature is null)
{
return false;
}
if (_cryptoHmac is null || _keyProvider is null)
{
_logger.LogWarning("Signature verification skipped: no HMAC verifier or key configured");
return false;
}
if (!string.Equals(bundle.Signature.KeyId, _keyProvider.KeyId, StringComparison.Ordinal))
{
_logger.LogWarning(
"Signature key mismatch: expected {Expected}, got {Actual}",
_keyProvider.KeyId,
bundle.Signature.KeyId);
return false;
}
var expectedAlgorithm = _cryptoHmac.GetAlgorithmForPurpose(HmacPurpose.Signing);
if (!string.Equals(bundle.Signature.Algorithm, expectedAlgorithm, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Signature algorithm mismatch: expected {Expected}, got {Actual}",
expectedAlgorithm,
bundle.Signature.Algorithm);
return false;
}
var bundleWithoutSig = bundle with { Signature = null };
var payload = CanonJson.Canonicalize(bundleWithoutSig, s_jsonOptions);
return _cryptoHmac.VerifyHmacBase64ForPurpose(
_keyProvider.KeyMaterial,
payload,
bundle.Signature.SignatureBytes,
HmacPurpose.Signing);
}
private static long CalculateChunkDataSize(ChunkManifest manifest, int chunkCount)
{
if (chunkCount <= 0 || manifest.Chunks.Count == 0)
{
return 0;
}
var actualCount = Math.Min(chunkCount, manifest.TotalChunks);
var rawSize = manifest.Chunks
.Take(actualCount)
.Sum(c => (long)c.Size);
// Base64 encoding overhead: ~33% increase
return (long)(rawSize * 1.37);
_logger = logger ?? NullLogger<MinimalProofExporter>.Instance;
}
}

View File

@@ -0,0 +1,44 @@
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Result of importing a minimal proof bundle.
/// </summary>
public sealed record MinimalProofImportResult
{
/// <summary>
/// Whether the import was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// The imported decision digest.
/// </summary>
public required DecisionDigest Digest { get; init; }
/// <summary>
/// The chunk manifest.
/// </summary>
public required ChunkManifest Manifest { get; init; }
/// <summary>
/// Number of chunks imported.
/// </summary>
public required int ChunksImported { get; init; }
/// <summary>
/// Number of chunks remaining to fetch (for lazy fetch scenarios).
/// </summary>
public required int ChunksPending { get; init; }
/// <summary>
/// Verification results.
/// </summary>
public required ImportVerification Verification { get; init; }
/// <summary>
/// Any warnings during import.
/// </summary>
public IReadOnlyList<string> Warnings { get; init; } = [];
}

View File

@@ -0,0 +1,25 @@
namespace StellaOps.Provcache;
/// <summary>
/// Density levels for minimal proof export.
/// </summary>
public enum ProofDensity
{
/// <summary>
/// Digest + proof root + chunk manifest only (~2KB).
/// For quick verification and high-trust networks.
/// </summary>
Lite,
/// <summary>
/// Lite + first N chunks (~200KB typical).
/// For normal air-gap scenarios and auditor preview.
/// </summary>
Standard,
/// <summary>
/// Full evidence with all chunks (variable size).
/// For complete audit and compliance evidence.
/// </summary>
Strict
}

View File

@@ -92,112 +92,3 @@ public interface IEvidenceChunkRepository
string proofRoot,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Represents an evidence chunk.
/// </summary>
public sealed record EvidenceChunk
{
/// <summary>
/// Unique chunk identifier.
/// </summary>
public required Guid ChunkId { get; init; }
/// <summary>
/// The proof root this chunk belongs to.
/// </summary>
public required string ProofRoot { get; init; }
/// <summary>
/// Zero-based index within the proof.
/// </summary>
public required int ChunkIndex { get; init; }
/// <summary>
/// SHA256 hash of the chunk for verification.
/// </summary>
public required string ChunkHash { get; init; }
/// <summary>
/// The binary content.
/// </summary>
public required byte[] Blob { get; init; }
/// <summary>
/// Size of the blob in bytes.
/// </summary>
public required int BlobSize { get; init; }
/// <summary>
/// MIME type of the content.
/// </summary>
public required string ContentType { get; init; }
/// <summary>
/// When the chunk was created.
/// </summary>
public required DateTimeOffset CreatedAt { get; init; }
}
/// <summary>
/// Manifest describing all chunks for a proof root (metadata only).
/// Used for lazy fetching where blobs are retrieved on demand.
/// </summary>
public sealed record ChunkManifest
{
/// <summary>
/// The proof root (Merkle root of all chunks).
/// </summary>
public required string ProofRoot { get; init; }
/// <summary>
/// Total number of chunks.
/// </summary>
public required int TotalChunks { get; init; }
/// <summary>
/// Total size of all chunks in bytes.
/// </summary>
public required long TotalSize { get; init; }
/// <summary>
/// Ordered list of chunk metadata.
/// </summary>
public required IReadOnlyList<ChunkMetadata> Chunks { get; init; }
/// <summary>
/// When the manifest was generated.
/// </summary>
public required DateTimeOffset GeneratedAt { get; init; }
}
/// <summary>
/// Metadata for a single chunk (no blob).
/// </summary>
public sealed record ChunkMetadata
{
/// <summary>
/// Chunk identifier.
/// </summary>
public required Guid ChunkId { get; init; }
/// <summary>
/// Zero-based index.
/// </summary>
public required int Index { get; init; }
/// <summary>
/// SHA256 hash for verification.
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// Size in bytes.
/// </summary>
public required int Size { get; init; }
/// <summary>
/// Content type.
/// </summary>
public required string ContentType { get; init; }
}

View File

@@ -94,44 +94,3 @@ public interface IProvcacheRepository
/// <returns>Cache statistics.</returns>
Task<ProvcacheStatistics> GetStatisticsAsync(CancellationToken cancellationToken = default);
}
/// <summary>
/// Cache statistics for monitoring and diagnostics.
/// </summary>
public sealed record ProvcacheStatistics
{
/// <summary>
/// Total number of entries in the cache.
/// </summary>
public long TotalEntries { get; init; }
/// <summary>
/// Total number of cache hits.
/// </summary>
public long TotalHits { get; init; }
/// <summary>
/// Number of entries expiring within the next hour.
/// </summary>
public long ExpiringWithinHour { get; init; }
/// <summary>
/// Number of unique policy hashes.
/// </summary>
public int UniquePolicies { get; init; }
/// <summary>
/// Number of unique signer set hashes.
/// </summary>
public int UniqueSignerSets { get; init; }
/// <summary>
/// Oldest entry timestamp.
/// </summary>
public DateTimeOffset? OldestEntry { get; init; }
/// <summary>
/// Newest entry timestamp.
/// </summary>
public DateTimeOffset? NewestEntry { get; init; }
}

View File

@@ -76,288 +76,3 @@ public interface IProvcacheService
/// <returns>Number of entries pruned.</returns>
Task<long> PruneExpiredAsync(CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of a cache service lookup.
/// </summary>
public sealed record ProvcacheServiceResult
{
/// <summary>
/// The cache result status.
/// </summary>
public required ProvcacheResultStatus Status { get; init; }
/// <summary>
/// The cache entry if found.
/// </summary>
public ProvcacheEntry? Entry { get; init; }
/// <summary>
/// Whether the result came from cache (true) or needs computation (false).
/// </summary>
public bool WasCached => Status == ProvcacheResultStatus.CacheHit;
/// <summary>
/// Source of the cache hit for diagnostics.
/// </summary>
public string? Source { get; init; }
/// <summary>
/// Time taken for the lookup in milliseconds.
/// </summary>
public double ElapsedMs { get; init; }
/// <summary>
/// Creates a cache hit result.
/// </summary>
public static ProvcacheServiceResult Hit(ProvcacheEntry entry, string source, double elapsedMs) => new()
{
Status = ProvcacheResultStatus.CacheHit,
Entry = entry,
Source = source,
ElapsedMs = elapsedMs
};
/// <summary>
/// Creates a cache miss result.
/// </summary>
public static ProvcacheServiceResult Miss(double elapsedMs) => new()
{
Status = ProvcacheResultStatus.CacheMiss,
Entry = null,
Source = null,
ElapsedMs = elapsedMs
};
/// <summary>
/// Creates a bypassed result (cache was skipped).
/// </summary>
public static ProvcacheServiceResult Bypassed() => new()
{
Status = ProvcacheResultStatus.Bypassed,
Entry = null,
Source = null,
ElapsedMs = 0
};
/// <summary>
/// Creates an expired result.
/// </summary>
public static ProvcacheServiceResult Expired(ProvcacheEntry entry, double elapsedMs) => new()
{
Status = ProvcacheResultStatus.Expired,
Entry = entry,
Source = "expired",
ElapsedMs = elapsedMs
};
}
/// <summary>
/// Cache result status.
/// </summary>
public enum ProvcacheResultStatus
{
/// <summary>
/// Entry was found in cache and is valid.
/// </summary>
CacheHit,
/// <summary>
/// Entry was not found in cache.
/// </summary>
CacheMiss,
/// <summary>
/// Cache was bypassed (force re-computation).
/// </summary>
Bypassed,
/// <summary>
/// Entry was found but has expired.
/// </summary>
Expired
}
/// <summary>
/// Request for cache invalidation by criteria.
/// </summary>
public sealed record InvalidationRequest
{
/// <summary>
/// The invalidation type.
/// </summary>
public required InvalidationType Type { get; init; }
/// <summary>
/// The value to match for invalidation.
/// </summary>
public required string Value { get; init; }
/// <summary>
/// Reason for invalidation (for audit log).
/// </summary>
public string? Reason { get; init; }
/// <summary>
/// Actor who initiated the invalidation.
/// </summary>
public string? Actor { get; init; }
/// <summary>
/// Creates an invalidation request by policy hash.
/// </summary>
public static InvalidationRequest ByPolicyHash(string policyHash, string? reason = null) => new()
{
Type = InvalidationType.PolicyHash,
Value = policyHash,
Reason = reason ?? "policy-update"
};
/// <summary>
/// Creates an invalidation request by signer set hash.
/// </summary>
public static InvalidationRequest BySignerSetHash(string signerSetHash, string? reason = null) => new()
{
Type = InvalidationType.SignerSetHash,
Value = signerSetHash,
Reason = reason ?? "signer-revocation"
};
/// <summary>
/// Creates an invalidation request by feed epoch.
/// </summary>
public static InvalidationRequest ByFeedEpochOlderThan(string feedEpoch, string? reason = null) => new()
{
Type = InvalidationType.FeedEpochOlderThan,
Value = feedEpoch,
Reason = reason ?? "feed-update"
};
/// <summary>
/// Creates an invalidation request by key pattern.
/// </summary>
public static InvalidationRequest ByPattern(string pattern, string? reason = null) => new()
{
Type = InvalidationType.Pattern,
Value = pattern,
Reason = reason ?? "pattern-invalidation"
};
}
/// <summary>
/// Type of invalidation criteria.
/// </summary>
public enum InvalidationType
{
/// <summary>
/// Invalidate by policy hash.
/// </summary>
PolicyHash,
/// <summary>
/// Invalidate by signer set hash.
/// </summary>
SignerSetHash,
/// <summary>
/// Invalidate entries with feed epoch older than specified.
/// </summary>
FeedEpochOlderThan,
/// <summary>
/// Invalidate by key pattern.
/// </summary>
Pattern,
/// <summary>
/// Invalidate expired entries.
/// </summary>
Expired
}
/// <summary>
/// Result of an invalidation operation.
/// </summary>
public sealed record InvalidationResult
{
/// <summary>
/// Number of entries invalidated.
/// </summary>
public required long EntriesAffected { get; init; }
/// <summary>
/// The invalidation request that was executed.
/// </summary>
public required InvalidationRequest Request { get; init; }
/// <summary>
/// Timestamp of the invalidation.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Whether the invalidation was logged for audit.
/// </summary>
public bool WasLogged { get; init; }
}
/// <summary>
/// Cache metrics for monitoring and observability.
/// </summary>
public sealed record ProvcacheMetrics
{
/// <summary>
/// Total cache requests since startup.
/// </summary>
public long TotalRequests { get; init; }
/// <summary>
/// Total cache hits since startup.
/// </summary>
public long TotalHits { get; init; }
/// <summary>
/// Total cache misses since startup.
/// </summary>
public long TotalMisses { get; init; }
/// <summary>
/// Cache hit rate (0.0 - 1.0).
/// </summary>
public double HitRate => TotalRequests == 0 ? 0.0 : (double)TotalHits / TotalRequests;
/// <summary>
/// Average lookup latency in milliseconds.
/// </summary>
public double AvgLatencyMs { get; init; }
/// <summary>
/// P99 lookup latency in milliseconds.
/// </summary>
public double P99LatencyMs { get; init; }
/// <summary>
/// Current number of entries in cache.
/// </summary>
public long CurrentEntryCount { get; init; }
/// <summary>
/// Total invalidations since startup.
/// </summary>
public long TotalInvalidations { get; init; }
/// <summary>
/// Valkey cache health status.
/// </summary>
public bool ValkeyCacheHealthy { get; init; }
/// <summary>
/// Postgres repository health status.
/// </summary>
public bool PostgresRepositoryHealthy { get; init; }
/// <summary>
/// Timestamp when metrics were collected.
/// </summary>
public DateTimeOffset CollectedAt { get; init; }
}

View File

@@ -71,80 +71,3 @@ public interface IProvcacheStore
Func<CancellationToken, ValueTask<ProvcacheEntry>> factory,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Result of a single cache lookup.
/// </summary>
public sealed record ProvcacheLookupResult
{
/// <summary>
/// Whether the entry was found in cache.
/// </summary>
public required bool IsHit { get; init; }
/// <summary>
/// The cache entry if found.
/// </summary>
public ProvcacheEntry? Entry { get; init; }
/// <summary>
/// Source of the cache hit (e.g., "valkey", "postgres").
/// Null for cache misses.
/// </summary>
public string? Source { get; init; }
/// <summary>
/// Time taken for the lookup in milliseconds.
/// </summary>
public double ElapsedMs { get; init; }
/// <summary>
/// Creates a cache hit result.
/// </summary>
public static ProvcacheLookupResult Hit(ProvcacheEntry entry, string source, double elapsedMs) => new()
{
IsHit = true,
Entry = entry,
Source = source,
ElapsedMs = elapsedMs
};
/// <summary>
/// Creates a cache miss result.
/// </summary>
public static ProvcacheLookupResult Miss(double elapsedMs) => new()
{
IsHit = false,
Entry = null,
Source = null,
ElapsedMs = elapsedMs
};
}
/// <summary>
/// Result of a batch cache lookup.
/// </summary>
public sealed record ProvcacheBatchLookupResult
{
/// <summary>
/// Entries found in cache, keyed by VeriKey.
/// </summary>
public required IReadOnlyDictionary<string, ProvcacheEntry> Hits { get; init; }
/// <summary>
/// VeriKeys that were not found in cache.
/// </summary>
public required IReadOnlyList<string> Misses { get; init; }
/// <summary>
/// Time taken for the lookup in milliseconds.
/// </summary>
public double ElapsedMs { get; init; }
/// <summary>
/// Cache hit rate for this batch (0.0 - 1.0).
/// </summary>
public double HitRate => Hits.Count == 0 && Misses.Count == 0
? 0.0
: (double)Hits.Count / (Hits.Count + Misses.Count);
}

View File

@@ -0,0 +1,25 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
/// <summary>
/// Interface for write-behind queue operations.
/// </summary>
public interface IWriteBehindQueue
{
/// <summary>
/// Enqueues an entry for asynchronous persistence.
/// </summary>
ValueTask EnqueueAsync(ProvcacheEntry entry, CancellationToken cancellationToken = default);
/// <summary>
/// Gets current queue metrics.
/// </summary>
WriteBehindMetrics GetMetrics();
/// <summary>
/// Runs the background write-behind processing loop.
/// </summary>
Task RunAsync(CancellationToken stoppingToken);
}

View File

@@ -0,0 +1,27 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache.Invalidation;
public sealed partial class FeedEpochInvalidator
{
/// <inheritdoc />
public InvalidatorMetrics GetMetrics()
{
return new InvalidatorMetrics
{
EventsProcessed = Interlocked.Read(ref _eventsProcessed),
EntriesInvalidated = Interlocked.Read(ref _entriesInvalidated),
Errors = Interlocked.Read(ref _errors),
LastEventAt = _lastEventAt,
CollectedAt = _timeProvider.GetUtcNow()
};
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
await StopAsync();
_cts?.Dispose();
}
}

View File

@@ -0,0 +1,82 @@
using Microsoft.Extensions.Logging;
using StellaOps.Messaging;
using StellaOps.Provcache.Events;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache.Invalidation;
public sealed partial class FeedEpochInvalidator
{
private async Task ProcessEventsAsync(CancellationToken cancellationToken)
{
try
{
// Start from latest events.
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, cancellationToken))
{
try
{
await HandleEventAsync(streamEvent.Event, cancellationToken);
Interlocked.Increment(ref _eventsProcessed);
_lastEventAt = _timeProvider.GetUtcNow();
}
catch (Exception ex)
{
Interlocked.Increment(ref _errors);
_logger.LogError(
ex,
"Error processing FeedEpochAdvancedEvent {EventId} for feed {FeedId}",
streamEvent.Event.EventId,
streamEvent.Event.FeedId);
}
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
// Normal shutdown.
}
catch (Exception ex)
{
_logger.LogError(ex, "Fatal error in FeedEpochInvalidator event processing loop");
throw;
}
}
private async Task HandleEventAsync(FeedEpochAdvancedEvent @event, CancellationToken cancellationToken)
{
_logger.LogInformation(
"Processing feed epoch advancement: FeedId={FeedId}, PreviousEpoch={PreviousEpoch}, NewEpoch={NewEpoch}",
@event.FeedId,
@event.PreviousEpoch,
@event.NewEpoch);
// Invalidate entries with feed_epoch older than the new epoch.
// The feed_epoch in cache entries is formatted as "feed:epoch" (e.g., "cve:2024-12-24T12:00:00Z").
var request = InvalidationRequest.ByFeedEpochOlderThan(
@event.NewEpoch,
$"Feed {FormatFeedEpoch(@event.FeedId, @event.NewEpoch)} advanced");
var result = await _provcacheService.InvalidateByAsync(request, cancellationToken);
Interlocked.Add(ref _entriesInvalidated, result.EntriesAffected);
_logger.LogInformation(
"Feed epoch advancement invalidated {Count} cache entries for feed {FeedId} epoch {NewEpoch}",
result.EntriesAffected,
@event.FeedId,
@event.NewEpoch);
// Record telemetry.
ProvcacheTelemetry.RecordInvalidation("feed_epoch", result.EntriesAffected);
}
/// <summary>
/// Formats a feed epoch identifier.
/// </summary>
private static string FormatFeedEpoch(string feedId, string epoch)
{
return $"{feedId}:{epoch}";
}
}

View File

@@ -2,6 +2,9 @@ using Microsoft.Extensions.Logging;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
using StellaOps.Provcache.Events;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache.Invalidation;
@@ -9,7 +12,7 @@ namespace StellaOps.Provcache.Invalidation;
/// Invalidator that handles feed epoch advancement events.
/// When a feed advances to a new epoch, cache entries with older feed_epoch are invalidated.
/// </summary>
public sealed class FeedEpochInvalidator : IProvcacheInvalidator
public sealed partial class FeedEpochInvalidator : IProvcacheInvalidator
{
private readonly IEventStream<FeedEpochAdvancedEvent> _eventStream;
private readonly IProvcacheService _provcacheService;
@@ -20,7 +23,7 @@ public sealed class FeedEpochInvalidator : IProvcacheInvalidator
private Task? _processingTask;
private bool _isRunning;
// Metrics
// Metrics.
private long _eventsProcessed;
private long _entriesInvalidated;
private long _errors;
@@ -57,7 +60,9 @@ public sealed class FeedEpochInvalidator : IProvcacheInvalidator
_processingTask = ProcessEventsAsync(_cts.Token);
_isRunning = true;
_logger.LogInformation("FeedEpochInvalidator started, subscribing to {StreamName}", FeedEpochAdvancedEvent.StreamName);
_logger.LogInformation(
"FeedEpochInvalidator started, subscribing to {StreamName}",
FeedEpochAdvancedEvent.StreamName);
return Task.CompletedTask;
}
@@ -84,7 +89,7 @@ public sealed class FeedEpochInvalidator : IProvcacheInvalidator
}
catch (OperationCanceledException)
{
// Expected during shutdown
// Expected during shutdown.
}
}
@@ -92,93 +97,4 @@ public sealed class FeedEpochInvalidator : IProvcacheInvalidator
_logger.LogInformation("FeedEpochInvalidator stopped");
}
/// <inheritdoc />
public InvalidatorMetrics GetMetrics()
{
return new InvalidatorMetrics
{
EventsProcessed = Interlocked.Read(ref _eventsProcessed),
EntriesInvalidated = Interlocked.Read(ref _entriesInvalidated),
Errors = Interlocked.Read(ref _errors),
LastEventAt = _lastEventAt,
CollectedAt = _timeProvider.GetUtcNow()
};
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
await StopAsync();
_cts?.Dispose();
}
private async Task ProcessEventsAsync(CancellationToken cancellationToken)
{
try
{
// Start from latest events
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, cancellationToken))
{
try
{
await HandleEventAsync(streamEvent.Event, cancellationToken);
Interlocked.Increment(ref _eventsProcessed);
_lastEventAt = _timeProvider.GetUtcNow();
}
catch (Exception ex)
{
Interlocked.Increment(ref _errors);
_logger.LogError(ex,
"Error processing FeedEpochAdvancedEvent {EventId} for feed {FeedId}",
streamEvent.Event.EventId,
streamEvent.Event.FeedId);
}
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
// Normal shutdown
}
catch (Exception ex)
{
_logger.LogError(ex, "Fatal error in FeedEpochInvalidator event processing loop");
throw;
}
}
private async Task HandleEventAsync(FeedEpochAdvancedEvent @event, CancellationToken cancellationToken)
{
_logger.LogInformation(
"Processing feed epoch advancement: FeedId={FeedId}, PreviousEpoch={PreviousEpoch}, NewEpoch={NewEpoch}",
@event.FeedId,
@event.PreviousEpoch,
@event.NewEpoch);
// Invalidate entries with feed_epoch older than the new epoch
// The feed_epoch in cache entries is formatted as "feed:epoch" (e.g., "cve:2024-12-24T12:00:00Z")
var request = InvalidationRequest.ByFeedEpochOlderThan(
@event.NewEpoch,
$"Feed {FormatFeedEpoch(@event.FeedId, @event.NewEpoch)} advanced");
var result = await _provcacheService.InvalidateByAsync(request, cancellationToken);
Interlocked.Add(ref _entriesInvalidated, result.EntriesAffected);
_logger.LogInformation(
"Feed epoch advancement invalidated {Count} cache entries for feed {FeedId} epoch {NewEpoch}",
result.EntriesAffected,
@event.FeedId,
@event.NewEpoch);
// Record telemetry
ProvcacheTelemetry.RecordInvalidation("feed_epoch", result.EntriesAffected);
}
/// <summary>
/// Formats a feed epoch identifier.
/// </summary>
private static string FormatFeedEpoch(string feedId, string epoch)
{
return $"{feedId}:{epoch}";
}
}

View File

@@ -0,0 +1,27 @@
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache.Invalidation;
public sealed partial class SignerSetInvalidator
{
/// <inheritdoc />
public InvalidatorMetrics GetMetrics()
{
return new InvalidatorMetrics
{
EventsProcessed = Interlocked.Read(ref _eventsProcessed),
EntriesInvalidated = Interlocked.Read(ref _entriesInvalidated),
Errors = Interlocked.Read(ref _errors),
LastEventAt = _lastEventAt,
CollectedAt = _timeProvider.GetUtcNow()
};
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
await StopAsync();
_cts?.Dispose();
}
}

View File

@@ -0,0 +1,75 @@
using Microsoft.Extensions.Logging;
using StellaOps.Messaging;
using StellaOps.Provcache.Events;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache.Invalidation;
public sealed partial class SignerSetInvalidator
{
private async Task ProcessEventsAsync(CancellationToken cancellationToken)
{
try
{
// Start from latest events (we don't want to replay old revocations).
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, cancellationToken))
{
try
{
await HandleEventAsync(streamEvent.Event, cancellationToken);
Interlocked.Increment(ref _eventsProcessed);
_lastEventAt = _timeProvider.GetUtcNow();
}
catch (Exception ex)
{
Interlocked.Increment(ref _errors);
_logger.LogError(
ex,
"Error processing SignerRevokedEvent {EventId} for signer {SignerHash}",
streamEvent.Event.EventId,
streamEvent.Event.SignerHash);
}
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
// Normal shutdown.
}
catch (Exception ex)
{
_logger.LogError(ex, "Fatal error in SignerSetInvalidator event processing loop");
throw;
}
}
private async Task HandleEventAsync(SignerRevokedEvent @event, CancellationToken cancellationToken)
{
_logger.LogInformation(
"Processing signer revocation: AnchorId={AnchorId}, KeyId={KeyId}, SignerHash={SignerHash}, Reason={Reason}",
@event.AnchorId,
@event.KeyId,
@event.SignerHash,
@event.Reason);
// Create invalidation request for entries with this signer hash.
var request = InvalidationRequest.BySignerSetHash(
@event.SignerHash,
$"Signer revoked: {@event.Reason ?? "unspecified"}");
request = request with { Actor = @event.Actor ?? "SignerSetInvalidator" };
var result = await _provcacheService.InvalidateByAsync(request, cancellationToken);
Interlocked.Add(ref _entriesInvalidated, result.EntriesAffected);
_logger.LogInformation(
"Signer revocation invalidated {Count} cache entries for signer {SignerHash}",
result.EntriesAffected,
@event.SignerHash);
// Record telemetry.
ProvcacheTelemetry.RecordInvalidation("signer_revocation", result.EntriesAffected);
}
}

View File

@@ -2,6 +2,9 @@ using Microsoft.Extensions.Logging;
using StellaOps.Messaging;
using StellaOps.Messaging.Abstractions;
using StellaOps.Provcache.Events;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache.Invalidation;
@@ -9,7 +12,7 @@ namespace StellaOps.Provcache.Invalidation;
/// Invalidator that handles signer revocation events.
/// When a signer is revoked, all cache entries with matching signer_set_hash are invalidated.
/// </summary>
public sealed class SignerSetInvalidator : IProvcacheInvalidator
public sealed partial class SignerSetInvalidator : IProvcacheInvalidator
{
private readonly IEventStream<SignerRevokedEvent> _eventStream;
private readonly IProvcacheService _provcacheService;
@@ -20,7 +23,7 @@ public sealed class SignerSetInvalidator : IProvcacheInvalidator
private Task? _processingTask;
private bool _isRunning;
// Metrics
// Metrics.
private long _eventsProcessed;
private long _entriesInvalidated;
private long _errors;
@@ -57,7 +60,9 @@ public sealed class SignerSetInvalidator : IProvcacheInvalidator
_processingTask = ProcessEventsAsync(_cts.Token);
_isRunning = true;
_logger.LogInformation("SignerSetInvalidator started, subscribing to {StreamName}", SignerRevokedEvent.StreamName);
_logger.LogInformation(
"SignerSetInvalidator started, subscribing to {StreamName}",
SignerRevokedEvent.StreamName);
return Task.CompletedTask;
}
@@ -84,7 +89,7 @@ public sealed class SignerSetInvalidator : IProvcacheInvalidator
}
catch (OperationCanceledException)
{
// Expected during shutdown
// Expected during shutdown.
}
}
@@ -92,86 +97,4 @@ public sealed class SignerSetInvalidator : IProvcacheInvalidator
_logger.LogInformation("SignerSetInvalidator stopped");
}
/// <inheritdoc />
public InvalidatorMetrics GetMetrics()
{
return new InvalidatorMetrics
{
EventsProcessed = Interlocked.Read(ref _eventsProcessed),
EntriesInvalidated = Interlocked.Read(ref _entriesInvalidated),
Errors = Interlocked.Read(ref _errors),
LastEventAt = _lastEventAt,
CollectedAt = _timeProvider.GetUtcNow()
};
}
/// <inheritdoc />
public async ValueTask DisposeAsync()
{
await StopAsync();
_cts?.Dispose();
}
private async Task ProcessEventsAsync(CancellationToken cancellationToken)
{
try
{
// Start from latest events (we don't want to replay old revocations)
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, cancellationToken))
{
try
{
await HandleEventAsync(streamEvent.Event, cancellationToken);
Interlocked.Increment(ref _eventsProcessed);
_lastEventAt = _timeProvider.GetUtcNow();
}
catch (Exception ex)
{
Interlocked.Increment(ref _errors);
_logger.LogError(ex,
"Error processing SignerRevokedEvent {EventId} for signer {SignerHash}",
streamEvent.Event.EventId,
streamEvent.Event.SignerHash);
}
}
}
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
{
// Normal shutdown
}
catch (Exception ex)
{
_logger.LogError(ex, "Fatal error in SignerSetInvalidator event processing loop");
throw;
}
}
private async Task HandleEventAsync(SignerRevokedEvent @event, CancellationToken cancellationToken)
{
_logger.LogInformation(
"Processing signer revocation: AnchorId={AnchorId}, KeyId={KeyId}, SignerHash={SignerHash}, Reason={Reason}",
@event.AnchorId,
@event.KeyId,
@event.SignerHash,
@event.Reason);
// Create invalidation request for entries with this signer hash
var request = InvalidationRequest.BySignerSetHash(
@event.SignerHash,
$"Signer revoked: {@event.Reason ?? "unspecified"}");
request = request with { Actor = @event.Actor ?? "SignerSetInvalidator" };
var result = await _provcacheService.InvalidateByAsync(request, cancellationToken);
Interlocked.Add(ref _entriesInvalidated, result.EntriesAffected);
_logger.LogInformation(
"Signer revocation invalidated {Count} cache entries for signer {SignerHash}",
result.EntriesAffected,
@event.SignerHash);
// Record telemetry
ProvcacheTelemetry.RecordInvalidation("signer_revocation", result.EntriesAffected);
}
}

View File

@@ -0,0 +1,67 @@
namespace StellaOps.Provcache;
/// <summary>
/// Request for cache invalidation by criteria.
/// </summary>
public sealed record InvalidationRequest
{
/// <summary>
/// The invalidation type.
/// </summary>
public required InvalidationType Type { get; init; }
/// <summary>
/// The value to match for invalidation.
/// </summary>
public required string Value { get; init; }
/// <summary>
/// Reason for invalidation (for audit log).
/// </summary>
public string? Reason { get; init; }
/// <summary>
/// Actor who initiated the invalidation.
/// </summary>
public string? Actor { get; init; }
/// <summary>
/// Creates an invalidation request by policy hash.
/// </summary>
public static InvalidationRequest ByPolicyHash(string policyHash, string? reason = null) => new()
{
Type = InvalidationType.PolicyHash,
Value = policyHash,
Reason = reason ?? "policy-update"
};
/// <summary>
/// Creates an invalidation request by signer set hash.
/// </summary>
public static InvalidationRequest BySignerSetHash(string signerSetHash, string? reason = null) => new()
{
Type = InvalidationType.SignerSetHash,
Value = signerSetHash,
Reason = reason ?? "signer-revocation"
};
/// <summary>
/// Creates an invalidation request by feed epoch.
/// </summary>
public static InvalidationRequest ByFeedEpochOlderThan(string feedEpoch, string? reason = null) => new()
{
Type = InvalidationType.FeedEpochOlderThan,
Value = feedEpoch,
Reason = reason ?? "feed-update"
};
/// <summary>
/// Creates an invalidation request by key pattern.
/// </summary>
public static InvalidationRequest ByPattern(string pattern, string? reason = null) => new()
{
Type = InvalidationType.Pattern,
Value = pattern,
Reason = reason ?? "pattern-invalidation"
};
}

View File

@@ -0,0 +1,29 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Result of an invalidation operation.
/// </summary>
public sealed record InvalidationResult
{
/// <summary>
/// Number of entries invalidated.
/// </summary>
public required long EntriesAffected { get; init; }
/// <summary>
/// The invalidation request that was executed.
/// </summary>
public required InvalidationRequest Request { get; init; }
/// <summary>
/// Timestamp of the invalidation.
/// </summary>
public required DateTimeOffset Timestamp { get; init; }
/// <summary>
/// Whether the invalidation was logged for audit.
/// </summary>
public bool WasLogged { get; init; }
}

View File

@@ -0,0 +1,32 @@
namespace StellaOps.Provcache;
/// <summary>
/// Type of invalidation criteria.
/// </summary>
public enum InvalidationType
{
/// <summary>
/// Invalidate by policy hash.
/// </summary>
PolicyHash,
/// <summary>
/// Invalidate by signer set hash.
/// </summary>
SignerSetHash,
/// <summary>
/// Invalidate entries with feed epoch older than specified.
/// </summary>
FeedEpochOlderThan,
/// <summary>
/// Invalidate by key pattern.
/// </summary>
Pattern,
/// <summary>
/// Invalidate expired entries.
/// </summary>
Expired
}

View File

@@ -0,0 +1,23 @@
namespace StellaOps.Provcache;
/// <summary>
/// Simplified chunk representation for lazy fetch interface.
/// Contains only the index and data for transport.
/// </summary>
public sealed record FetchedChunk
{
/// <summary>
/// Zero-based chunk index.
/// </summary>
public required int Index { get; init; }
/// <summary>
/// The chunk data.
/// </summary>
public required byte[] Data { get; init; }
/// <summary>
/// SHA256 hash of the data for verification.
/// </summary>
public required string Hash { get; init; }
}

View File

@@ -0,0 +1,17 @@
using Microsoft.Extensions.Logging;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class FileChunkFetcher
{
/// <inheritdoc />
public Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
{
var isAvailable = Directory.Exists(_basePath);
_logger.LogDebug("File fetcher availability check: {IsAvailable}", isAvailable);
return Task.FromResult(isAvailable);
}
}

View File

@@ -0,0 +1,82 @@
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class FileChunkFetcher
{
/// <summary>
/// Exports chunks to files for sneakernet transfer.
/// </summary>
/// <param name="proofRoot">The proof root.</param>
/// <param name="manifest">The chunk manifest.</param>
/// <param name="chunks">The chunks to export.</param>
/// <param name="cancellationToken">Cancellation token.</param>
public async Task ExportToFilesAsync(
string proofRoot,
ChunkManifest manifest,
IEnumerable<FetchedChunk> chunks,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(chunks);
var safeProofRoot = SanitizeForPath(proofRoot);
var proofDir = Path.Combine(_basePath, safeProofRoot);
Directory.CreateDirectory(proofDir);
_logger.LogInformation("Exporting to {Directory}", proofDir);
// Write manifest.
var manifestPath = GetManifestPath(proofRoot);
await using (var manifestStream = File.Create(manifestPath))
{
await JsonSerializer.SerializeAsync(manifestStream, manifest, _jsonOptions, cancellationToken);
}
_logger.LogDebug("Wrote manifest to {Path}", manifestPath);
// Write chunks.
var count = 0;
foreach (var chunk in chunks)
{
cancellationToken.ThrowIfCancellationRequested();
var chunkPath = GetChunkPath(proofRoot, chunk.Index);
await using var chunkStream = File.Create(chunkPath);
await JsonSerializer.SerializeAsync(chunkStream, chunk, _jsonOptions, cancellationToken);
count++;
}
_logger.LogInformation("Exported {Count} chunks to {Directory}", count, proofDir);
}
/// <summary>
/// Exports EvidenceChunks to files (converts to FetchedChunk format).
/// </summary>
/// <param name="proofRoot">The proof root.</param>
/// <param name="manifest">The chunk manifest.</param>
/// <param name="chunks">The evidence chunks to export.</param>
/// <param name="cancellationToken">Cancellation token.</param>
public Task ExportEvidenceChunksToFilesAsync(
string proofRoot,
ChunkManifest manifest,
IEnumerable<EvidenceChunk> chunks,
CancellationToken cancellationToken = default)
{
var fetchedChunks = chunks.Select(c => new FetchedChunk
{
Index = c.ChunkIndex,
Data = c.Blob,
Hash = c.ChunkHash
});
return ExportToFilesAsync(proofRoot, manifest, fetchedChunks, cancellationToken);
}
}

View File

@@ -0,0 +1,66 @@
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class FileChunkFetcher
{
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
string proofRoot,
IEnumerable<int> chunkIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(chunkIndices);
var indices = chunkIndices.ToList();
_logger.LogInformation(
"Fetching {Count} chunks from file system for proof root {ProofRoot}",
indices.Count,
proofRoot);
foreach (var index in indices)
{
cancellationToken.ThrowIfCancellationRequested();
var chunk = await FetchChunkAsync(proofRoot, index, cancellationToken);
if (chunk is not null)
{
yield return chunk;
}
}
}
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
string proofRoot,
ChunkManifest manifest,
IReadOnlySet<int> existingIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(existingIndices);
var missingIndices = Enumerable.Range(0, manifest.TotalChunks)
.Where(i => !existingIndices.Contains(i))
.ToList();
_logger.LogInformation(
"Fetching {MissingCount} remaining chunks from files (have {ExistingCount}/{TotalCount})",
missingIndices.Count,
existingIndices.Count,
manifest.TotalChunks);
await foreach (var chunk in FetchChunksAsync(proofRoot, missingIndices, cancellationToken))
{
yield return chunk;
}
}
}

View File

@@ -0,0 +1,46 @@
using Microsoft.Extensions.Logging;
using System;
using System.IO;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class FileChunkFetcher
{
/// <inheritdoc />
public async Task<FetchedChunk?> FetchChunkAsync(
string proofRoot,
int chunkIndex,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentOutOfRangeException.ThrowIfNegative(chunkIndex);
var chunkPath = GetChunkPath(proofRoot, chunkIndex);
_logger.LogDebug("Looking for chunk at {Path}", chunkPath);
if (!File.Exists(chunkPath))
{
_logger.LogDebug("Chunk file not found: {Path}", chunkPath);
return null;
}
try
{
await using var stream = File.OpenRead(chunkPath);
var chunk = await JsonSerializer.DeserializeAsync<FetchedChunk>(stream, _jsonOptions, cancellationToken);
_logger.LogDebug(
"Successfully loaded chunk {Index}, {Bytes} bytes",
chunkIndex,
chunk?.Data.Length ?? 0);
return chunk;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error reading chunk file {Path}", chunkPath);
throw;
}
}
}

View File

@@ -0,0 +1,42 @@
using Microsoft.Extensions.Logging;
using System;
using System.IO;
using System.Text.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class FileChunkFetcher
{
/// <inheritdoc />
public async Task<ChunkManifest?> FetchManifestAsync(
string proofRoot,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
var manifestPath = GetManifestPath(proofRoot);
_logger.LogDebug("Looking for manifest at {Path}", manifestPath);
if (!File.Exists(manifestPath))
{
_logger.LogDebug("Manifest file not found: {Path}", manifestPath);
return null;
}
try
{
await using var stream = File.OpenRead(manifestPath);
return await JsonSerializer.DeserializeAsync<ChunkManifest>(
stream,
_jsonOptions,
cancellationToken);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error reading manifest file {Path}", manifestPath);
throw;
}
}
}

View File

@@ -0,0 +1,41 @@
using System;
using System.IO;
using System.Security.Cryptography;
using System.Text;
namespace StellaOps.Provcache;
public sealed partial class FileChunkFetcher
{
/// <summary>
/// Gets the file path for a chunk.
/// </summary>
private string GetChunkPath(string proofRoot, int chunkIndex)
{
// Sanitize proof root for use in file paths.
var safeProofRoot = SanitizeForPath(proofRoot);
return Path.Combine(_basePath, safeProofRoot, $"chunk_{chunkIndex:D4}.json");
}
/// <summary>
/// Gets the file path for a manifest.
/// </summary>
private string GetManifestPath(string proofRoot)
{
var safeProofRoot = SanitizeForPath(proofRoot);
return Path.Combine(_basePath, safeProofRoot, "manifest.json");
}
/// <summary>
/// Sanitizes a proof root for use in file paths.
/// </summary>
private static string SanitizeForPath(string input)
{
// Use hash prefix to ensure consistent directory naming.
var hash = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(input)))
.ToLowerInvariant();
// Return first 16 chars of hash for reasonable directory names.
return hash[..16];
}
}

View File

@@ -1,7 +1,6 @@
using Microsoft.Extensions.Logging;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System;
using System.IO;
using System.Text.Json;
namespace StellaOps.Provcache;
@@ -10,7 +9,7 @@ namespace StellaOps.Provcache;
/// File-based lazy evidence chunk fetcher for sneakernet mode.
/// Fetches chunks from a local directory (e.g., USB drive, NFS mount).
/// </summary>
public sealed class FileChunkFetcher : ILazyEvidenceFetcher
public sealed partial class FileChunkFetcher : ILazyEvidenceFetcher
{
private readonly string _basePath;
private readonly ILogger<FileChunkFetcher> _logger;
@@ -27,7 +26,7 @@ public sealed class FileChunkFetcher : ILazyEvidenceFetcher
public FileChunkFetcher(string basePath, ILogger<FileChunkFetcher> logger)
{
ArgumentException.ThrowIfNullOrWhiteSpace(basePath);
_basePath = Path.GetFullPath(basePath);
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
_jsonOptions = new JsonSerializerOptions
@@ -38,221 +37,4 @@ public sealed class FileChunkFetcher : ILazyEvidenceFetcher
_logger.LogDebug("FileChunkFetcher initialized with base path: {BasePath}", _basePath);
}
/// <inheritdoc />
public async Task<FetchedChunk?> FetchChunkAsync(
string proofRoot,
int chunkIndex,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentOutOfRangeException.ThrowIfNegative(chunkIndex);
var chunkPath = GetChunkPath(proofRoot, chunkIndex);
_logger.LogDebug("Looking for chunk at {Path}", chunkPath);
if (!File.Exists(chunkPath))
{
_logger.LogDebug("Chunk file not found: {Path}", chunkPath);
return null;
}
try
{
await using var stream = File.OpenRead(chunkPath);
var chunk = await JsonSerializer.DeserializeAsync<FetchedChunk>(stream, _jsonOptions, cancellationToken);
_logger.LogDebug("Successfully loaded chunk {Index}, {Bytes} bytes", chunkIndex, chunk?.Data.Length ?? 0);
return chunk;
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error reading chunk file {Path}", chunkPath);
throw;
}
}
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
string proofRoot,
IEnumerable<int> chunkIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(chunkIndices);
var indices = chunkIndices.ToList();
_logger.LogInformation("Fetching {Count} chunks from file system for proof root {ProofRoot}", indices.Count, proofRoot);
foreach (var index in indices)
{
cancellationToken.ThrowIfCancellationRequested();
var chunk = await FetchChunkAsync(proofRoot, index, cancellationToken);
if (chunk is not null)
{
yield return chunk;
}
}
}
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
string proofRoot,
ChunkManifest manifest,
IReadOnlySet<int> existingIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(existingIndices);
var missingIndices = Enumerable.Range(0, manifest.TotalChunks)
.Where(i => !existingIndices.Contains(i))
.ToList();
_logger.LogInformation(
"Fetching {MissingCount} remaining chunks from files (have {ExistingCount}/{TotalCount})",
missingIndices.Count, existingIndices.Count, manifest.TotalChunks);
await foreach (var chunk in FetchChunksAsync(proofRoot, missingIndices, cancellationToken))
{
yield return chunk;
}
}
/// <inheritdoc />
public Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
{
var isAvailable = Directory.Exists(_basePath);
_logger.LogDebug("File fetcher availability check: {IsAvailable}", isAvailable);
return Task.FromResult(isAvailable);
}
/// <inheritdoc />
public async Task<ChunkManifest?> FetchManifestAsync(
string proofRoot,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
var manifestPath = GetManifestPath(proofRoot);
_logger.LogDebug("Looking for manifest at {Path}", manifestPath);
if (!File.Exists(manifestPath))
{
_logger.LogDebug("Manifest file not found: {Path}", manifestPath);
return null;
}
try
{
await using var stream = File.OpenRead(manifestPath);
return await JsonSerializer.DeserializeAsync<ChunkManifest>(stream, _jsonOptions, cancellationToken);
}
catch (Exception ex)
{
_logger.LogWarning(ex, "Error reading manifest file {Path}", manifestPath);
throw;
}
}
/// <summary>
/// Gets the file path for a chunk.
/// </summary>
private string GetChunkPath(string proofRoot, int chunkIndex)
{
// Sanitize proof root for use in file paths
var safeProofRoot = SanitizeForPath(proofRoot);
return Path.Combine(_basePath, safeProofRoot, $"chunk_{chunkIndex:D4}.json");
}
/// <summary>
/// Gets the file path for a manifest.
/// </summary>
private string GetManifestPath(string proofRoot)
{
var safeProofRoot = SanitizeForPath(proofRoot);
return Path.Combine(_basePath, safeProofRoot, "manifest.json");
}
/// <summary>
/// Sanitizes a proof root for use in file paths.
/// </summary>
private static string SanitizeForPath(string input)
{
// Use hash prefix to ensure consistent directory naming
var hash = Convert.ToHexString(SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input))).ToLowerInvariant();
// Return first 16 chars of hash for reasonable directory names
return hash[..16];
}
/// <summary>
/// Exports chunks to files for sneakernet transfer.
/// </summary>
/// <param name="proofRoot">The proof root.</param>
/// <param name="manifest">The chunk manifest.</param>
/// <param name="chunks">The chunks to export.</param>
/// <param name="cancellationToken">Cancellation token.</param>
public async Task ExportToFilesAsync(
string proofRoot,
ChunkManifest manifest,
IEnumerable<FetchedChunk> chunks,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(chunks);
var safeProofRoot = SanitizeForPath(proofRoot);
var proofDir = Path.Combine(_basePath, safeProofRoot);
Directory.CreateDirectory(proofDir);
_logger.LogInformation("Exporting to {Directory}", proofDir);
// Write manifest
var manifestPath = GetManifestPath(proofRoot);
await using (var manifestStream = File.Create(manifestPath))
{
await JsonSerializer.SerializeAsync(manifestStream, manifest, _jsonOptions, cancellationToken);
}
_logger.LogDebug("Wrote manifest to {Path}", manifestPath);
// Write chunks
var count = 0;
foreach (var chunk in chunks)
{
cancellationToken.ThrowIfCancellationRequested();
var chunkPath = GetChunkPath(proofRoot, chunk.Index);
await using var chunkStream = File.Create(chunkPath);
await JsonSerializer.SerializeAsync(chunkStream, chunk, _jsonOptions, cancellationToken);
count++;
}
_logger.LogInformation("Exported {Count} chunks to {Directory}", count, proofDir);
}
/// <summary>
/// Exports EvidenceChunks to files (converts to FetchedChunk format).
/// </summary>
/// <param name="proofRoot">The proof root.</param>
/// <param name="manifest">The chunk manifest.</param>
/// <param name="chunks">The evidence chunks to export.</param>
/// <param name="cancellationToken">Cancellation token.</param>
public Task ExportEvidenceChunksToFilesAsync(
string proofRoot,
ChunkManifest manifest,
IEnumerable<EvidenceChunk> chunks,
CancellationToken cancellationToken = default)
{
var fetchedChunks = chunks.Select(c => new FetchedChunk
{
Index = c.ChunkIndex,
Data = c.Blob,
Hash = c.ChunkHash
});
return ExportToFilesAsync(proofRoot, manifest, fetchedChunks, cancellationToken);
}
}

View File

@@ -0,0 +1,65 @@
using System;
using System.Collections.Generic;
using System.Linq;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
private static IReadOnlySet<string> NormalizeSchemes(IList<string> schemes)
{
var normalized = schemes
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.ToArray();
if (normalized.Length == 0)
{
normalized = DefaultSchemes;
}
return new HashSet<string>(normalized, StringComparer.OrdinalIgnoreCase);
}
private static IReadOnlyList<string> NormalizeHosts(
IList<string> hosts,
string baseHost,
out bool allowAllHosts)
{
var normalized = hosts
.Where(h => !string.IsNullOrWhiteSpace(h))
.Select(h => h.Trim())
.ToList();
allowAllHosts = normalized.Any(h => string.Equals(h, "*", StringComparison.Ordinal));
if (!allowAllHosts && normalized.Count == 0)
{
normalized.Add(baseHost);
}
return normalized;
}
private static bool IsHostAllowed(string host, IReadOnlyList<string> allowedHosts)
{
foreach (var allowed in allowedHosts)
{
if (string.Equals(allowed, host, StringComparison.OrdinalIgnoreCase))
{
return true;
}
if (allowed.StartsWith("*.", StringComparison.Ordinal))
{
var suffix = allowed[1..];
if (host.EndsWith(suffix, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
}
return false;
}
}

View File

@@ -0,0 +1,24 @@
using Microsoft.Extensions.Logging;
using System;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
/// <inheritdoc />
public async Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
{
try
{
var response = await _httpClient.GetAsync("api/v1/health", cancellationToken);
return response.IsSuccessStatusCode;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Health check failed");
return false;
}
}
}

View File

@@ -0,0 +1,41 @@
using System;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Headers;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
private static HttpClient CreateClient(IHttpClientFactory httpClientFactory, Uri baseUrl)
{
ArgumentNullException.ThrowIfNull(httpClientFactory);
ArgumentNullException.ThrowIfNull(baseUrl);
var client = httpClientFactory.CreateClient(HttpClientName);
client.BaseAddress = baseUrl;
return client;
}
private void ApplyClientConfiguration()
{
var timeout = _options.Timeout;
if (timeout <= TimeSpan.Zero || timeout == Timeout.InfiniteTimeSpan)
{
throw new InvalidOperationException("Lazy fetch HTTP timeout must be a positive, non-infinite duration.");
}
if (_httpClient.Timeout == Timeout.InfiniteTimeSpan || _httpClient.Timeout > timeout)
{
_httpClient.Timeout = timeout;
}
if (!_httpClient.DefaultRequestHeaders.Accept.Any(header =>
string.Equals(header.MediaType, "application/json", StringComparison.OrdinalIgnoreCase)))
{
_httpClient.DefaultRequestHeaders.Accept.Add(
new MediaTypeWithQualityHeaderValue("application/json"));
}
}
}

View File

@@ -0,0 +1,66 @@
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Runtime.CompilerServices;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
string proofRoot,
IEnumerable<int> chunkIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(chunkIndices);
var indices = chunkIndices.ToList();
_logger.LogInformation(
"Fetching {Count} chunks for proof root {ProofRoot}",
indices.Count,
proofRoot);
foreach (var index in indices)
{
cancellationToken.ThrowIfCancellationRequested();
var chunk = await FetchChunkAsync(proofRoot, index, cancellationToken);
if (chunk is not null)
{
yield return chunk;
}
}
}
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
string proofRoot,
ChunkManifest manifest,
IReadOnlySet<int> existingIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(existingIndices);
var missingIndices = Enumerable.Range(0, manifest.TotalChunks)
.Where(i => !existingIndices.Contains(i))
.ToList();
_logger.LogInformation(
"Fetching {MissingCount} remaining chunks (have {ExistingCount}/{TotalCount})",
missingIndices.Count,
existingIndices.Count,
manifest.TotalChunks);
await foreach (var chunk in FetchChunksAsync(proofRoot, missingIndices, cancellationToken))
{
yield return chunk;
}
}
}

View File

@@ -0,0 +1,51 @@
using Microsoft.Extensions.Logging;
using System;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
/// <inheritdoc />
public async Task<FetchedChunk?> FetchChunkAsync(
string proofRoot,
int chunkIndex,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentOutOfRangeException.ThrowIfNegative(chunkIndex);
var url = $"api/v1/evidence/{Uri.EscapeDataString(proofRoot)}/chunks/{chunkIndex}";
_logger.LogDebug("Fetching chunk {Index} from {Url}", chunkIndex, url);
try
{
var response = await _httpClient.GetAsync(url, cancellationToken);
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
_logger.LogDebug("Chunk {Index} not found at remote", chunkIndex);
return null;
}
response.EnsureSuccessStatusCode();
var chunk = await response.Content.ReadFromJsonAsync<FetchedChunk>(
_jsonOptions,
cancellationToken);
_logger.LogDebug(
"Successfully fetched chunk {Index}, {Bytes} bytes",
chunkIndex,
chunk?.Data.Length ?? 0);
return chunk;
}
catch (HttpRequestException ex)
{
_logger.LogWarning(ex, "HTTP error fetching chunk {Index}", chunkIndex);
throw;
}
}
}

View File

@@ -0,0 +1,41 @@
using Microsoft.Extensions.Logging;
using System;
using System.Net.Http;
using System.Net.Http.Json;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
/// <inheritdoc />
public async Task<ChunkManifest?> FetchManifestAsync(
string proofRoot,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
var url = $"api/v1/evidence/{Uri.EscapeDataString(proofRoot)}/manifest";
_logger.LogDebug("Fetching manifest from {Url}", url);
try
{
var response = await _httpClient.GetAsync(url, cancellationToken);
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
_logger.LogDebug("Manifest not found for proof root {ProofRoot}", proofRoot);
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadFromJsonAsync<ChunkManifest>(_jsonOptions, cancellationToken);
}
catch (HttpRequestException ex)
{
_logger.LogWarning(ex, "HTTP error fetching manifest for {ProofRoot}", proofRoot);
throw;
}
}
}

View File

@@ -0,0 +1,34 @@
using System;
namespace StellaOps.Provcache;
public sealed partial class HttpChunkFetcher
{
private void ValidateBaseAddress(Uri baseAddress)
{
if (!baseAddress.IsAbsoluteUri)
{
throw new InvalidOperationException("Lazy fetch base URL must be absolute.");
}
if (!string.IsNullOrWhiteSpace(baseAddress.UserInfo))
{
throw new InvalidOperationException("Lazy fetch base URL must not include user info.");
}
if (string.IsNullOrWhiteSpace(baseAddress.Host))
{
throw new InvalidOperationException("Lazy fetch base URL must include a host.");
}
if (!_allowedSchemes.Contains(baseAddress.Scheme))
{
throw new InvalidOperationException($"Lazy fetch base URL scheme '{baseAddress.Scheme}' is not allowed.");
}
if (!_allowAllHosts && !IsHostAllowed(baseAddress.Host, _allowedHosts))
{
throw new InvalidOperationException($"Lazy fetch base URL host '{baseAddress.Host}' is not allowlisted.");
}
}
}

View File

@@ -1,8 +1,7 @@
using Microsoft.Extensions.Logging;
using System.Net.Http.Headers;
using System.Net.Http.Json;
using System.Runtime.CompilerServices;
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Text.Json;
namespace StellaOps.Provcache;
@@ -11,7 +10,7 @@ namespace StellaOps.Provcache;
/// HTTP-based lazy evidence chunk fetcher for connected mode.
/// Fetches chunks from a remote Stella API endpoint.
/// </summary>
public sealed class HttpChunkFetcher : ILazyEvidenceFetcher, IDisposable
public sealed partial class HttpChunkFetcher : ILazyEvidenceFetcher, IDisposable
{
/// <summary>
/// Named client for use with IHttpClientFactory.
@@ -81,251 +80,6 @@ public sealed class HttpChunkFetcher : ILazyEvidenceFetcher, IDisposable
ApplyClientConfiguration();
}
private static HttpClient CreateClient(IHttpClientFactory httpClientFactory, Uri baseUrl)
{
ArgumentNullException.ThrowIfNull(httpClientFactory);
ArgumentNullException.ThrowIfNull(baseUrl);
var client = httpClientFactory.CreateClient(HttpClientName);
client.BaseAddress = baseUrl;
return client;
}
private void ApplyClientConfiguration()
{
var timeout = _options.Timeout;
if (timeout <= TimeSpan.Zero || timeout == Timeout.InfiniteTimeSpan)
{
throw new InvalidOperationException("Lazy fetch HTTP timeout must be a positive, non-infinite duration.");
}
if (_httpClient.Timeout == Timeout.InfiniteTimeSpan || _httpClient.Timeout > timeout)
{
_httpClient.Timeout = timeout;
}
if (!_httpClient.DefaultRequestHeaders.Accept.Any(header =>
string.Equals(header.MediaType, "application/json", StringComparison.OrdinalIgnoreCase)))
{
_httpClient.DefaultRequestHeaders.Accept.Add(
new MediaTypeWithQualityHeaderValue("application/json"));
}
}
private void ValidateBaseAddress(Uri baseAddress)
{
if (!baseAddress.IsAbsoluteUri)
{
throw new InvalidOperationException("Lazy fetch base URL must be absolute.");
}
if (!string.IsNullOrWhiteSpace(baseAddress.UserInfo))
{
throw new InvalidOperationException("Lazy fetch base URL must not include user info.");
}
if (string.IsNullOrWhiteSpace(baseAddress.Host))
{
throw new InvalidOperationException("Lazy fetch base URL must include a host.");
}
if (!_allowedSchemes.Contains(baseAddress.Scheme))
{
throw new InvalidOperationException($"Lazy fetch base URL scheme '{baseAddress.Scheme}' is not allowed.");
}
if (!_allowAllHosts && !IsHostAllowed(baseAddress.Host, _allowedHosts))
{
throw new InvalidOperationException($"Lazy fetch base URL host '{baseAddress.Host}' is not allowlisted.");
}
}
private static IReadOnlySet<string> NormalizeSchemes(IList<string> schemes)
{
var normalized = schemes
.Where(s => !string.IsNullOrWhiteSpace(s))
.Select(s => s.Trim())
.ToArray();
if (normalized.Length == 0)
{
normalized = DefaultSchemes;
}
return new HashSet<string>(normalized, StringComparer.OrdinalIgnoreCase);
}
private static IReadOnlyList<string> NormalizeHosts(
IList<string> hosts,
string baseHost,
out bool allowAllHosts)
{
var normalized = hosts
.Where(h => !string.IsNullOrWhiteSpace(h))
.Select(h => h.Trim())
.ToList();
allowAllHosts = normalized.Any(h => string.Equals(h, "*", StringComparison.Ordinal));
if (!allowAllHosts && normalized.Count == 0)
{
normalized.Add(baseHost);
}
return normalized;
}
private static bool IsHostAllowed(string host, IReadOnlyList<string> allowedHosts)
{
foreach (var allowed in allowedHosts)
{
if (string.Equals(allowed, host, StringComparison.OrdinalIgnoreCase))
{
return true;
}
if (allowed.StartsWith("*.", StringComparison.Ordinal))
{
var suffix = allowed[1..];
if (host.EndsWith(suffix, StringComparison.OrdinalIgnoreCase))
{
return true;
}
}
}
return false;
}
/// <inheritdoc />
public async Task<FetchedChunk?> FetchChunkAsync(
string proofRoot,
int chunkIndex,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentOutOfRangeException.ThrowIfNegative(chunkIndex);
var url = $"api/v1/evidence/{Uri.EscapeDataString(proofRoot)}/chunks/{chunkIndex}";
_logger.LogDebug("Fetching chunk {Index} from {Url}", chunkIndex, url);
try
{
var response = await _httpClient.GetAsync(url, cancellationToken);
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
_logger.LogDebug("Chunk {Index} not found at remote", chunkIndex);
return null;
}
response.EnsureSuccessStatusCode();
var chunk = await response.Content.ReadFromJsonAsync<FetchedChunk>(_jsonOptions, cancellationToken);
_logger.LogDebug("Successfully fetched chunk {Index}, {Bytes} bytes", chunkIndex, chunk?.Data.Length ?? 0);
return chunk;
}
catch (HttpRequestException ex)
{
_logger.LogWarning(ex, "HTTP error fetching chunk {Index}", chunkIndex);
throw;
}
}
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
string proofRoot,
IEnumerable<int> chunkIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(chunkIndices);
var indices = chunkIndices.ToList();
_logger.LogInformation("Fetching {Count} chunks for proof root {ProofRoot}", indices.Count, proofRoot);
foreach (var index in indices)
{
cancellationToken.ThrowIfCancellationRequested();
var chunk = await FetchChunkAsync(proofRoot, index, cancellationToken);
if (chunk is not null)
{
yield return chunk;
}
}
}
/// <inheritdoc />
public async IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
string proofRoot,
ChunkManifest manifest,
IReadOnlySet<int> existingIndices,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(manifest);
ArgumentNullException.ThrowIfNull(existingIndices);
var missingIndices = Enumerable.Range(0, manifest.TotalChunks)
.Where(i => !existingIndices.Contains(i))
.ToList();
_logger.LogInformation(
"Fetching {MissingCount} remaining chunks (have {ExistingCount}/{TotalCount})",
missingIndices.Count, existingIndices.Count, manifest.TotalChunks);
await foreach (var chunk in FetchChunksAsync(proofRoot, missingIndices, cancellationToken))
{
yield return chunk;
}
}
/// <inheritdoc />
public async Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
{
try
{
var response = await _httpClient.GetAsync("api/v1/health", cancellationToken);
return response.IsSuccessStatusCode;
}
catch (Exception ex)
{
_logger.LogDebug(ex, "Health check failed");
return false;
}
}
/// <inheritdoc />
public async Task<ChunkManifest?> FetchManifestAsync(
string proofRoot,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
var url = $"api/v1/evidence/{Uri.EscapeDataString(proofRoot)}/manifest";
_logger.LogDebug("Fetching manifest from {Url}", url);
try
{
var response = await _httpClient.GetAsync(url, cancellationToken);
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
{
_logger.LogDebug("Manifest not found for proof root {ProofRoot}", proofRoot);
return null;
}
response.EnsureSuccessStatusCode();
return await response.Content.ReadFromJsonAsync<ChunkManifest>(_jsonOptions, cancellationToken);
}
catch (HttpRequestException ex)
{
_logger.LogWarning(ex, "HTTP error fetching manifest for {ProofRoot}", proofRoot);
throw;
}
}
/// <inheritdoc />
public void Dispose()
{

View File

@@ -66,66 +66,3 @@ public interface ILazyEvidenceFetcher
string proofRoot,
CancellationToken cancellationToken = default);
}
/// <summary>
/// Simplified chunk representation for lazy fetch interface.
/// Contains only the index and data for transport.
/// </summary>
public sealed record FetchedChunk
{
/// <summary>
/// Zero-based chunk index.
/// </summary>
public required int Index { get; init; }
/// <summary>
/// The chunk data.
/// </summary>
public required byte[] Data { get; init; }
/// <summary>
/// SHA256 hash of the data for verification.
/// </summary>
public required string Hash { get; init; }
}
/// <summary>
/// Result of a lazy fetch operation.
/// </summary>
public sealed record LazyFetchResult
{
/// <summary>
/// Whether the fetch was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Number of chunks fetched.
/// </summary>
public required int ChunksFetched { get; init; }
/// <summary>
/// Total bytes fetched.
/// </summary>
public required long BytesFetched { get; init; }
/// <summary>
/// Number of chunks that failed verification.
/// </summary>
public required int ChunksFailedVerification { get; init; }
/// <summary>
/// Indices of failed chunks.
/// </summary>
public IReadOnlyList<int> FailedIndices { get; init; } = [];
/// <summary>
/// Any errors encountered.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
/// <summary>
/// Time taken for the fetch operation.
/// </summary>
public TimeSpan Duration { get; init; }
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Provcache;
/// <summary>
/// Options for lazy fetch operations.
/// </summary>
public sealed class LazyFetchOptions
{
/// <summary>
/// Whether to verify chunks on fetch.
/// </summary>
public bool VerifyOnFetch { get; init; } = true;
/// <summary>
/// Whether to fail the entire operation on verification error.
/// </summary>
public bool FailOnVerificationError { get; init; } = false;
/// <summary>
/// Batch size for storing chunks.
/// </summary>
public int BatchSize { get; init; } = 100;
/// <summary>
/// Maximum number of chunks to fetch (0 = unlimited).
/// </summary>
public int MaxChunksToFetch { get; init; } = 0;
}

View File

@@ -0,0 +1,97 @@
using Microsoft.Extensions.Logging;
using System;
using System.Diagnostics;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
/// <summary>
/// Fetches remaining chunks for a proof root and stores them locally.
/// </summary>
/// <param name="proofRoot">The proof root.</param>
/// <param name="fetcher">The fetcher to use.</param>
/// <param name="options">Fetch options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The fetch result.</returns>
public async Task<LazyFetchResult> FetchAndStoreAsync(
string proofRoot,
ILazyEvidenceFetcher fetcher,
LazyFetchOptions? options = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(fetcher);
options ??= new LazyFetchOptions();
var stopwatch = Stopwatch.StartNew();
var state = new LazyFetchRunState();
_logger.LogInformation(
"Starting lazy fetch for {ProofRoot} using {FetcherType} fetcher",
proofRoot,
fetcher.FetcherType);
try
{
if (!await fetcher.IsAvailableAsync(cancellationToken))
{
_logger.LogWarning("Fetcher {FetcherType} is not available", fetcher.FetcherType);
state.Errors.Add($"Fetcher {fetcher.FetcherType} is not available");
return BuildResult(state, success: false, stopwatch.Elapsed);
}
var manifest = await ResolveManifestAsync(proofRoot, fetcher, cancellationToken);
if (manifest is null)
{
state.Errors.Add($"No manifest found for proof root {proofRoot}");
return BuildResult(state, success: false, stopwatch.Elapsed);
}
var existingIndices = await GetExistingIndicesAsync(proofRoot, cancellationToken);
var missingCount = manifest.TotalChunks - existingIndices.Count;
_logger.LogInformation(
"Have {Existing}/{Total} chunks, need to fetch {Missing}",
existingIndices.Count,
manifest.TotalChunks,
missingCount);
if (missingCount == 0)
{
_logger.LogInformation("All chunks already present, nothing to fetch");
return BuildResult(state, success: true, stopwatch.Elapsed);
}
await FetchAndStoreChunksAsync(
proofRoot,
fetcher,
manifest,
existingIndices,
options,
state,
cancellationToken);
stopwatch.Stop();
var success = state.ChunksFailedVerification == 0 || !options.FailOnVerificationError;
_logger.LogInformation(
"Lazy fetch complete: {Fetched} chunks, {Bytes} bytes, {Failed} verification failures in {Duration}",
state.ChunksFetched,
state.BytesFetched,
state.ChunksFailedVerification,
stopwatch.Elapsed);
return BuildResult(state, success, stopwatch.Elapsed);
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during lazy fetch for {ProofRoot}", proofRoot);
state.Errors.Add(ex.Message);
return BuildResult(state, success: false, stopwatch.Elapsed);
}
}
}

View File

@@ -0,0 +1,18 @@
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
private async Task<HashSet<int>> GetExistingIndicesAsync(
string proofRoot,
CancellationToken cancellationToken)
{
return (await _repository.GetChunksAsync(proofRoot, cancellationToken))
.Select(c => c.ChunkIndex)
.ToHashSet();
}
}

View File

@@ -0,0 +1,28 @@
using Microsoft.Extensions.Logging;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
private async Task<ChunkManifest?> ResolveManifestAsync(
string proofRoot,
ILazyEvidenceFetcher fetcher,
CancellationToken cancellationToken)
{
var manifest = await _repository.GetManifestAsync(proofRoot, cancellationToken);
if (manifest is not null)
{
return manifest;
}
manifest = await fetcher.FetchManifestAsync(proofRoot, cancellationToken);
if (manifest is null)
{
_logger.LogWarning("No manifest found for {ProofRoot}", proofRoot);
}
return manifest;
}
}

View File

@@ -0,0 +1,23 @@
using System;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
private static LazyFetchResult BuildResult(
LazyFetchRunState state,
bool success,
TimeSpan duration)
{
return new LazyFetchResult
{
Success = success,
ChunksFetched = state.ChunksFetched,
BytesFetched = state.BytesFetched,
ChunksFailedVerification = state.ChunksFailedVerification,
FailedIndices = state.FailedIndices,
Errors = state.Errors,
Duration = duration
};
}
}

View File

@@ -0,0 +1,15 @@
using System.Collections.Generic;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
private sealed class LazyFetchRunState
{
public List<string> Errors { get; } = [];
public List<int> FailedIndices { get; } = [];
public int ChunksFetched { get; set; }
public long BytesFetched { get; set; }
public int ChunksFailedVerification { get; set; }
}
}

View File

@@ -0,0 +1,94 @@
using Microsoft.Extensions.Logging;
using System;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
private async Task FetchAndStoreChunksAsync(
string proofRoot,
ILazyEvidenceFetcher fetcher,
ChunkManifest manifest,
IReadOnlySet<int> existingIndices,
LazyFetchOptions options,
LazyFetchRunState state,
CancellationToken cancellationToken)
{
var chunksToStore = new List<EvidenceChunk>();
var now = _timeProvider.GetUtcNow();
await foreach (var fetchedChunk in fetcher.FetchRemainingChunksAsync(
proofRoot,
manifest,
existingIndices,
cancellationToken))
{
if (options.VerifyOnFetch && !VerifyChunk(fetchedChunk, manifest))
{
state.ChunksFailedVerification++;
state.FailedIndices.Add(fetchedChunk.Index);
state.Errors.Add($"Chunk {fetchedChunk.Index} failed verification");
if (options.FailOnVerificationError)
{
_logger.LogError("Chunk {Index} failed verification, aborting", fetchedChunk.Index);
break;
}
_logger.LogWarning("Chunk {Index} failed verification, skipping", fetchedChunk.Index);
continue;
}
chunksToStore.Add(CreateEvidenceChunk(proofRoot, fetchedChunk, now));
state.BytesFetched += fetchedChunk.Data.Length;
state.ChunksFetched++;
if (chunksToStore.Count >= options.BatchSize)
{
await StoreChunkBatchAsync(proofRoot, chunksToStore, cancellationToken);
chunksToStore.Clear();
}
if (options.MaxChunksToFetch > 0 && state.ChunksFetched >= options.MaxChunksToFetch)
{
_logger.LogInformation("Reached max chunks limit ({Max})", options.MaxChunksToFetch);
break;
}
}
if (chunksToStore.Count > 0)
{
await StoreChunkBatchAsync(proofRoot, chunksToStore, cancellationToken);
}
}
private async Task StoreChunkBatchAsync(
string proofRoot,
List<EvidenceChunk> chunksToStore,
CancellationToken cancellationToken)
{
await _repository.StoreChunksAsync(proofRoot, chunksToStore, cancellationToken);
_logger.LogDebug("Stored batch of {Count} chunks", chunksToStore.Count);
}
private EvidenceChunk CreateEvidenceChunk(
string proofRoot,
FetchedChunk fetchedChunk,
DateTimeOffset createdAt)
{
return new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = proofRoot,
ChunkIndex = fetchedChunk.Index,
ChunkHash = fetchedChunk.Hash,
Blob = fetchedChunk.Data,
BlobSize = fetchedChunk.Data.Length,
ContentType = "application/octet-stream",
CreatedAt = createdAt
};
}
}

View File

@@ -0,0 +1,51 @@
using Microsoft.Extensions.Logging;
using System;
using System.Security.Cryptography;
namespace StellaOps.Provcache;
public sealed partial class LazyFetchOrchestrator
{
/// <summary>
/// Verifies a chunk against the manifest.
/// </summary>
private bool VerifyChunk(FetchedChunk chunk, ChunkManifest manifest)
{
// Check index bounds.
if (chunk.Index < 0 || chunk.Index >= manifest.TotalChunks)
{
_logger.LogWarning(
"Chunk index {Index} out of bounds (max {Max})",
chunk.Index,
manifest.TotalChunks - 1);
return false;
}
// Verify hash against manifest metadata.
if (manifest.Chunks is not null && chunk.Index < manifest.Chunks.Count)
{
var expectedHash = manifest.Chunks[chunk.Index].Hash;
var actualHash = Convert.ToHexString(SHA256.HashData(chunk.Data)).ToLowerInvariant();
if (!string.Equals(actualHash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Chunk {Index} hash mismatch: expected {Expected}, got {Actual}",
chunk.Index, expectedHash, actualHash);
return false;
}
}
// Also verify the chunk's own hash claim.
var claimedHash = Convert.ToHexString(SHA256.HashData(chunk.Data)).ToLowerInvariant();
if (!string.Equals(claimedHash, chunk.Hash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Chunk {Index} self-hash mismatch: claimed {Claimed}, actual {Actual}",
chunk.Index, chunk.Hash, claimedHash);
return false;
}
return true;
}
}

View File

@@ -1,8 +1,6 @@
using Microsoft.Extensions.Logging;
using StellaOps.Determinism;
using System.Diagnostics;
using System.Security.Cryptography;
using System;
namespace StellaOps.Provcache;
@@ -10,7 +8,7 @@ namespace StellaOps.Provcache;
/// Orchestrates lazy evidence fetching with verification.
/// Coordinates between fetchers and the local evidence store.
/// </summary>
public sealed class LazyFetchOrchestrator
public sealed partial class LazyFetchOrchestrator
{
private readonly IEvidenceChunkRepository _repository;
private readonly ILogger<LazyFetchOrchestrator> _logger;
@@ -35,268 +33,4 @@ public sealed class LazyFetchOrchestrator
_timeProvider = timeProvider ?? TimeProvider.System;
_guidProvider = guidProvider ?? SystemGuidProvider.Instance;
}
/// <summary>
/// Fetches remaining chunks for a proof root and stores them locally.
/// </summary>
/// <param name="proofRoot">The proof root.</param>
/// <param name="fetcher">The fetcher to use.</param>
/// <param name="options">Fetch options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>The fetch result.</returns>
public async Task<LazyFetchResult> FetchAndStoreAsync(
string proofRoot,
ILazyEvidenceFetcher fetcher,
LazyFetchOptions? options = null,
CancellationToken cancellationToken = default)
{
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
ArgumentNullException.ThrowIfNull(fetcher);
options ??= new LazyFetchOptions();
var stopwatch = Stopwatch.StartNew();
var errors = new List<string>();
var failedIndices = new List<int>();
var chunksFetched = 0;
long bytesFetched = 0;
var chunksFailedVerification = 0;
_logger.LogInformation(
"Starting lazy fetch for {ProofRoot} using {FetcherType} fetcher",
proofRoot, fetcher.FetcherType);
try
{
// Check fetcher availability
if (!await fetcher.IsAvailableAsync(cancellationToken))
{
_logger.LogWarning("Fetcher {FetcherType} is not available", fetcher.FetcherType);
return new LazyFetchResult
{
Success = false,
ChunksFetched = 0,
BytesFetched = 0,
ChunksFailedVerification = 0,
Errors = [$"Fetcher {fetcher.FetcherType} is not available"],
Duration = stopwatch.Elapsed
};
}
// Get local manifest
var localManifest = await _repository.GetManifestAsync(proofRoot, cancellationToken);
if (localManifest is null)
{
// Try to fetch manifest from remote
localManifest = await fetcher.FetchManifestAsync(proofRoot, cancellationToken);
if (localManifest is null)
{
_logger.LogWarning("No manifest found for {ProofRoot}", proofRoot);
return new LazyFetchResult
{
Success = false,
ChunksFetched = 0,
BytesFetched = 0,
ChunksFailedVerification = 0,
Errors = [$"No manifest found for proof root {proofRoot}"],
Duration = stopwatch.Elapsed
};
}
}
// Get existing chunks
var existingChunks = (await _repository.GetChunksAsync(proofRoot, cancellationToken))
.Select(c => c.ChunkIndex)
.ToHashSet();
var totalChunks = localManifest.TotalChunks;
var missingCount = totalChunks - existingChunks.Count;
_logger.LogInformation(
"Have {Existing}/{Total} chunks, need to fetch {Missing}",
existingChunks.Count, totalChunks, missingCount);
if (missingCount == 0)
{
_logger.LogInformation("All chunks already present, nothing to fetch");
return new LazyFetchResult
{
Success = true,
ChunksFetched = 0,
BytesFetched = 0,
ChunksFailedVerification = 0,
Duration = stopwatch.Elapsed
};
}
// Fetch remaining chunks
var chunksToStore = new List<EvidenceChunk>();
var now = _timeProvider.GetUtcNow();
await foreach (var fetchedChunk in fetcher.FetchRemainingChunksAsync(
proofRoot, localManifest, existingChunks, cancellationToken))
{
// Verify chunk if enabled
if (options.VerifyOnFetch)
{
var isValid = VerifyChunk(fetchedChunk, localManifest);
if (!isValid)
{
chunksFailedVerification++;
failedIndices.Add(fetchedChunk.Index);
errors.Add($"Chunk {fetchedChunk.Index} failed verification");
if (options.FailOnVerificationError)
{
_logger.LogError("Chunk {Index} failed verification, aborting", fetchedChunk.Index);
break;
}
_logger.LogWarning("Chunk {Index} failed verification, skipping", fetchedChunk.Index);
continue;
}
}
// Convert FetchedChunk to EvidenceChunk for storage
var evidenceChunk = new EvidenceChunk
{
ChunkId = _guidProvider.NewGuid(),
ProofRoot = proofRoot,
ChunkIndex = fetchedChunk.Index,
ChunkHash = fetchedChunk.Hash,
Blob = fetchedChunk.Data,
BlobSize = fetchedChunk.Data.Length,
ContentType = "application/octet-stream",
CreatedAt = now
};
chunksToStore.Add(evidenceChunk);
bytesFetched += fetchedChunk.Data.Length;
chunksFetched++;
// Batch store to reduce database round-trips
if (chunksToStore.Count >= options.BatchSize)
{
await _repository.StoreChunksAsync(proofRoot, chunksToStore, cancellationToken);
_logger.LogDebug("Stored batch of {Count} chunks", chunksToStore.Count);
chunksToStore.Clear();
}
// Check max chunks limit
if (options.MaxChunksToFetch > 0 && chunksFetched >= options.MaxChunksToFetch)
{
_logger.LogInformation("Reached max chunks limit ({Max})", options.MaxChunksToFetch);
break;
}
}
// Store any remaining chunks
if (chunksToStore.Count > 0)
{
await _repository.StoreChunksAsync(proofRoot, chunksToStore, cancellationToken);
_logger.LogDebug("Stored final batch of {Count} chunks", chunksToStore.Count);
}
stopwatch.Stop();
var success = chunksFailedVerification == 0 || !options.FailOnVerificationError;
_logger.LogInformation(
"Lazy fetch complete: {Fetched} chunks, {Bytes} bytes, {Failed} verification failures in {Duration}",
chunksFetched, bytesFetched, chunksFailedVerification, stopwatch.Elapsed);
return new LazyFetchResult
{
Success = success,
ChunksFetched = chunksFetched,
BytesFetched = bytesFetched,
ChunksFailedVerification = chunksFailedVerification,
FailedIndices = failedIndices,
Errors = errors,
Duration = stopwatch.Elapsed
};
}
catch (Exception ex)
{
_logger.LogError(ex, "Error during lazy fetch for {ProofRoot}", proofRoot);
errors.Add(ex.Message);
return new LazyFetchResult
{
Success = false,
ChunksFetched = chunksFetched,
BytesFetched = bytesFetched,
ChunksFailedVerification = chunksFailedVerification,
FailedIndices = failedIndices,
Errors = errors,
Duration = stopwatch.Elapsed
};
}
}
/// <summary>
/// Verifies a chunk against the manifest.
/// </summary>
private bool VerifyChunk(FetchedChunk chunk, ChunkManifest manifest)
{
// Check index bounds
if (chunk.Index < 0 || chunk.Index >= manifest.TotalChunks)
{
_logger.LogWarning("Chunk index {Index} out of bounds (max {Max})", chunk.Index, manifest.TotalChunks - 1);
return false;
}
// Verify hash against manifest metadata
if (manifest.Chunks is not null && chunk.Index < manifest.Chunks.Count)
{
var expectedHash = manifest.Chunks[chunk.Index].Hash;
var actualHash = Convert.ToHexString(SHA256.HashData(chunk.Data)).ToLowerInvariant();
if (!string.Equals(actualHash, expectedHash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Chunk {Index} hash mismatch: expected {Expected}, got {Actual}",
chunk.Index, expectedHash, actualHash);
return false;
}
}
// Also verify the chunk's own hash claim
var claimedHash = Convert.ToHexString(SHA256.HashData(chunk.Data)).ToLowerInvariant();
if (!string.Equals(claimedHash, chunk.Hash, StringComparison.OrdinalIgnoreCase))
{
_logger.LogWarning(
"Chunk {Index} self-hash mismatch: claimed {Claimed}, actual {Actual}",
chunk.Index, chunk.Hash, claimedHash);
return false;
}
return true;
}
}
/// <summary>
/// Options for lazy fetch operations.
/// </summary>
public sealed class LazyFetchOptions
{
/// <summary>
/// Whether to verify chunks on fetch.
/// </summary>
public bool VerifyOnFetch { get; init; } = true;
/// <summary>
/// Whether to fail the entire operation on verification error.
/// </summary>
public bool FailOnVerificationError { get; init; } = false;
/// <summary>
/// Batch size for storing chunks.
/// </summary>
public int BatchSize { get; init; } = 100;
/// <summary>
/// Maximum number of chunks to fetch (0 = unlimited).
/// </summary>
public int MaxChunksToFetch { get; init; } = 0;
}

View File

@@ -0,0 +1,45 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Result of a lazy fetch operation.
/// </summary>
public sealed record LazyFetchResult
{
/// <summary>
/// Whether the fetch was successful.
/// </summary>
public required bool Success { get; init; }
/// <summary>
/// Number of chunks fetched.
/// </summary>
public required int ChunksFetched { get; init; }
/// <summary>
/// Total bytes fetched.
/// </summary>
public required long BytesFetched { get; init; }
/// <summary>
/// Number of chunks that failed verification.
/// </summary>
public required int ChunksFailedVerification { get; init; }
/// <summary>
/// Indices of failed chunks.
/// </summary>
public IReadOnlyList<int> FailedIndices { get; init; } = [];
/// <summary>
/// Any errors encountered.
/// </summary>
public IReadOnlyList<string> Errors { get; init; } = [];
/// <summary>
/// Time taken for the fetch operation.
/// </summary>
public TimeSpan Duration { get; init; }
}

View File

@@ -59,114 +59,3 @@ public sealed record DecisionDigest
/// </summary>
public TrustScoreBreakdown? TrustScoreBreakdown { get; init; }
}
/// <summary>
/// Breakdown of trust score by component, showing contribution from each evidence type.
/// </summary>
public sealed record TrustScoreBreakdown
{
/// <summary>
/// Reachability evidence contribution (weight: 25%).
/// Based on call graph / static analysis evidence.
/// </summary>
public required TrustScoreComponent Reachability { get; init; }
/// <summary>
/// SBOM completeness contribution (weight: 20%).
/// Based on package coverage and license data.
/// </summary>
public required TrustScoreComponent SbomCompleteness { get; init; }
/// <summary>
/// VEX statement coverage contribution (weight: 20%).
/// Based on vendor statements and OpenVEX coverage.
/// </summary>
public required TrustScoreComponent VexCoverage { get; init; }
/// <summary>
/// Policy freshness contribution (weight: 15%).
/// Based on last policy update timestamp.
/// </summary>
public required TrustScoreComponent PolicyFreshness { get; init; }
/// <summary>
/// Signer trust contribution (weight: 20%).
/// Based on signer reputation and key age.
/// </summary>
public required TrustScoreComponent SignerTrust { get; init; }
/// <summary>
/// Computes weighted total score from all components.
/// </summary>
public int ComputeTotal()
{
return (int)Math.Round(
Reachability.Score * Reachability.Weight +
SbomCompleteness.Score * SbomCompleteness.Weight +
VexCoverage.Score * VexCoverage.Weight +
PolicyFreshness.Score * PolicyFreshness.Weight +
SignerTrust.Score * SignerTrust.Weight);
}
/// <summary>
/// Creates a default breakdown with standard weights.
/// </summary>
public static TrustScoreBreakdown CreateDefault(
int reachabilityScore = 0,
int sbomScore = 0,
int vexScore = 0,
int policyScore = 0,
int signerScore = 0) => new()
{
Reachability = new TrustScoreComponent { Score = reachabilityScore, Weight = 0.25m },
SbomCompleteness = new TrustScoreComponent { Score = sbomScore, Weight = 0.20m },
VexCoverage = new TrustScoreComponent { Score = vexScore, Weight = 0.20m },
PolicyFreshness = new TrustScoreComponent { Score = policyScore, Weight = 0.15m },
SignerTrust = new TrustScoreComponent { Score = signerScore, Weight = 0.20m }
};
}
/// <summary>
/// Individual component of trust score with its score and weight.
/// </summary>
public sealed record TrustScoreComponent
{
/// <summary>
/// Component score (0-100).
/// </summary>
public required int Score { get; init; }
/// <summary>
/// Weight of this component in the total score (0.0-1.0).
/// </summary>
public required decimal Weight { get; init; }
/// <summary>
/// Weighted contribution to total score.
/// </summary>
public decimal Contribution => Score * Weight;
}
/// <summary>
/// Identifiers needed to replay an evaluation with the same inputs.
/// </summary>
public sealed record ReplaySeed
{
/// <summary>
/// Advisory feed identifiers used in evaluation.
/// Example: ["cve-2024", "ghsa-2024", "oval-debian"]
/// </summary>
public required IReadOnlyList<string> FeedIds { get; init; } = [];
/// <summary>
/// Policy rule identifiers used in evaluation.
/// Example: ["default-policy-v2", "org-exceptions"]
/// </summary>
public required IReadOnlyList<string> RuleIds { get; init; } = [];
/// <summary>
/// Optional: Frozen epoch timestamp for deterministic replay.
/// If set, evaluation should use this timestamp instead of current time.
/// </summary>
public DateTimeOffset? FrozenEpoch { get; init; }
}

View File

@@ -41,205 +41,3 @@ public sealed record InputManifest
/// </summary>
public required TimeWindowInfo TimeWindow { get; init; }
}
/// <summary>
/// Information about the source artifact.
/// </summary>
public sealed record SourceArtifactInfo
{
/// <summary>
/// Content-addressed hash of the artifact (e.g., sha256:abc123...).
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Type of artifact (container-image, binary, archive, etc.).
/// </summary>
public string? ArtifactType { get; init; }
/// <summary>
/// OCI reference if applicable (e.g., ghcr.io/org/repo:tag).
/// </summary>
public string? OciReference { get; init; }
/// <summary>
/// Size of the artifact in bytes.
/// </summary>
public long? SizeBytes { get; init; }
}
/// <summary>
/// Information about the SBOM.
/// </summary>
public sealed record SbomInfo
{
/// <summary>
/// Canonical hash of the SBOM content.
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// SBOM format (spdx-2.3, cyclonedx-1.6, etc.).
/// </summary>
public string? Format { get; init; }
/// <summary>
/// Number of packages in the SBOM.
/// </summary>
public int? PackageCount { get; init; }
/// <summary>
/// Number of packages with license information.
/// </summary>
public int? PackagesWithLicense { get; init; }
/// <summary>
/// Completeness percentage (0-100).
/// </summary>
public int? CompletenessScore { get; init; }
/// <summary>
/// When the SBOM was created or last updated.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
}
/// <summary>
/// Information about VEX statements.
/// </summary>
public sealed record VexInfo
{
/// <summary>
/// Hash of the sorted VEX statement set.
/// </summary>
public required string HashSetHash { get; init; }
/// <summary>
/// Number of VEX statements contributing to this decision.
/// </summary>
public int StatementCount { get; init; }
/// <summary>
/// Sources of VEX statements (vendor names, OpenVEX IDs, etc.).
/// </summary>
public IReadOnlyList<string> Sources { get; init; } = [];
/// <summary>
/// Most recent VEX statement timestamp.
/// </summary>
public DateTimeOffset? LatestStatementAt { get; init; }
/// <summary>
/// Individual statement hashes (for verification).
/// </summary>
public IReadOnlyList<string> StatementHashes { get; init; } = [];
}
/// <summary>
/// Information about the policy.
/// </summary>
public sealed record PolicyInfo
{
/// <summary>
/// Canonical hash of the policy bundle.
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// Policy pack identifier.
/// </summary>
public string? PackId { get; init; }
/// <summary>
/// Policy version number.
/// </summary>
public int? Version { get; init; }
/// <summary>
/// When the policy was last updated.
/// </summary>
public DateTimeOffset? LastUpdatedAt { get; init; }
/// <summary>
/// Human-readable policy name.
/// </summary>
public string? Name { get; init; }
}
/// <summary>
/// Information about signers and attestors.
/// </summary>
public sealed record SignerInfo
{
/// <summary>
/// Hash of the sorted signer set.
/// </summary>
public required string SetHash { get; init; }
/// <summary>
/// Number of signers in the set.
/// </summary>
public int SignerCount { get; init; }
/// <summary>
/// Signer certificate information.
/// </summary>
public IReadOnlyList<SignerCertificate> Certificates { get; init; } = [];
}
/// <summary>
/// Information about a signer certificate.
/// </summary>
public sealed record SignerCertificate
{
/// <summary>
/// Subject of the certificate (e.g., CN=...).
/// </summary>
public string? Subject { get; init; }
/// <summary>
/// Certificate issuer.
/// </summary>
public string? Issuer { get; init; }
/// <summary>
/// Certificate serial number or fingerprint.
/// </summary>
public string? Fingerprint { get; init; }
/// <summary>
/// When the certificate expires.
/// </summary>
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Trust level (fulcio, self-signed, enterprise-ca, etc.).
/// </summary>
public string? TrustLevel { get; init; }
}
/// <summary>
/// Information about the time window used in VeriKey.
/// </summary>
public sealed record TimeWindowInfo
{
/// <summary>
/// The time window bucket identifier.
/// </summary>
public required string Bucket { get; init; }
/// <summary>
/// Start of the time window (UTC).
/// </summary>
public DateTimeOffset? StartsAt { get; init; }
/// <summary>
/// End of the time window (UTC).
/// </summary>
public DateTimeOffset? EndsAt { get; init; }
/// <summary>
/// Duration of the time window.
/// </summary>
public TimeSpan? Duration { get; init; }
}

View File

@@ -0,0 +1,34 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Information about the policy.
/// </summary>
public sealed record PolicyInfo
{
/// <summary>
/// Canonical hash of the policy bundle.
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// Policy pack identifier.
/// </summary>
public string? PackId { get; init; }
/// <summary>
/// Policy version number.
/// </summary>
public int? Version { get; init; }
/// <summary>
/// When the policy was last updated.
/// </summary>
public DateTimeOffset? LastUpdatedAt { get; init; }
/// <summary>
/// Human-readable policy name.
/// </summary>
public string? Name { get; init; }
}

View File

@@ -0,0 +1,28 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Identifiers needed to replay an evaluation with the same inputs.
/// </summary>
public sealed record ReplaySeed
{
/// <summary>
/// Advisory feed identifiers used in evaluation.
/// Example: ["cve-2024", "ghsa-2024", "oval-debian"]
/// </summary>
public required IReadOnlyList<string> FeedIds { get; init; } = [];
/// <summary>
/// Policy rule identifiers used in evaluation.
/// Example: ["default-policy-v2", "org-exceptions"]
/// </summary>
public required IReadOnlyList<string> RuleIds { get; init; } = [];
/// <summary>
/// Optional: Frozen epoch timestamp for deterministic replay.
/// If set, evaluation should use this timestamp instead of current time.
/// </summary>
public DateTimeOffset? FrozenEpoch { get; init; }
}

View File

@@ -0,0 +1,39 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Information about the SBOM.
/// </summary>
public sealed record SbomInfo
{
/// <summary>
/// Canonical hash of the SBOM content.
/// </summary>
public required string Hash { get; init; }
/// <summary>
/// SBOM format (spdx-2.3, cyclonedx-1.6, etc.).
/// </summary>
public string? Format { get; init; }
/// <summary>
/// Number of packages in the SBOM.
/// </summary>
public int? PackageCount { get; init; }
/// <summary>
/// Number of packages with license information.
/// </summary>
public int? PackagesWithLicense { get; init; }
/// <summary>
/// Completeness percentage (0-100).
/// </summary>
public int? CompletenessScore { get; init; }
/// <summary>
/// When the SBOM was created or last updated.
/// </summary>
public DateTimeOffset? CreatedAt { get; init; }
}

View File

@@ -0,0 +1,34 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Information about a signer certificate.
/// </summary>
public sealed record SignerCertificate
{
/// <summary>
/// Subject of the certificate (e.g., CN=...).
/// </summary>
public string? Subject { get; init; }
/// <summary>
/// Certificate issuer.
/// </summary>
public string? Issuer { get; init; }
/// <summary>
/// Certificate serial number or fingerprint.
/// </summary>
public string? Fingerprint { get; init; }
/// <summary>
/// When the certificate expires.
/// </summary>
public DateTimeOffset? ExpiresAt { get; init; }
/// <summary>
/// Trust level (fulcio, self-signed, enterprise-ca, etc.).
/// </summary>
public string? TrustLevel { get; init; }
}

View File

@@ -0,0 +1,24 @@
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Information about signers and attestors.
/// </summary>
public sealed record SignerInfo
{
/// <summary>
/// Hash of the sorted signer set.
/// </summary>
public required string SetHash { get; init; }
/// <summary>
/// Number of signers in the set.
/// </summary>
public int SignerCount { get; init; }
/// <summary>
/// Signer certificate information.
/// </summary>
public IReadOnlyList<SignerCertificate> Certificates { get; init; } = [];
}

View File

@@ -0,0 +1,27 @@
namespace StellaOps.Provcache;
/// <summary>
/// Information about the source artifact.
/// </summary>
public sealed record SourceArtifactInfo
{
/// <summary>
/// Content-addressed hash of the artifact (e.g., sha256:abc123...).
/// </summary>
public required string Digest { get; init; }
/// <summary>
/// Type of artifact (container-image, binary, archive, etc.).
/// </summary>
public string? ArtifactType { get; init; }
/// <summary>
/// OCI reference if applicable (e.g., ghcr.io/org/repo:tag).
/// </summary>
public string? OciReference { get; init; }
/// <summary>
/// Size of the artifact in bytes.
/// </summary>
public long? SizeBytes { get; init; }
}

View File

@@ -0,0 +1,29 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Information about the time window used in VeriKey.
/// </summary>
public sealed record TimeWindowInfo
{
/// <summary>
/// The time window bucket identifier.
/// </summary>
public required string Bucket { get; init; }
/// <summary>
/// Start of the time window (UTC).
/// </summary>
public DateTimeOffset? StartsAt { get; init; }
/// <summary>
/// End of the time window (UTC).
/// </summary>
public DateTimeOffset? EndsAt { get; init; }
/// <summary>
/// Duration of the time window.
/// </summary>
public TimeSpan? Duration { get; init; }
}

View File

@@ -0,0 +1,69 @@
using System;
namespace StellaOps.Provcache;
/// <summary>
/// Breakdown of trust score by component, showing contribution from each evidence type.
/// </summary>
public sealed record TrustScoreBreakdown
{
/// <summary>
/// Reachability evidence contribution (weight: 25%).
/// Based on call graph / static analysis evidence.
/// </summary>
public required TrustScoreComponent Reachability { get; init; }
/// <summary>
/// SBOM completeness contribution (weight: 20%).
/// Based on package coverage and license data.
/// </summary>
public required TrustScoreComponent SbomCompleteness { get; init; }
/// <summary>
/// VEX statement coverage contribution (weight: 20%).
/// Based on vendor statements and OpenVEX coverage.
/// </summary>
public required TrustScoreComponent VexCoverage { get; init; }
/// <summary>
/// Policy freshness contribution (weight: 15%).
/// Based on last policy update timestamp.
/// </summary>
public required TrustScoreComponent PolicyFreshness { get; init; }
/// <summary>
/// Signer trust contribution (weight: 20%).
/// Based on signer reputation and key age.
/// </summary>
public required TrustScoreComponent SignerTrust { get; init; }
/// <summary>
/// Computes weighted total score from all components.
/// </summary>
public int ComputeTotal()
{
return (int)Math.Round(
Reachability.Score * Reachability.Weight +
SbomCompleteness.Score * SbomCompleteness.Weight +
VexCoverage.Score * VexCoverage.Weight +
PolicyFreshness.Score * PolicyFreshness.Weight +
SignerTrust.Score * SignerTrust.Weight);
}
/// <summary>
/// Creates a default breakdown with standard weights.
/// </summary>
public static TrustScoreBreakdown CreateDefault(
int reachabilityScore = 0,
int sbomScore = 0,
int vexScore = 0,
int policyScore = 0,
int signerScore = 0) => new()
{
Reachability = new TrustScoreComponent { Score = reachabilityScore, Weight = 0.25m },
SbomCompleteness = new TrustScoreComponent { Score = sbomScore, Weight = 0.20m },
VexCoverage = new TrustScoreComponent { Score = vexScore, Weight = 0.20m },
PolicyFreshness = new TrustScoreComponent { Score = policyScore, Weight = 0.15m },
SignerTrust = new TrustScoreComponent { Score = signerScore, Weight = 0.20m }
};
}

View File

@@ -0,0 +1,22 @@
namespace StellaOps.Provcache;
/// <summary>
/// Individual component of trust score with its score and weight.
/// </summary>
public sealed record TrustScoreComponent
{
/// <summary>
/// Component score (0-100).
/// </summary>
public required int Score { get; init; }
/// <summary>
/// Weight of this component in the total score (0.0-1.0).
/// </summary>
public required decimal Weight { get; init; }
/// <summary>
/// Weighted contribution to total score.
/// </summary>
public decimal Contribution => Score * Weight;
}

View File

@@ -0,0 +1,35 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Provcache;
/// <summary>
/// Information about VEX statements.
/// </summary>
public sealed record VexInfo
{
/// <summary>
/// Hash of the sorted VEX statement set.
/// </summary>
public required string HashSetHash { get; init; }
/// <summary>
/// Number of VEX statements contributing to this decision.
/// </summary>
public int StatementCount { get; init; }
/// <summary>
/// Sources of VEX statements (vendor names, OpenVEX IDs, etc.).
/// </summary>
public IReadOnlyList<string> Sources { get; init; } = [];
/// <summary>
/// Most recent VEX statement timestamp.
/// </summary>
public DateTimeOffset? LatestStatementAt { get; init; }
/// <summary>
/// Individual statement hashes (for verification).
/// </summary>
public IReadOnlyList<string> StatementHashes { get; init; } = [];
}

View File

@@ -0,0 +1,21 @@
// ----------------------------------------------------------------------------
// Copyright (c) 2025 StellaOps contributors. All rights reserved.
// SPDX-License-Identifier: BUSL-1.1
// ----------------------------------------------------------------------------
namespace StellaOps.Provcache.Oci;
/// <summary>
/// Interface for building OCI attestations for Provcache decisions.
/// </summary>
public interface IProvcacheOciAttestationBuilder
{
/// <summary>
/// Builds an OCI attestation from a DecisionDigest.
/// </summary>
ProvcacheOciAttestationResult Build(ProvcacheOciAttestationRequest request);
/// <summary>
/// Creates an OCI attachment ready for pushing to a registry.
/// </summary>
ProvcacheOciAttachment CreateAttachment(ProvcacheOciAttestationRequest request);
}

Some files were not shown because too many files have changed in this diff Show More