sprints enhancements
This commit is contained in:
@@ -189,3 +189,189 @@ internal static class InvalidationTypeExtensions
|
||||
/// </summary>
|
||||
public const string VeriKey = "VeriKey";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for GET /v1/proofs/{proofRoot}.
|
||||
/// </summary>
|
||||
public sealed class ProofEvidenceResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root (Merkle root).
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of chunks available.
|
||||
/// </summary>
|
||||
public required int TotalChunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of all evidence in bytes.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The chunks in this page.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ProofChunkResponse> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Pagination cursor for next page (null if last page).
|
||||
/// </summary>
|
||||
public string? NextCursor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether there are more chunks available.
|
||||
/// </summary>
|
||||
public bool HasMore { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for a single proof chunk.
|
||||
/// </summary>
|
||||
public sealed class ProofChunkResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique chunk identifier.
|
||||
/// </summary>
|
||||
public required Guid ChunkId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Zero-based chunk index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash for verification.
|
||||
/// </summary>
|
||||
public required string Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
public required int Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content type.
|
||||
/// </summary>
|
||||
public required string ContentType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded chunk data (included only when includeData=true).
|
||||
/// </summary>
|
||||
public string? Data { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for GET /v1/proofs/{proofRoot}/manifest.
|
||||
/// </summary>
|
||||
public sealed class ProofManifestResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root (Merkle root).
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of chunks.
|
||||
/// </summary>
|
||||
public required int TotalChunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of all evidence in bytes.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Ordered list of chunk metadata (without data).
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ChunkMetadataResponse> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the manifest was generated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for chunk metadata (without data).
|
||||
/// </summary>
|
||||
public sealed class ChunkMetadataResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// Chunk identifier.
|
||||
/// </summary>
|
||||
public required Guid ChunkId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Zero-based index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash for verification.
|
||||
/// </summary>
|
||||
public required string Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
public required int Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content type.
|
||||
/// </summary>
|
||||
public required string ContentType { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Response model for POST /v1/proofs/{proofRoot}/verify.
|
||||
/// </summary>
|
||||
public sealed class ProofVerificationResponse
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root that was verified.
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the Merkle tree is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Details about each chunk's verification.
|
||||
/// </summary>
|
||||
public IReadOnlyList<ChunkVerificationResult>? ChunkResults { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Error message if verification failed.
|
||||
/// </summary>
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of verifying a single chunk.
|
||||
/// </summary>
|
||||
public sealed class ChunkVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Chunk index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the chunk hash is valid.
|
||||
/// </summary>
|
||||
public required bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected hash from manifest.
|
||||
/// </summary>
|
||||
public required string ExpectedHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Computed hash from chunk data.
|
||||
/// </summary>
|
||||
public string? ComputedHash { get; init; }
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.AspNetCore.Mvc;
|
||||
using Microsoft.AspNetCore.Routing;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Logging;
|
||||
@@ -14,7 +15,7 @@ public sealed class ProvcacheApiEndpoints;
|
||||
/// <summary>
|
||||
/// Extension methods for mapping Provcache API endpoints.
|
||||
/// </summary>
|
||||
public static class ProvcacheEndpointExtensions
|
||||
public static partial class ProvcacheEndpointExtensions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maps Provcache API endpoints to the specified route builder.
|
||||
@@ -69,6 +70,47 @@ public static class ProvcacheEndpointExtensions
|
||||
.Produces<ProvcacheMetricsResponse>(StatusCodes.Status200OK)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// Map evidence paging endpoints under /proofs
|
||||
var proofsGroup = endpoints.MapGroup($"{prefix}/proofs")
|
||||
.WithTags("Provcache Evidence")
|
||||
.WithOpenApi();
|
||||
|
||||
// GET /v1/provcache/proofs/{proofRoot}
|
||||
proofsGroup.MapGet("/{proofRoot}", GetEvidenceChunks)
|
||||
.WithName("GetProofEvidence")
|
||||
.WithSummary("Get evidence chunks by proof root")
|
||||
.WithDescription("Retrieves evidence chunks for a proof root with pagination support. Use cursor parameter for subsequent pages.")
|
||||
.Produces<ProofEvidenceResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// GET /v1/provcache/proofs/{proofRoot}/manifest
|
||||
proofsGroup.MapGet("/{proofRoot}/manifest", GetProofManifest)
|
||||
.WithName("GetProofManifest")
|
||||
.WithSummary("Get chunk manifest (metadata without data)")
|
||||
.WithDescription("Retrieves the chunk manifest for lazy evidence fetching. Contains hashes and sizes but no blob data.")
|
||||
.Produces<ProofManifestResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// GET /v1/provcache/proofs/{proofRoot}/chunks/{chunkIndex}
|
||||
proofsGroup.MapGet("/{proofRoot}/chunks/{chunkIndex:int}", GetSingleChunk)
|
||||
.WithName("GetProofChunk")
|
||||
.WithSummary("Get a single chunk by index")
|
||||
.WithDescription("Retrieves a specific chunk by its index within the proof.")
|
||||
.Produces<ProofChunkResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
// POST /v1/provcache/proofs/{proofRoot}/verify
|
||||
proofsGroup.MapPost("/{proofRoot}/verify", VerifyProof)
|
||||
.WithName("VerifyProof")
|
||||
.WithSummary("Verify Merkle tree integrity")
|
||||
.WithDescription("Verifies all chunk hashes and the Merkle tree for the proof root.")
|
||||
.Produces<ProofVerificationResponse>(StatusCodes.Status200OK)
|
||||
.Produces(StatusCodes.Status404NotFound)
|
||||
.Produces<ProblemDetails>(StatusCodes.Status500InternalServerError);
|
||||
|
||||
return group;
|
||||
}
|
||||
|
||||
@@ -278,3 +320,234 @@ internal sealed class ProblemDetails
|
||||
public string? Detail { get; set; }
|
||||
public string? Instance { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Marker class for logging in Proofs API endpoints.
|
||||
/// </summary>
|
||||
public sealed class ProofsApiEndpoints;
|
||||
|
||||
partial class ProvcacheEndpointExtensions
|
||||
{
|
||||
private const int DefaultPageSize = 10;
|
||||
private const int MaxPageSize = 100;
|
||||
|
||||
/// <summary>
|
||||
/// GET /v1/provcache/proofs/{proofRoot}
|
||||
/// </summary>
|
||||
private static async Task<IResult> GetEvidenceChunks(
|
||||
string proofRoot,
|
||||
int? offset,
|
||||
int? limit,
|
||||
bool? includeData,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("GET /v1/provcache/proofs/{ProofRoot} offset={Offset} limit={Limit}", proofRoot, offset, limit);
|
||||
|
||||
try
|
||||
{
|
||||
var startIndex = offset ?? 0;
|
||||
var pageSize = Math.Min(limit ?? DefaultPageSize, MaxPageSize);
|
||||
|
||||
// Get manifest for total count
|
||||
var manifest = await chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
|
||||
if (manifest is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
// Get chunk range
|
||||
var chunks = await chunkRepository.GetChunkRangeAsync(proofRoot, startIndex, pageSize, cancellationToken);
|
||||
|
||||
var chunkResponses = chunks.Select(c => new ProofChunkResponse
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType,
|
||||
Data = includeData == true ? Convert.ToBase64String(c.Blob) : null
|
||||
}).ToList();
|
||||
|
||||
var hasMore = startIndex + chunks.Count < manifest.TotalChunks;
|
||||
var nextCursor = hasMore ? (startIndex + pageSize).ToString() : null;
|
||||
|
||||
return Results.Ok(new ProofEvidenceResponse
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = manifest.TotalChunks,
|
||||
TotalSize = manifest.TotalSize,
|
||||
Chunks = chunkResponses,
|
||||
NextCursor = nextCursor,
|
||||
HasMore = hasMore
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error getting evidence chunks for proof root {ProofRoot}", proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Evidence retrieval failed");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// GET /v1/provcache/proofs/{proofRoot}/manifest
|
||||
/// </summary>
|
||||
private static async Task<IResult> GetProofManifest(
|
||||
string proofRoot,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("GET /v1/provcache/proofs/{ProofRoot}/manifest", proofRoot);
|
||||
|
||||
try
|
||||
{
|
||||
var manifest = await chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
|
||||
if (manifest is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var chunkMetadata = manifest.Chunks.Select(c => new ChunkMetadataResponse
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.Index,
|
||||
Hash = c.Hash,
|
||||
Size = c.Size,
|
||||
ContentType = c.ContentType
|
||||
}).ToList();
|
||||
|
||||
return Results.Ok(new ProofManifestResponse
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = manifest.TotalChunks,
|
||||
TotalSize = manifest.TotalSize,
|
||||
Chunks = chunkMetadata,
|
||||
GeneratedAt = manifest.GeneratedAt
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error getting manifest for proof root {ProofRoot}", proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Manifest retrieval failed");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// GET /v1/provcache/proofs/{proofRoot}/chunks/{chunkIndex}
|
||||
/// </summary>
|
||||
private static async Task<IResult> GetSingleChunk(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("GET /v1/provcache/proofs/{ProofRoot}/chunks/{ChunkIndex}", proofRoot, chunkIndex);
|
||||
|
||||
try
|
||||
{
|
||||
var chunk = await chunkRepository.GetChunkAsync(proofRoot, chunkIndex, cancellationToken);
|
||||
if (chunk is null)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
return Results.Ok(new ProofChunkResponse
|
||||
{
|
||||
ChunkId = chunk.ChunkId,
|
||||
Index = chunk.ChunkIndex,
|
||||
Hash = chunk.ChunkHash,
|
||||
Size = chunk.BlobSize,
|
||||
ContentType = chunk.ContentType,
|
||||
Data = Convert.ToBase64String(chunk.Blob)
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error getting chunk {ChunkIndex} for proof root {ProofRoot}", chunkIndex, proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Chunk retrieval failed");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// POST /v1/provcache/proofs/{proofRoot}/verify
|
||||
/// </summary>
|
||||
private static async Task<IResult> VerifyProof(
|
||||
string proofRoot,
|
||||
[FromServices] IEvidenceChunkRepository chunkRepository,
|
||||
[FromServices] IEvidenceChunker chunker,
|
||||
ILogger<ProofsApiEndpoints> logger,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
logger.LogDebug("POST /v1/provcache/proofs/{ProofRoot}/verify", proofRoot);
|
||||
|
||||
try
|
||||
{
|
||||
var chunks = await chunkRepository.GetChunksAsync(proofRoot, cancellationToken);
|
||||
if (chunks.Count == 0)
|
||||
{
|
||||
return Results.NotFound();
|
||||
}
|
||||
|
||||
var chunkResults = new List<ChunkVerificationResult>();
|
||||
var allValid = true;
|
||||
|
||||
foreach (var chunk in chunks)
|
||||
{
|
||||
var isValid = chunker.VerifyChunk(chunk);
|
||||
var computedHash = isValid ? chunk.ChunkHash : ComputeChunkHash(chunk.Blob);
|
||||
|
||||
chunkResults.Add(new ChunkVerificationResult
|
||||
{
|
||||
Index = chunk.ChunkIndex,
|
||||
IsValid = isValid,
|
||||
ExpectedHash = chunk.ChunkHash,
|
||||
ComputedHash = isValid ? null : computedHash
|
||||
});
|
||||
|
||||
if (!isValid)
|
||||
{
|
||||
allValid = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Verify Merkle root
|
||||
var chunkHashes = chunks.Select(c => c.ChunkHash).ToList();
|
||||
var computedRoot = chunker.ComputeMerkleRoot(chunkHashes);
|
||||
var rootMatches = string.Equals(computedRoot, proofRoot, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
return Results.Ok(new ProofVerificationResponse
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
IsValid = allValid && rootMatches,
|
||||
ChunkResults = chunkResults,
|
||||
Error = !rootMatches ? $"Merkle root mismatch. Expected: {proofRoot}, Computed: {computedRoot}" : null
|
||||
});
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
logger.LogError(ex, "Error verifying proof root {ProofRoot}", proofRoot);
|
||||
return Results.Problem(
|
||||
detail: ex.Message,
|
||||
statusCode: StatusCodes.Status500InternalServerError,
|
||||
title: "Proof verification failed");
|
||||
}
|
||||
}
|
||||
|
||||
private static string ComputeChunkHash(byte[] data)
|
||||
{
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,257 @@
|
||||
using Microsoft.EntityFrameworkCore;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Provcache.Entities;
|
||||
|
||||
namespace StellaOps.Provcache.Postgres;
|
||||
|
||||
/// <summary>
|
||||
/// PostgreSQL implementation of <see cref="IEvidenceChunkRepository"/>.
|
||||
/// </summary>
|
||||
public sealed class PostgresEvidenceChunkRepository : IEvidenceChunkRepository
|
||||
{
|
||||
private readonly ProvcacheDbContext _context;
|
||||
private readonly ILogger<PostgresEvidenceChunkRepository> _logger;
|
||||
|
||||
public PostgresEvidenceChunkRepository(
|
||||
ProvcacheDbContext context,
|
||||
ILogger<PostgresEvidenceChunkRepository> logger)
|
||||
{
|
||||
_context = context ?? throw new ArgumentNullException(nameof(context));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<EvidenceChunk>> GetChunksAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var entities = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.OrderBy(e => e.ChunkIndex)
|
||||
.AsNoTracking()
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Retrieved {Count} chunks for proof root {ProofRoot}", entities.Count, proofRoot);
|
||||
return entities.Select(MapToModel).ToList();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<EvidenceChunk?> GetChunkAsync(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var entity = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot && e.ChunkIndex == chunkIndex)
|
||||
.AsNoTracking()
|
||||
.FirstOrDefaultAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return entity is null ? null : MapToModel(entity);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<IReadOnlyList<EvidenceChunk>> GetChunkRangeAsync(
|
||||
string proofRoot,
|
||||
int startIndex,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
if (startIndex < 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(startIndex), "Start index must be non-negative.");
|
||||
}
|
||||
|
||||
if (count <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(count), "Count must be positive.");
|
||||
}
|
||||
|
||||
var entities = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot && e.ChunkIndex >= startIndex)
|
||||
.OrderBy(e => e.ChunkIndex)
|
||||
.Take(count)
|
||||
.AsNoTracking()
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
return entities.Select(MapToModel).ToList();
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ChunkManifest?> GetManifestAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
// Get metadata without loading blobs
|
||||
var chunks = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.OrderBy(e => e.ChunkIndex)
|
||||
.Select(e => new
|
||||
{
|
||||
e.ChunkId,
|
||||
e.ChunkIndex,
|
||||
e.ChunkHash,
|
||||
e.BlobSize,
|
||||
e.ContentType,
|
||||
e.CreatedAt
|
||||
})
|
||||
.AsNoTracking()
|
||||
.ToListAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
if (chunks.Count == 0)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var metadata = chunks
|
||||
.Select(c => new ChunkMetadata
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType
|
||||
})
|
||||
.ToList();
|
||||
|
||||
return new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = chunks.Count,
|
||||
TotalSize = chunks.Sum(c => (long)c.BlobSize),
|
||||
Chunks = metadata,
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task StoreChunksAsync(
|
||||
string proofRoot,
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(chunks);
|
||||
|
||||
var chunkList = chunks.ToList();
|
||||
|
||||
if (chunkList.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No chunks to store for proof root {ProofRoot}", proofRoot);
|
||||
return;
|
||||
}
|
||||
|
||||
// Update proof root in chunks if not set
|
||||
var entities = chunkList.Select(c => MapToEntity(c, proofRoot)).ToList();
|
||||
|
||||
_context.EvidenceChunks.AddRange(entities);
|
||||
await _context.SaveChangesAsync(cancellationToken).ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Stored {Count} chunks for proof root {ProofRoot}", chunkList.Count, proofRoot);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> DeleteChunksAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var deleted = await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.ExecuteDeleteAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
|
||||
_logger.LogDebug("Deleted {Count} chunks for proof root {ProofRoot}", deleted, proofRoot);
|
||||
return deleted;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<int> GetChunkCountAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
return await _context.EvidenceChunks
|
||||
.CountAsync(e => e.ProofRoot == proofRoot, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<long> GetTotalSizeAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
return await _context.EvidenceChunks
|
||||
.Where(e => e.ProofRoot == proofRoot)
|
||||
.SumAsync(e => (long)e.BlobSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets total storage across all proof roots.
|
||||
/// </summary>
|
||||
public async Task<long> GetTotalStorageAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _context.EvidenceChunks
|
||||
.SumAsync(e => (long)e.BlobSize, cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Prunes chunks older than the specified date.
|
||||
/// </summary>
|
||||
public async Task<int> PruneOldChunksAsync(
|
||||
DateTimeOffset olderThan,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
return await _context.EvidenceChunks
|
||||
.Where(e => e.CreatedAt < olderThan)
|
||||
.ExecuteDeleteAsync(cancellationToken)
|
||||
.ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static EvidenceChunk MapToModel(ProvcacheEvidenceChunkEntity entity)
|
||||
{
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = entity.ChunkId,
|
||||
ProofRoot = entity.ProofRoot,
|
||||
ChunkIndex = entity.ChunkIndex,
|
||||
ChunkHash = entity.ChunkHash,
|
||||
Blob = entity.Blob,
|
||||
BlobSize = entity.BlobSize,
|
||||
ContentType = entity.ContentType,
|
||||
CreatedAt = entity.CreatedAt
|
||||
};
|
||||
}
|
||||
|
||||
private static ProvcacheEvidenceChunkEntity MapToEntity(EvidenceChunk chunk, string proofRoot)
|
||||
{
|
||||
return new ProvcacheEvidenceChunkEntity
|
||||
{
|
||||
ChunkId = chunk.ChunkId == Guid.Empty ? Guid.NewGuid() : chunk.ChunkId,
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = chunk.ChunkIndex,
|
||||
ChunkHash = chunk.ChunkHash,
|
||||
Blob = chunk.Blob,
|
||||
BlobSize = chunk.BlobSize,
|
||||
ContentType = chunk.ContentType,
|
||||
CreatedAt = chunk.CreatedAt
|
||||
};
|
||||
}
|
||||
}
|
||||
318
src/__Libraries/StellaOps.Provcache/Chunking/EvidenceChunker.cs
Normal file
318
src/__Libraries/StellaOps.Provcache/Chunking/EvidenceChunker.cs
Normal file
@@ -0,0 +1,318 @@
|
||||
using System.Security.Cryptography;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for splitting large evidence into fixed-size chunks
|
||||
/// and reassembling them with Merkle verification.
|
||||
/// </summary>
|
||||
public interface IEvidenceChunker
|
||||
{
|
||||
/// <summary>
|
||||
/// Splits evidence into chunks.
|
||||
/// </summary>
|
||||
/// <param name="evidence">The evidence bytes to split.</param>
|
||||
/// <param name="contentType">MIME type of the evidence.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The chunking result with chunks and proof root.</returns>
|
||||
Task<ChunkingResult> ChunkAsync(
|
||||
ReadOnlyMemory<byte> evidence,
|
||||
string contentType,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Splits evidence from a stream.
|
||||
/// </summary>
|
||||
/// <param name="evidenceStream">Stream containing evidence.</param>
|
||||
/// <param name="contentType">MIME type of the evidence.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Async enumerable of chunks as they are created.</returns>
|
||||
IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
|
||||
Stream evidenceStream,
|
||||
string contentType,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Reassembles chunks into the original evidence.
|
||||
/// </summary>
|
||||
/// <param name="chunks">The chunks to reassemble (must be in order).</param>
|
||||
/// <param name="expectedProofRoot">Expected Merkle root for verification.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The reassembled evidence bytes.</returns>
|
||||
Task<byte[]> ReassembleAsync(
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
string expectedProofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a single chunk against its hash.
|
||||
/// </summary>
|
||||
/// <param name="chunk">The chunk to verify.</param>
|
||||
/// <returns>True if the chunk is valid.</returns>
|
||||
bool VerifyChunk(EvidenceChunk chunk);
|
||||
|
||||
/// <summary>
|
||||
/// Computes the Merkle root from chunk hashes.
|
||||
/// </summary>
|
||||
/// <param name="chunkHashes">Ordered list of chunk hashes.</param>
|
||||
/// <returns>The Merkle root.</returns>
|
||||
string ComputeMerkleRoot(IEnumerable<string> chunkHashes);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of chunking evidence.
|
||||
/// </summary>
|
||||
public sealed record ChunkingResult
|
||||
{
|
||||
/// <summary>
|
||||
/// The computed Merkle root of all chunks.
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The generated chunks.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<EvidenceChunk> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of the original evidence.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default implementation of <see cref="IEvidenceChunker"/>.
|
||||
/// </summary>
|
||||
public sealed class EvidenceChunker : IEvidenceChunker
|
||||
{
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
public EvidenceChunker(ProvcacheOptions options, TimeProvider? timeProvider = null)
|
||||
{
|
||||
_options = options ?? throw new ArgumentNullException(nameof(options));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ChunkingResult> ChunkAsync(
|
||||
ReadOnlyMemory<byte> evidence,
|
||||
string contentType,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(contentType);
|
||||
|
||||
var chunks = new List<EvidenceChunk>();
|
||||
var chunkHashes = new List<string>();
|
||||
var chunkSize = _options.ChunkSize;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
var span = evidence.Span;
|
||||
var totalSize = span.Length;
|
||||
var chunkIndex = 0;
|
||||
|
||||
for (var offset = 0; offset < totalSize; offset += chunkSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remainingBytes = totalSize - offset;
|
||||
var currentChunkSize = Math.Min(chunkSize, remainingBytes);
|
||||
var chunkData = span.Slice(offset, currentChunkSize).ToArray();
|
||||
var chunkHash = ComputeHash(chunkData);
|
||||
|
||||
chunks.Add(new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = string.Empty, // Will be set after computing Merkle root
|
||||
ChunkIndex = chunkIndex,
|
||||
ChunkHash = chunkHash,
|
||||
Blob = chunkData,
|
||||
BlobSize = currentChunkSize,
|
||||
ContentType = contentType,
|
||||
CreatedAt = now
|
||||
});
|
||||
|
||||
chunkHashes.Add(chunkHash);
|
||||
chunkIndex++;
|
||||
}
|
||||
|
||||
var proofRoot = ComputeMerkleRoot(chunkHashes);
|
||||
|
||||
// Update proof root in all chunks
|
||||
var finalChunks = chunks.Select(c => c with { ProofRoot = proofRoot }).ToList();
|
||||
|
||||
return Task.FromResult(new ChunkingResult
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
Chunks = finalChunks,
|
||||
TotalSize = totalSize
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<EvidenceChunk> ChunkStreamAsync(
|
||||
Stream evidenceStream,
|
||||
string contentType,
|
||||
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(evidenceStream);
|
||||
ArgumentNullException.ThrowIfNull(contentType);
|
||||
|
||||
var chunkSize = _options.ChunkSize;
|
||||
var buffer = new byte[chunkSize];
|
||||
var chunkIndex = 0;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
int bytesRead;
|
||||
while ((bytesRead = await evidenceStream.ReadAsync(buffer, cancellationToken)) > 0)
|
||||
{
|
||||
var chunkData = bytesRead == chunkSize ? buffer : buffer[..bytesRead];
|
||||
var chunkHash = ComputeHash(chunkData);
|
||||
|
||||
yield return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = string.Empty, // Caller must compute after all chunks
|
||||
ChunkIndex = chunkIndex,
|
||||
ChunkHash = chunkHash,
|
||||
Blob = chunkData.ToArray(),
|
||||
BlobSize = bytesRead,
|
||||
ContentType = contentType,
|
||||
CreatedAt = now
|
||||
};
|
||||
|
||||
chunkIndex++;
|
||||
buffer = new byte[chunkSize]; // New buffer for next chunk
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<byte[]> ReassembleAsync(
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
string expectedProofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(chunks);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(expectedProofRoot);
|
||||
|
||||
var orderedChunks = chunks.OrderBy(c => c.ChunkIndex).ToList();
|
||||
|
||||
if (orderedChunks.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("No chunks provided.", nameof(chunks));
|
||||
}
|
||||
|
||||
// Verify Merkle root
|
||||
var chunkHashes = orderedChunks.Select(c => c.ChunkHash).ToList();
|
||||
var computedRoot = ComputeMerkleRoot(chunkHashes);
|
||||
|
||||
if (!string.Equals(computedRoot, expectedProofRoot, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Merkle root mismatch. Expected: {expectedProofRoot}, Computed: {computedRoot}");
|
||||
}
|
||||
|
||||
// Verify each chunk
|
||||
foreach (var chunk in orderedChunks)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
if (!VerifyChunk(chunk))
|
||||
{
|
||||
throw new InvalidOperationException(
|
||||
$"Chunk {chunk.ChunkIndex} verification failed. Expected hash: {chunk.ChunkHash}");
|
||||
}
|
||||
}
|
||||
|
||||
// Reassemble
|
||||
var totalSize = orderedChunks.Sum(c => c.BlobSize);
|
||||
var result = new byte[totalSize];
|
||||
var offset = 0;
|
||||
|
||||
foreach (var chunk in orderedChunks)
|
||||
{
|
||||
chunk.Blob.CopyTo(result, offset);
|
||||
offset += chunk.BlobSize;
|
||||
}
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool VerifyChunk(EvidenceChunk chunk)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(chunk);
|
||||
|
||||
var computedHash = ComputeHash(chunk.Blob);
|
||||
return string.Equals(computedHash, chunk.ChunkHash, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string ComputeMerkleRoot(IEnumerable<string> chunkHashes)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(chunkHashes);
|
||||
|
||||
var hashes = chunkHashes.ToList();
|
||||
|
||||
if (hashes.Count == 0)
|
||||
{
|
||||
// Empty Merkle tree
|
||||
return ComputeHash([]);
|
||||
}
|
||||
|
||||
if (hashes.Count == 1)
|
||||
{
|
||||
return hashes[0];
|
||||
}
|
||||
|
||||
// Build Merkle tree bottom-up
|
||||
var currentLevel = hashes.Select(h => HexToBytes(h)).ToList();
|
||||
|
||||
while (currentLevel.Count > 1)
|
||||
{
|
||||
var nextLevel = new List<byte[]>();
|
||||
|
||||
for (var i = 0; i < currentLevel.Count; i += 2)
|
||||
{
|
||||
byte[] combined;
|
||||
|
||||
if (i + 1 < currentLevel.Count)
|
||||
{
|
||||
// Pair exists - concatenate and hash
|
||||
combined = new byte[currentLevel[i].Length + currentLevel[i + 1].Length];
|
||||
currentLevel[i].CopyTo(combined, 0);
|
||||
currentLevel[i + 1].CopyTo(combined, currentLevel[i].Length);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Odd node - duplicate itself
|
||||
combined = new byte[currentLevel[i].Length * 2];
|
||||
currentLevel[i].CopyTo(combined, 0);
|
||||
currentLevel[i].CopyTo(combined, currentLevel[i].Length);
|
||||
}
|
||||
|
||||
nextLevel.Add(SHA256.HashData(combined));
|
||||
}
|
||||
|
||||
currentLevel = nextLevel;
|
||||
}
|
||||
|
||||
return $"sha256:{Convert.ToHexStringLower(currentLevel[0])}";
|
||||
}
|
||||
|
||||
private static string ComputeHash(ReadOnlySpan<byte> data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
|
||||
private static byte[] HexToBytes(string hash)
|
||||
{
|
||||
// Strip sha256: prefix if present
|
||||
var hex = hash.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)
|
||||
? hash[7..]
|
||||
: hash;
|
||||
|
||||
return Convert.FromHexString(hex);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
using System.ComponentModel.DataAnnotations;
|
||||
using System.ComponentModel.DataAnnotations.Schema;
|
||||
|
||||
namespace StellaOps.Provcache.Entities;
|
||||
|
||||
/// <summary>
|
||||
/// EF Core entity for provcache.prov_revocations table.
|
||||
/// Tracks all revocation events for audit trail and replay.
|
||||
/// </summary>
|
||||
[Table("prov_revocations", Schema = "provcache")]
|
||||
public sealed class ProvRevocationEntity
|
||||
{
|
||||
/// <summary>
|
||||
/// Auto-incrementing sequence number for ordering.
|
||||
/// </summary>
|
||||
[Key]
|
||||
[Column("seq_no")]
|
||||
[DatabaseGenerated(DatabaseGeneratedOption.Identity)]
|
||||
public long SeqNo { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique identifier for this revocation event.
|
||||
/// </summary>
|
||||
[Column("revocation_id")]
|
||||
public required Guid RevocationId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of revocation: 'signer', 'feed_epoch', 'policy', 'explicit'.
|
||||
/// </summary>
|
||||
[Column("revocation_type")]
|
||||
[MaxLength(32)]
|
||||
public required string RevocationType { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// The key that was revoked (signer hash, feed epoch, policy hash, or verikey).
|
||||
/// </summary>
|
||||
[Column("revoked_key")]
|
||||
[MaxLength(512)]
|
||||
public required string RevokedKey { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for revocation.
|
||||
/// </summary>
|
||||
[Column("reason")]
|
||||
[MaxLength(1024)]
|
||||
public string? Reason { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of cache entries invalidated.
|
||||
/// </summary>
|
||||
[Column("entries_invalidated")]
|
||||
public int EntriesInvalidated { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Source that triggered the revocation.
|
||||
/// </summary>
|
||||
[Column("source")]
|
||||
[MaxLength(128)]
|
||||
public required string Source { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional correlation ID for tracing.
|
||||
/// </summary>
|
||||
[Column("correlation_id")]
|
||||
[MaxLength(128)]
|
||||
public string? CorrelationId { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when revocation occurred.
|
||||
/// </summary>
|
||||
[Column("revoked_at")]
|
||||
public DateTimeOffset RevokedAt { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional metadata as JSON.
|
||||
/// </summary>
|
||||
[Column("metadata", TypeName = "jsonb")]
|
||||
public string? Metadata { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of revocation events.
|
||||
/// </summary>
|
||||
public static class RevocationTypes
|
||||
{
|
||||
/// <summary>
|
||||
/// Signer certificate revoked.
|
||||
/// </summary>
|
||||
public const string Signer = "signer";
|
||||
|
||||
/// <summary>
|
||||
/// Feed epoch advanced (older epochs revoked).
|
||||
/// </summary>
|
||||
public const string FeedEpoch = "feed_epoch";
|
||||
|
||||
/// <summary>
|
||||
/// Policy bundle updated/revoked.
|
||||
/// </summary>
|
||||
public const string Policy = "policy";
|
||||
|
||||
/// <summary>
|
||||
/// Explicit revocation of specific entry.
|
||||
/// </summary>
|
||||
public const string Explicit = "explicit";
|
||||
|
||||
/// <summary>
|
||||
/// TTL expiration (for audit completeness).
|
||||
/// </summary>
|
||||
public const string Expiration = "expiration";
|
||||
}
|
||||
@@ -0,0 +1,109 @@
|
||||
namespace StellaOps.Provcache.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Event published when an advisory feed advances to a new epoch.
|
||||
/// Provcache subscribers use this to invalidate cache entries
|
||||
/// that were computed against older feed epochs.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Stream name: <c>stellaops:events:feed-epoch-advanced</c>
|
||||
/// </remarks>
|
||||
public sealed record FeedEpochAdvancedEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Stream name for feed epoch events.
|
||||
/// </summary>
|
||||
public const string StreamName = "stellaops:events:feed-epoch-advanced";
|
||||
|
||||
/// <summary>
|
||||
/// Event type identifier for serialization.
|
||||
/// </summary>
|
||||
public const string EventType = "feed.epoch.advanced.v1";
|
||||
|
||||
/// <summary>
|
||||
/// Unique identifier for this event instance.
|
||||
/// </summary>
|
||||
public required Guid EventId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the event occurred (UTC).
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The feed identifier (e.g., "cve", "ghsa", "osv", "redhat-oval").
|
||||
/// </summary>
|
||||
public required string FeedId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The previous epoch identifier.
|
||||
/// Format varies by feed (e.g., "2024-12-24T12:00:00Z", "v2024.52").
|
||||
/// </summary>
|
||||
public required string PreviousEpoch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The new epoch identifier.
|
||||
/// </summary>
|
||||
public required string NewEpoch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the new epoch became effective.
|
||||
/// Cache entries with feed_epoch older than this should be invalidated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset EffectiveAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of advisories added in this epoch (for metrics).
|
||||
/// </summary>
|
||||
public int? AdvisoriesAdded { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of advisories modified in this epoch (for metrics).
|
||||
/// </summary>
|
||||
public int? AdvisoriesModified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of advisories withdrawn in this epoch (for metrics).
|
||||
/// </summary>
|
||||
public int? AdvisoriesWithdrawn { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tenant ID if multi-tenant (null for global feeds).
|
||||
/// </summary>
|
||||
public string? TenantId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for distributed tracing.
|
||||
/// </summary>
|
||||
public string? CorrelationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new FeedEpochAdvancedEvent.
|
||||
/// </summary>
|
||||
public static FeedEpochAdvancedEvent Create(
|
||||
string feedId,
|
||||
string previousEpoch,
|
||||
string newEpoch,
|
||||
DateTimeOffset effectiveAt,
|
||||
int? advisoriesAdded = null,
|
||||
int? advisoriesModified = null,
|
||||
int? advisoriesWithdrawn = null,
|
||||
string? tenantId = null,
|
||||
string? correlationId = null)
|
||||
{
|
||||
return new FeedEpochAdvancedEvent
|
||||
{
|
||||
EventId = Guid.NewGuid(),
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
FeedId = feedId,
|
||||
PreviousEpoch = previousEpoch,
|
||||
NewEpoch = newEpoch,
|
||||
EffectiveAt = effectiveAt,
|
||||
AdvisoriesAdded = advisoriesAdded,
|
||||
AdvisoriesModified = advisoriesModified,
|
||||
AdvisoriesWithdrawn = advisoriesWithdrawn,
|
||||
TenantId = tenantId,
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,96 @@
|
||||
namespace StellaOps.Provcache.Events;
|
||||
|
||||
/// <summary>
|
||||
/// Event published when a signer key is revoked.
|
||||
/// Provcache subscribers use this to invalidate cache entries
|
||||
/// that were signed by the revoked key.
|
||||
/// </summary>
|
||||
/// <remarks>
|
||||
/// Stream name: <c>stellaops:events:signer-revoked</c>
|
||||
/// </remarks>
|
||||
public sealed record SignerRevokedEvent
|
||||
{
|
||||
/// <summary>
|
||||
/// Stream name for signer revocation events.
|
||||
/// </summary>
|
||||
public const string StreamName = "stellaops:events:signer-revoked";
|
||||
|
||||
/// <summary>
|
||||
/// Event type identifier for serialization.
|
||||
/// </summary>
|
||||
public const string EventType = "signer.revoked.v1";
|
||||
|
||||
/// <summary>
|
||||
/// Unique identifier for this event instance.
|
||||
/// </summary>
|
||||
public required Guid EventId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp when the event occurred (UTC).
|
||||
/// </summary>
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The trust anchor ID that owns the revoked key.
|
||||
/// </summary>
|
||||
public required Guid AnchorId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The revoked key identifier.
|
||||
/// </summary>
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Hash of the revoked signer's certificate/public key.
|
||||
/// This is used to match against the <c>signer_set_hash</c> in cache entries.
|
||||
/// Format: <c>sha256:<hex></c>
|
||||
/// </summary>
|
||||
public required string SignerHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the revocation became effective.
|
||||
/// Cache entries created after this time with this signer should be invalidated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset EffectiveAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for the revocation (for audit purposes).
|
||||
/// </summary>
|
||||
public string? Reason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Actor who initiated the revocation.
|
||||
/// </summary>
|
||||
public string? Actor { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for distributed tracing.
|
||||
/// </summary>
|
||||
public string? CorrelationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new SignerRevokedEvent.
|
||||
/// </summary>
|
||||
public static SignerRevokedEvent Create(
|
||||
Guid anchorId,
|
||||
string keyId,
|
||||
string signerHash,
|
||||
DateTimeOffset effectiveAt,
|
||||
string? reason = null,
|
||||
string? actor = null,
|
||||
string? correlationId = null)
|
||||
{
|
||||
return new SignerRevokedEvent
|
||||
{
|
||||
EventId = Guid.NewGuid(),
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
AnchorId = anchorId,
|
||||
KeyId = keyId,
|
||||
SignerHash = signerHash,
|
||||
EffectiveAt = effectiveAt,
|
||||
Reason = reason,
|
||||
Actor = actor,
|
||||
CorrelationId = correlationId
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for exporting and importing minimal proof bundles.
|
||||
/// Supports various density levels for air-gap scenarios.
|
||||
/// </summary>
|
||||
public interface IMinimalProofExporter
|
||||
{
|
||||
/// <summary>
|
||||
/// Exports a minimal proof bundle for the given veri key.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The verification key identifying the cache entry.</param>
|
||||
/// <param name="options">Export options including density level and signing.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The exported minimal proof bundle.</returns>
|
||||
Task<MinimalProofBundle> ExportAsync(
|
||||
string veriKey,
|
||||
MinimalProofExportOptions options,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports a minimal proof bundle as JSON bytes.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The verification key identifying the cache entry.</param>
|
||||
/// <param name="options">Export options including density level and signing.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>UTF-8 encoded JSON bytes of the bundle.</returns>
|
||||
Task<byte[]> ExportAsJsonAsync(
|
||||
string veriKey,
|
||||
MinimalProofExportOptions options,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Exports a minimal proof bundle to a stream.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The verification key identifying the cache entry.</param>
|
||||
/// <param name="options">Export options including density level and signing.</param>
|
||||
/// <param name="outputStream">The stream to write the bundle to.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task ExportToStreamAsync(
|
||||
string veriKey,
|
||||
MinimalProofExportOptions options,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Imports a minimal proof bundle.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to import.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Import result with verification status.</returns>
|
||||
Task<MinimalProofImportResult> ImportAsync(
|
||||
MinimalProofBundle bundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Imports a minimal proof bundle from JSON bytes.
|
||||
/// </summary>
|
||||
/// <param name="jsonBytes">UTF-8 encoded JSON bytes of the bundle.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Import result with verification status.</returns>
|
||||
Task<MinimalProofImportResult> ImportFromJsonAsync(
|
||||
byte[] jsonBytes,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Imports a minimal proof bundle from a stream.
|
||||
/// </summary>
|
||||
/// <param name="inputStream">The stream containing the bundle JSON.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Import result with verification status.</returns>
|
||||
Task<MinimalProofImportResult> ImportFromStreamAsync(
|
||||
Stream inputStream,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a bundle without importing it.
|
||||
/// </summary>
|
||||
/// <param name="bundle">The bundle to verify.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Verification results.</returns>
|
||||
Task<ImportVerification> VerifyAsync(
|
||||
MinimalProofBundle bundle,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the expected size of an export with the given options.
|
||||
/// </summary>
|
||||
/// <param name="veriKey">The verification key identifying the cache entry.</param>
|
||||
/// <param name="density">The density level.</param>
|
||||
/// <param name="standardChunkCount">Number of chunks for Standard density.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Estimated size in bytes.</returns>
|
||||
Task<long> EstimateExportSizeAsync(
|
||||
string veriKey,
|
||||
ProofDensity density,
|
||||
int standardChunkCount = 3,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
263
src/__Libraries/StellaOps.Provcache/Export/MinimalProofBundle.cs
Normal file
263
src/__Libraries/StellaOps.Provcache/Export/MinimalProofBundle.cs
Normal file
@@ -0,0 +1,263 @@
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Density levels for minimal proof export.
|
||||
/// </summary>
|
||||
public enum ProofDensity
|
||||
{
|
||||
/// <summary>
|
||||
/// Digest + proof root + chunk manifest only (~2KB).
|
||||
/// For quick verification and high-trust networks.
|
||||
/// </summary>
|
||||
Lite,
|
||||
|
||||
/// <summary>
|
||||
/// Lite + first N chunks (~200KB typical).
|
||||
/// For normal air-gap scenarios and auditor preview.
|
||||
/// </summary>
|
||||
Standard,
|
||||
|
||||
/// <summary>
|
||||
/// Full evidence with all chunks (variable size).
|
||||
/// For complete audit and compliance evidence.
|
||||
/// </summary>
|
||||
Strict
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Minimal proof bundle for air-gap export/import.
|
||||
/// Contains the decision digest, proof root, and optionally evidence chunks.
|
||||
/// </summary>
|
||||
public sealed record MinimalProofBundle
|
||||
{
|
||||
/// <summary>
|
||||
/// Bundle format version for compatibility checking.
|
||||
/// </summary>
|
||||
[JsonPropertyName("bundleVersion")]
|
||||
public string BundleVersion { get; init; } = "v1";
|
||||
|
||||
/// <summary>
|
||||
/// The density level this bundle was exported with.
|
||||
/// </summary>
|
||||
[JsonPropertyName("density")]
|
||||
public required ProofDensity Density { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The decision digest containing verdict hash, proof root, and trust score.
|
||||
/// </summary>
|
||||
[JsonPropertyName("digest")]
|
||||
public required DecisionDigest Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Chunk manifest for lazy evidence retrieval.
|
||||
/// Always present regardless of density level.
|
||||
/// </summary>
|
||||
[JsonPropertyName("manifest")]
|
||||
public required ChunkManifest Manifest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Included evidence chunks (density-dependent).
|
||||
/// - Lite: empty
|
||||
/// - Standard: first N chunks
|
||||
/// - Strict: all chunks
|
||||
/// </summary>
|
||||
[JsonPropertyName("chunks")]
|
||||
public IReadOnlyList<BundleChunk> Chunks { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when bundle was exported.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exportedAt")]
|
||||
public required DateTimeOffset ExportedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Exporting system identifier for audit trail.
|
||||
/// </summary>
|
||||
[JsonPropertyName("exportedBy")]
|
||||
public string? ExportedBy { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional DSSE envelope containing signed bundle.
|
||||
/// Present when bundle was signed during export.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signature")]
|
||||
public BundleSignature? Signature { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Chunk included in the bundle with base64-encoded blob.
|
||||
/// </summary>
|
||||
public sealed record BundleChunk
|
||||
{
|
||||
/// <summary>
|
||||
/// Zero-based chunk index.
|
||||
/// </summary>
|
||||
[JsonPropertyName("index")]
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash for verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("hash")]
|
||||
public required string Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("size")]
|
||||
public required int Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type.
|
||||
/// </summary>
|
||||
[JsonPropertyName("contentType")]
|
||||
public required string ContentType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded chunk data.
|
||||
/// </summary>
|
||||
[JsonPropertyName("data")]
|
||||
public required string Data { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature envelope for bundle integrity.
|
||||
/// </summary>
|
||||
public sealed record BundleSignature
|
||||
{
|
||||
/// <summary>
|
||||
/// Signature algorithm (e.g., "ES256", "RS256", "Ed25519").
|
||||
/// </summary>
|
||||
[JsonPropertyName("algorithm")]
|
||||
public required string Algorithm { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key identifier used for signing.
|
||||
/// </summary>
|
||||
[JsonPropertyName("keyId")]
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Base64-encoded signature bytes.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signature")]
|
||||
public required string SignatureBytes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when bundle was signed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("signedAt")]
|
||||
public required DateTimeOffset SignedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional certificate chain for verification.
|
||||
/// </summary>
|
||||
[JsonPropertyName("certificateChain")]
|
||||
public IReadOnlyList<string>? CertificateChain { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for exporting a minimal proof bundle.
|
||||
/// </summary>
|
||||
public sealed record MinimalProofExportOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Density level determining how much evidence to include.
|
||||
/// </summary>
|
||||
public ProofDensity Density { get; init; } = ProofDensity.Standard;
|
||||
|
||||
/// <summary>
|
||||
/// Number of leading chunks to include for Standard density.
|
||||
/// Default is 3 (~192KB with 64KB chunks).
|
||||
/// </summary>
|
||||
public int StandardDensityChunkCount { get; init; } = 3;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to sign the bundle.
|
||||
/// </summary>
|
||||
public bool Sign { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID to use for signing (if Sign is true).
|
||||
/// </summary>
|
||||
public string? SigningKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional system identifier for audit trail.
|
||||
/// </summary>
|
||||
public string? ExportedBy { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of importing a minimal proof bundle.
|
||||
/// </summary>
|
||||
public sealed record MinimalProofImportResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the import was successful.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The imported decision digest.
|
||||
/// </summary>
|
||||
public required DecisionDigest Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The chunk manifest.
|
||||
/// </summary>
|
||||
public required ChunkManifest Manifest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of chunks imported.
|
||||
/// </summary>
|
||||
public required int ChunksImported { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of chunks remaining to fetch (for lazy fetch scenarios).
|
||||
/// </summary>
|
||||
public required int ChunksPending { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Verification results.
|
||||
/// </summary>
|
||||
public required ImportVerification Verification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Any warnings during import.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> Warnings { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verification results from importing a bundle.
|
||||
/// </summary>
|
||||
public sealed record ImportVerification
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the Merkle root matches the proof root.
|
||||
/// </summary>
|
||||
public required bool MerkleRootValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the signature was verified (if present).
|
||||
/// </summary>
|
||||
public required bool? SignatureValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether all included chunks passed hash verification.
|
||||
/// </summary>
|
||||
public required bool ChunksValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the digest integrity check passed.
|
||||
/// </summary>
|
||||
public required bool DigestValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of failed chunk indices (if any).
|
||||
/// </summary>
|
||||
public IReadOnlyList<int> FailedChunkIndices { get; init; } = [];
|
||||
}
|
||||
@@ -0,0 +1,457 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Provenance.Attestation;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of <see cref="IMinimalProofExporter"/> supporting
|
||||
/// multiple density levels for air-gap scenarios.
|
||||
/// </summary>
|
||||
public sealed class MinimalProofExporter : IMinimalProofExporter
|
||||
{
|
||||
private readonly IProvcacheService _provcacheService;
|
||||
private readonly IEvidenceChunkRepository _chunkRepository;
|
||||
private readonly ISigner? _signer;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly ILogger<MinimalProofExporter> _logger;
|
||||
|
||||
private static readonly JsonSerializerOptions s_jsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public MinimalProofExporter(
|
||||
IProvcacheService provcacheService,
|
||||
IEvidenceChunkRepository chunkRepository,
|
||||
ISigner? signer = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
ILogger<MinimalProofExporter>? logger = null)
|
||||
{
|
||||
_provcacheService = provcacheService ?? throw new ArgumentNullException(nameof(provcacheService));
|
||||
_chunkRepository = chunkRepository ?? throw new ArgumentNullException(nameof(chunkRepository));
|
||||
_signer = signer;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_logger = logger ?? Microsoft.Extensions.Logging.Abstractions.NullLogger<MinimalProofExporter>.Instance;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MinimalProofBundle> ExportAsync(
|
||||
string veriKey,
|
||||
MinimalProofExportOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
|
||||
ArgumentNullException.ThrowIfNull(options);
|
||||
|
||||
_logger.LogDebug("Exporting minimal proof bundle for {VeriKey} with density {Density}",
|
||||
veriKey, options.Density);
|
||||
|
||||
// Get the cache entry
|
||||
var cacheResult = await _provcacheService.GetAsync(veriKey, bypassCache: false, cancellationToken);
|
||||
if (cacheResult.Entry is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Cache entry not found for VeriKey: {veriKey}");
|
||||
}
|
||||
|
||||
var entry = cacheResult.Entry;
|
||||
var proofRoot = entry.Decision.ProofRoot;
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
// Get the chunk manifest
|
||||
var manifest = await _chunkRepository.GetManifestAsync(proofRoot, cancellationToken)
|
||||
?? throw new InvalidOperationException($"Chunk manifest not found for proof root: {proofRoot}");
|
||||
|
||||
// Build chunks based on density
|
||||
var bundleChunks = await GetChunksForDensityAsync(
|
||||
proofRoot,
|
||||
manifest,
|
||||
options.Density,
|
||||
options.StandardDensityChunkCount,
|
||||
cancellationToken);
|
||||
|
||||
// Build the bundle
|
||||
var bundle = new MinimalProofBundle
|
||||
{
|
||||
BundleVersion = "v1",
|
||||
Density = options.Density,
|
||||
Digest = entry.Decision,
|
||||
Manifest = manifest,
|
||||
Chunks = bundleChunks,
|
||||
ExportedAt = now,
|
||||
ExportedBy = options.ExportedBy
|
||||
};
|
||||
|
||||
// Sign if requested
|
||||
if (options.Sign)
|
||||
{
|
||||
if (_signer is null)
|
||||
{
|
||||
throw new InvalidOperationException("Signing requested but no signer is configured.");
|
||||
}
|
||||
|
||||
bundle = await SignBundleAsync(bundle, options.SigningKeyId, cancellationToken);
|
||||
}
|
||||
|
||||
_logger.LogInformation(
|
||||
"Exported minimal proof bundle for {VeriKey}: density={Density}, chunks={ChunkCount}/{TotalChunks}, signed={Signed}",
|
||||
veriKey, options.Density, bundleChunks.Count, manifest.TotalChunks, options.Sign);
|
||||
|
||||
return bundle;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<byte[]> ExportAsJsonAsync(
|
||||
string veriKey,
|
||||
MinimalProofExportOptions options,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var bundle = await ExportAsync(veriKey, options, cancellationToken);
|
||||
return JsonSerializer.SerializeToUtf8Bytes(bundle, s_jsonOptions);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task ExportToStreamAsync(
|
||||
string veriKey,
|
||||
MinimalProofExportOptions options,
|
||||
Stream outputStream,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(outputStream);
|
||||
|
||||
var bundle = await ExportAsync(veriKey, options, cancellationToken);
|
||||
await JsonSerializer.SerializeAsync(outputStream, bundle, s_jsonOptions, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MinimalProofImportResult> ImportAsync(
|
||||
MinimalProofBundle bundle,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
|
||||
_logger.LogDebug("Importing minimal proof bundle: density={Density}, chunks={ChunkCount}",
|
||||
bundle.Density, bundle.Chunks.Count);
|
||||
|
||||
var warnings = new List<string>();
|
||||
|
||||
// Verify the bundle
|
||||
var verification = await VerifyAsync(bundle, cancellationToken);
|
||||
|
||||
if (!verification.DigestValid)
|
||||
{
|
||||
return new MinimalProofImportResult
|
||||
{
|
||||
Success = false,
|
||||
Digest = bundle.Digest,
|
||||
Manifest = bundle.Manifest,
|
||||
ChunksImported = 0,
|
||||
ChunksPending = bundle.Manifest.TotalChunks,
|
||||
Verification = verification,
|
||||
Warnings = ["Digest verification failed."]
|
||||
};
|
||||
}
|
||||
|
||||
if (!verification.MerkleRootValid)
|
||||
{
|
||||
return new MinimalProofImportResult
|
||||
{
|
||||
Success = false,
|
||||
Digest = bundle.Digest,
|
||||
Manifest = bundle.Manifest,
|
||||
ChunksImported = 0,
|
||||
ChunksPending = bundle.Manifest.TotalChunks,
|
||||
Verification = verification,
|
||||
Warnings = ["Merkle root verification failed."]
|
||||
};
|
||||
}
|
||||
|
||||
if (!verification.ChunksValid)
|
||||
{
|
||||
warnings.Add($"Some chunks failed verification: indices {string.Join(", ", verification.FailedChunkIndices)}");
|
||||
}
|
||||
|
||||
if (verification.SignatureValid == false)
|
||||
{
|
||||
warnings.Add("Signature verification failed.");
|
||||
}
|
||||
|
||||
// Store chunks
|
||||
var chunksToStore = new List<EvidenceChunk>();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
foreach (var bundleChunk in bundle.Chunks)
|
||||
{
|
||||
if (verification.FailedChunkIndices.Contains(bundleChunk.Index))
|
||||
{
|
||||
continue; // Skip failed chunks
|
||||
}
|
||||
|
||||
chunksToStore.Add(new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = bundle.Digest.ProofRoot,
|
||||
ChunkIndex = bundleChunk.Index,
|
||||
ChunkHash = bundleChunk.Hash,
|
||||
Blob = Convert.FromBase64String(bundleChunk.Data),
|
||||
BlobSize = bundleChunk.Size,
|
||||
ContentType = bundleChunk.ContentType,
|
||||
CreatedAt = now
|
||||
});
|
||||
}
|
||||
|
||||
if (chunksToStore.Count > 0)
|
||||
{
|
||||
await _chunkRepository.StoreChunksAsync(bundle.Digest.ProofRoot, chunksToStore, cancellationToken);
|
||||
}
|
||||
|
||||
var chunksImported = chunksToStore.Count;
|
||||
var chunksPending = bundle.Manifest.TotalChunks - chunksImported;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Imported minimal proof bundle: chunksImported={ChunksImported}, chunksPending={ChunksPending}",
|
||||
chunksImported, chunksPending);
|
||||
|
||||
return new MinimalProofImportResult
|
||||
{
|
||||
Success = verification.DigestValid && verification.MerkleRootValid,
|
||||
Digest = bundle.Digest,
|
||||
Manifest = bundle.Manifest,
|
||||
ChunksImported = chunksImported,
|
||||
ChunksPending = chunksPending,
|
||||
Verification = verification,
|
||||
Warnings = warnings
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MinimalProofImportResult> ImportFromJsonAsync(
|
||||
byte[] jsonBytes,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(jsonBytes);
|
||||
|
||||
var bundle = JsonSerializer.Deserialize<MinimalProofBundle>(jsonBytes, s_jsonOptions)
|
||||
?? throw new InvalidOperationException("Failed to deserialize bundle.");
|
||||
|
||||
return await ImportAsync(bundle, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<MinimalProofImportResult> ImportFromStreamAsync(
|
||||
Stream inputStream,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(inputStream);
|
||||
|
||||
var bundle = await JsonSerializer.DeserializeAsync<MinimalProofBundle>(inputStream, s_jsonOptions, cancellationToken)
|
||||
?? throw new InvalidOperationException("Failed to deserialize bundle.");
|
||||
|
||||
return await ImportAsync(bundle, cancellationToken);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<ImportVerification> VerifyAsync(
|
||||
MinimalProofBundle bundle,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(bundle);
|
||||
|
||||
// Verify digest integrity
|
||||
var digestValid = VerifyDigest(bundle.Digest);
|
||||
|
||||
// Verify Merkle root matches digest
|
||||
var merkleRootValid = string.Equals(
|
||||
bundle.Manifest.ProofRoot,
|
||||
bundle.Digest.ProofRoot,
|
||||
StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
// Verify included chunks
|
||||
var failedChunks = new List<int>();
|
||||
foreach (var chunk in bundle.Chunks)
|
||||
{
|
||||
if (!VerifyChunk(chunk))
|
||||
{
|
||||
failedChunks.Add(chunk.Index);
|
||||
}
|
||||
}
|
||||
|
||||
var chunksValid = failedChunks.Count == 0;
|
||||
|
||||
// Verify signature if present
|
||||
bool? signatureValid = null;
|
||||
if (bundle.Signature is not null)
|
||||
{
|
||||
signatureValid = VerifySignature(bundle);
|
||||
}
|
||||
|
||||
return Task.FromResult(new ImportVerification
|
||||
{
|
||||
DigestValid = digestValid,
|
||||
MerkleRootValid = merkleRootValid,
|
||||
ChunksValid = chunksValid,
|
||||
SignatureValid = signatureValid,
|
||||
FailedChunkIndices = failedChunks
|
||||
});
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<long> EstimateExportSizeAsync(
|
||||
string veriKey,
|
||||
ProofDensity density,
|
||||
int standardChunkCount = 3,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(veriKey);
|
||||
|
||||
var cacheResult = await _provcacheService.GetAsync(veriKey, bypassCache: false, cancellationToken);
|
||||
if (cacheResult.Entry is null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var proofRoot = cacheResult.Entry.Decision.ProofRoot;
|
||||
var manifest = await _chunkRepository.GetManifestAsync(proofRoot, cancellationToken);
|
||||
if (manifest is null)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Base size: digest + manifest (roughly 2KB)
|
||||
const long baseSize = 2048;
|
||||
|
||||
return density switch
|
||||
{
|
||||
ProofDensity.Lite => baseSize,
|
||||
ProofDensity.Standard => baseSize + CalculateChunkDataSize(manifest, standardChunkCount),
|
||||
ProofDensity.Strict => baseSize + CalculateChunkDataSize(manifest, manifest.TotalChunks),
|
||||
_ => baseSize
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<IReadOnlyList<BundleChunk>> GetChunksForDensityAsync(
|
||||
string proofRoot,
|
||||
ChunkManifest manifest,
|
||||
ProofDensity density,
|
||||
int standardChunkCount,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var chunkCount = density switch
|
||||
{
|
||||
ProofDensity.Lite => 0,
|
||||
ProofDensity.Standard => Math.Min(standardChunkCount, manifest.TotalChunks),
|
||||
ProofDensity.Strict => manifest.TotalChunks,
|
||||
_ => 0
|
||||
};
|
||||
|
||||
if (chunkCount == 0)
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var chunks = await _chunkRepository.GetChunkRangeAsync(
|
||||
proofRoot,
|
||||
startIndex: 0,
|
||||
count: chunkCount,
|
||||
cancellationToken);
|
||||
|
||||
return chunks.Select(c => new BundleChunk
|
||||
{
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType,
|
||||
Data = Convert.ToBase64String(c.Blob)
|
||||
}).ToList();
|
||||
}
|
||||
|
||||
private async Task<MinimalProofBundle> SignBundleAsync(
|
||||
MinimalProofBundle bundle,
|
||||
string? signingKeyId,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
if (_signer is null)
|
||||
{
|
||||
throw new InvalidOperationException("Signer is not configured.");
|
||||
}
|
||||
|
||||
// Serialize bundle without signature for signing
|
||||
var bundleWithoutSig = bundle with { Signature = null };
|
||||
var payload = JsonSerializer.SerializeToUtf8Bytes(bundleWithoutSig, s_jsonOptions);
|
||||
|
||||
var signRequest = new SignRequest(
|
||||
Payload: payload,
|
||||
ContentType: "application/vnd.stellaops.proof-bundle+json");
|
||||
|
||||
var signResult = await _signer.SignAsync(signRequest, cancellationToken);
|
||||
|
||||
return bundle with
|
||||
{
|
||||
Signature = new BundleSignature
|
||||
{
|
||||
Algorithm = "HMAC-SHA256", // Could be made configurable
|
||||
KeyId = signResult.KeyId,
|
||||
SignatureBytes = Convert.ToBase64String(signResult.Signature),
|
||||
SignedAt = signResult.SignedAt
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static bool VerifyDigest(DecisionDigest digest)
|
||||
{
|
||||
// Basic integrity checks
|
||||
if (string.IsNullOrWhiteSpace(digest.VeriKey)) return false;
|
||||
if (string.IsNullOrWhiteSpace(digest.VerdictHash)) return false;
|
||||
if (string.IsNullOrWhiteSpace(digest.ProofRoot)) return false;
|
||||
if (digest.TrustScore < 0 || digest.TrustScore > 100) return false;
|
||||
if (digest.CreatedAt > digest.ExpiresAt) return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static bool VerifyChunk(BundleChunk chunk)
|
||||
{
|
||||
try
|
||||
{
|
||||
var data = Convert.FromBase64String(chunk.Data);
|
||||
if (data.Length != chunk.Size) return false;
|
||||
|
||||
var hash = SHA256.HashData(data);
|
||||
var computedHash = $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
|
||||
return string.Equals(computedHash, chunk.Hash, StringComparison.OrdinalIgnoreCase);
|
||||
}
|
||||
catch
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private bool VerifySignature(MinimalProofBundle bundle)
|
||||
{
|
||||
// For now, we don't have signature verification implemented
|
||||
// This would require the signer's public key or certificate
|
||||
// Return true as a placeholder - signature presence is enough for MVP
|
||||
_logger.LogWarning("Signature verification not fully implemented - assuming valid");
|
||||
return bundle.Signature is not null;
|
||||
}
|
||||
|
||||
private static long CalculateChunkDataSize(ChunkManifest manifest, int chunkCount)
|
||||
{
|
||||
if (chunkCount <= 0 || manifest.Chunks.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var actualCount = Math.Min(chunkCount, manifest.TotalChunks);
|
||||
var rawSize = manifest.Chunks
|
||||
.Take(actualCount)
|
||||
.Sum(c => (long)c.Size);
|
||||
|
||||
// Base64 encoding overhead: ~33% increase
|
||||
return (long)(rawSize * 1.37);
|
||||
}
|
||||
}
|
||||
203
src/__Libraries/StellaOps.Provcache/IEvidenceChunkRepository.cs
Normal file
203
src/__Libraries/StellaOps.Provcache/IEvidenceChunkRepository.cs
Normal file
@@ -0,0 +1,203 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Repository for evidence chunk storage and retrieval.
|
||||
/// </summary>
|
||||
public interface IEvidenceChunkRepository
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets all chunks for a proof root.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root to get chunks for.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Ordered list of chunks.</returns>
|
||||
Task<IReadOnlyList<EvidenceChunk>> GetChunksAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets a specific chunk by index.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="chunkIndex">The chunk index.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The chunk or null if not found.</returns>
|
||||
Task<EvidenceChunk?> GetChunkAsync(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets chunks in a range (for paged retrieval).
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="startIndex">Starting chunk index (inclusive).</param>
|
||||
/// <param name="count">Number of chunks to retrieve.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Ordered list of chunks in the range.</returns>
|
||||
Task<IReadOnlyList<EvidenceChunk>> GetChunkRangeAsync(
|
||||
string proofRoot,
|
||||
int startIndex,
|
||||
int count,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the chunk manifest (metadata without blobs).
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The chunk manifest.</returns>
|
||||
Task<ChunkManifest?> GetManifestAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stores multiple chunks for a proof root.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="chunks">The chunks to store.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task StoreChunksAsync(
|
||||
string proofRoot,
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Deletes all chunks for a proof root.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of chunks deleted.</returns>
|
||||
Task<int> DeleteChunksAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets total chunk count for a proof root.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Number of chunks.</returns>
|
||||
Task<int> GetChunkCountAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets total storage size for a proof root.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Total bytes stored.</returns>
|
||||
Task<long> GetTotalSizeAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an evidence chunk.
|
||||
/// </summary>
|
||||
public sealed record EvidenceChunk
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique chunk identifier.
|
||||
/// </summary>
|
||||
public required Guid ChunkId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The proof root this chunk belongs to.
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Zero-based index within the proof.
|
||||
/// </summary>
|
||||
public required int ChunkIndex { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash of the chunk for verification.
|
||||
/// </summary>
|
||||
public required string ChunkHash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The binary content.
|
||||
/// </summary>
|
||||
public required byte[] Blob { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size of the blob in bytes.
|
||||
/// </summary>
|
||||
public required int BlobSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// MIME type of the content.
|
||||
/// </summary>
|
||||
public required string ContentType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the chunk was created.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Manifest describing all chunks for a proof root (metadata only).
|
||||
/// Used for lazy fetching where blobs are retrieved on demand.
|
||||
/// </summary>
|
||||
public sealed record ChunkManifest
|
||||
{
|
||||
/// <summary>
|
||||
/// The proof root (Merkle root of all chunks).
|
||||
/// </summary>
|
||||
public required string ProofRoot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of chunks.
|
||||
/// </summary>
|
||||
public required int TotalChunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total size of all chunks in bytes.
|
||||
/// </summary>
|
||||
public required long TotalSize { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Ordered list of chunk metadata.
|
||||
/// </summary>
|
||||
public required IReadOnlyList<ChunkMetadata> Chunks { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the manifest was generated.
|
||||
/// </summary>
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metadata for a single chunk (no blob).
|
||||
/// </summary>
|
||||
public sealed record ChunkMetadata
|
||||
{
|
||||
/// <summary>
|
||||
/// Chunk identifier.
|
||||
/// </summary>
|
||||
public required Guid ChunkId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Zero-based index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash for verification.
|
||||
/// </summary>
|
||||
public required string Hash { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes.
|
||||
/// </summary>
|
||||
public required int Size { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Content type.
|
||||
/// </summary>
|
||||
public required string ContentType { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,184 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Messaging;
|
||||
using StellaOps.Messaging.Abstractions;
|
||||
using StellaOps.Provcache.Events;
|
||||
|
||||
namespace StellaOps.Provcache.Invalidation;
|
||||
|
||||
/// <summary>
|
||||
/// Invalidator that handles feed epoch advancement events.
|
||||
/// When a feed advances to a new epoch, cache entries with older feed_epoch are invalidated.
|
||||
/// </summary>
|
||||
public sealed class FeedEpochInvalidator : IProvcacheInvalidator
|
||||
{
|
||||
private readonly IEventStream<FeedEpochAdvancedEvent> _eventStream;
|
||||
private readonly IProvcacheService _provcacheService;
|
||||
private readonly ILogger<FeedEpochInvalidator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private CancellationTokenSource? _cts;
|
||||
private Task? _processingTask;
|
||||
private bool _isRunning;
|
||||
|
||||
// Metrics
|
||||
private long _eventsProcessed;
|
||||
private long _entriesInvalidated;
|
||||
private long _errors;
|
||||
private DateTimeOffset? _lastEventAt;
|
||||
|
||||
public FeedEpochInvalidator(
|
||||
IEventStream<FeedEpochAdvancedEvent> eventStream,
|
||||
IProvcacheService provcacheService,
|
||||
ILogger<FeedEpochInvalidator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_eventStream = eventStream ?? throw new ArgumentNullException(nameof(eventStream));
|
||||
_provcacheService = provcacheService ?? throw new ArgumentNullException(nameof(provcacheService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "FeedEpochInvalidator";
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool IsRunning => _isRunning;
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task StartAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_isRunning)
|
||||
{
|
||||
_logger.LogWarning("FeedEpochInvalidator is already running");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
_processingTask = ProcessEventsAsync(_cts.Token);
|
||||
_isRunning = true;
|
||||
|
||||
_logger.LogInformation("FeedEpochInvalidator started, subscribing to {StreamName}", FeedEpochAdvancedEvent.StreamName);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task StopAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_isRunning)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("FeedEpochInvalidator stopping...");
|
||||
|
||||
if (_cts is not null)
|
||||
{
|
||||
await _cts.CancelAsync();
|
||||
}
|
||||
|
||||
if (_processingTask is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _processingTask.WaitAsync(cancellationToken);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// Expected during shutdown
|
||||
}
|
||||
}
|
||||
|
||||
_isRunning = false;
|
||||
_logger.LogInformation("FeedEpochInvalidator stopped");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public InvalidatorMetrics GetMetrics()
|
||||
{
|
||||
return new InvalidatorMetrics
|
||||
{
|
||||
EventsProcessed = Interlocked.Read(ref _eventsProcessed),
|
||||
EntriesInvalidated = Interlocked.Read(ref _entriesInvalidated),
|
||||
Errors = Interlocked.Read(ref _errors),
|
||||
LastEventAt = _lastEventAt,
|
||||
CollectedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
await StopAsync();
|
||||
_cts?.Dispose();
|
||||
}
|
||||
|
||||
private async Task ProcessEventsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Start from latest events
|
||||
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, cancellationToken))
|
||||
{
|
||||
try
|
||||
{
|
||||
await HandleEventAsync(streamEvent.Event, cancellationToken);
|
||||
Interlocked.Increment(ref _eventsProcessed);
|
||||
_lastEventAt = _timeProvider.GetUtcNow();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Interlocked.Increment(ref _errors);
|
||||
_logger.LogError(ex,
|
||||
"Error processing FeedEpochAdvancedEvent {EventId} for feed {FeedId}",
|
||||
streamEvent.Event.EventId,
|
||||
streamEvent.Event.FeedId);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// Normal shutdown
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Fatal error in FeedEpochInvalidator event processing loop");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task HandleEventAsync(FeedEpochAdvancedEvent @event, CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Processing feed epoch advancement: FeedId={FeedId}, PreviousEpoch={PreviousEpoch}, NewEpoch={NewEpoch}",
|
||||
@event.FeedId,
|
||||
@event.PreviousEpoch,
|
||||
@event.NewEpoch);
|
||||
|
||||
// Invalidate entries with feed_epoch older than the new epoch
|
||||
// The feed_epoch in cache entries is formatted as "feed:epoch" (e.g., "cve:2024-12-24T12:00:00Z")
|
||||
var request = InvalidationRequest.ByFeedEpochOlderThan(
|
||||
@event.NewEpoch,
|
||||
$"Feed {FormatFeedEpoch(@event.FeedId, @event.NewEpoch)} advanced");
|
||||
|
||||
var result = await _provcacheService.InvalidateByAsync(request, cancellationToken);
|
||||
|
||||
Interlocked.Add(ref _entriesInvalidated, result.EntriesAffected);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Feed epoch advancement invalidated {Count} cache entries for feed {FeedId} epoch {NewEpoch}",
|
||||
result.EntriesAffected,
|
||||
@event.FeedId,
|
||||
@event.NewEpoch);
|
||||
|
||||
// Record telemetry
|
||||
ProvcacheTelemetry.RecordInvalidation("feed_epoch", result.EntriesAffected);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Formats a feed epoch identifier.
|
||||
/// </summary>
|
||||
private static string FormatFeedEpoch(string feedId, string epoch)
|
||||
{
|
||||
return $"{feedId}:{epoch}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
namespace StellaOps.Provcache.Invalidation;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for cache invalidation handlers that respond to external events.
|
||||
/// Implementations subscribe to event streams and invalidate cache entries accordingly.
|
||||
/// </summary>
|
||||
public interface IProvcacheInvalidator : IAsyncDisposable
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the name of this invalidator for diagnostics.
|
||||
/// </summary>
|
||||
string Name { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets whether this invalidator is currently subscribed and processing events.
|
||||
/// </summary>
|
||||
bool IsRunning { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Starts processing invalidation events.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task StartAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Stops processing invalidation events.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task StopAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets metrics for this invalidator.
|
||||
/// </summary>
|
||||
InvalidatorMetrics GetMetrics();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Metrics for a cache invalidator.
|
||||
/// </summary>
|
||||
public sealed record InvalidatorMetrics
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of events processed.
|
||||
/// </summary>
|
||||
public required long EventsProcessed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total number of cache entries invalidated.
|
||||
/// </summary>
|
||||
public required long EntriesInvalidated { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of processing errors encountered.
|
||||
/// </summary>
|
||||
public required long Errors { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Last event processed timestamp.
|
||||
/// </summary>
|
||||
public DateTimeOffset? LastEventAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When these metrics were collected.
|
||||
/// </summary>
|
||||
public required DateTimeOffset CollectedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,177 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Messaging;
|
||||
using StellaOps.Messaging.Abstractions;
|
||||
using StellaOps.Provcache.Events;
|
||||
|
||||
namespace StellaOps.Provcache.Invalidation;
|
||||
|
||||
/// <summary>
|
||||
/// Invalidator that handles signer revocation events.
|
||||
/// When a signer is revoked, all cache entries with matching signer_set_hash are invalidated.
|
||||
/// </summary>
|
||||
public sealed class SignerSetInvalidator : IProvcacheInvalidator
|
||||
{
|
||||
private readonly IEventStream<SignerRevokedEvent> _eventStream;
|
||||
private readonly IProvcacheService _provcacheService;
|
||||
private readonly ILogger<SignerSetInvalidator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
private CancellationTokenSource? _cts;
|
||||
private Task? _processingTask;
|
||||
private bool _isRunning;
|
||||
|
||||
// Metrics
|
||||
private long _eventsProcessed;
|
||||
private long _entriesInvalidated;
|
||||
private long _errors;
|
||||
private DateTimeOffset? _lastEventAt;
|
||||
|
||||
public SignerSetInvalidator(
|
||||
IEventStream<SignerRevokedEvent> eventStream,
|
||||
IProvcacheService provcacheService,
|
||||
ILogger<SignerSetInvalidator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_eventStream = eventStream ?? throw new ArgumentNullException(nameof(eventStream));
|
||||
_provcacheService = provcacheService ?? throw new ArgumentNullException(nameof(provcacheService));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public string Name => "SignerSetInvalidator";
|
||||
|
||||
/// <inheritdoc />
|
||||
public bool IsRunning => _isRunning;
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task StartAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (_isRunning)
|
||||
{
|
||||
_logger.LogWarning("SignerSetInvalidator is already running");
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
_cts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken);
|
||||
_processingTask = ProcessEventsAsync(_cts.Token);
|
||||
_isRunning = true;
|
||||
|
||||
_logger.LogInformation("SignerSetInvalidator started, subscribing to {StreamName}", SignerRevokedEvent.StreamName);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task StopAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
if (!_isRunning)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
_logger.LogInformation("SignerSetInvalidator stopping...");
|
||||
|
||||
if (_cts is not null)
|
||||
{
|
||||
await _cts.CancelAsync();
|
||||
}
|
||||
|
||||
if (_processingTask is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
await _processingTask.WaitAsync(cancellationToken);
|
||||
}
|
||||
catch (OperationCanceledException)
|
||||
{
|
||||
// Expected during shutdown
|
||||
}
|
||||
}
|
||||
|
||||
_isRunning = false;
|
||||
_logger.LogInformation("SignerSetInvalidator stopped");
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public InvalidatorMetrics GetMetrics()
|
||||
{
|
||||
return new InvalidatorMetrics
|
||||
{
|
||||
EventsProcessed = Interlocked.Read(ref _eventsProcessed),
|
||||
EntriesInvalidated = Interlocked.Read(ref _entriesInvalidated),
|
||||
Errors = Interlocked.Read(ref _errors),
|
||||
LastEventAt = _lastEventAt,
|
||||
CollectedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
await StopAsync();
|
||||
_cts?.Dispose();
|
||||
}
|
||||
|
||||
private async Task ProcessEventsAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
try
|
||||
{
|
||||
// Start from latest events (we don't want to replay old revocations)
|
||||
await foreach (var streamEvent in _eventStream.SubscribeAsync(StreamPosition.End, cancellationToken))
|
||||
{
|
||||
try
|
||||
{
|
||||
await HandleEventAsync(streamEvent.Event, cancellationToken);
|
||||
Interlocked.Increment(ref _eventsProcessed);
|
||||
_lastEventAt = _timeProvider.GetUtcNow();
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Interlocked.Increment(ref _errors);
|
||||
_logger.LogError(ex,
|
||||
"Error processing SignerRevokedEvent {EventId} for signer {SignerHash}",
|
||||
streamEvent.Event.EventId,
|
||||
streamEvent.Event.SignerHash);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested)
|
||||
{
|
||||
// Normal shutdown
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Fatal error in SignerSetInvalidator event processing loop");
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
private async Task HandleEventAsync(SignerRevokedEvent @event, CancellationToken cancellationToken)
|
||||
{
|
||||
_logger.LogInformation(
|
||||
"Processing signer revocation: AnchorId={AnchorId}, KeyId={KeyId}, SignerHash={SignerHash}, Reason={Reason}",
|
||||
@event.AnchorId,
|
||||
@event.KeyId,
|
||||
@event.SignerHash,
|
||||
@event.Reason);
|
||||
|
||||
// Create invalidation request for entries with this signer hash
|
||||
var request = InvalidationRequest.BySignerSetHash(
|
||||
@event.SignerHash,
|
||||
$"Signer revoked: {@event.Reason ?? "unspecified"}");
|
||||
|
||||
request = request with { Actor = @event.Actor ?? "SignerSetInvalidator" };
|
||||
|
||||
var result = await _provcacheService.InvalidateByAsync(request, cancellationToken);
|
||||
|
||||
Interlocked.Add(ref _entriesInvalidated, result.EntriesAffected);
|
||||
|
||||
_logger.LogInformation(
|
||||
"Signer revocation invalidated {Count} cache entries for signer {SignerHash}",
|
||||
result.EntriesAffected,
|
||||
@event.SignerHash);
|
||||
|
||||
// Record telemetry
|
||||
ProvcacheTelemetry.RecordInvalidation("signer_revocation", result.EntriesAffected);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,257 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// File-based lazy evidence chunk fetcher for sneakernet mode.
|
||||
/// Fetches chunks from a local directory (e.g., USB drive, NFS mount).
|
||||
/// </summary>
|
||||
public sealed class FileChunkFetcher : ILazyEvidenceFetcher
|
||||
{
|
||||
private readonly string _basePath;
|
||||
private readonly ILogger<FileChunkFetcher> _logger;
|
||||
private readonly JsonSerializerOptions _jsonOptions;
|
||||
|
||||
/// <inheritdoc />
|
||||
public string FetcherType => "file";
|
||||
|
||||
/// <summary>
|
||||
/// Creates a file chunk fetcher with the specified base directory.
|
||||
/// </summary>
|
||||
/// <param name="basePath">The base directory containing evidence files.</param>
|
||||
/// <param name="logger">Logger instance.</param>
|
||||
public FileChunkFetcher(string basePath, ILogger<FileChunkFetcher> logger)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(basePath);
|
||||
|
||||
_basePath = Path.GetFullPath(basePath);
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
_logger.LogDebug("FileChunkFetcher initialized with base path: {BasePath}", _basePath);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<FetchedChunk?> FetchChunkAsync(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentOutOfRangeException.ThrowIfNegative(chunkIndex);
|
||||
|
||||
var chunkPath = GetChunkPath(proofRoot, chunkIndex);
|
||||
_logger.LogDebug("Looking for chunk at {Path}", chunkPath);
|
||||
|
||||
if (!File.Exists(chunkPath))
|
||||
{
|
||||
_logger.LogDebug("Chunk file not found: {Path}", chunkPath);
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(chunkPath);
|
||||
var chunk = await JsonSerializer.DeserializeAsync<FetchedChunk>(stream, _jsonOptions, cancellationToken);
|
||||
_logger.LogDebug("Successfully loaded chunk {Index}, {Bytes} bytes", chunkIndex, chunk?.Data.Length ?? 0);
|
||||
return chunk;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Error reading chunk file {Path}", chunkPath);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
|
||||
string proofRoot,
|
||||
IEnumerable<int> chunkIndices,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(chunkIndices);
|
||||
|
||||
var indices = chunkIndices.ToList();
|
||||
_logger.LogInformation("Fetching {Count} chunks from file system for proof root {ProofRoot}", indices.Count, proofRoot);
|
||||
|
||||
foreach (var index in indices)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var chunk = await FetchChunkAsync(proofRoot, index, cancellationToken);
|
||||
if (chunk is not null)
|
||||
{
|
||||
yield return chunk;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
|
||||
string proofRoot,
|
||||
ChunkManifest manifest,
|
||||
IReadOnlySet<int> existingIndices,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
ArgumentNullException.ThrowIfNull(existingIndices);
|
||||
|
||||
var missingIndices = Enumerable.Range(0, manifest.TotalChunks)
|
||||
.Where(i => !existingIndices.Contains(i))
|
||||
.ToList();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Fetching {MissingCount} remaining chunks from files (have {ExistingCount}/{TotalCount})",
|
||||
missingIndices.Count, existingIndices.Count, manifest.TotalChunks);
|
||||
|
||||
await foreach (var chunk in FetchChunksAsync(proofRoot, missingIndices, cancellationToken))
|
||||
{
|
||||
yield return chunk;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var isAvailable = Directory.Exists(_basePath);
|
||||
_logger.LogDebug("File fetcher availability check: {IsAvailable}", isAvailable);
|
||||
return Task.FromResult(isAvailable);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ChunkManifest?> FetchManifestAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var manifestPath = GetManifestPath(proofRoot);
|
||||
_logger.LogDebug("Looking for manifest at {Path}", manifestPath);
|
||||
|
||||
if (!File.Exists(manifestPath))
|
||||
{
|
||||
_logger.LogDebug("Manifest file not found: {Path}", manifestPath);
|
||||
return null;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
await using var stream = File.OpenRead(manifestPath);
|
||||
return await JsonSerializer.DeserializeAsync<ChunkManifest>(stream, _jsonOptions, cancellationToken);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Error reading manifest file {Path}", manifestPath);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the file path for a chunk.
|
||||
/// </summary>
|
||||
private string GetChunkPath(string proofRoot, int chunkIndex)
|
||||
{
|
||||
// Sanitize proof root for use in file paths
|
||||
var safeProofRoot = SanitizeForPath(proofRoot);
|
||||
return Path.Combine(_basePath, safeProofRoot, $"chunk_{chunkIndex:D4}.json");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the file path for a manifest.
|
||||
/// </summary>
|
||||
private string GetManifestPath(string proofRoot)
|
||||
{
|
||||
var safeProofRoot = SanitizeForPath(proofRoot);
|
||||
return Path.Combine(_basePath, safeProofRoot, "manifest.json");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sanitizes a proof root for use in file paths.
|
||||
/// </summary>
|
||||
private static string SanitizeForPath(string input)
|
||||
{
|
||||
// Use hash prefix to ensure consistent directory naming
|
||||
var hash = Convert.ToHexString(SHA256.HashData(System.Text.Encoding.UTF8.GetBytes(input))).ToLowerInvariant();
|
||||
|
||||
// Return first 16 chars of hash for reasonable directory names
|
||||
return hash[..16];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exports chunks to files for sneakernet transfer.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="manifest">The chunk manifest.</param>
|
||||
/// <param name="chunks">The chunks to export.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
public async Task ExportToFilesAsync(
|
||||
string proofRoot,
|
||||
ChunkManifest manifest,
|
||||
IEnumerable<FetchedChunk> chunks,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
ArgumentNullException.ThrowIfNull(chunks);
|
||||
|
||||
var safeProofRoot = SanitizeForPath(proofRoot);
|
||||
var proofDir = Path.Combine(_basePath, safeProofRoot);
|
||||
|
||||
Directory.CreateDirectory(proofDir);
|
||||
_logger.LogInformation("Exporting to {Directory}", proofDir);
|
||||
|
||||
// Write manifest
|
||||
var manifestPath = GetManifestPath(proofRoot);
|
||||
await using (var manifestStream = File.Create(manifestPath))
|
||||
{
|
||||
await JsonSerializer.SerializeAsync(manifestStream, manifest, _jsonOptions, cancellationToken);
|
||||
}
|
||||
_logger.LogDebug("Wrote manifest to {Path}", manifestPath);
|
||||
|
||||
// Write chunks
|
||||
var count = 0;
|
||||
foreach (var chunk in chunks)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var chunkPath = GetChunkPath(proofRoot, chunk.Index);
|
||||
await using var chunkStream = File.Create(chunkPath);
|
||||
await JsonSerializer.SerializeAsync(chunkStream, chunk, _jsonOptions, cancellationToken);
|
||||
count++;
|
||||
}
|
||||
|
||||
_logger.LogInformation("Exported {Count} chunks to {Directory}", count, proofDir);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exports EvidenceChunks to files (converts to FetchedChunk format).
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="manifest">The chunk manifest.</param>
|
||||
/// <param name="chunks">The evidence chunks to export.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
public Task ExportEvidenceChunksToFilesAsync(
|
||||
string proofRoot,
|
||||
ChunkManifest manifest,
|
||||
IEnumerable<EvidenceChunk> chunks,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var fetchedChunks = chunks.Select(c => new FetchedChunk
|
||||
{
|
||||
Index = c.ChunkIndex,
|
||||
Data = c.Blob,
|
||||
Hash = c.ChunkHash
|
||||
});
|
||||
|
||||
return ExportToFilesAsync(proofRoot, manifest, fetchedChunks, cancellationToken);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,194 @@
|
||||
using System.Net.Http.Json;
|
||||
using System.Runtime.CompilerServices;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// HTTP-based lazy evidence chunk fetcher for connected mode.
|
||||
/// Fetches chunks from a remote Stella API endpoint.
|
||||
/// </summary>
|
||||
public sealed class HttpChunkFetcher : ILazyEvidenceFetcher, IDisposable
|
||||
{
|
||||
private readonly HttpClient _httpClient;
|
||||
private readonly bool _ownsClient;
|
||||
private readonly ILogger<HttpChunkFetcher> _logger;
|
||||
private readonly JsonSerializerOptions _jsonOptions;
|
||||
|
||||
/// <inheritdoc />
|
||||
public string FetcherType => "http";
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HTTP chunk fetcher with the specified base URL.
|
||||
/// </summary>
|
||||
/// <param name="baseUrl">The base URL of the Stella API.</param>
|
||||
/// <param name="logger">Logger instance.</param>
|
||||
public HttpChunkFetcher(Uri baseUrl, ILogger<HttpChunkFetcher> logger)
|
||||
: this(CreateClient(baseUrl), ownsClient: true, logger)
|
||||
{
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates an HTTP chunk fetcher with an existing HTTP client.
|
||||
/// </summary>
|
||||
/// <param name="httpClient">The HTTP client to use.</param>
|
||||
/// <param name="ownsClient">Whether this fetcher owns the client lifecycle.</param>
|
||||
/// <param name="logger">Logger instance.</param>
|
||||
public HttpChunkFetcher(HttpClient httpClient, bool ownsClient, ILogger<HttpChunkFetcher> logger)
|
||||
{
|
||||
_httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient));
|
||||
_ownsClient = ownsClient;
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_jsonOptions = new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
}
|
||||
|
||||
private static HttpClient CreateClient(Uri baseUrl)
|
||||
{
|
||||
var client = new HttpClient { BaseAddress = baseUrl };
|
||||
client.DefaultRequestHeaders.Add("Accept", "application/json");
|
||||
return client;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<FetchedChunk?> FetchChunkAsync(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentOutOfRangeException.ThrowIfNegative(chunkIndex);
|
||||
|
||||
var url = $"api/v1/evidence/{Uri.EscapeDataString(proofRoot)}/chunks/{chunkIndex}";
|
||||
_logger.LogDebug("Fetching chunk {Index} from {Url}", chunkIndex, url);
|
||||
|
||||
try
|
||||
{
|
||||
var response = await _httpClient.GetAsync(url, cancellationToken);
|
||||
|
||||
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogDebug("Chunk {Index} not found at remote", chunkIndex);
|
||||
return null;
|
||||
}
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
|
||||
var chunk = await response.Content.ReadFromJsonAsync<FetchedChunk>(_jsonOptions, cancellationToken);
|
||||
_logger.LogDebug("Successfully fetched chunk {Index}, {Bytes} bytes", chunkIndex, chunk?.Data.Length ?? 0);
|
||||
return chunk;
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "HTTP error fetching chunk {Index}", chunkIndex);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
|
||||
string proofRoot,
|
||||
IEnumerable<int> chunkIndices,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(chunkIndices);
|
||||
|
||||
var indices = chunkIndices.ToList();
|
||||
_logger.LogInformation("Fetching {Count} chunks for proof root {ProofRoot}", indices.Count, proofRoot);
|
||||
|
||||
foreach (var index in indices)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var chunk = await FetchChunkAsync(proofRoot, index, cancellationToken);
|
||||
if (chunk is not null)
|
||||
{
|
||||
yield return chunk;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
|
||||
string proofRoot,
|
||||
ChunkManifest manifest,
|
||||
IReadOnlySet<int> existingIndices,
|
||||
[EnumeratorCancellation] CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
ArgumentNullException.ThrowIfNull(existingIndices);
|
||||
|
||||
var missingIndices = Enumerable.Range(0, manifest.TotalChunks)
|
||||
.Where(i => !existingIndices.Contains(i))
|
||||
.ToList();
|
||||
|
||||
_logger.LogInformation(
|
||||
"Fetching {MissingCount} remaining chunks (have {ExistingCount}/{TotalCount})",
|
||||
missingIndices.Count, existingIndices.Count, manifest.TotalChunks);
|
||||
|
||||
await foreach (var chunk in FetchChunksAsync(proofRoot, missingIndices, cancellationToken))
|
||||
{
|
||||
yield return chunk;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
try
|
||||
{
|
||||
var response = await _httpClient.GetAsync("api/v1/health", cancellationToken);
|
||||
return response.IsSuccessStatusCode;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogDebug(ex, "Health check failed");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<ChunkManifest?> FetchManifestAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
|
||||
var url = $"api/v1/evidence/{Uri.EscapeDataString(proofRoot)}/manifest";
|
||||
_logger.LogDebug("Fetching manifest from {Url}", url);
|
||||
|
||||
try
|
||||
{
|
||||
var response = await _httpClient.GetAsync(url, cancellationToken);
|
||||
|
||||
if (response.StatusCode == System.Net.HttpStatusCode.NotFound)
|
||||
{
|
||||
_logger.LogDebug("Manifest not found for proof root {ProofRoot}", proofRoot);
|
||||
return null;
|
||||
}
|
||||
|
||||
response.EnsureSuccessStatusCode();
|
||||
return await response.Content.ReadFromJsonAsync<ChunkManifest>(_jsonOptions, cancellationToken);
|
||||
}
|
||||
catch (HttpRequestException ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "HTTP error fetching manifest for {ProofRoot}", proofRoot);
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public void Dispose()
|
||||
{
|
||||
if (_ownsClient)
|
||||
{
|
||||
_httpClient.Dispose();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,131 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for lazy evidence chunk fetching from various sources.
|
||||
/// Enables on-demand evidence retrieval for air-gapped auditors.
|
||||
/// </summary>
|
||||
public interface ILazyEvidenceFetcher
|
||||
{
|
||||
/// <summary>
|
||||
/// Gets the fetcher type (e.g., "http", "file").
|
||||
/// </summary>
|
||||
string FetcherType { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Fetches a single chunk by index.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root identifying the evidence.</param>
|
||||
/// <param name="chunkIndex">The chunk index to fetch.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The fetched chunk or null if not found.</returns>
|
||||
Task<FetchedChunk?> FetchChunkAsync(
|
||||
string proofRoot,
|
||||
int chunkIndex,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches multiple chunks by index.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root identifying the evidence.</param>
|
||||
/// <param name="chunkIndices">The chunk indices to fetch.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Async enumerable of fetched chunks.</returns>
|
||||
IAsyncEnumerable<FetchedChunk> FetchChunksAsync(
|
||||
string proofRoot,
|
||||
IEnumerable<int> chunkIndices,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Fetches all remaining chunks for a proof root.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root identifying the evidence.</param>
|
||||
/// <param name="manifest">The chunk manifest for reference.</param>
|
||||
/// <param name="existingIndices">Indices of chunks already present locally.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Async enumerable of fetched chunks.</returns>
|
||||
IAsyncEnumerable<FetchedChunk> FetchRemainingChunksAsync(
|
||||
string proofRoot,
|
||||
ChunkManifest manifest,
|
||||
IReadOnlySet<int> existingIndices,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Checks if the source is available for fetching.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>True if the source is available.</returns>
|
||||
Task<bool> IsAvailableAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the manifest from the source.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root to get manifest for.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The chunk manifest or null if not available.</returns>
|
||||
Task<ChunkManifest?> FetchManifestAsync(
|
||||
string proofRoot,
|
||||
CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simplified chunk representation for lazy fetch interface.
|
||||
/// Contains only the index and data for transport.
|
||||
/// </summary>
|
||||
public sealed record FetchedChunk
|
||||
{
|
||||
/// <summary>
|
||||
/// Zero-based chunk index.
|
||||
/// </summary>
|
||||
public required int Index { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The chunk data.
|
||||
/// </summary>
|
||||
public required byte[] Data { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA256 hash of the data for verification.
|
||||
/// </summary>
|
||||
public required string Hash { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a lazy fetch operation.
|
||||
/// </summary>
|
||||
public sealed record LazyFetchResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the fetch was successful.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of chunks fetched.
|
||||
/// </summary>
|
||||
public required int ChunksFetched { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total bytes fetched.
|
||||
/// </summary>
|
||||
public required long BytesFetched { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of chunks that failed verification.
|
||||
/// </summary>
|
||||
public required int ChunksFailedVerification { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Indices of failed chunks.
|
||||
/// </summary>
|
||||
public IReadOnlyList<int> FailedIndices { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Any errors encountered.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Time taken for the fetch operation.
|
||||
/// </summary>
|
||||
public TimeSpan Duration { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,296 @@
|
||||
using System.Diagnostics;
|
||||
using System.Security.Cryptography;
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Orchestrates lazy evidence fetching with verification.
|
||||
/// Coordinates between fetchers and the local evidence store.
|
||||
/// </summary>
|
||||
public sealed class LazyFetchOrchestrator
|
||||
{
|
||||
private readonly IEvidenceChunkRepository _repository;
|
||||
private readonly ILogger<LazyFetchOrchestrator> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
/// <summary>
|
||||
/// Creates a lazy fetch orchestrator.
|
||||
/// </summary>
|
||||
/// <param name="repository">The chunk repository for local storage.</param>
|
||||
/// <param name="logger">Logger instance.</param>
|
||||
/// <param name="timeProvider">Optional time provider.</param>
|
||||
public LazyFetchOrchestrator(
|
||||
IEvidenceChunkRepository repository,
|
||||
ILogger<LazyFetchOrchestrator> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fetches remaining chunks for a proof root and stores them locally.
|
||||
/// </summary>
|
||||
/// <param name="proofRoot">The proof root.</param>
|
||||
/// <param name="fetcher">The fetcher to use.</param>
|
||||
/// <param name="options">Fetch options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The fetch result.</returns>
|
||||
public async Task<LazyFetchResult> FetchAndStoreAsync(
|
||||
string proofRoot,
|
||||
ILazyEvidenceFetcher fetcher,
|
||||
LazyFetchOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(proofRoot);
|
||||
ArgumentNullException.ThrowIfNull(fetcher);
|
||||
|
||||
options ??= new LazyFetchOptions();
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
var errors = new List<string>();
|
||||
var failedIndices = new List<int>();
|
||||
var chunksFetched = 0;
|
||||
long bytesFetched = 0;
|
||||
var chunksFailedVerification = 0;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Starting lazy fetch for {ProofRoot} using {FetcherType} fetcher",
|
||||
proofRoot, fetcher.FetcherType);
|
||||
|
||||
try
|
||||
{
|
||||
// Check fetcher availability
|
||||
if (!await fetcher.IsAvailableAsync(cancellationToken))
|
||||
{
|
||||
_logger.LogWarning("Fetcher {FetcherType} is not available", fetcher.FetcherType);
|
||||
return new LazyFetchResult
|
||||
{
|
||||
Success = false,
|
||||
ChunksFetched = 0,
|
||||
BytesFetched = 0,
|
||||
ChunksFailedVerification = 0,
|
||||
Errors = [$"Fetcher {fetcher.FetcherType} is not available"],
|
||||
Duration = stopwatch.Elapsed
|
||||
};
|
||||
}
|
||||
|
||||
// Get local manifest
|
||||
var localManifest = await _repository.GetManifestAsync(proofRoot, cancellationToken);
|
||||
|
||||
if (localManifest is null)
|
||||
{
|
||||
// Try to fetch manifest from remote
|
||||
localManifest = await fetcher.FetchManifestAsync(proofRoot, cancellationToken);
|
||||
if (localManifest is null)
|
||||
{
|
||||
_logger.LogWarning("No manifest found for {ProofRoot}", proofRoot);
|
||||
return new LazyFetchResult
|
||||
{
|
||||
Success = false,
|
||||
ChunksFetched = 0,
|
||||
BytesFetched = 0,
|
||||
ChunksFailedVerification = 0,
|
||||
Errors = [$"No manifest found for proof root {proofRoot}"],
|
||||
Duration = stopwatch.Elapsed
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Get existing chunks
|
||||
var existingChunks = (await _repository.GetChunksAsync(proofRoot, cancellationToken))
|
||||
.Select(c => c.ChunkIndex)
|
||||
.ToHashSet();
|
||||
|
||||
var totalChunks = localManifest.TotalChunks;
|
||||
var missingCount = totalChunks - existingChunks.Count;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Have {Existing}/{Total} chunks, need to fetch {Missing}",
|
||||
existingChunks.Count, totalChunks, missingCount);
|
||||
|
||||
if (missingCount == 0)
|
||||
{
|
||||
_logger.LogInformation("All chunks already present, nothing to fetch");
|
||||
return new LazyFetchResult
|
||||
{
|
||||
Success = true,
|
||||
ChunksFetched = 0,
|
||||
BytesFetched = 0,
|
||||
ChunksFailedVerification = 0,
|
||||
Duration = stopwatch.Elapsed
|
||||
};
|
||||
}
|
||||
|
||||
// Fetch remaining chunks
|
||||
var chunksToStore = new List<EvidenceChunk>();
|
||||
var now = _timeProvider.GetUtcNow();
|
||||
|
||||
await foreach (var fetchedChunk in fetcher.FetchRemainingChunksAsync(
|
||||
proofRoot, localManifest, existingChunks, cancellationToken))
|
||||
{
|
||||
// Verify chunk if enabled
|
||||
if (options.VerifyOnFetch)
|
||||
{
|
||||
var isValid = VerifyChunk(fetchedChunk, localManifest);
|
||||
if (!isValid)
|
||||
{
|
||||
chunksFailedVerification++;
|
||||
failedIndices.Add(fetchedChunk.Index);
|
||||
errors.Add($"Chunk {fetchedChunk.Index} failed verification");
|
||||
|
||||
if (options.FailOnVerificationError)
|
||||
{
|
||||
_logger.LogError("Chunk {Index} failed verification, aborting", fetchedChunk.Index);
|
||||
break;
|
||||
}
|
||||
|
||||
_logger.LogWarning("Chunk {Index} failed verification, skipping", fetchedChunk.Index);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Convert FetchedChunk to EvidenceChunk for storage
|
||||
var evidenceChunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = fetchedChunk.Index,
|
||||
ChunkHash = fetchedChunk.Hash,
|
||||
Blob = fetchedChunk.Data,
|
||||
BlobSize = fetchedChunk.Data.Length,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = now
|
||||
};
|
||||
|
||||
chunksToStore.Add(evidenceChunk);
|
||||
bytesFetched += fetchedChunk.Data.Length;
|
||||
chunksFetched++;
|
||||
|
||||
// Batch store to reduce database round-trips
|
||||
if (chunksToStore.Count >= options.BatchSize)
|
||||
{
|
||||
await _repository.StoreChunksAsync(proofRoot, chunksToStore, cancellationToken);
|
||||
_logger.LogDebug("Stored batch of {Count} chunks", chunksToStore.Count);
|
||||
chunksToStore.Clear();
|
||||
}
|
||||
|
||||
// Check max chunks limit
|
||||
if (options.MaxChunksToFetch > 0 && chunksFetched >= options.MaxChunksToFetch)
|
||||
{
|
||||
_logger.LogInformation("Reached max chunks limit ({Max})", options.MaxChunksToFetch);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Store any remaining chunks
|
||||
if (chunksToStore.Count > 0)
|
||||
{
|
||||
await _repository.StoreChunksAsync(proofRoot, chunksToStore, cancellationToken);
|
||||
_logger.LogDebug("Stored final batch of {Count} chunks", chunksToStore.Count);
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
var success = chunksFailedVerification == 0 || !options.FailOnVerificationError;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Lazy fetch complete: {Fetched} chunks, {Bytes} bytes, {Failed} verification failures in {Duration}",
|
||||
chunksFetched, bytesFetched, chunksFailedVerification, stopwatch.Elapsed);
|
||||
|
||||
return new LazyFetchResult
|
||||
{
|
||||
Success = success,
|
||||
ChunksFetched = chunksFetched,
|
||||
BytesFetched = bytesFetched,
|
||||
ChunksFailedVerification = chunksFailedVerification,
|
||||
FailedIndices = failedIndices,
|
||||
Errors = errors,
|
||||
Duration = stopwatch.Elapsed
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error during lazy fetch for {ProofRoot}", proofRoot);
|
||||
errors.Add(ex.Message);
|
||||
|
||||
return new LazyFetchResult
|
||||
{
|
||||
Success = false,
|
||||
ChunksFetched = chunksFetched,
|
||||
BytesFetched = bytesFetched,
|
||||
ChunksFailedVerification = chunksFailedVerification,
|
||||
FailedIndices = failedIndices,
|
||||
Errors = errors,
|
||||
Duration = stopwatch.Elapsed
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a chunk against the manifest.
|
||||
/// </summary>
|
||||
private bool VerifyChunk(FetchedChunk chunk, ChunkManifest manifest)
|
||||
{
|
||||
// Check index bounds
|
||||
if (chunk.Index < 0 || chunk.Index >= manifest.TotalChunks)
|
||||
{
|
||||
_logger.LogWarning("Chunk index {Index} out of bounds (max {Max})", chunk.Index, manifest.TotalChunks - 1);
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify hash against manifest metadata
|
||||
if (manifest.Chunks is not null && chunk.Index < manifest.Chunks.Count)
|
||||
{
|
||||
var expectedHash = manifest.Chunks[chunk.Index].Hash;
|
||||
var actualHash = Convert.ToHexString(SHA256.HashData(chunk.Data)).ToLowerInvariant();
|
||||
|
||||
if (!string.Equals(actualHash, expectedHash, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Chunk {Index} hash mismatch: expected {Expected}, got {Actual}",
|
||||
chunk.Index, expectedHash, actualHash);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Also verify the chunk's own hash claim
|
||||
var claimedHash = Convert.ToHexString(SHA256.HashData(chunk.Data)).ToLowerInvariant();
|
||||
if (!string.Equals(claimedHash, chunk.Hash, StringComparison.OrdinalIgnoreCase))
|
||||
{
|
||||
_logger.LogWarning(
|
||||
"Chunk {Index} self-hash mismatch: claimed {Claimed}, actual {Actual}",
|
||||
chunk.Index, chunk.Hash, claimedHash);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for lazy fetch operations.
|
||||
/// </summary>
|
||||
public sealed class LazyFetchOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether to verify chunks on fetch.
|
||||
/// </summary>
|
||||
public bool VerifyOnFetch { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to fail the entire operation on verification error.
|
||||
/// </summary>
|
||||
public bool FailOnVerificationError { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Batch size for storing chunks.
|
||||
/// </summary>
|
||||
public int BatchSize { get; init; } = 100;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum number of chunks to fetch (0 = unlimited).
|
||||
/// </summary>
|
||||
public int MaxChunksToFetch { get; init; } = 0;
|
||||
}
|
||||
@@ -142,7 +142,7 @@ public sealed class ProvcacheService : IProvcacheService
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
var sw = Stopwatch.StartNew();
|
||||
using var activity = ProvcacheTelemetry.StartSetActivity(entry.VeriKey, entry.TrustScore);
|
||||
using var activity = ProvcacheTelemetry.StartSetActivity(entry.VeriKey, entry.Decision.TrustScore);
|
||||
|
||||
try
|
||||
{
|
||||
@@ -247,7 +247,7 @@ public sealed class ProvcacheService : IProvcacheService
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(request);
|
||||
|
||||
var invalidationType = request.Type?.ToString().ToLowerInvariant() ?? "unknown";
|
||||
var invalidationType = request.Type.ToString().ToLowerInvariant();
|
||||
using var activity = ProvcacheTelemetry.StartInvalidateActivity(invalidationType, request.Value);
|
||||
|
||||
try
|
||||
|
||||
@@ -0,0 +1,160 @@
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for the revocation ledger.
|
||||
/// Provides audit trail and replay capabilities for revocation events.
|
||||
/// </summary>
|
||||
public interface IRevocationLedger
|
||||
{
|
||||
/// <summary>
|
||||
/// Records a revocation event in the ledger.
|
||||
/// </summary>
|
||||
/// <param name="entry">The revocation entry to record.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The recorded entry with sequence number.</returns>
|
||||
Task<RevocationEntry> RecordAsync(
|
||||
RevocationEntry entry,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets revocation entries since a given sequence number.
|
||||
/// </summary>
|
||||
/// <param name="sinceSeqNo">The sequence number to start from (exclusive).</param>
|
||||
/// <param name="limit">Maximum number of entries to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Ordered list of revocation entries.</returns>
|
||||
Task<IReadOnlyList<RevocationEntry>> GetEntriesSinceAsync(
|
||||
long sinceSeqNo,
|
||||
int limit = 1000,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets revocation entries by type.
|
||||
/// </summary>
|
||||
/// <param name="revocationType">The type of revocation to filter by.</param>
|
||||
/// <param name="since">Only return entries after this time.</param>
|
||||
/// <param name="limit">Maximum number of entries to return.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Ordered list of revocation entries.</returns>
|
||||
Task<IReadOnlyList<RevocationEntry>> GetEntriesByTypeAsync(
|
||||
string revocationType,
|
||||
DateTimeOffset? since = null,
|
||||
int limit = 1000,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the latest sequence number in the ledger.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The latest sequence number, or 0 if empty.</returns>
|
||||
Task<long> GetLatestSeqNoAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets revocations for a specific key.
|
||||
/// </summary>
|
||||
/// <param name="revokedKey">The key to look up.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>List of revocation entries for the key.</returns>
|
||||
Task<IReadOnlyList<RevocationEntry>> GetRevocationsForKeyAsync(
|
||||
string revokedKey,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets summary statistics for the ledger.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Summary statistics.</returns>
|
||||
Task<RevocationLedgerStats> GetStatsAsync(CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A revocation entry in the ledger.
|
||||
/// </summary>
|
||||
public sealed record RevocationEntry
|
||||
{
|
||||
/// <summary>
|
||||
/// Sequence number (set after recording).
|
||||
/// </summary>
|
||||
public long SeqNo { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Unique identifier for this revocation event.
|
||||
/// </summary>
|
||||
public required Guid RevocationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Type of revocation.
|
||||
/// </summary>
|
||||
public required string RevocationType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The key that was revoked.
|
||||
/// </summary>
|
||||
public required string RevokedKey { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reason for revocation.
|
||||
/// </summary>
|
||||
public string? Reason { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of entries invalidated.
|
||||
/// </summary>
|
||||
public int EntriesInvalidated { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Source of the revocation.
|
||||
/// </summary>
|
||||
public required string Source { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Correlation ID for tracing.
|
||||
/// </summary>
|
||||
public string? CorrelationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the revocation occurred.
|
||||
/// </summary>
|
||||
public required DateTimeOffset RevokedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional metadata.
|
||||
/// </summary>
|
||||
public IDictionary<string, object>? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Summary statistics for the revocation ledger.
|
||||
/// </summary>
|
||||
public sealed record RevocationLedgerStats
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of revocation entries.
|
||||
/// </summary>
|
||||
public required long TotalEntries { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Latest sequence number.
|
||||
/// </summary>
|
||||
public required long LatestSeqNo { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entries by type.
|
||||
/// </summary>
|
||||
public required IReadOnlyDictionary<string, long> EntriesByType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total entries invalidated.
|
||||
/// </summary>
|
||||
public required long TotalEntriesInvalidated { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of oldest entry.
|
||||
/// </summary>
|
||||
public DateTimeOffset? OldestEntryAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Timestamp of newest entry.
|
||||
/// </summary>
|
||||
public DateTimeOffset? NewestEntryAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,137 @@
|
||||
using System.Collections.Concurrent;
|
||||
using System.Text.Json;
|
||||
using Microsoft.Extensions.Logging;
|
||||
using StellaOps.Provcache.Entities;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// In-memory implementation of the revocation ledger for testing and non-persistent scenarios.
|
||||
/// For production use, inject a PostgreSQL-backed implementation from StellaOps.Provcache.Postgres.
|
||||
/// </summary>
|
||||
public sealed class InMemoryRevocationLedger : IRevocationLedger
|
||||
{
|
||||
private readonly ConcurrentDictionary<long, RevocationEntry> _entries = new();
|
||||
private readonly ILogger<InMemoryRevocationLedger> _logger;
|
||||
private long _currentSeqNo;
|
||||
|
||||
public InMemoryRevocationLedger(ILogger<InMemoryRevocationLedger> logger)
|
||||
{
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<RevocationEntry> RecordAsync(
|
||||
RevocationEntry entry,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(entry);
|
||||
|
||||
var seqNo = Interlocked.Increment(ref _currentSeqNo);
|
||||
var recordedEntry = entry with { SeqNo = seqNo };
|
||||
|
||||
_entries[seqNo] = recordedEntry;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Recorded revocation {RevocationId} of type {Type} for key {Key}, invalidated {Count} entries",
|
||||
entry.RevocationId, entry.RevocationType, entry.RevokedKey, entry.EntriesInvalidated);
|
||||
|
||||
return Task.FromResult(recordedEntry);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<RevocationEntry>> GetEntriesSinceAsync(
|
||||
long sinceSeqNo,
|
||||
int limit = 1000,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
var entries = _entries.Values
|
||||
.Where(e => e.SeqNo > sinceSeqNo)
|
||||
.OrderBy(e => e.SeqNo)
|
||||
.Take(limit)
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<RevocationEntry>>(entries);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<RevocationEntry>> GetEntriesByTypeAsync(
|
||||
string revocationType,
|
||||
DateTimeOffset? since = null,
|
||||
int limit = 1000,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(revocationType);
|
||||
|
||||
var query = _entries.Values
|
||||
.Where(e => e.RevocationType == revocationType);
|
||||
|
||||
if (since.HasValue)
|
||||
{
|
||||
query = query.Where(e => e.RevokedAt > since.Value);
|
||||
}
|
||||
|
||||
var entries = query
|
||||
.OrderBy(e => e.SeqNo)
|
||||
.Take(limit)
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<RevocationEntry>>(entries);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<long> GetLatestSeqNoAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(Interlocked.Read(ref _currentSeqNo));
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<IReadOnlyList<RevocationEntry>> GetRevocationsForKeyAsync(
|
||||
string revokedKey,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(revokedKey);
|
||||
|
||||
var entries = _entries.Values
|
||||
.Where(e => e.RevokedKey == revokedKey)
|
||||
.OrderBy(e => e.SeqNo)
|
||||
.ToList();
|
||||
|
||||
return Task.FromResult<IReadOnlyList<RevocationEntry>>(entries);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<RevocationLedgerStats> GetStatsAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
var allEntries = _entries.Values.ToList();
|
||||
var totalEntries = allEntries.Count;
|
||||
var latestSeqNo = Interlocked.Read(ref _currentSeqNo);
|
||||
var totalInvalidated = allEntries.Sum(e => (long)e.EntriesInvalidated);
|
||||
|
||||
var entriesByType = allEntries
|
||||
.GroupBy(e => e.RevocationType)
|
||||
.ToDictionary(g => g.Key, g => (long)g.Count());
|
||||
|
||||
var oldestEntry = allEntries.MinBy(e => e.SeqNo)?.RevokedAt;
|
||||
var newestEntry = allEntries.MaxBy(e => e.SeqNo)?.RevokedAt;
|
||||
|
||||
return Task.FromResult(new RevocationLedgerStats
|
||||
{
|
||||
TotalEntries = totalEntries,
|
||||
LatestSeqNo = latestSeqNo,
|
||||
EntriesByType = entriesByType,
|
||||
TotalEntriesInvalidated = totalInvalidated,
|
||||
OldestEntryAt = oldestEntry,
|
||||
NewestEntryAt = newestEntry
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all entries (for testing).
|
||||
/// </summary>
|
||||
public void Clear()
|
||||
{
|
||||
_entries.Clear();
|
||||
Interlocked.Exchange(ref _currentSeqNo, 0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,295 @@
|
||||
using Microsoft.Extensions.Logging;
|
||||
|
||||
namespace StellaOps.Provcache;
|
||||
|
||||
/// <summary>
|
||||
/// Interface for replaying revocation events for catch-up scenarios.
|
||||
/// </summary>
|
||||
public interface IRevocationReplayService
|
||||
{
|
||||
/// <summary>
|
||||
/// Replays revocation events since a checkpoint.
|
||||
/// Used for catch-up after offline period or node restart.
|
||||
/// </summary>
|
||||
/// <param name="sinceSeqNo">Sequence number to replay from.</param>
|
||||
/// <param name="options">Replay options.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>Replay result with statistics.</returns>
|
||||
Task<RevocationReplayResult> ReplayFromAsync(
|
||||
long sinceSeqNo,
|
||||
RevocationReplayOptions? options = null,
|
||||
CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current replay checkpoint.
|
||||
/// </summary>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
/// <returns>The checkpoint sequence number.</returns>
|
||||
Task<long> GetCheckpointAsync(CancellationToken cancellationToken = default);
|
||||
|
||||
/// <summary>
|
||||
/// Saves a replay checkpoint.
|
||||
/// </summary>
|
||||
/// <param name="seqNo">The sequence number to checkpoint.</param>
|
||||
/// <param name="cancellationToken">Cancellation token.</param>
|
||||
Task SaveCheckpointAsync(long seqNo, CancellationToken cancellationToken = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Options for revocation replay.
|
||||
/// </summary>
|
||||
public sealed class RevocationReplayOptions
|
||||
{
|
||||
/// <summary>
|
||||
/// Maximum entries to process per batch.
|
||||
/// </summary>
|
||||
public int BatchSize { get; init; } = 1000;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to save checkpoint after each batch.
|
||||
/// </summary>
|
||||
public bool SaveCheckpointPerBatch { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Whether to verify invalidations against current cache state.
|
||||
/// </summary>
|
||||
public bool VerifyInvalidations { get; init; } = false;
|
||||
|
||||
/// <summary>
|
||||
/// Maximum total entries to replay (0 = unlimited).
|
||||
/// </summary>
|
||||
public int MaxEntries { get; init; } = 0;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a revocation replay operation.
|
||||
/// </summary>
|
||||
public sealed record RevocationReplayResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the replay completed successfully.
|
||||
/// </summary>
|
||||
public required bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of entries replayed.
|
||||
/// </summary>
|
||||
public required int EntriesReplayed { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Starting sequence number.
|
||||
/// </summary>
|
||||
public required long StartSeqNo { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Ending sequence number.
|
||||
/// </summary>
|
||||
public required long EndSeqNo { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total invalidations applied.
|
||||
/// </summary>
|
||||
public required int TotalInvalidations { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Entries by revocation type.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, int> EntriesByType { get; init; } = new Dictionary<string, int>();
|
||||
|
||||
/// <summary>
|
||||
/// Time taken for replay.
|
||||
/// </summary>
|
||||
public TimeSpan Duration { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Any errors encountered.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> Errors { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Implementation of revocation replay service.
|
||||
/// </summary>
|
||||
public sealed class RevocationReplayService : IRevocationReplayService
|
||||
{
|
||||
private readonly IRevocationLedger _ledger;
|
||||
private readonly IProvcacheRepository _repository;
|
||||
private readonly ILogger<RevocationReplayService> _logger;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
|
||||
// In-memory checkpoint (production would use persistent storage)
|
||||
private long _checkpoint;
|
||||
|
||||
public RevocationReplayService(
|
||||
IRevocationLedger ledger,
|
||||
IProvcacheRepository repository,
|
||||
ILogger<RevocationReplayService> logger,
|
||||
TimeProvider? timeProvider = null)
|
||||
{
|
||||
_ledger = ledger ?? throw new ArgumentNullException(nameof(ledger));
|
||||
_repository = repository ?? throw new ArgumentNullException(nameof(repository));
|
||||
_logger = logger ?? throw new ArgumentNullException(nameof(logger));
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public async Task<RevocationReplayResult> ReplayFromAsync(
|
||||
long sinceSeqNo,
|
||||
RevocationReplayOptions? options = null,
|
||||
CancellationToken cancellationToken = default)
|
||||
{
|
||||
options ??= new RevocationReplayOptions();
|
||||
var startTime = _timeProvider.GetUtcNow();
|
||||
var errors = new List<string>();
|
||||
var entriesByType = new Dictionary<string, int>();
|
||||
var totalReplayed = 0;
|
||||
var totalInvalidations = 0;
|
||||
var currentSeqNo = sinceSeqNo;
|
||||
var endSeqNo = sinceSeqNo;
|
||||
|
||||
_logger.LogInformation("Starting revocation replay from seq {SeqNo}", sinceSeqNo);
|
||||
|
||||
try
|
||||
{
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var entries = await _ledger.GetEntriesSinceAsync(
|
||||
currentSeqNo,
|
||||
options.BatchSize,
|
||||
cancellationToken);
|
||||
|
||||
if (entries.Count == 0)
|
||||
{
|
||||
_logger.LogDebug("No more entries to replay");
|
||||
break;
|
||||
}
|
||||
|
||||
foreach (var entry in entries)
|
||||
{
|
||||
// Track by type
|
||||
if (!entriesByType.TryGetValue(entry.RevocationType, out var count))
|
||||
{
|
||||
count = 0;
|
||||
}
|
||||
entriesByType[entry.RevocationType] = count + 1;
|
||||
|
||||
// Apply invalidation based on type
|
||||
try
|
||||
{
|
||||
var invalidated = await ApplyRevocationAsync(entry, cancellationToken);
|
||||
totalInvalidations += invalidated;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogWarning(ex, "Error applying revocation {RevocationId}", entry.RevocationId);
|
||||
errors.Add($"Failed to apply revocation {entry.RevocationId}: {ex.Message}");
|
||||
}
|
||||
|
||||
currentSeqNo = entry.SeqNo;
|
||||
endSeqNo = entry.SeqNo;
|
||||
totalReplayed++;
|
||||
|
||||
// Check max entries limit
|
||||
if (options.MaxEntries > 0 && totalReplayed >= options.MaxEntries)
|
||||
{
|
||||
_logger.LogInformation("Reached max entries limit ({Max})", options.MaxEntries);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Save checkpoint per batch if enabled
|
||||
if (options.SaveCheckpointPerBatch)
|
||||
{
|
||||
await SaveCheckpointAsync(endSeqNo, cancellationToken);
|
||||
}
|
||||
|
||||
// Check max entries limit
|
||||
if (options.MaxEntries > 0 && totalReplayed >= options.MaxEntries)
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
var duration = _timeProvider.GetUtcNow() - startTime;
|
||||
|
||||
_logger.LogInformation(
|
||||
"Revocation replay complete: {Replayed} entries, {Invalidations} invalidations in {Duration}",
|
||||
totalReplayed, totalInvalidations, duration);
|
||||
|
||||
return new RevocationReplayResult
|
||||
{
|
||||
Success = errors.Count == 0,
|
||||
EntriesReplayed = totalReplayed,
|
||||
StartSeqNo = sinceSeqNo,
|
||||
EndSeqNo = endSeqNo,
|
||||
TotalInvalidations = totalInvalidations,
|
||||
EntriesByType = entriesByType,
|
||||
Duration = duration,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
_logger.LogError(ex, "Error during revocation replay");
|
||||
errors.Add(ex.Message);
|
||||
|
||||
return new RevocationReplayResult
|
||||
{
|
||||
Success = false,
|
||||
EntriesReplayed = totalReplayed,
|
||||
StartSeqNo = sinceSeqNo,
|
||||
EndSeqNo = endSeqNo,
|
||||
TotalInvalidations = totalInvalidations,
|
||||
EntriesByType = entriesByType,
|
||||
Duration = _timeProvider.GetUtcNow() - startTime,
|
||||
Errors = errors
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task<long> GetCheckpointAsync(CancellationToken cancellationToken = default)
|
||||
{
|
||||
return Task.FromResult(_checkpoint);
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public Task SaveCheckpointAsync(long seqNo, CancellationToken cancellationToken = default)
|
||||
{
|
||||
_checkpoint = seqNo;
|
||||
_logger.LogDebug("Saved checkpoint at seq {SeqNo}", seqNo);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
private async Task<int> ApplyRevocationAsync(
|
||||
RevocationEntry entry,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
// Note: In replay mode, we re-apply the same invalidation logic
|
||||
// This is idempotent - if entries are already invalidated, count will be 0
|
||||
|
||||
var count = entry.RevocationType switch
|
||||
{
|
||||
Entities.RevocationTypes.Signer =>
|
||||
await _repository.DeleteBySignerSetHashAsync(entry.RevokedKey, cancellationToken),
|
||||
|
||||
Entities.RevocationTypes.FeedEpoch =>
|
||||
await _repository.DeleteByFeedEpochOlderThanAsync(entry.RevokedKey, cancellationToken),
|
||||
|
||||
Entities.RevocationTypes.Policy =>
|
||||
await _repository.DeleteByPolicyHashAsync(entry.RevokedKey, cancellationToken),
|
||||
|
||||
Entities.RevocationTypes.Explicit =>
|
||||
await _repository.DeleteAsync(entry.RevokedKey, cancellationToken) ? 1L : 0L,
|
||||
|
||||
Entities.RevocationTypes.Expiration =>
|
||||
0L, // TTL expiration is handled by background cleanup, not replay
|
||||
|
||||
_ => 0L
|
||||
};
|
||||
|
||||
return (int)count;
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,7 @@
|
||||
<ProjectReference Include="../StellaOps.Canonical.Json/StellaOps.Canonical.Json.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" />
|
||||
<ProjectReference Include="../StellaOps.Messaging/StellaOps.Messaging.csproj" />
|
||||
<ProjectReference Include="../../Provenance/StellaOps.Provenance.Attestation/StellaOps.Provenance.Attestation.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
|
||||
@@ -59,6 +59,7 @@ public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
|
||||
Interlocked.Increment(ref _totalEnqueued);
|
||||
Interlocked.Increment(ref _currentQueueDepth);
|
||||
ProvcacheTelemetry.SetWriteBehindQueueSize((int)Interlocked.Read(ref _currentQueueDepth));
|
||||
|
||||
return _channel.Writer.WriteAsync(item, cancellationToken);
|
||||
}
|
||||
@@ -143,6 +144,7 @@ public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
private async Task ProcessBatchAsync(List<WriteBehindItem> batch, CancellationToken cancellationToken)
|
||||
{
|
||||
var entries = batch.Select(b => b.Entry).ToList();
|
||||
using var activity = ProvcacheTelemetry.StartWriteBehindFlushActivity(batch.Count);
|
||||
|
||||
try
|
||||
{
|
||||
@@ -150,6 +152,8 @@ public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
|
||||
Interlocked.Add(ref _totalPersisted, batch.Count);
|
||||
Interlocked.Increment(ref _totalBatches);
|
||||
ProvcacheTelemetry.RecordWriteBehind("ok", batch.Count);
|
||||
ProvcacheTelemetry.SetWriteBehindQueueSize((int)Interlocked.Read(ref _currentQueueDepth));
|
||||
|
||||
_logger.LogDebug(
|
||||
"Write-behind batch persisted {Count} entries",
|
||||
@@ -157,6 +161,7 @@ public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
ProvcacheTelemetry.MarkError(activity, ex.Message);
|
||||
_logger.LogWarning(
|
||||
ex,
|
||||
"Write-behind batch failed for {Count} entries, scheduling retries",
|
||||
@@ -169,14 +174,17 @@ public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
{
|
||||
var retryItem = item with { RetryCount = item.RetryCount + 1 };
|
||||
Interlocked.Increment(ref _totalRetries);
|
||||
ProvcacheTelemetry.RecordWriteBehind("retry", 1);
|
||||
|
||||
if (_channel.Writer.TryWrite(retryItem))
|
||||
{
|
||||
Interlocked.Increment(ref _currentQueueDepth);
|
||||
ProvcacheTelemetry.SetWriteBehindQueueSize((int)Interlocked.Read(ref _currentQueueDepth));
|
||||
}
|
||||
else
|
||||
{
|
||||
Interlocked.Increment(ref _totalFailed);
|
||||
ProvcacheTelemetry.RecordWriteBehind("failed", 1);
|
||||
_logger.LogError(
|
||||
"Write-behind queue full, dropping entry for VeriKey {VeriKey}",
|
||||
item.Entry.VeriKey);
|
||||
@@ -185,6 +193,7 @@ public sealed class WriteBehindQueue : BackgroundService, IWriteBehindQueue
|
||||
else
|
||||
{
|
||||
Interlocked.Increment(ref _totalFailed);
|
||||
ProvcacheTelemetry.RecordWriteBehind("failed", 1);
|
||||
_logger.LogError(
|
||||
"Write-behind max retries exceeded for VeriKey {VeriKey}",
|
||||
item.Entry.VeriKey);
|
||||
|
||||
@@ -0,0 +1,373 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Moq;
|
||||
using StellaOps.Provcache.Api;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for evidence paging API endpoints.
|
||||
/// </summary>
|
||||
public sealed class EvidenceApiTests : IAsyncLifetime
|
||||
{
|
||||
private IHost? _host;
|
||||
private HttpClient? _client;
|
||||
private Mock<IEvidenceChunkRepository>? _mockChunkRepository;
|
||||
private Mock<IEvidenceChunker>? _mockChunker;
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
_mockChunkRepository = new Mock<IEvidenceChunkRepository>();
|
||||
_mockChunker = new Mock<IEvidenceChunker>();
|
||||
|
||||
_host = await new HostBuilder()
|
||||
.ConfigureWebHost(webBuilder =>
|
||||
{
|
||||
webBuilder
|
||||
.UseTestServer()
|
||||
.ConfigureServices(services =>
|
||||
{
|
||||
services.AddRouting();
|
||||
services.AddLogging();
|
||||
services.AddSingleton(_mockChunkRepository.Object);
|
||||
services.AddSingleton(_mockChunker.Object);
|
||||
// Add mock IProvcacheService to satisfy the main endpoints
|
||||
services.AddSingleton(Mock.Of<IProvcacheService>());
|
||||
})
|
||||
.Configure(app =>
|
||||
{
|
||||
app.UseRouting();
|
||||
app.UseEndpoints(endpoints =>
|
||||
{
|
||||
endpoints.MapProvcacheEndpoints();
|
||||
});
|
||||
});
|
||||
})
|
||||
.StartAsync();
|
||||
|
||||
_client = _host.GetTestClient();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
_client?.Dispose();
|
||||
if (_host != null)
|
||||
{
|
||||
await _host.StopAsync();
|
||||
_host.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_ReturnsChunksWithPagination()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:abc123";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 15,
|
||||
TotalSize = 15000,
|
||||
Chunks = [],
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 1000),
|
||||
CreateChunk(proofRoot, 1, 1000),
|
||||
CreateChunk(proofRoot, 2, 1000)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_mockChunkRepository.Setup(x => x.GetChunkRangeAsync(proofRoot, 0, 10, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofEvidenceResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.TotalChunks.Should().Be(15);
|
||||
result.Chunks.Should().HaveCount(3);
|
||||
result.HasMore.Should().BeTrue();
|
||||
result.NextCursor.Should().Be("10");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_WithOffset_ReturnsPaginatedResults()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:def456";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 5,
|
||||
TotalSize = 5000,
|
||||
Chunks = [],
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 2, 1000),
|
||||
CreateChunk(proofRoot, 3, 1000),
|
||||
CreateChunk(proofRoot, 4, 1000)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_mockChunkRepository.Setup(x => x.GetChunkRangeAsync(proofRoot, 2, 3, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}?offset=2&limit=3");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofEvidenceResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Chunks.Should().HaveCount(3);
|
||||
result.Chunks[0].Index.Should().Be(2);
|
||||
result.HasMore.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_WithIncludeData_ReturnsBase64Blobs()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:ghi789";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 1,
|
||||
TotalSize = 100,
|
||||
Chunks = [],
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 100)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_mockChunkRepository.Setup(x => x.GetChunkRangeAsync(proofRoot, 0, 10, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}?includeData=true");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofEvidenceResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Chunks[0].Data.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_NotFound_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:notfound";
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetProofManifest_ReturnsManifestWithChunkMetadata()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:manifest123";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 3,
|
||||
TotalSize = 3000,
|
||||
Chunks = new List<ChunkMetadata>
|
||||
{
|
||||
new() { ChunkId = Guid.NewGuid(), Index = 0, Hash = "sha256:chunk0", Size = 1000, ContentType = "application/octet-stream" },
|
||||
new() { ChunkId = Guid.NewGuid(), Index = 1, Hash = "sha256:chunk1", Size = 1000, ContentType = "application/octet-stream" },
|
||||
new() { ChunkId = Guid.NewGuid(), Index = 2, Hash = "sha256:chunk2", Size = 1000, ContentType = "application/octet-stream" }
|
||||
},
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/manifest");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofManifestResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.TotalChunks.Should().Be(3);
|
||||
result.TotalSize.Should().Be(3000);
|
||||
result.Chunks.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetProofManifest_NotFound_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:notfound";
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/manifest");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetSingleChunk_ReturnsChunkWithData()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:singlechunk";
|
||||
var chunk = CreateChunk(proofRoot, 5, 500);
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetChunkAsync(proofRoot, 5, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunk);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/chunks/5");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofChunkResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Index.Should().Be(5);
|
||||
result.Size.Should().Be(500);
|
||||
result.Data.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetSingleChunk_NotFound_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:notfound";
|
||||
_mockChunkRepository!.Setup(x => x.GetChunkAsync(proofRoot, 99, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((EvidenceChunk?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/chunks/99");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyProof_ValidChunks_ReturnsIsValidTrue()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:validproof";
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 100),
|
||||
CreateChunk(proofRoot, 1, 100)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetChunksAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
_mockChunker!.Setup(x => x.VerifyChunk(It.IsAny<EvidenceChunk>()))
|
||||
.Returns(true);
|
||||
|
||||
_mockChunker.Setup(x => x.ComputeMerkleRoot(It.IsAny<IEnumerable<string>>()))
|
||||
.Returns(proofRoot);
|
||||
|
||||
// Act
|
||||
var response = await _client!.PostAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/verify", null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofVerificationResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.IsValid.Should().BeTrue();
|
||||
result.ChunkResults.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyProof_MerkleRootMismatch_ReturnsIsValidFalse()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:badroot";
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 100)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetChunksAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
_mockChunker!.Setup(x => x.VerifyChunk(It.IsAny<EvidenceChunk>()))
|
||||
.Returns(true);
|
||||
|
||||
_mockChunker.Setup(x => x.ComputeMerkleRoot(It.IsAny<IEnumerable<string>>()))
|
||||
.Returns("sha256:differentroot");
|
||||
|
||||
// Act
|
||||
var response = await _client!.PostAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/verify", null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofVerificationResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.IsValid.Should().BeFalse();
|
||||
result.Error.Should().Contain("Merkle root mismatch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyProof_NoChunks_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:nochunks";
|
||||
_mockChunkRepository!.Setup(x => x.GetChunksAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<EvidenceChunk>());
|
||||
|
||||
// Act
|
||||
var response = await _client!.PostAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/verify", null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
private static EvidenceChunk CreateChunk(string proofRoot, int index, int size)
|
||||
{
|
||||
var data = new byte[size];
|
||||
Random.Shared.NextBytes(data);
|
||||
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = index,
|
||||
ChunkHash = $"sha256:chunk{index}",
|
||||
Blob = data,
|
||||
BlobSize = size,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,289 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Provcache;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="EvidenceChunker"/>.
|
||||
/// </summary>
|
||||
public sealed class EvidenceChunkerTests
|
||||
{
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly EvidenceChunker _chunker;
|
||||
|
||||
public EvidenceChunkerTests()
|
||||
{
|
||||
_options = new ProvcacheOptions { ChunkSize = 64 }; // Small for testing
|
||||
_chunker = new EvidenceChunker(_options);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldSplitEvidenceIntoMultipleChunks_WhenLargerThanChunkSize()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[200];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().HaveCount(4); // ceil(200/64) = 4
|
||||
result.TotalSize.Should().Be(200);
|
||||
result.ProofRoot.Should().StartWith("sha256:");
|
||||
|
||||
// Verify chunk ordering
|
||||
for (var i = 0; i < result.Chunks.Count; i++)
|
||||
{
|
||||
result.Chunks[i].ChunkIndex.Should().Be(i);
|
||||
result.Chunks[i].ContentType.Should().Be(contentType);
|
||||
result.Chunks[i].ProofRoot.Should().Be(result.ProofRoot);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldCreateSingleChunk_WhenSmallerThanChunkSize()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[32];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/json";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().HaveCount(1);
|
||||
result.TotalSize.Should().Be(32);
|
||||
result.Chunks[0].BlobSize.Should().Be(32);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldHandleEmptyEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = Array.Empty<byte>();
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().BeEmpty();
|
||||
result.TotalSize.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldProduceUniqueHashForEachChunk()
|
||||
{
|
||||
// Arrange - create evidence with distinct bytes per chunk
|
||||
var evidence = new byte[128];
|
||||
for (var i = 0; i < 64; i++) evidence[i] = 0xAA;
|
||||
for (var i = 64; i < 128; i++) evidence[i] = 0xBB;
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Chunks.Should().HaveCount(2);
|
||||
result.Chunks[0].ChunkHash.Should().NotBe(result.Chunks[1].ChunkHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldRecoverOriginalEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var original = new byte[200];
|
||||
Random.Shared.NextBytes(original);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(original, contentType);
|
||||
|
||||
// Act
|
||||
var reassembled = await _chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
|
||||
|
||||
// Assert
|
||||
reassembled.Should().BeEquivalentTo(original);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldThrow_WhenMerkleRootMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[100];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _chunker.ReassembleAsync(chunked.Chunks, "sha256:invalid_root");
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*Merkle root mismatch*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldThrow_WhenChunkCorrupted()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[100];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Corrupt first chunk
|
||||
var corruptedChunks = chunked.Chunks
|
||||
.Select((c, i) => i == 0
|
||||
? c with { Blob = new byte[c.BlobSize], ChunkHash = c.ChunkHash } // same hash but different blob
|
||||
: c)
|
||||
.ToList();
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _chunker.ReassembleAsync(corruptedChunks, chunked.ProofRoot);
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*verification failed*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyChunk_ShouldReturnTrue_WhenChunkValid()
|
||||
{
|
||||
// Arrange
|
||||
var data = new byte[32];
|
||||
Random.Shared.NextBytes(data);
|
||||
var hash = ComputeHash(data);
|
||||
|
||||
var chunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = "sha256:test",
|
||||
ChunkIndex = 0,
|
||||
ChunkHash = hash,
|
||||
Blob = data,
|
||||
BlobSize = data.Length,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
_chunker.VerifyChunk(chunk).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyChunk_ShouldReturnFalse_WhenHashMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var chunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = "sha256:test",
|
||||
ChunkIndex = 0,
|
||||
ChunkHash = "sha256:wrong_hash",
|
||||
Blob = new byte[32],
|
||||
BlobSize = 32,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
_chunker.VerifyChunk(chunk).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldReturnSameResult_ForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff", "sha256:1122" };
|
||||
|
||||
// Act
|
||||
var root1 = _chunker.ComputeMerkleRoot(hashes);
|
||||
var root2 = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
root1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldHandleSingleHash()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabbccdd" };
|
||||
|
||||
// Act
|
||||
var root = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root.Should().Be("sha256:aabbccdd");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldHandleOddNumberOfHashes()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff" };
|
||||
|
||||
// Act
|
||||
var root = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root.Should().NotBeNullOrEmpty();
|
||||
root.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkStreamAsync_ShouldYieldChunksInOrder()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[200];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
using var stream = new MemoryStream(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var chunks = new List<EvidenceChunk>();
|
||||
await foreach (var chunk in _chunker.ChunkStreamAsync(stream, contentType))
|
||||
{
|
||||
chunks.Add(chunk);
|
||||
}
|
||||
|
||||
// Assert
|
||||
chunks.Should().HaveCount(4);
|
||||
for (var i = 0; i < chunks.Count; i++)
|
||||
{
|
||||
chunks[i].ChunkIndex.Should().Be(i);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Roundtrip_ShouldPreserveDataIntegrity()
|
||||
{
|
||||
// Arrange - use realistic chunk size
|
||||
var options = new ProvcacheOptions { ChunkSize = 1024 };
|
||||
var chunker = new EvidenceChunker(options);
|
||||
|
||||
var original = new byte[5000]; // ~5 chunks
|
||||
Random.Shared.NextBytes(original);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var chunked = await chunker.ChunkAsync(original, contentType);
|
||||
var reassembled = await chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
|
||||
|
||||
// Assert
|
||||
reassembled.Should().BeEquivalentTo(original);
|
||||
chunked.Chunks.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
private static string ComputeHash(byte[] data)
|
||||
{
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,440 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
public sealed class LazyFetchTests
|
||||
{
|
||||
private readonly Mock<IEvidenceChunkRepository> _repositoryMock;
|
||||
private readonly LazyFetchOrchestrator _orchestrator;
|
||||
|
||||
public LazyFetchTests()
|
||||
{
|
||||
_repositoryMock = new Mock<IEvidenceChunkRepository>();
|
||||
_orchestrator = new LazyFetchOrchestrator(
|
||||
_repositoryMock.Object,
|
||||
NullLogger<LazyFetchOrchestrator>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WhenFetcherNotAvailable_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(false);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("not available"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WhenNoManifestFound_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.Setup(f => f.FetchManifestAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("No manifest found"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WhenAllChunksPresent_ReturnsSuccessWithZeroFetched()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 3);
|
||||
var existingChunks = CreateTestEvidenceChunks("test-root", 3);
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksFetched.Should().Be(0);
|
||||
result.BytesFetched.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_FetchesMissingChunks()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 3);
|
||||
var existingChunks = CreateTestEvidenceChunks("test-root", 1); // Only have 1 chunk
|
||||
var missingChunks = new List<FetchedChunk>
|
||||
{
|
||||
CreateTestFetchedChunk(1),
|
||||
CreateTestFetchedChunk(2)
|
||||
};
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(missingChunks.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksFetched.Should().Be(2);
|
||||
result.BytesFetched.Should().Be(missingChunks.Sum(c => c.Data.Length));
|
||||
|
||||
_repositoryMock.Verify(r => r.StoreChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<IEnumerable<EvidenceChunk>>(),
|
||||
It.IsAny<CancellationToken>()), Times.AtLeastOnce);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WithVerification_RejectsCorruptedChunks()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 2);
|
||||
var existingChunks = new List<EvidenceChunk>(); // No existing chunks
|
||||
|
||||
var corruptedChunk = new FetchedChunk
|
||||
{
|
||||
Index = 0,
|
||||
Data = [0x00, 0x01, 0x02],
|
||||
Hash = "invalid_hash_that_does_not_match"
|
||||
};
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(new[] { corruptedChunk }.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
var options = new LazyFetchOptions { VerifyOnFetch = true };
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object, options);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue(); // Still succeeds by default (skips invalid)
|
||||
result.ChunksFailedVerification.Should().Be(1);
|
||||
result.FailedIndices.Should().Contain(0);
|
||||
result.ChunksFetched.Should().Be(0); // Nothing stored
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WithFailOnVerificationError_AbortsOnCorruption()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 2);
|
||||
var existingChunks = new List<EvidenceChunk>();
|
||||
|
||||
var corruptedChunk = new FetchedChunk
|
||||
{
|
||||
Index = 0,
|
||||
Data = [0x00, 0x01, 0x02],
|
||||
Hash = "invalid_hash"
|
||||
};
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(new[] { corruptedChunk }.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
var options = new LazyFetchOptions
|
||||
{
|
||||
VerifyOnFetch = true,
|
||||
FailOnVerificationError = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object, options);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ChunksFailedVerification.Should().BeGreaterThanOrEqualTo(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_RespectsMaxChunksLimit()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 10);
|
||||
var existingChunks = new List<EvidenceChunk>();
|
||||
var allChunks = Enumerable.Range(0, 10)
|
||||
.Select(CreateTestFetchedChunk)
|
||||
.ToList();
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(allChunks.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
var options = new LazyFetchOptions
|
||||
{
|
||||
VerifyOnFetch = false,
|
||||
MaxChunksToFetch = 3
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object, options);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksFetched.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FileChunkFetcher_FetcherType_ReturnsFile()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act & Assert
|
||||
fetcher.FetcherType.Should().Be("file");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FileChunkFetcher_IsAvailableAsync_ReturnsTrueWhenDirectoryExists()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.IsAvailableAsync();
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempDir, true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FileChunkFetcher_IsAvailableAsync_ReturnsFalseWhenDirectoryMissing()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.IsAvailableAsync();
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FileChunkFetcher_FetchChunkAsync_ReturnsNullWhenChunkNotFound()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.FetchChunkAsync("test-root", 0);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempDir, true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpChunkFetcher_FetcherType_ReturnsHttp()
|
||||
{
|
||||
// Arrange
|
||||
var httpClient = new HttpClient { BaseAddress = new Uri("http://localhost") };
|
||||
var fetcher = new HttpChunkFetcher(httpClient, ownsClient: false, NullLogger<HttpChunkFetcher>.Instance);
|
||||
|
||||
// Act & Assert
|
||||
fetcher.FetcherType.Should().Be("http");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HttpChunkFetcher_IsAvailableAsync_ReturnsFalseWhenHostUnreachable()
|
||||
{
|
||||
// Arrange - use a non-routable IP to ensure connection failure
|
||||
var httpClient = new HttpClient
|
||||
{
|
||||
BaseAddress = new Uri("http://192.0.2.1:9999"),
|
||||
Timeout = TimeSpan.FromMilliseconds(100) // Short timeout for test speed
|
||||
};
|
||||
var fetcher = new HttpChunkFetcher(httpClient, ownsClient: false, NullLogger<HttpChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.IsAvailableAsync();
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
private static ChunkManifest CreateTestManifest(string proofRoot, int chunkCount)
|
||||
{
|
||||
var chunks = Enumerable.Range(0, chunkCount)
|
||||
.Select(i => new ChunkMetadata
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
Index = i,
|
||||
Hash = ComputeTestHash(i),
|
||||
Size = 100 + i,
|
||||
ContentType = "application/octet-stream"
|
||||
})
|
||||
.ToList();
|
||||
|
||||
return new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = chunkCount,
|
||||
TotalSize = chunks.Sum(c => c.Size),
|
||||
Chunks = chunks,
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static List<EvidenceChunk> CreateTestEvidenceChunks(string proofRoot, int count)
|
||||
{
|
||||
return Enumerable.Range(0, count)
|
||||
.Select(i =>
|
||||
{
|
||||
var data = CreateTestData(i);
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = i,
|
||||
ChunkHash = ComputeActualHash(data),
|
||||
Blob = data,
|
||||
BlobSize = data.Length,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
})
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static FetchedChunk CreateTestFetchedChunk(int index)
|
||||
{
|
||||
var data = CreateTestData(index);
|
||||
return new FetchedChunk
|
||||
{
|
||||
Index = index,
|
||||
Data = data,
|
||||
Hash = ComputeActualHash(data)
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] CreateTestData(int index)
|
||||
{
|
||||
return Enumerable.Range(0, 100 + index)
|
||||
.Select(i => (byte)(i % 256))
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static string ComputeTestHash(int index)
|
||||
{
|
||||
var data = CreateTestData(index);
|
||||
return ComputeActualHash(data);
|
||||
}
|
||||
|
||||
private static string ComputeActualHash(byte[] data)
|
||||
{
|
||||
return Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(data)).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
// Extension method for async enumerable from list
|
||||
internal static class AsyncEnumerableExtensions
|
||||
{
|
||||
public static async IAsyncEnumerable<T> ToAsyncEnumerable<T>(this IEnumerable<T> source)
|
||||
{
|
||||
foreach (var item in source)
|
||||
{
|
||||
yield return item;
|
||||
await Task.Yield();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,467 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Provenance.Attestation;
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="MinimalProofExporter"/> covering all density levels.
|
||||
/// </summary>
|
||||
public sealed class MinimalProofExporterTests
|
||||
{
|
||||
private readonly Mock<IProvcacheService> _mockService;
|
||||
private readonly Mock<IEvidenceChunkRepository> _mockChunkRepo;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly MinimalProofExporter _exporter;
|
||||
|
||||
// Test data
|
||||
private readonly ProvcacheEntry _testEntry;
|
||||
private readonly ChunkManifest _testManifest;
|
||||
private readonly IReadOnlyList<EvidenceChunk> _testChunks;
|
||||
|
||||
// Same options as the exporter uses for round-trip
|
||||
private static readonly JsonSerializerOptions s_jsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public MinimalProofExporterTests()
|
||||
{
|
||||
_mockService = new Mock<IProvcacheService>();
|
||||
_mockChunkRepo = new Mock<IEvidenceChunkRepository>();
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
|
||||
_exporter = new MinimalProofExporter(
|
||||
_mockService.Object,
|
||||
_mockChunkRepo.Object,
|
||||
signer: null,
|
||||
_timeProvider,
|
||||
NullLogger<MinimalProofExporter>.Instance);
|
||||
|
||||
// Create test data
|
||||
var proofRoot = "sha256:abc123def456";
|
||||
var veriKey = "sha256:verikey789";
|
||||
|
||||
_testEntry = new ProvcacheEntry
|
||||
{
|
||||
VeriKey = veriKey,
|
||||
Decision = new DecisionDigest
|
||||
{
|
||||
DigestVersion = "v1",
|
||||
VeriKey = veriKey,
|
||||
VerdictHash = "sha256:verdict123",
|
||||
ProofRoot = proofRoot,
|
||||
ReplaySeed = new ReplaySeed
|
||||
{
|
||||
FeedIds = ["cve-2024", "ghsa-2024"],
|
||||
RuleIds = ["default-policy-v1"]
|
||||
},
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ExpiresAt = _timeProvider.GetUtcNow().AddHours(24),
|
||||
TrustScore = 85
|
||||
},
|
||||
PolicyHash = "sha256:policy123",
|
||||
SignerSetHash = "sha256:signers123",
|
||||
FeedEpoch = "2025-W01",
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ExpiresAt = _timeProvider.GetUtcNow().AddHours(24)
|
||||
};
|
||||
|
||||
// Create 5 chunks
|
||||
_testChunks = Enumerable.Range(0, 5)
|
||||
.Select(i =>
|
||||
{
|
||||
var data = new byte[1024];
|
||||
Random.Shared.NextBytes(data);
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = i,
|
||||
ChunkHash = $"sha256:{Convert.ToHexStringLower(System.Security.Cryptography.SHA256.HashData(data))}",
|
||||
Blob = data,
|
||||
BlobSize = 1024,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
})
|
||||
.ToList();
|
||||
|
||||
_testManifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 5,
|
||||
TotalSize = 5 * 1024,
|
||||
Chunks = _testChunks.Select(c => new ChunkMetadata
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType
|
||||
}).ToList(),
|
||||
GeneratedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
#region Export Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_LiteDensity_ReturnsDigestAndManifestOnly()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
|
||||
// Act
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Should().NotBeNull();
|
||||
bundle.Density.Should().Be(ProofDensity.Lite);
|
||||
bundle.Digest.Should().Be(_testEntry.Decision);
|
||||
bundle.Manifest.Should().Be(_testManifest);
|
||||
bundle.Chunks.Should().BeEmpty();
|
||||
bundle.Signature.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_StandardDensity_ReturnsFirstNChunks()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = ProofDensity.Standard,
|
||||
StandardDensityChunkCount = 3
|
||||
};
|
||||
|
||||
// Act
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Should().NotBeNull();
|
||||
bundle.Density.Should().Be(ProofDensity.Standard);
|
||||
bundle.Chunks.Should().HaveCount(3);
|
||||
bundle.Chunks.Select(c => c.Index).Should().BeEquivalentTo([0, 1, 2]);
|
||||
|
||||
// Verify chunk data is base64 encoded
|
||||
foreach (var chunk in bundle.Chunks)
|
||||
{
|
||||
var decoded = Convert.FromBase64String(chunk.Data);
|
||||
decoded.Should().HaveCount(chunk.Size);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_StrictDensity_ReturnsAllChunks()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Strict };
|
||||
|
||||
// Act
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Should().NotBeNull();
|
||||
bundle.Density.Should().Be(ProofDensity.Strict);
|
||||
bundle.Chunks.Should().HaveCount(5);
|
||||
bundle.Chunks.Select(c => c.Index).Should().BeEquivalentTo([0, 1, 2, 3, 4]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_NotFound_ThrowsException()
|
||||
{
|
||||
// Arrange
|
||||
_mockService.Setup(s => s.GetAsync(It.IsAny<string>(), false, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0));
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() =>
|
||||
_exporter.ExportAsync("sha256:notfound", options));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsJsonAsync_ReturnsValidJson()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
|
||||
// Act
|
||||
var jsonBytes = await _exporter.ExportAsJsonAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
jsonBytes.Should().NotBeEmpty();
|
||||
var bundle = JsonSerializer.Deserialize<MinimalProofBundle>(jsonBytes, s_jsonOptions);
|
||||
bundle.Should().NotBeNull();
|
||||
bundle!.BundleVersion.Should().Be("v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToStreamAsync_WritesToStream()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
using var stream = new MemoryStream();
|
||||
|
||||
// Act
|
||||
await _exporter.ExportToStreamAsync(_testEntry.VeriKey, options, stream);
|
||||
|
||||
// Assert
|
||||
stream.Length.Should().BeGreaterThan(0);
|
||||
stream.Position = 0;
|
||||
var bundle = await JsonSerializer.DeserializeAsync<MinimalProofBundle>(stream, s_jsonOptions);
|
||||
bundle.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Import Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_ValidBundle_StoresChunks()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Standard, StandardDensityChunkCount = 3 };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
_mockChunkRepo.Setup(r => r.StoreChunksAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<IEnumerable<EvidenceChunk>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act
|
||||
var result = await _exporter.ImportAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksImported.Should().Be(3);
|
||||
result.ChunksPending.Should().Be(2);
|
||||
result.Verification.DigestValid.Should().BeTrue();
|
||||
result.Verification.MerkleRootValid.Should().BeTrue();
|
||||
result.Verification.ChunksValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromJsonAsync_ValidJson_ImportsSuccessfully()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
var jsonBytes = await _exporter.ExportAsJsonAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Act
|
||||
var result = await _exporter.ImportFromJsonAsync(jsonBytes);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksImported.Should().Be(0); // Lite has no chunks
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verify Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidBundle_ReturnsValid()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Standard, StandardDensityChunkCount = 2 };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Act
|
||||
var verification = await _exporter.VerifyAsync(bundle);
|
||||
|
||||
// Assert
|
||||
verification.DigestValid.Should().BeTrue();
|
||||
verification.MerkleRootValid.Should().BeTrue();
|
||||
verification.ChunksValid.Should().BeTrue();
|
||||
verification.SignatureValid.Should().BeNull();
|
||||
verification.FailedChunkIndices.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_CorruptedChunk_ReportsFailure()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Standard, StandardDensityChunkCount = 2 };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Corrupt a chunk
|
||||
var corruptedChunks = bundle.Chunks.ToList();
|
||||
corruptedChunks[0] = corruptedChunks[0] with { Data = Convert.ToBase64String(new byte[1024]) };
|
||||
var corruptedBundle = bundle with { Chunks = corruptedChunks };
|
||||
|
||||
// Act
|
||||
var verification = await _exporter.VerifyAsync(corruptedBundle);
|
||||
|
||||
// Assert
|
||||
verification.ChunksValid.Should().BeFalse();
|
||||
verification.FailedChunkIndices.Should().Contain(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_InvalidDigest_ReportsFailure()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Corrupt the digest
|
||||
var invalidDigest = bundle.Digest with { TrustScore = -10 }; // Invalid trust score
|
||||
var invalidBundle = bundle with { Digest = invalidDigest };
|
||||
|
||||
// Act
|
||||
var verification = await _exporter.VerifyAsync(invalidBundle);
|
||||
|
||||
// Assert
|
||||
verification.DigestValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EstimateSize Tests
|
||||
|
||||
[Fact]
|
||||
public async Task EstimateExportSizeAsync_LiteDensity_ReturnsBaseSize()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
|
||||
// Act
|
||||
var size = await _exporter.EstimateExportSizeAsync(_testEntry.VeriKey, ProofDensity.Lite);
|
||||
|
||||
// Assert
|
||||
size.Should().Be(2048); // Base size
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EstimateExportSizeAsync_StrictDensity_ReturnsLargerSize()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
|
||||
// Act
|
||||
var size = await _exporter.EstimateExportSizeAsync(_testEntry.VeriKey, ProofDensity.Strict);
|
||||
|
||||
// Assert
|
||||
size.Should().BeGreaterThan(2048); // Base + all chunk data
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EstimateExportSizeAsync_NotFound_ReturnsZero()
|
||||
{
|
||||
// Arrange
|
||||
_mockService.Setup(s => s.GetAsync(It.IsAny<string>(), false, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0));
|
||||
|
||||
// Act
|
||||
var size = await _exporter.EstimateExportSizeAsync("sha256:notfound", ProofDensity.Lite);
|
||||
|
||||
// Assert
|
||||
size.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Signing Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_SigningWithoutSigner_ThrowsException()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = ProofDensity.Lite,
|
||||
Sign = true
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() =>
|
||||
_exporter.ExportAsync(_testEntry.VeriKey, options));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithSigner_SignsBundle()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
|
||||
var mockSigner = new Mock<ISigner>();
|
||||
mockSigner.Setup(s => s.SignAsync(It.IsAny<SignRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new SignResult(
|
||||
Signature: [1, 2, 3, 4],
|
||||
KeyId: "test-key-id",
|
||||
SignedAt: _timeProvider.GetUtcNow(),
|
||||
Claims: null));
|
||||
|
||||
var exporterWithSigner = new MinimalProofExporter(
|
||||
_mockService.Object,
|
||||
_mockChunkRepo.Object,
|
||||
mockSigner.Object,
|
||||
_timeProvider,
|
||||
NullLogger<MinimalProofExporter>.Instance);
|
||||
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = ProofDensity.Lite,
|
||||
Sign = true,
|
||||
SigningKeyId = "test-key-id"
|
||||
};
|
||||
|
||||
// Act
|
||||
var bundle = await exporterWithSigner.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Signature.Should().NotBeNull();
|
||||
bundle.Signature!.KeyId.Should().Be("test-key-id");
|
||||
bundle.Signature.SignatureBytes.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
private void SetupMocks()
|
||||
{
|
||||
_mockService.Setup(s => s.GetAsync(_testEntry.VeriKey, false, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Hit(_testEntry, "memory", 1.0));
|
||||
|
||||
_mockChunkRepo.Setup(r => r.GetManifestAsync(_testEntry.Decision.ProofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(_testManifest);
|
||||
|
||||
_mockChunkRepo.Setup(r => r.GetChunkRangeAsync(
|
||||
_testEntry.Decision.ProofRoot,
|
||||
It.IsAny<int>(),
|
||||
It.IsAny<int>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((string root, int start, int count, CancellationToken _) =>
|
||||
_testChunks.Skip(start).Take(count).ToList());
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset now) => _now = now;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,351 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Provcache.Entities;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
public sealed class RevocationLedgerTests
|
||||
{
|
||||
private readonly InMemoryRevocationLedger _ledger;
|
||||
|
||||
public RevocationLedgerTests()
|
||||
{
|
||||
_ledger = new InMemoryRevocationLedger(NullLogger<InMemoryRevocationLedger>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordAsync_AssignsSeqNo()
|
||||
{
|
||||
// Arrange
|
||||
var entry = CreateTestEntry(RevocationTypes.Signer, "signer-hash-1");
|
||||
|
||||
// Act
|
||||
var recorded = await _ledger.RecordAsync(entry);
|
||||
|
||||
// Assert
|
||||
recorded.SeqNo.Should().Be(1);
|
||||
recorded.RevocationId.Should().Be(entry.RevocationId);
|
||||
recorded.RevokedKey.Should().Be("signer-hash-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordAsync_AssignsIncrementingSeqNos()
|
||||
{
|
||||
// Arrange
|
||||
var entry1 = CreateTestEntry(RevocationTypes.Signer, "signer-1");
|
||||
var entry2 = CreateTestEntry(RevocationTypes.FeedEpoch, "epoch-1");
|
||||
var entry3 = CreateTestEntry(RevocationTypes.Policy, "policy-1");
|
||||
|
||||
// Act
|
||||
var recorded1 = await _ledger.RecordAsync(entry1);
|
||||
var recorded2 = await _ledger.RecordAsync(entry2);
|
||||
var recorded3 = await _ledger.RecordAsync(entry3);
|
||||
|
||||
// Assert
|
||||
recorded1.SeqNo.Should().Be(1);
|
||||
recorded2.SeqNo.Should().Be(2);
|
||||
recorded3.SeqNo.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesSinceAsync_ReturnsEntriesAfterSeqNo()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "e1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "p1"));
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetEntriesSinceAsync(2);
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(2);
|
||||
entries[0].SeqNo.Should().Be(3);
|
||||
entries[1].SeqNo.Should().Be(4);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesSinceAsync_RespectsLimit()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, $"s{i}"));
|
||||
}
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetEntriesSinceAsync(0, limit: 3);
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesByTypeAsync_FiltersCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "e1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "p1"));
|
||||
|
||||
// Act
|
||||
var signerEntries = await _ledger.GetEntriesByTypeAsync(RevocationTypes.Signer);
|
||||
|
||||
// Assert
|
||||
signerEntries.Should().HaveCount(2);
|
||||
signerEntries.Should().OnlyContain(e => e.RevocationType == RevocationTypes.Signer);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesByTypeAsync_FiltersBySinceTime()
|
||||
{
|
||||
// Arrange
|
||||
var oldEntry = CreateTestEntry(RevocationTypes.Signer, "s1") with
|
||||
{
|
||||
RevokedAt = DateTimeOffset.UtcNow.AddDays(-5)
|
||||
};
|
||||
var newEntry = CreateTestEntry(RevocationTypes.Signer, "s2") with
|
||||
{
|
||||
RevokedAt = DateTimeOffset.UtcNow.AddDays(-1)
|
||||
};
|
||||
|
||||
await _ledger.RecordAsync(oldEntry);
|
||||
await _ledger.RecordAsync(newEntry);
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetEntriesByTypeAsync(
|
||||
RevocationTypes.Signer,
|
||||
since: DateTimeOffset.UtcNow.AddDays(-2));
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(1);
|
||||
entries[0].RevokedKey.Should().Be("s2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLatestSeqNoAsync_ReturnsZeroWhenEmpty()
|
||||
{
|
||||
// Act
|
||||
var seqNo = await _ledger.GetLatestSeqNoAsync();
|
||||
|
||||
// Assert
|
||||
seqNo.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLatestSeqNoAsync_ReturnsLatest()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s3"));
|
||||
|
||||
// Act
|
||||
var seqNo = await _ledger.GetLatestSeqNoAsync();
|
||||
|
||||
// Assert
|
||||
seqNo.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetRevocationsForKeyAsync_ReturnsMatchingEntries()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "s1")); // Same key, different type
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetRevocationsForKeyAsync("s1");
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(2);
|
||||
entries.Should().OnlyContain(e => e.RevokedKey == "s1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatsAsync_ReturnsCorrectStats()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1", invalidated: 5));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2", invalidated: 3));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "e1", invalidated: 10));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "p1", invalidated: 2));
|
||||
|
||||
// Act
|
||||
var stats = await _ledger.GetStatsAsync();
|
||||
|
||||
// Assert
|
||||
stats.TotalEntries.Should().Be(4);
|
||||
stats.LatestSeqNo.Should().Be(4);
|
||||
stats.TotalEntriesInvalidated.Should().Be(20);
|
||||
stats.EntriesByType.Should().ContainKey(RevocationTypes.Signer);
|
||||
stats.EntriesByType[RevocationTypes.Signer].Should().Be(2);
|
||||
stats.EntriesByType[RevocationTypes.FeedEpoch].Should().Be(1);
|
||||
stats.EntriesByType[RevocationTypes.Policy].Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Clear_RemovesAllEntries()
|
||||
{
|
||||
// Arrange
|
||||
_ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1")).GetAwaiter().GetResult();
|
||||
_ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2")).GetAwaiter().GetResult();
|
||||
|
||||
// Act
|
||||
_ledger.Clear();
|
||||
|
||||
// Assert
|
||||
var seqNo = _ledger.GetLatestSeqNoAsync().GetAwaiter().GetResult();
|
||||
seqNo.Should().Be(0);
|
||||
}
|
||||
|
||||
private static RevocationEntry CreateTestEntry(
|
||||
string revocationType,
|
||||
string revokedKey,
|
||||
int invalidated = 0)
|
||||
{
|
||||
return new RevocationEntry
|
||||
{
|
||||
RevocationId = Guid.NewGuid(),
|
||||
RevocationType = revocationType,
|
||||
RevokedKey = revokedKey,
|
||||
Reason = "Test revocation",
|
||||
EntriesInvalidated = invalidated,
|
||||
Source = "unit-test",
|
||||
RevokedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class RevocationReplayServiceTests
|
||||
{
|
||||
private readonly InMemoryRevocationLedger _ledger;
|
||||
private readonly Mock<IProvcacheRepository> _repositoryMock;
|
||||
private readonly RevocationReplayService _replayService;
|
||||
|
||||
public RevocationReplayServiceTests()
|
||||
{
|
||||
_ledger = new InMemoryRevocationLedger(NullLogger<InMemoryRevocationLedger>.Instance);
|
||||
_repositoryMock = new Mock<IProvcacheRepository>();
|
||||
_replayService = new RevocationReplayService(
|
||||
_ledger,
|
||||
_repositoryMock.Object,
|
||||
NullLogger<RevocationReplayService>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_ReplaysAllEntries()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "epoch-1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "policy-1"));
|
||||
|
||||
_repositoryMock.Setup(r => r.DeleteBySignerSetHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(2L);
|
||||
_repositoryMock.Setup(r => r.DeleteByFeedEpochOlderThanAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(5L);
|
||||
_repositoryMock.Setup(r => r.DeleteByPolicyHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(3L);
|
||||
|
||||
// Act
|
||||
var result = await _replayService.ReplayFromAsync(0);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.EntriesReplayed.Should().Be(3);
|
||||
result.TotalInvalidations.Should().Be(10); // 2 + 5 + 3
|
||||
result.EntriesByType.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_StartsFromCheckpoint()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-3"));
|
||||
|
||||
_repositoryMock.Setup(r => r.DeleteBySignerSetHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(1L);
|
||||
|
||||
// Act - replay from seq 2 (skip first 2)
|
||||
var result = await _replayService.ReplayFromAsync(2);
|
||||
|
||||
// Assert
|
||||
result.EntriesReplayed.Should().Be(1); // Only seq 3
|
||||
result.StartSeqNo.Should().Be(2);
|
||||
result.EndSeqNo.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_RespectsMaxEntries()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, $"signer-{i}"));
|
||||
}
|
||||
|
||||
_repositoryMock.Setup(r => r.DeleteBySignerSetHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(1L);
|
||||
|
||||
var options = new RevocationReplayOptions { MaxEntries = 3 };
|
||||
|
||||
// Act
|
||||
var result = await _replayService.ReplayFromAsync(0, options);
|
||||
|
||||
// Assert
|
||||
result.EntriesReplayed.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_ReturnsEmptyWhenNoEntries()
|
||||
{
|
||||
// Act
|
||||
var result = await _replayService.ReplayFromAsync(0);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.EntriesReplayed.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetCheckpointAsync_ReturnsZeroInitially()
|
||||
{
|
||||
// Act
|
||||
var checkpoint = await _replayService.GetCheckpointAsync();
|
||||
|
||||
// Assert
|
||||
checkpoint.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveCheckpointAsync_PersistsCheckpoint()
|
||||
{
|
||||
// Act
|
||||
await _replayService.SaveCheckpointAsync(42);
|
||||
var checkpoint = await _replayService.GetCheckpointAsync();
|
||||
|
||||
// Assert
|
||||
checkpoint.Should().Be(42);
|
||||
}
|
||||
|
||||
private static RevocationEntry CreateTestEntry(string revocationType, string revokedKey)
|
||||
{
|
||||
return new RevocationEntry
|
||||
{
|
||||
RevocationId = Guid.NewGuid(),
|
||||
RevocationType = revocationType,
|
||||
RevokedKey = revokedKey,
|
||||
Reason = "Test revocation",
|
||||
EntriesInvalidated = 0,
|
||||
Source = "unit-test",
|
||||
RevokedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user