sprints enhancements
This commit is contained in:
@@ -0,0 +1,373 @@
|
||||
using System.Net;
|
||||
using System.Net.Http.Json;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Builder;
|
||||
using Microsoft.AspNetCore.Hosting;
|
||||
using Microsoft.AspNetCore.TestHost;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using Microsoft.Extensions.Hosting;
|
||||
using Moq;
|
||||
using StellaOps.Provcache.Api;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for evidence paging API endpoints.
|
||||
/// </summary>
|
||||
public sealed class EvidenceApiTests : IAsyncLifetime
|
||||
{
|
||||
private IHost? _host;
|
||||
private HttpClient? _client;
|
||||
private Mock<IEvidenceChunkRepository>? _mockChunkRepository;
|
||||
private Mock<IEvidenceChunker>? _mockChunker;
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
_mockChunkRepository = new Mock<IEvidenceChunkRepository>();
|
||||
_mockChunker = new Mock<IEvidenceChunker>();
|
||||
|
||||
_host = await new HostBuilder()
|
||||
.ConfigureWebHost(webBuilder =>
|
||||
{
|
||||
webBuilder
|
||||
.UseTestServer()
|
||||
.ConfigureServices(services =>
|
||||
{
|
||||
services.AddRouting();
|
||||
services.AddLogging();
|
||||
services.AddSingleton(_mockChunkRepository.Object);
|
||||
services.AddSingleton(_mockChunker.Object);
|
||||
// Add mock IProvcacheService to satisfy the main endpoints
|
||||
services.AddSingleton(Mock.Of<IProvcacheService>());
|
||||
})
|
||||
.Configure(app =>
|
||||
{
|
||||
app.UseRouting();
|
||||
app.UseEndpoints(endpoints =>
|
||||
{
|
||||
endpoints.MapProvcacheEndpoints();
|
||||
});
|
||||
});
|
||||
})
|
||||
.StartAsync();
|
||||
|
||||
_client = _host.GetTestClient();
|
||||
}
|
||||
|
||||
public async Task DisposeAsync()
|
||||
{
|
||||
_client?.Dispose();
|
||||
if (_host != null)
|
||||
{
|
||||
await _host.StopAsync();
|
||||
_host.Dispose();
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_ReturnsChunksWithPagination()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:abc123";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 15,
|
||||
TotalSize = 15000,
|
||||
Chunks = [],
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 1000),
|
||||
CreateChunk(proofRoot, 1, 1000),
|
||||
CreateChunk(proofRoot, 2, 1000)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_mockChunkRepository.Setup(x => x.GetChunkRangeAsync(proofRoot, 0, 10, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofEvidenceResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.TotalChunks.Should().Be(15);
|
||||
result.Chunks.Should().HaveCount(3);
|
||||
result.HasMore.Should().BeTrue();
|
||||
result.NextCursor.Should().Be("10");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_WithOffset_ReturnsPaginatedResults()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:def456";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 5,
|
||||
TotalSize = 5000,
|
||||
Chunks = [],
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 2, 1000),
|
||||
CreateChunk(proofRoot, 3, 1000),
|
||||
CreateChunk(proofRoot, 4, 1000)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_mockChunkRepository.Setup(x => x.GetChunkRangeAsync(proofRoot, 2, 3, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}?offset=2&limit=3");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofEvidenceResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Chunks.Should().HaveCount(3);
|
||||
result.Chunks[0].Index.Should().Be(2);
|
||||
result.HasMore.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_WithIncludeData_ReturnsBase64Blobs()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:ghi789";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 1,
|
||||
TotalSize = 100,
|
||||
Chunks = [],
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 100)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_mockChunkRepository.Setup(x => x.GetChunkRangeAsync(proofRoot, 0, 10, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}?includeData=true");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofEvidenceResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Chunks[0].Data.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEvidenceChunks_NotFound_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:notfound";
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetProofManifest_ReturnsManifestWithChunkMetadata()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:manifest123";
|
||||
var manifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 3,
|
||||
TotalSize = 3000,
|
||||
Chunks = new List<ChunkMetadata>
|
||||
{
|
||||
new() { ChunkId = Guid.NewGuid(), Index = 0, Hash = "sha256:chunk0", Size = 1000, ContentType = "application/octet-stream" },
|
||||
new() { ChunkId = Guid.NewGuid(), Index = 1, Hash = "sha256:chunk1", Size = 1000, ContentType = "application/octet-stream" },
|
||||
new() { ChunkId = Guid.NewGuid(), Index = 2, Hash = "sha256:chunk2", Size = 1000, ContentType = "application/octet-stream" }
|
||||
},
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/manifest");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofManifestResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.TotalChunks.Should().Be(3);
|
||||
result.TotalSize.Should().Be(3000);
|
||||
result.Chunks.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetProofManifest_NotFound_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:notfound";
|
||||
_mockChunkRepository!.Setup(x => x.GetManifestAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/manifest");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetSingleChunk_ReturnsChunkWithData()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:singlechunk";
|
||||
var chunk = CreateChunk(proofRoot, 5, 500);
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetChunkAsync(proofRoot, 5, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunk);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/chunks/5");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofChunkResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.Index.Should().Be(5);
|
||||
result.Size.Should().Be(500);
|
||||
result.Data.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetSingleChunk_NotFound_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:notfound";
|
||||
_mockChunkRepository!.Setup(x => x.GetChunkAsync(proofRoot, 99, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((EvidenceChunk?)null);
|
||||
|
||||
// Act
|
||||
var response = await _client!.GetAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/chunks/99");
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyProof_ValidChunks_ReturnsIsValidTrue()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:validproof";
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 100),
|
||||
CreateChunk(proofRoot, 1, 100)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetChunksAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
_mockChunker!.Setup(x => x.VerifyChunk(It.IsAny<EvidenceChunk>()))
|
||||
.Returns(true);
|
||||
|
||||
_mockChunker.Setup(x => x.ComputeMerkleRoot(It.IsAny<IEnumerable<string>>()))
|
||||
.Returns(proofRoot);
|
||||
|
||||
// Act
|
||||
var response = await _client!.PostAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/verify", null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofVerificationResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.IsValid.Should().BeTrue();
|
||||
result.ChunkResults.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyProof_MerkleRootMismatch_ReturnsIsValidFalse()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:badroot";
|
||||
var chunks = new List<EvidenceChunk>
|
||||
{
|
||||
CreateChunk(proofRoot, 0, 100)
|
||||
};
|
||||
|
||||
_mockChunkRepository!.Setup(x => x.GetChunksAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(chunks);
|
||||
|
||||
_mockChunker!.Setup(x => x.VerifyChunk(It.IsAny<EvidenceChunk>()))
|
||||
.Returns(true);
|
||||
|
||||
_mockChunker.Setup(x => x.ComputeMerkleRoot(It.IsAny<IEnumerable<string>>()))
|
||||
.Returns("sha256:differentroot");
|
||||
|
||||
// Act
|
||||
var response = await _client!.PostAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/verify", null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.OK);
|
||||
var result = await response.Content.ReadFromJsonAsync<ProofVerificationResponse>();
|
||||
result.Should().NotBeNull();
|
||||
result!.IsValid.Should().BeFalse();
|
||||
result.Error.Should().Contain("Merkle root mismatch");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyProof_NoChunks_Returns404()
|
||||
{
|
||||
// Arrange
|
||||
var proofRoot = "sha256:nochunks";
|
||||
_mockChunkRepository!.Setup(x => x.GetChunksAsync(proofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new List<EvidenceChunk>());
|
||||
|
||||
// Act
|
||||
var response = await _client!.PostAsync($"/v1/provcache/proofs/{Uri.EscapeDataString(proofRoot)}/verify", null);
|
||||
|
||||
// Assert
|
||||
response.StatusCode.Should().Be(HttpStatusCode.NotFound);
|
||||
}
|
||||
|
||||
private static EvidenceChunk CreateChunk(string proofRoot, int index, int size)
|
||||
{
|
||||
var data = new byte[size];
|
||||
Random.Shared.NextBytes(data);
|
||||
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = index,
|
||||
ChunkHash = $"sha256:chunk{index}",
|
||||
Blob = data,
|
||||
BlobSize = size,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,289 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Provcache;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="EvidenceChunker"/>.
|
||||
/// </summary>
|
||||
public sealed class EvidenceChunkerTests
|
||||
{
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly EvidenceChunker _chunker;
|
||||
|
||||
public EvidenceChunkerTests()
|
||||
{
|
||||
_options = new ProvcacheOptions { ChunkSize = 64 }; // Small for testing
|
||||
_chunker = new EvidenceChunker(_options);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldSplitEvidenceIntoMultipleChunks_WhenLargerThanChunkSize()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[200];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().HaveCount(4); // ceil(200/64) = 4
|
||||
result.TotalSize.Should().Be(200);
|
||||
result.ProofRoot.Should().StartWith("sha256:");
|
||||
|
||||
// Verify chunk ordering
|
||||
for (var i = 0; i < result.Chunks.Count; i++)
|
||||
{
|
||||
result.Chunks[i].ChunkIndex.Should().Be(i);
|
||||
result.Chunks[i].ContentType.Should().Be(contentType);
|
||||
result.Chunks[i].ProofRoot.Should().Be(result.ProofRoot);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldCreateSingleChunk_WhenSmallerThanChunkSize()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[32];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/json";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().HaveCount(1);
|
||||
result.TotalSize.Should().Be(32);
|
||||
result.Chunks[0].BlobSize.Should().Be(32);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldHandleEmptyEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = Array.Empty<byte>();
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().BeEmpty();
|
||||
result.TotalSize.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldProduceUniqueHashForEachChunk()
|
||||
{
|
||||
// Arrange - create evidence with distinct bytes per chunk
|
||||
var evidence = new byte[128];
|
||||
for (var i = 0; i < 64; i++) evidence[i] = 0xAA;
|
||||
for (var i = 64; i < 128; i++) evidence[i] = 0xBB;
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Chunks.Should().HaveCount(2);
|
||||
result.Chunks[0].ChunkHash.Should().NotBe(result.Chunks[1].ChunkHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldRecoverOriginalEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var original = new byte[200];
|
||||
Random.Shared.NextBytes(original);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(original, contentType);
|
||||
|
||||
// Act
|
||||
var reassembled = await _chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
|
||||
|
||||
// Assert
|
||||
reassembled.Should().BeEquivalentTo(original);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldThrow_WhenMerkleRootMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[100];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _chunker.ReassembleAsync(chunked.Chunks, "sha256:invalid_root");
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*Merkle root mismatch*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldThrow_WhenChunkCorrupted()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[100];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Corrupt first chunk
|
||||
var corruptedChunks = chunked.Chunks
|
||||
.Select((c, i) => i == 0
|
||||
? c with { Blob = new byte[c.BlobSize], ChunkHash = c.ChunkHash } // same hash but different blob
|
||||
: c)
|
||||
.ToList();
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _chunker.ReassembleAsync(corruptedChunks, chunked.ProofRoot);
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*verification failed*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyChunk_ShouldReturnTrue_WhenChunkValid()
|
||||
{
|
||||
// Arrange
|
||||
var data = new byte[32];
|
||||
Random.Shared.NextBytes(data);
|
||||
var hash = ComputeHash(data);
|
||||
|
||||
var chunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = "sha256:test",
|
||||
ChunkIndex = 0,
|
||||
ChunkHash = hash,
|
||||
Blob = data,
|
||||
BlobSize = data.Length,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
_chunker.VerifyChunk(chunk).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyChunk_ShouldReturnFalse_WhenHashMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var chunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = "sha256:test",
|
||||
ChunkIndex = 0,
|
||||
ChunkHash = "sha256:wrong_hash",
|
||||
Blob = new byte[32],
|
||||
BlobSize = 32,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
_chunker.VerifyChunk(chunk).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldReturnSameResult_ForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff", "sha256:1122" };
|
||||
|
||||
// Act
|
||||
var root1 = _chunker.ComputeMerkleRoot(hashes);
|
||||
var root2 = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
root1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldHandleSingleHash()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabbccdd" };
|
||||
|
||||
// Act
|
||||
var root = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root.Should().Be("sha256:aabbccdd");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldHandleOddNumberOfHashes()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff" };
|
||||
|
||||
// Act
|
||||
var root = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root.Should().NotBeNullOrEmpty();
|
||||
root.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkStreamAsync_ShouldYieldChunksInOrder()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[200];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
using var stream = new MemoryStream(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var chunks = new List<EvidenceChunk>();
|
||||
await foreach (var chunk in _chunker.ChunkStreamAsync(stream, contentType))
|
||||
{
|
||||
chunks.Add(chunk);
|
||||
}
|
||||
|
||||
// Assert
|
||||
chunks.Should().HaveCount(4);
|
||||
for (var i = 0; i < chunks.Count; i++)
|
||||
{
|
||||
chunks[i].ChunkIndex.Should().Be(i);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Roundtrip_ShouldPreserveDataIntegrity()
|
||||
{
|
||||
// Arrange - use realistic chunk size
|
||||
var options = new ProvcacheOptions { ChunkSize = 1024 };
|
||||
var chunker = new EvidenceChunker(options);
|
||||
|
||||
var original = new byte[5000]; // ~5 chunks
|
||||
Random.Shared.NextBytes(original);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var chunked = await chunker.ChunkAsync(original, contentType);
|
||||
var reassembled = await chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
|
||||
|
||||
// Assert
|
||||
reassembled.Should().BeEquivalentTo(original);
|
||||
chunked.Chunks.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
private static string ComputeHash(byte[] data)
|
||||
{
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,440 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
public sealed class LazyFetchTests
|
||||
{
|
||||
private readonly Mock<IEvidenceChunkRepository> _repositoryMock;
|
||||
private readonly LazyFetchOrchestrator _orchestrator;
|
||||
|
||||
public LazyFetchTests()
|
||||
{
|
||||
_repositoryMock = new Mock<IEvidenceChunkRepository>();
|
||||
_orchestrator = new LazyFetchOrchestrator(
|
||||
_repositoryMock.Object,
|
||||
NullLogger<LazyFetchOrchestrator>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WhenFetcherNotAvailable_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(false);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("not available"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WhenNoManifestFound_ReturnsFailure()
|
||||
{
|
||||
// Arrange
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.Setup(f => f.FetchManifestAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((ChunkManifest?)null);
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.Errors.Should().Contain(e => e.Contains("No manifest found"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WhenAllChunksPresent_ReturnsSuccessWithZeroFetched()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 3);
|
||||
var existingChunks = CreateTestEvidenceChunks("test-root", 3);
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksFetched.Should().Be(0);
|
||||
result.BytesFetched.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_FetchesMissingChunks()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 3);
|
||||
var existingChunks = CreateTestEvidenceChunks("test-root", 1); // Only have 1 chunk
|
||||
var missingChunks = new List<FetchedChunk>
|
||||
{
|
||||
CreateTestFetchedChunk(1),
|
||||
CreateTestFetchedChunk(2)
|
||||
};
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(missingChunks.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksFetched.Should().Be(2);
|
||||
result.BytesFetched.Should().Be(missingChunks.Sum(c => c.Data.Length));
|
||||
|
||||
_repositoryMock.Verify(r => r.StoreChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<IEnumerable<EvidenceChunk>>(),
|
||||
It.IsAny<CancellationToken>()), Times.AtLeastOnce);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WithVerification_RejectsCorruptedChunks()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 2);
|
||||
var existingChunks = new List<EvidenceChunk>(); // No existing chunks
|
||||
|
||||
var corruptedChunk = new FetchedChunk
|
||||
{
|
||||
Index = 0,
|
||||
Data = [0x00, 0x01, 0x02],
|
||||
Hash = "invalid_hash_that_does_not_match"
|
||||
};
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(new[] { corruptedChunk }.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
var options = new LazyFetchOptions { VerifyOnFetch = true };
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object, options);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue(); // Still succeeds by default (skips invalid)
|
||||
result.ChunksFailedVerification.Should().Be(1);
|
||||
result.FailedIndices.Should().Contain(0);
|
||||
result.ChunksFetched.Should().Be(0); // Nothing stored
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_WithFailOnVerificationError_AbortsOnCorruption()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 2);
|
||||
var existingChunks = new List<EvidenceChunk>();
|
||||
|
||||
var corruptedChunk = new FetchedChunk
|
||||
{
|
||||
Index = 0,
|
||||
Data = [0x00, 0x01, 0x02],
|
||||
Hash = "invalid_hash"
|
||||
};
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(new[] { corruptedChunk }.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
var options = new LazyFetchOptions
|
||||
{
|
||||
VerifyOnFetch = true,
|
||||
FailOnVerificationError = true
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object, options);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeFalse();
|
||||
result.ChunksFailedVerification.Should().BeGreaterThanOrEqualTo(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FetchAndStoreAsync_RespectsMaxChunksLimit()
|
||||
{
|
||||
// Arrange
|
||||
var manifest = CreateTestManifest("test-root", 10);
|
||||
var existingChunks = new List<EvidenceChunk>();
|
||||
var allChunks = Enumerable.Range(0, 10)
|
||||
.Select(CreateTestFetchedChunk)
|
||||
.ToList();
|
||||
|
||||
var fetcherMock = new Mock<ILazyEvidenceFetcher>();
|
||||
fetcherMock.Setup(f => f.IsAvailableAsync(It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
fetcherMock.SetupGet(f => f.FetcherType).Returns("mock");
|
||||
fetcherMock.Setup(f => f.FetchRemainingChunksAsync(
|
||||
"test-root",
|
||||
It.IsAny<ChunkManifest>(),
|
||||
It.IsAny<IReadOnlySet<int>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(allChunks.ToAsyncEnumerable());
|
||||
|
||||
_repositoryMock.Setup(r => r.GetManifestAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(manifest);
|
||||
_repositoryMock.Setup(r => r.GetChunksAsync("test-root", It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(existingChunks);
|
||||
|
||||
var options = new LazyFetchOptions
|
||||
{
|
||||
VerifyOnFetch = false,
|
||||
MaxChunksToFetch = 3
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await _orchestrator.FetchAndStoreAsync("test-root", fetcherMock.Object, options);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksFetched.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FileChunkFetcher_FetcherType_ReturnsFile()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act & Assert
|
||||
fetcher.FetcherType.Should().Be("file");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FileChunkFetcher_IsAvailableAsync_ReturnsTrueWhenDirectoryExists()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.IsAvailableAsync();
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempDir, true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FileChunkFetcher_IsAvailableAsync_ReturnsFalseWhenDirectoryMissing()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.IsAvailableAsync();
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FileChunkFetcher_FetchChunkAsync_ReturnsNullWhenChunkNotFound()
|
||||
{
|
||||
// Arrange
|
||||
var tempDir = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
|
||||
Directory.CreateDirectory(tempDir);
|
||||
|
||||
try
|
||||
{
|
||||
var fetcher = new FileChunkFetcher(tempDir, NullLogger<FileChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.FetchChunkAsync("test-root", 0);
|
||||
|
||||
// Assert
|
||||
result.Should().BeNull();
|
||||
}
|
||||
finally
|
||||
{
|
||||
Directory.Delete(tempDir, true);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpChunkFetcher_FetcherType_ReturnsHttp()
|
||||
{
|
||||
// Arrange
|
||||
var httpClient = new HttpClient { BaseAddress = new Uri("http://localhost") };
|
||||
var fetcher = new HttpChunkFetcher(httpClient, ownsClient: false, NullLogger<HttpChunkFetcher>.Instance);
|
||||
|
||||
// Act & Assert
|
||||
fetcher.FetcherType.Should().Be("http");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HttpChunkFetcher_IsAvailableAsync_ReturnsFalseWhenHostUnreachable()
|
||||
{
|
||||
// Arrange - use a non-routable IP to ensure connection failure
|
||||
var httpClient = new HttpClient
|
||||
{
|
||||
BaseAddress = new Uri("http://192.0.2.1:9999"),
|
||||
Timeout = TimeSpan.FromMilliseconds(100) // Short timeout for test speed
|
||||
};
|
||||
var fetcher = new HttpChunkFetcher(httpClient, ownsClient: false, NullLogger<HttpChunkFetcher>.Instance);
|
||||
|
||||
// Act
|
||||
var result = await fetcher.IsAvailableAsync();
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
// Helper methods
|
||||
|
||||
private static ChunkManifest CreateTestManifest(string proofRoot, int chunkCount)
|
||||
{
|
||||
var chunks = Enumerable.Range(0, chunkCount)
|
||||
.Select(i => new ChunkMetadata
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
Index = i,
|
||||
Hash = ComputeTestHash(i),
|
||||
Size = 100 + i,
|
||||
ContentType = "application/octet-stream"
|
||||
})
|
||||
.ToList();
|
||||
|
||||
return new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = chunkCount,
|
||||
TotalSize = chunks.Sum(c => c.Size),
|
||||
Chunks = chunks,
|
||||
GeneratedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static List<EvidenceChunk> CreateTestEvidenceChunks(string proofRoot, int count)
|
||||
{
|
||||
return Enumerable.Range(0, count)
|
||||
.Select(i =>
|
||||
{
|
||||
var data = CreateTestData(i);
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = i,
|
||||
ChunkHash = ComputeActualHash(data),
|
||||
Blob = data,
|
||||
BlobSize = data.Length,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
})
|
||||
.ToList();
|
||||
}
|
||||
|
||||
private static FetchedChunk CreateTestFetchedChunk(int index)
|
||||
{
|
||||
var data = CreateTestData(index);
|
||||
return new FetchedChunk
|
||||
{
|
||||
Index = index,
|
||||
Data = data,
|
||||
Hash = ComputeActualHash(data)
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] CreateTestData(int index)
|
||||
{
|
||||
return Enumerable.Range(0, 100 + index)
|
||||
.Select(i => (byte)(i % 256))
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
private static string ComputeTestHash(int index)
|
||||
{
|
||||
var data = CreateTestData(index);
|
||||
return ComputeActualHash(data);
|
||||
}
|
||||
|
||||
private static string ComputeActualHash(byte[] data)
|
||||
{
|
||||
return Convert.ToHexString(System.Security.Cryptography.SHA256.HashData(data)).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
|
||||
// Extension method for async enumerable from list
|
||||
internal static class AsyncEnumerableExtensions
|
||||
{
|
||||
public static async IAsyncEnumerable<T> ToAsyncEnumerable<T>(this IEnumerable<T> source)
|
||||
{
|
||||
foreach (var item in source)
|
||||
{
|
||||
yield return item;
|
||||
await Task.Yield();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,467 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Cryptography;
|
||||
using StellaOps.Provenance.Attestation;
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="MinimalProofExporter"/> covering all density levels.
|
||||
/// </summary>
|
||||
public sealed class MinimalProofExporterTests
|
||||
{
|
||||
private readonly Mock<IProvcacheService> _mockService;
|
||||
private readonly Mock<IEvidenceChunkRepository> _mockChunkRepo;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly MinimalProofExporter _exporter;
|
||||
|
||||
// Test data
|
||||
private readonly ProvcacheEntry _testEntry;
|
||||
private readonly ChunkManifest _testManifest;
|
||||
private readonly IReadOnlyList<EvidenceChunk> _testChunks;
|
||||
|
||||
// Same options as the exporter uses for round-trip
|
||||
private static readonly JsonSerializerOptions s_jsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull,
|
||||
PropertyNameCaseInsensitive = true
|
||||
};
|
||||
|
||||
public MinimalProofExporterTests()
|
||||
{
|
||||
_mockService = new Mock<IProvcacheService>();
|
||||
_mockChunkRepo = new Mock<IEvidenceChunkRepository>();
|
||||
_timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero));
|
||||
|
||||
_exporter = new MinimalProofExporter(
|
||||
_mockService.Object,
|
||||
_mockChunkRepo.Object,
|
||||
signer: null,
|
||||
_timeProvider,
|
||||
NullLogger<MinimalProofExporter>.Instance);
|
||||
|
||||
// Create test data
|
||||
var proofRoot = "sha256:abc123def456";
|
||||
var veriKey = "sha256:verikey789";
|
||||
|
||||
_testEntry = new ProvcacheEntry
|
||||
{
|
||||
VeriKey = veriKey,
|
||||
Decision = new DecisionDigest
|
||||
{
|
||||
DigestVersion = "v1",
|
||||
VeriKey = veriKey,
|
||||
VerdictHash = "sha256:verdict123",
|
||||
ProofRoot = proofRoot,
|
||||
ReplaySeed = new ReplaySeed
|
||||
{
|
||||
FeedIds = ["cve-2024", "ghsa-2024"],
|
||||
RuleIds = ["default-policy-v1"]
|
||||
},
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ExpiresAt = _timeProvider.GetUtcNow().AddHours(24),
|
||||
TrustScore = 85
|
||||
},
|
||||
PolicyHash = "sha256:policy123",
|
||||
SignerSetHash = "sha256:signers123",
|
||||
FeedEpoch = "2025-W01",
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ExpiresAt = _timeProvider.GetUtcNow().AddHours(24)
|
||||
};
|
||||
|
||||
// Create 5 chunks
|
||||
_testChunks = Enumerable.Range(0, 5)
|
||||
.Select(i =>
|
||||
{
|
||||
var data = new byte[1024];
|
||||
Random.Shared.NextBytes(data);
|
||||
return new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = proofRoot,
|
||||
ChunkIndex = i,
|
||||
ChunkHash = $"sha256:{Convert.ToHexStringLower(System.Security.Cryptography.SHA256.HashData(data))}",
|
||||
Blob = data,
|
||||
BlobSize = 1024,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
})
|
||||
.ToList();
|
||||
|
||||
_testManifest = new ChunkManifest
|
||||
{
|
||||
ProofRoot = proofRoot,
|
||||
TotalChunks = 5,
|
||||
TotalSize = 5 * 1024,
|
||||
Chunks = _testChunks.Select(c => new ChunkMetadata
|
||||
{
|
||||
ChunkId = c.ChunkId,
|
||||
Index = c.ChunkIndex,
|
||||
Hash = c.ChunkHash,
|
||||
Size = c.BlobSize,
|
||||
ContentType = c.ContentType
|
||||
}).ToList(),
|
||||
GeneratedAt = _timeProvider.GetUtcNow()
|
||||
};
|
||||
}
|
||||
|
||||
#region Export Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_LiteDensity_ReturnsDigestAndManifestOnly()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
|
||||
// Act
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Should().NotBeNull();
|
||||
bundle.Density.Should().Be(ProofDensity.Lite);
|
||||
bundle.Digest.Should().Be(_testEntry.Decision);
|
||||
bundle.Manifest.Should().Be(_testManifest);
|
||||
bundle.Chunks.Should().BeEmpty();
|
||||
bundle.Signature.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_StandardDensity_ReturnsFirstNChunks()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = ProofDensity.Standard,
|
||||
StandardDensityChunkCount = 3
|
||||
};
|
||||
|
||||
// Act
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Should().NotBeNull();
|
||||
bundle.Density.Should().Be(ProofDensity.Standard);
|
||||
bundle.Chunks.Should().HaveCount(3);
|
||||
bundle.Chunks.Select(c => c.Index).Should().BeEquivalentTo([0, 1, 2]);
|
||||
|
||||
// Verify chunk data is base64 encoded
|
||||
foreach (var chunk in bundle.Chunks)
|
||||
{
|
||||
var decoded = Convert.FromBase64String(chunk.Data);
|
||||
decoded.Should().HaveCount(chunk.Size);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_StrictDensity_ReturnsAllChunks()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Strict };
|
||||
|
||||
// Act
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Should().NotBeNull();
|
||||
bundle.Density.Should().Be(ProofDensity.Strict);
|
||||
bundle.Chunks.Should().HaveCount(5);
|
||||
bundle.Chunks.Select(c => c.Index).Should().BeEquivalentTo([0, 1, 2, 3, 4]);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_NotFound_ThrowsException()
|
||||
{
|
||||
// Arrange
|
||||
_mockService.Setup(s => s.GetAsync(It.IsAny<string>(), false, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0));
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() =>
|
||||
_exporter.ExportAsync("sha256:notfound", options));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsJsonAsync_ReturnsValidJson()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
|
||||
// Act
|
||||
var jsonBytes = await _exporter.ExportAsJsonAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
jsonBytes.Should().NotBeEmpty();
|
||||
var bundle = JsonSerializer.Deserialize<MinimalProofBundle>(jsonBytes, s_jsonOptions);
|
||||
bundle.Should().NotBeNull();
|
||||
bundle!.BundleVersion.Should().Be("v1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportToStreamAsync_WritesToStream()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
using var stream = new MemoryStream();
|
||||
|
||||
// Act
|
||||
await _exporter.ExportToStreamAsync(_testEntry.VeriKey, options, stream);
|
||||
|
||||
// Assert
|
||||
stream.Length.Should().BeGreaterThan(0);
|
||||
stream.Position = 0;
|
||||
var bundle = await JsonSerializer.DeserializeAsync<MinimalProofBundle>(stream, s_jsonOptions);
|
||||
bundle.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Import Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ImportAsync_ValidBundle_StoresChunks()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Standard, StandardDensityChunkCount = 3 };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
_mockChunkRepo.Setup(r => r.StoreChunksAsync(
|
||||
It.IsAny<string>(),
|
||||
It.IsAny<IEnumerable<EvidenceChunk>>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
// Act
|
||||
var result = await _exporter.ImportAsync(bundle);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksImported.Should().Be(3);
|
||||
result.ChunksPending.Should().Be(2);
|
||||
result.Verification.DigestValid.Should().BeTrue();
|
||||
result.Verification.MerkleRootValid.Should().BeTrue();
|
||||
result.Verification.ChunksValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ImportFromJsonAsync_ValidJson_ImportsSuccessfully()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
var jsonBytes = await _exporter.ExportAsJsonAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Act
|
||||
var result = await _exporter.ImportFromJsonAsync(jsonBytes);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.ChunksImported.Should().Be(0); // Lite has no chunks
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Verify Tests
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_ValidBundle_ReturnsValid()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Standard, StandardDensityChunkCount = 2 };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Act
|
||||
var verification = await _exporter.VerifyAsync(bundle);
|
||||
|
||||
// Assert
|
||||
verification.DigestValid.Should().BeTrue();
|
||||
verification.MerkleRootValid.Should().BeTrue();
|
||||
verification.ChunksValid.Should().BeTrue();
|
||||
verification.SignatureValid.Should().BeNull();
|
||||
verification.FailedChunkIndices.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_CorruptedChunk_ReportsFailure()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Standard, StandardDensityChunkCount = 2 };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Corrupt a chunk
|
||||
var corruptedChunks = bundle.Chunks.ToList();
|
||||
corruptedChunks[0] = corruptedChunks[0] with { Data = Convert.ToBase64String(new byte[1024]) };
|
||||
var corruptedBundle = bundle with { Chunks = corruptedChunks };
|
||||
|
||||
// Act
|
||||
var verification = await _exporter.VerifyAsync(corruptedBundle);
|
||||
|
||||
// Assert
|
||||
verification.ChunksValid.Should().BeFalse();
|
||||
verification.FailedChunkIndices.Should().Contain(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VerifyAsync_InvalidDigest_ReportsFailure()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions { Density = ProofDensity.Lite };
|
||||
var bundle = await _exporter.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Corrupt the digest
|
||||
var invalidDigest = bundle.Digest with { TrustScore = -10 }; // Invalid trust score
|
||||
var invalidBundle = bundle with { Digest = invalidDigest };
|
||||
|
||||
// Act
|
||||
var verification = await _exporter.VerifyAsync(invalidBundle);
|
||||
|
||||
// Assert
|
||||
verification.DigestValid.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region EstimateSize Tests
|
||||
|
||||
[Fact]
|
||||
public async Task EstimateExportSizeAsync_LiteDensity_ReturnsBaseSize()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
|
||||
// Act
|
||||
var size = await _exporter.EstimateExportSizeAsync(_testEntry.VeriKey, ProofDensity.Lite);
|
||||
|
||||
// Assert
|
||||
size.Should().Be(2048); // Base size
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EstimateExportSizeAsync_StrictDensity_ReturnsLargerSize()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
|
||||
// Act
|
||||
var size = await _exporter.EstimateExportSizeAsync(_testEntry.VeriKey, ProofDensity.Strict);
|
||||
|
||||
// Assert
|
||||
size.Should().BeGreaterThan(2048); // Base + all chunk data
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task EstimateExportSizeAsync_NotFound_ReturnsZero()
|
||||
{
|
||||
// Arrange
|
||||
_mockService.Setup(s => s.GetAsync(It.IsAny<string>(), false, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0));
|
||||
|
||||
// Act
|
||||
var size = await _exporter.EstimateExportSizeAsync("sha256:notfound", ProofDensity.Lite);
|
||||
|
||||
// Assert
|
||||
size.Should().Be(0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Signing Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_SigningWithoutSigner_ThrowsException()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = ProofDensity.Lite,
|
||||
Sign = true
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
await Assert.ThrowsAsync<InvalidOperationException>(() =>
|
||||
_exporter.ExportAsync(_testEntry.VeriKey, options));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ExportAsync_WithSigner_SignsBundle()
|
||||
{
|
||||
// Arrange
|
||||
SetupMocks();
|
||||
|
||||
var mockSigner = new Mock<ISigner>();
|
||||
mockSigner.Setup(s => s.SignAsync(It.IsAny<SignRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new SignResult(
|
||||
Signature: [1, 2, 3, 4],
|
||||
KeyId: "test-key-id",
|
||||
SignedAt: _timeProvider.GetUtcNow(),
|
||||
Claims: null));
|
||||
|
||||
var exporterWithSigner = new MinimalProofExporter(
|
||||
_mockService.Object,
|
||||
_mockChunkRepo.Object,
|
||||
mockSigner.Object,
|
||||
_timeProvider,
|
||||
NullLogger<MinimalProofExporter>.Instance);
|
||||
|
||||
var options = new MinimalProofExportOptions
|
||||
{
|
||||
Density = ProofDensity.Lite,
|
||||
Sign = true,
|
||||
SigningKeyId = "test-key-id"
|
||||
};
|
||||
|
||||
// Act
|
||||
var bundle = await exporterWithSigner.ExportAsync(_testEntry.VeriKey, options);
|
||||
|
||||
// Assert
|
||||
bundle.Signature.Should().NotBeNull();
|
||||
bundle.Signature!.KeyId.Should().Be("test-key-id");
|
||||
bundle.Signature.SignatureBytes.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
private void SetupMocks()
|
||||
{
|
||||
_mockService.Setup(s => s.GetAsync(_testEntry.VeriKey, false, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Hit(_testEntry, "memory", 1.0));
|
||||
|
||||
_mockChunkRepo.Setup(r => r.GetManifestAsync(_testEntry.Decision.ProofRoot, It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(_testManifest);
|
||||
|
||||
_mockChunkRepo.Setup(r => r.GetChunkRangeAsync(
|
||||
_testEntry.Decision.ProofRoot,
|
||||
It.IsAny<int>(),
|
||||
It.IsAny<int>(),
|
||||
It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync((string root, int start, int count, CancellationToken _) =>
|
||||
_testChunks.Skip(start).Take(count).ToList());
|
||||
}
|
||||
|
||||
private sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _now;
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset now) => _now = now;
|
||||
|
||||
public override DateTimeOffset GetUtcNow() => _now;
|
||||
|
||||
public void Advance(TimeSpan duration) => _now = _now.Add(duration);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,351 @@
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Provcache.Entities;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
public sealed class RevocationLedgerTests
|
||||
{
|
||||
private readonly InMemoryRevocationLedger _ledger;
|
||||
|
||||
public RevocationLedgerTests()
|
||||
{
|
||||
_ledger = new InMemoryRevocationLedger(NullLogger<InMemoryRevocationLedger>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordAsync_AssignsSeqNo()
|
||||
{
|
||||
// Arrange
|
||||
var entry = CreateTestEntry(RevocationTypes.Signer, "signer-hash-1");
|
||||
|
||||
// Act
|
||||
var recorded = await _ledger.RecordAsync(entry);
|
||||
|
||||
// Assert
|
||||
recorded.SeqNo.Should().Be(1);
|
||||
recorded.RevocationId.Should().Be(entry.RevocationId);
|
||||
recorded.RevokedKey.Should().Be("signer-hash-1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task RecordAsync_AssignsIncrementingSeqNos()
|
||||
{
|
||||
// Arrange
|
||||
var entry1 = CreateTestEntry(RevocationTypes.Signer, "signer-1");
|
||||
var entry2 = CreateTestEntry(RevocationTypes.FeedEpoch, "epoch-1");
|
||||
var entry3 = CreateTestEntry(RevocationTypes.Policy, "policy-1");
|
||||
|
||||
// Act
|
||||
var recorded1 = await _ledger.RecordAsync(entry1);
|
||||
var recorded2 = await _ledger.RecordAsync(entry2);
|
||||
var recorded3 = await _ledger.RecordAsync(entry3);
|
||||
|
||||
// Assert
|
||||
recorded1.SeqNo.Should().Be(1);
|
||||
recorded2.SeqNo.Should().Be(2);
|
||||
recorded3.SeqNo.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesSinceAsync_ReturnsEntriesAfterSeqNo()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "e1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "p1"));
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetEntriesSinceAsync(2);
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(2);
|
||||
entries[0].SeqNo.Should().Be(3);
|
||||
entries[1].SeqNo.Should().Be(4);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesSinceAsync_RespectsLimit()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, $"s{i}"));
|
||||
}
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetEntriesSinceAsync(0, limit: 3);
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesByTypeAsync_FiltersCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "e1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "p1"));
|
||||
|
||||
// Act
|
||||
var signerEntries = await _ledger.GetEntriesByTypeAsync(RevocationTypes.Signer);
|
||||
|
||||
// Assert
|
||||
signerEntries.Should().HaveCount(2);
|
||||
signerEntries.Should().OnlyContain(e => e.RevocationType == RevocationTypes.Signer);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetEntriesByTypeAsync_FiltersBySinceTime()
|
||||
{
|
||||
// Arrange
|
||||
var oldEntry = CreateTestEntry(RevocationTypes.Signer, "s1") with
|
||||
{
|
||||
RevokedAt = DateTimeOffset.UtcNow.AddDays(-5)
|
||||
};
|
||||
var newEntry = CreateTestEntry(RevocationTypes.Signer, "s2") with
|
||||
{
|
||||
RevokedAt = DateTimeOffset.UtcNow.AddDays(-1)
|
||||
};
|
||||
|
||||
await _ledger.RecordAsync(oldEntry);
|
||||
await _ledger.RecordAsync(newEntry);
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetEntriesByTypeAsync(
|
||||
RevocationTypes.Signer,
|
||||
since: DateTimeOffset.UtcNow.AddDays(-2));
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(1);
|
||||
entries[0].RevokedKey.Should().Be("s2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLatestSeqNoAsync_ReturnsZeroWhenEmpty()
|
||||
{
|
||||
// Act
|
||||
var seqNo = await _ledger.GetLatestSeqNoAsync();
|
||||
|
||||
// Assert
|
||||
seqNo.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetLatestSeqNoAsync_ReturnsLatest()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s3"));
|
||||
|
||||
// Act
|
||||
var seqNo = await _ledger.GetLatestSeqNoAsync();
|
||||
|
||||
// Assert
|
||||
seqNo.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetRevocationsForKeyAsync_ReturnsMatchingEntries()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "s1")); // Same key, different type
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2"));
|
||||
|
||||
// Act
|
||||
var entries = await _ledger.GetRevocationsForKeyAsync("s1");
|
||||
|
||||
// Assert
|
||||
entries.Should().HaveCount(2);
|
||||
entries.Should().OnlyContain(e => e.RevokedKey == "s1");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetStatsAsync_ReturnsCorrectStats()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1", invalidated: 5));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2", invalidated: 3));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "e1", invalidated: 10));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "p1", invalidated: 2));
|
||||
|
||||
// Act
|
||||
var stats = await _ledger.GetStatsAsync();
|
||||
|
||||
// Assert
|
||||
stats.TotalEntries.Should().Be(4);
|
||||
stats.LatestSeqNo.Should().Be(4);
|
||||
stats.TotalEntriesInvalidated.Should().Be(20);
|
||||
stats.EntriesByType.Should().ContainKey(RevocationTypes.Signer);
|
||||
stats.EntriesByType[RevocationTypes.Signer].Should().Be(2);
|
||||
stats.EntriesByType[RevocationTypes.FeedEpoch].Should().Be(1);
|
||||
stats.EntriesByType[RevocationTypes.Policy].Should().Be(1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Clear_RemovesAllEntries()
|
||||
{
|
||||
// Arrange
|
||||
_ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s1")).GetAwaiter().GetResult();
|
||||
_ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "s2")).GetAwaiter().GetResult();
|
||||
|
||||
// Act
|
||||
_ledger.Clear();
|
||||
|
||||
// Assert
|
||||
var seqNo = _ledger.GetLatestSeqNoAsync().GetAwaiter().GetResult();
|
||||
seqNo.Should().Be(0);
|
||||
}
|
||||
|
||||
private static RevocationEntry CreateTestEntry(
|
||||
string revocationType,
|
||||
string revokedKey,
|
||||
int invalidated = 0)
|
||||
{
|
||||
return new RevocationEntry
|
||||
{
|
||||
RevocationId = Guid.NewGuid(),
|
||||
RevocationType = revocationType,
|
||||
RevokedKey = revokedKey,
|
||||
Reason = "Test revocation",
|
||||
EntriesInvalidated = invalidated,
|
||||
Source = "unit-test",
|
||||
RevokedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
public sealed class RevocationReplayServiceTests
|
||||
{
|
||||
private readonly InMemoryRevocationLedger _ledger;
|
||||
private readonly Mock<IProvcacheRepository> _repositoryMock;
|
||||
private readonly RevocationReplayService _replayService;
|
||||
|
||||
public RevocationReplayServiceTests()
|
||||
{
|
||||
_ledger = new InMemoryRevocationLedger(NullLogger<InMemoryRevocationLedger>.Instance);
|
||||
_repositoryMock = new Mock<IProvcacheRepository>();
|
||||
_replayService = new RevocationReplayService(
|
||||
_ledger,
|
||||
_repositoryMock.Object,
|
||||
NullLogger<RevocationReplayService>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_ReplaysAllEntries()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.FeedEpoch, "epoch-1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Policy, "policy-1"));
|
||||
|
||||
_repositoryMock.Setup(r => r.DeleteBySignerSetHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(2L);
|
||||
_repositoryMock.Setup(r => r.DeleteByFeedEpochOlderThanAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(5L);
|
||||
_repositoryMock.Setup(r => r.DeleteByPolicyHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(3L);
|
||||
|
||||
// Act
|
||||
var result = await _replayService.ReplayFromAsync(0);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.EntriesReplayed.Should().Be(3);
|
||||
result.TotalInvalidations.Should().Be(10); // 2 + 5 + 3
|
||||
result.EntriesByType.Should().HaveCount(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_StartsFromCheckpoint()
|
||||
{
|
||||
// Arrange
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-1"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-2"));
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, "signer-3"));
|
||||
|
||||
_repositoryMock.Setup(r => r.DeleteBySignerSetHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(1L);
|
||||
|
||||
// Act - replay from seq 2 (skip first 2)
|
||||
var result = await _replayService.ReplayFromAsync(2);
|
||||
|
||||
// Assert
|
||||
result.EntriesReplayed.Should().Be(1); // Only seq 3
|
||||
result.StartSeqNo.Should().Be(2);
|
||||
result.EndSeqNo.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_RespectsMaxEntries()
|
||||
{
|
||||
// Arrange
|
||||
for (int i = 0; i < 10; i++)
|
||||
{
|
||||
await _ledger.RecordAsync(CreateTestEntry(RevocationTypes.Signer, $"signer-{i}"));
|
||||
}
|
||||
|
||||
_repositoryMock.Setup(r => r.DeleteBySignerSetHashAsync(It.IsAny<string>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(1L);
|
||||
|
||||
var options = new RevocationReplayOptions { MaxEntries = 3 };
|
||||
|
||||
// Act
|
||||
var result = await _replayService.ReplayFromAsync(0, options);
|
||||
|
||||
// Assert
|
||||
result.EntriesReplayed.Should().Be(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReplayFromAsync_ReturnsEmptyWhenNoEntries()
|
||||
{
|
||||
// Act
|
||||
var result = await _replayService.ReplayFromAsync(0);
|
||||
|
||||
// Assert
|
||||
result.Success.Should().BeTrue();
|
||||
result.EntriesReplayed.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetCheckpointAsync_ReturnsZeroInitially()
|
||||
{
|
||||
// Act
|
||||
var checkpoint = await _replayService.GetCheckpointAsync();
|
||||
|
||||
// Assert
|
||||
checkpoint.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveCheckpointAsync_PersistsCheckpoint()
|
||||
{
|
||||
// Act
|
||||
await _replayService.SaveCheckpointAsync(42);
|
||||
var checkpoint = await _replayService.GetCheckpointAsync();
|
||||
|
||||
// Assert
|
||||
checkpoint.Should().Be(42);
|
||||
}
|
||||
|
||||
private static RevocationEntry CreateTestEntry(string revocationType, string revokedKey)
|
||||
{
|
||||
return new RevocationEntry
|
||||
{
|
||||
RevocationId = Guid.NewGuid(),
|
||||
RevocationType = revocationType,
|
||||
RevokedKey = revokedKey,
|
||||
Reason = "Test revocation",
|
||||
EntriesInvalidated = 0,
|
||||
Source = "unit-test",
|
||||
RevokedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user