sprints enhancements
This commit is contained in:
@@ -0,0 +1,289 @@
|
||||
using FluentAssertions;
|
||||
using StellaOps.Provcache;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Provcache.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for <see cref="EvidenceChunker"/>.
|
||||
/// </summary>
|
||||
public sealed class EvidenceChunkerTests
|
||||
{
|
||||
private readonly ProvcacheOptions _options;
|
||||
private readonly EvidenceChunker _chunker;
|
||||
|
||||
public EvidenceChunkerTests()
|
||||
{
|
||||
_options = new ProvcacheOptions { ChunkSize = 64 }; // Small for testing
|
||||
_chunker = new EvidenceChunker(_options);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldSplitEvidenceIntoMultipleChunks_WhenLargerThanChunkSize()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[200];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().HaveCount(4); // ceil(200/64) = 4
|
||||
result.TotalSize.Should().Be(200);
|
||||
result.ProofRoot.Should().StartWith("sha256:");
|
||||
|
||||
// Verify chunk ordering
|
||||
for (var i = 0; i < result.Chunks.Count; i++)
|
||||
{
|
||||
result.Chunks[i].ChunkIndex.Should().Be(i);
|
||||
result.Chunks[i].ContentType.Should().Be(contentType);
|
||||
result.Chunks[i].ProofRoot.Should().Be(result.ProofRoot);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldCreateSingleChunk_WhenSmallerThanChunkSize()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[32];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/json";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().HaveCount(1);
|
||||
result.TotalSize.Should().Be(32);
|
||||
result.Chunks[0].BlobSize.Should().Be(32);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldHandleEmptyEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = Array.Empty<byte>();
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.Chunks.Should().BeEmpty();
|
||||
result.TotalSize.Should().Be(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkAsync_ShouldProduceUniqueHashForEachChunk()
|
||||
{
|
||||
// Arrange - create evidence with distinct bytes per chunk
|
||||
var evidence = new byte[128];
|
||||
for (var i = 0; i < 64; i++) evidence[i] = 0xAA;
|
||||
for (var i = 64; i < 128; i++) evidence[i] = 0xBB;
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var result = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Assert
|
||||
result.Chunks.Should().HaveCount(2);
|
||||
result.Chunks[0].ChunkHash.Should().NotBe(result.Chunks[1].ChunkHash);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldRecoverOriginalEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var original = new byte[200];
|
||||
Random.Shared.NextBytes(original);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(original, contentType);
|
||||
|
||||
// Act
|
||||
var reassembled = await _chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
|
||||
|
||||
// Assert
|
||||
reassembled.Should().BeEquivalentTo(original);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldThrow_WhenMerkleRootMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[100];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _chunker.ReassembleAsync(chunked.Chunks, "sha256:invalid_root");
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*Merkle root mismatch*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ReassembleAsync_ShouldThrow_WhenChunkCorrupted()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[100];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
var chunked = await _chunker.ChunkAsync(evidence, contentType);
|
||||
|
||||
// Corrupt first chunk
|
||||
var corruptedChunks = chunked.Chunks
|
||||
.Select((c, i) => i == 0
|
||||
? c with { Blob = new byte[c.BlobSize], ChunkHash = c.ChunkHash } // same hash but different blob
|
||||
: c)
|
||||
.ToList();
|
||||
|
||||
// Act & Assert
|
||||
var act = () => _chunker.ReassembleAsync(corruptedChunks, chunked.ProofRoot);
|
||||
await act.Should().ThrowAsync<InvalidOperationException>()
|
||||
.WithMessage("*verification failed*");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyChunk_ShouldReturnTrue_WhenChunkValid()
|
||||
{
|
||||
// Arrange
|
||||
var data = new byte[32];
|
||||
Random.Shared.NextBytes(data);
|
||||
var hash = ComputeHash(data);
|
||||
|
||||
var chunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = "sha256:test",
|
||||
ChunkIndex = 0,
|
||||
ChunkHash = hash,
|
||||
Blob = data,
|
||||
BlobSize = data.Length,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
_chunker.VerifyChunk(chunk).Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void VerifyChunk_ShouldReturnFalse_WhenHashMismatch()
|
||||
{
|
||||
// Arrange
|
||||
var chunk = new EvidenceChunk
|
||||
{
|
||||
ChunkId = Guid.NewGuid(),
|
||||
ProofRoot = "sha256:test",
|
||||
ChunkIndex = 0,
|
||||
ChunkHash = "sha256:wrong_hash",
|
||||
Blob = new byte[32],
|
||||
BlobSize = 32,
|
||||
ContentType = "application/octet-stream",
|
||||
CreatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
_chunker.VerifyChunk(chunk).Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldReturnSameResult_ForSameInput()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff", "sha256:1122" };
|
||||
|
||||
// Act
|
||||
var root1 = _chunker.ComputeMerkleRoot(hashes);
|
||||
var root2 = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root1.Should().Be(root2);
|
||||
root1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldHandleSingleHash()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabbccdd" };
|
||||
|
||||
// Act
|
||||
var root = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root.Should().Be("sha256:aabbccdd");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ComputeMerkleRoot_ShouldHandleOddNumberOfHashes()
|
||||
{
|
||||
// Arrange
|
||||
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff" };
|
||||
|
||||
// Act
|
||||
var root = _chunker.ComputeMerkleRoot(hashes);
|
||||
|
||||
// Assert
|
||||
root.Should().NotBeNullOrEmpty();
|
||||
root.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ChunkStreamAsync_ShouldYieldChunksInOrder()
|
||||
{
|
||||
// Arrange
|
||||
var evidence = new byte[200];
|
||||
Random.Shared.NextBytes(evidence);
|
||||
using var stream = new MemoryStream(evidence);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var chunks = new List<EvidenceChunk>();
|
||||
await foreach (var chunk in _chunker.ChunkStreamAsync(stream, contentType))
|
||||
{
|
||||
chunks.Add(chunk);
|
||||
}
|
||||
|
||||
// Assert
|
||||
chunks.Should().HaveCount(4);
|
||||
for (var i = 0; i < chunks.Count; i++)
|
||||
{
|
||||
chunks[i].ChunkIndex.Should().Be(i);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Roundtrip_ShouldPreserveDataIntegrity()
|
||||
{
|
||||
// Arrange - use realistic chunk size
|
||||
var options = new ProvcacheOptions { ChunkSize = 1024 };
|
||||
var chunker = new EvidenceChunker(options);
|
||||
|
||||
var original = new byte[5000]; // ~5 chunks
|
||||
Random.Shared.NextBytes(original);
|
||||
const string contentType = "application/octet-stream";
|
||||
|
||||
// Act
|
||||
var chunked = await chunker.ChunkAsync(original, contentType);
|
||||
var reassembled = await chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
|
||||
|
||||
// Assert
|
||||
reassembled.Should().BeEquivalentTo(original);
|
||||
chunked.Chunks.Should().HaveCount(5);
|
||||
}
|
||||
|
||||
private static string ComputeHash(byte[] data)
|
||||
{
|
||||
var hash = System.Security.Cryptography.SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexStringLower(hash)}";
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user