using FluentAssertions;
using StellaOps.Provcache;
using StellaOps.TestKit;
using StellaOps.TestKit.Deterministic;
using Xunit;
namespace StellaOps.Provcache.Tests;
///
/// Tests for .
///
public sealed class EvidenceChunkerTests
{
private static readonly DateTimeOffset FixedNow = new(2026, 1, 1, 0, 0, 0, TimeSpan.Zero);
private readonly ProvcacheOptions _options;
private readonly EvidenceChunker _chunker;
public EvidenceChunkerTests()
{
_options = new ProvcacheOptions { ChunkSize = 64 }; // Small for testing
_chunker = new EvidenceChunker(_options);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ChunkAsync_ShouldSplitEvidenceIntoMultipleChunks_WhenLargerThanChunkSize()
{
// Arrange
var evidence = CreateDeterministicBytes(200, 1);
const string contentType = "application/octet-stream";
// Act
var result = await _chunker.ChunkAsync(evidence, contentType);
// Assert
result.Should().NotBeNull();
result.Chunks.Should().HaveCount(4); // ceil(200/64) = 4
result.TotalSize.Should().Be(200);
result.ProofRoot.Should().StartWith("sha256:");
// Verify chunk ordering
for (var i = 0; i < result.Chunks.Count; i++)
{
result.Chunks[i].ChunkIndex.Should().Be(i);
result.Chunks[i].ContentType.Should().Be(contentType);
result.Chunks[i].ProofRoot.Should().Be(result.ProofRoot);
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ChunkAsync_ShouldCreateSingleChunk_WhenSmallerThanChunkSize()
{
// Arrange
var evidence = CreateDeterministicBytes(32, 2);
const string contentType = "application/json";
// Act
var result = await _chunker.ChunkAsync(evidence, contentType);
// Assert
result.Should().NotBeNull();
result.Chunks.Should().HaveCount(1);
result.TotalSize.Should().Be(32);
result.Chunks[0].BlobSize.Should().Be(32);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ChunkAsync_ShouldHandleEmptyEvidence()
{
// Arrange
var evidence = Array.Empty();
const string contentType = "application/octet-stream";
// Act
var result = await _chunker.ChunkAsync(evidence, contentType);
// Assert
result.Should().NotBeNull();
result.Chunks.Should().BeEmpty();
result.TotalSize.Should().Be(0);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ChunkAsync_ShouldProduceUniqueHashForEachChunk()
{
// Arrange - create evidence with distinct bytes per chunk
var evidence = new byte[128];
for (var i = 0; i < 64; i++) evidence[i] = 0xAA;
for (var i = 64; i < 128; i++) evidence[i] = 0xBB;
const string contentType = "application/octet-stream";
// Act
var result = await _chunker.ChunkAsync(evidence, contentType);
// Assert
result.Chunks.Should().HaveCount(2);
result.Chunks[0].ChunkHash.Should().NotBe(result.Chunks[1].ChunkHash);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ReassembleAsync_ShouldRecoverOriginalEvidence()
{
// Arrange
var original = CreateDeterministicBytes(200, 3);
const string contentType = "application/octet-stream";
var chunked = await _chunker.ChunkAsync(original, contentType);
// Act
var reassembled = await _chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
// Assert
reassembled.Should().BeEquivalentTo(original);
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ReassembleAsync_ShouldThrow_WhenMerkleRootMismatch()
{
// Arrange
var evidence = CreateDeterministicBytes(100, 4);
const string contentType = "application/octet-stream";
var chunked = await _chunker.ChunkAsync(evidence, contentType);
// Act & Assert
var act = () => _chunker.ReassembleAsync(chunked.Chunks, "sha256:invalid_root");
await act.Should().ThrowAsync()
.WithMessage("*Merkle root mismatch*");
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ReassembleAsync_ShouldThrow_WhenChunkCorrupted()
{
// Arrange
var evidence = CreateDeterministicBytes(100, 5);
const string contentType = "application/octet-stream";
var chunked = await _chunker.ChunkAsync(evidence, contentType);
// Corrupt first chunk
var corruptedChunks = chunked.Chunks
.Select((c, i) => i == 0
? c with { Blob = new byte[c.BlobSize], ChunkHash = c.ChunkHash } // same hash but different blob
: c)
.ToList();
// Act & Assert
var act = () => _chunker.ReassembleAsync(corruptedChunks, chunked.ProofRoot);
await act.Should().ThrowAsync()
.WithMessage("*verification failed*");
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void VerifyChunk_ShouldReturnTrue_WhenChunkValid()
{
// Arrange
var data = CreateDeterministicBytes(32, 6);
var hash = ComputeHash(data);
var chunk = new EvidenceChunk
{
ChunkId = CreateGuid(6),
ProofRoot = "sha256:test",
ChunkIndex = 0,
ChunkHash = hash,
Blob = data,
BlobSize = data.Length,
ContentType = "application/octet-stream",
CreatedAt = FixedNow
};
// Act & Assert
_chunker.VerifyChunk(chunk).Should().BeTrue();
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void VerifyChunk_ShouldReturnFalse_WhenHashMismatch()
{
// Arrange
var chunk = new EvidenceChunk
{
ChunkId = CreateGuid(7),
ProofRoot = "sha256:test",
ChunkIndex = 0,
ChunkHash = "sha256:wrong_hash",
Blob = new byte[32],
BlobSize = 32,
ContentType = "application/octet-stream",
CreatedAt = FixedNow
};
// Act & Assert
_chunker.VerifyChunk(chunk).Should().BeFalse();
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ComputeMerkleRoot_ShouldReturnSameResult_ForSameInput()
{
// Arrange
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff", "sha256:1122" };
// Act
var root1 = _chunker.ComputeMerkleRoot(hashes);
var root2 = _chunker.ComputeMerkleRoot(hashes);
// Assert
root1.Should().Be(root2);
root1.Should().StartWith("sha256:");
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ComputeMerkleRoot_ShouldHandleSingleHash()
{
// Arrange
var hashes = new[] { "sha256:aabbccdd" };
// Act
var root = _chunker.ComputeMerkleRoot(hashes);
// Assert
root.Should().Be("sha256:aabbccdd");
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public void ComputeMerkleRoot_ShouldHandleOddNumberOfHashes()
{
// Arrange
var hashes = new[] { "sha256:aabb", "sha256:ccdd", "sha256:eeff" };
// Act
var root = _chunker.ComputeMerkleRoot(hashes);
// Assert
root.Should().NotBeNullOrEmpty();
root.Should().StartWith("sha256:");
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task ChunkStreamAsync_ShouldYieldChunksInOrder()
{
// Arrange
var evidence = CreateDeterministicBytes(200, 8);
using var stream = new MemoryStream(evidence);
const string contentType = "application/octet-stream";
// Act
var chunks = new List();
await foreach (var chunk in _chunker.ChunkStreamAsync(stream, contentType))
{
chunks.Add(chunk);
}
// Assert
chunks.Should().HaveCount(4);
for (var i = 0; i < chunks.Count; i++)
{
chunks[i].ChunkIndex.Should().Be(i);
}
}
[Trait("Category", TestCategories.Unit)]
[Fact]
public async Task Roundtrip_ShouldPreserveDataIntegrity()
{
// Arrange - use realistic chunk size
var options = new ProvcacheOptions { ChunkSize = 1024 };
var chunker = new EvidenceChunker(options);
var original = CreateDeterministicBytes(5000, 9); // ~5 chunks
const string contentType = "application/octet-stream";
// Act
var chunked = await chunker.ChunkAsync(original, contentType);
var reassembled = await chunker.ReassembleAsync(chunked.Chunks, chunked.ProofRoot);
// Assert
reassembled.Should().BeEquivalentTo(original);
chunked.Chunks.Should().HaveCount(5);
}
private static string ComputeHash(byte[] data)
{
var hash = System.Security.Cryptography.SHA256.HashData(data);
return $"sha256:{Convert.ToHexStringLower(hash)}";
}
private static byte[] CreateDeterministicBytes(int length, int seed)
{
var random = new DeterministicRandom(seed);
var data = new byte[length];
random.NextBytes(data);
return data;
}
private static Guid CreateGuid(int seed)
{
return new DeterministicRandom(seed).NextGuid();
}
}