new two advisories and sprints work on them

This commit is contained in:
master
2026-01-16 18:39:36 +02:00
parent 9daf619954
commit c3a6269d55
72 changed files with 15540 additions and 18 deletions

View File

@@ -0,0 +1,465 @@
// -----------------------------------------------------------------------------
// RekorVerificationServiceTests.cs
// Sprint: SPRINT_20260117_001_ATTESTOR_periodic_rekor_verification
// Task: PRV-007 - Unit tests for verification service
// Description: Unit tests for RekorVerificationService
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using System.Security.Cryptography;
using System.Text;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Attestor.Core.Verification;
using Xunit;
namespace StellaOps.Attestor.Core.Tests.Verification;
[Trait("Category", "Unit")]
public sealed class RekorVerificationServiceTests
{
private static readonly DateTimeOffset FixedTimestamp = new(2026, 1, 16, 12, 0, 0, TimeSpan.Zero);
private readonly FakeTimeProvider _timeProvider;
private readonly ILogger<RekorVerificationServiceTests> _logger;
public RekorVerificationServiceTests()
{
_timeProvider = new FakeTimeProvider(FixedTimestamp);
_logger = NullLogger<RekorVerificationServiceTests>.Instance;
}
[Fact]
public void VerifySignature_ValidEd25519Signature_ReturnsTrue()
{
// Arrange
var service = CreateService();
using var ed25519 = new Ed25519Signature();
var data = Encoding.UTF8.GetBytes("test message");
var signature = ed25519.Sign(data);
var publicKey = ed25519.ExportPublicKey();
// Act
var result = service.VerifySignature(data, signature, publicKey, "ed25519");
// Assert
Assert.True(result.IsValid);
Assert.Empty(result.Errors);
}
[Fact]
public void VerifySignature_InvalidSignature_ReturnsFalse()
{
// Arrange
var service = CreateService();
using var ed25519 = new Ed25519Signature();
var data = Encoding.UTF8.GetBytes("test message");
var signature = new byte[64]; // Invalid signature
var publicKey = ed25519.ExportPublicKey();
// Act
var result = service.VerifySignature(data, signature, publicKey, "ed25519");
// Assert
Assert.False(result.IsValid);
Assert.Contains("signature", result.Errors.First(), StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void VerifySignature_TamperedData_ReturnsFalse()
{
// Arrange
var service = CreateService();
using var ed25519 = new Ed25519Signature();
var originalData = Encoding.UTF8.GetBytes("original message");
var tamperedData = Encoding.UTF8.GetBytes("tampered message");
var signature = ed25519.Sign(originalData);
var publicKey = ed25519.ExportPublicKey();
// Act
var result = service.VerifySignature(tamperedData, signature, publicKey, "ed25519");
// Assert
Assert.False(result.IsValid);
}
[Fact]
public void VerifyInclusionProof_ValidProof_ReturnsTrue()
{
// Arrange
var service = CreateService();
var leafHash = CreateDeterministicHash("leaf-data-0");
var proof = CreateValidInclusionProof(leafHash, 100, 5);
// Act
var result = service.VerifyInclusionProof(proof);
// Assert
Assert.True(result.IsValid);
Assert.Equal(proof.TreeSize, result.TreeSize);
}
[Fact]
public void VerifyInclusionProof_EmptyHashes_ReturnsFalse()
{
// Arrange
var service = CreateService();
var proof = new InclusionProofData(
LeafHash: CreateDeterministicHash("leaf"),
RootHash: CreateDeterministicHash("root"),
TreeSize: 100,
LogIndex: 5,
Hashes: ImmutableArray<string>.Empty);
// Act
var result = service.VerifyInclusionProof(proof);
// Assert
Assert.False(result.IsValid);
Assert.Contains("proof", result.Errors.First(), StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void VerifyInclusionProof_InvalidRootHash_ReturnsFalse()
{
// Arrange
var service = CreateService();
var leafHash = CreateDeterministicHash("leaf");
var proof = new InclusionProofData(
LeafHash: leafHash,
RootHash: CreateDeterministicHash("wrong-root"),
TreeSize: 100,
LogIndex: 5,
Hashes: ImmutableArray.Create(
CreateDeterministicHash("sibling1"),
CreateDeterministicHash("sibling2")));
// Act
var result = service.VerifyInclusionProof(proof);
// Assert
Assert.False(result.IsValid);
}
[Fact]
public void DetectTimeSkew_WithinThreshold_ReturnsNoSkew()
{
// Arrange
var service = CreateService();
var integratedTime = FixedTimestamp.AddSeconds(-30);
// Act
var result = service.DetectTimeSkew(integratedTime, FixedTimestamp);
// Assert
Assert.False(result.HasSkew);
Assert.Equal(TimeSpan.FromSeconds(30), result.Skew);
}
[Fact]
public void DetectTimeSkew_ExceedsThreshold_ReturnsSkewDetected()
{
// Arrange
var options = CreateOptions();
options.Value.MaxTimeSkewSeconds = 60;
var service = CreateService(options);
var integratedTime = FixedTimestamp.AddSeconds(-120);
// Act
var result = service.DetectTimeSkew(integratedTime, FixedTimestamp);
// Assert
Assert.True(result.HasSkew);
Assert.Equal(TimeSpan.FromSeconds(120), result.Skew);
}
[Fact]
public void DetectTimeSkew_FutureIntegratedTime_ReturnsSkewDetected()
{
// Arrange
var options = CreateOptions();
options.Value.MaxTimeSkewSeconds = 60;
var service = CreateService(options);
var integratedTime = FixedTimestamp.AddMinutes(5); // 5 minutes in future
// Act
var result = service.DetectTimeSkew(integratedTime, FixedTimestamp);
// Assert
Assert.True(result.HasSkew);
Assert.True(result.IsFutureTimestamp);
}
[Fact]
public void VerifyEntry_AllChecksPass_ReturnsSuccess()
{
// Arrange
var service = CreateService();
var entry = CreateValidRekorEntry();
// Act
var result = service.VerifyEntry(entry);
// Assert
Assert.True(result.IsValid);
Assert.True(result.SignatureValid);
Assert.True(result.InclusionProofValid);
Assert.False(result.TimeSkewDetected);
}
[Fact]
public void VerifyEntry_InvalidSignature_ReturnsPartialFailure()
{
// Arrange
var service = CreateService();
var entry = CreateRekorEntryWithInvalidSignature();
// Act
var result = service.VerifyEntry(entry);
// Assert
Assert.False(result.IsValid);
Assert.False(result.SignatureValid);
Assert.Contains("signature", result.FailureReasons.First(), StringComparison.OrdinalIgnoreCase);
}
[Fact]
public void VerifyBatch_MultipleEntries_ReturnsAggregateResults()
{
// Arrange
var service = CreateService();
var entries = new[]
{
CreateValidRekorEntry(),
CreateRekorEntryWithInvalidSignature(),
CreateValidRekorEntry()
};
// Act
var result = service.VerifyBatch(entries);
// Assert
Assert.Equal(3, result.TotalCount);
Assert.Equal(2, result.ValidCount);
Assert.Equal(1, result.InvalidCount);
Assert.Equal(2, result.Results.Count(r => r.IsValid));
}
[Fact]
public void VerifyRootConsistency_ConsistentRoots_ReturnsTrue()
{
// Arrange
var service = CreateService();
var storedRoot = CreateDeterministicHash("root-at-100");
var remoteRoot = storedRoot; // Same root
var storedSize = 100L;
var remoteSize = 100L;
// Act
var result = service.VerifyRootConsistency(storedRoot, remoteRoot, storedSize, remoteSize);
// Assert
Assert.True(result.IsConsistent);
}
[Fact]
public void VerifyRootConsistency_DifferentRootsSameSize_ReturnsFalse()
{
// Arrange
var service = CreateService();
var storedRoot = CreateDeterministicHash("root-v1");
var remoteRoot = CreateDeterministicHash("root-v2");
var size = 100L;
// Act
var result = service.VerifyRootConsistency(storedRoot, remoteRoot, size, size);
// Assert
Assert.False(result.IsConsistent);
Assert.True(result.PossibleTampering);
}
[Fact]
public void VerifyRootConsistency_RemoteSmallerThanStored_ReturnsFalse()
{
// Arrange
var service = CreateService();
var storedRoot = CreateDeterministicHash("root");
var remoteRoot = CreateDeterministicHash("root-smaller");
var storedSize = 100L;
var remoteSize = 50L; // Smaller - indicates rollback
// Act
var result = service.VerifyRootConsistency(storedRoot, remoteRoot, storedSize, remoteSize);
// Assert
Assert.False(result.IsConsistent);
Assert.True(result.PossibleRollback);
}
// Helper methods
private IRekorVerificationService CreateService(IOptions<RekorVerificationOptions>? options = null)
{
return new RekorVerificationService(
options ?? CreateOptions(),
_timeProvider,
NullLogger<RekorVerificationService>.Instance);
}
private static IOptions<RekorVerificationOptions> CreateOptions()
{
return Options.Create(new RekorVerificationOptions
{
Enabled = true,
MaxTimeSkewSeconds = 300,
BatchSize = 100
});
}
private static string CreateDeterministicHash(string input)
{
var bytes = Encoding.UTF8.GetBytes(input);
var hash = SHA256.HashData(bytes);
return Convert.ToBase64String(hash);
}
private static InclusionProofData CreateValidInclusionProof(string leafHash, long treeSize, long logIndex)
{
// Create a valid proof structure
var hashes = ImmutableArray.Create(
CreateDeterministicHash($"sibling-{logIndex}-0"),
CreateDeterministicHash($"sibling-{logIndex}-1"),
CreateDeterministicHash($"sibling-{logIndex}-2"));
// Compute expected root (simplified for test)
var rootHash = ComputeMerkleRoot(leafHash, hashes, logIndex, treeSize);
return new InclusionProofData(
LeafHash: leafHash,
RootHash: rootHash,
TreeSize: treeSize,
LogIndex: logIndex,
Hashes: hashes);
}
private static string ComputeMerkleRoot(string leafHash, ImmutableArray<string> hashes, long logIndex, long treeSize)
{
// Simplified Merkle root computation for test purposes
var current = Convert.FromBase64String(leafHash);
foreach (var siblingHash in hashes)
{
var sibling = Convert.FromBase64String(siblingHash);
var combined = new byte[current.Length + sibling.Length + 1];
combined[0] = 0x01; // RFC 6962 interior node prefix
current.CopyTo(combined, 1);
sibling.CopyTo(combined, 1 + current.Length);
current = SHA256.HashData(combined);
}
return Convert.ToBase64String(current);
}
private RekorEntryForVerification CreateValidRekorEntry()
{
using var ed25519 = new Ed25519Signature();
var body = Encoding.UTF8.GetBytes("""{"test":"data"}""");
var signature = ed25519.Sign(body);
return new RekorEntryForVerification(
EntryUuid: Guid.NewGuid().ToString("N"),
LogIndex: 12345,
IntegratedTime: FixedTimestamp.AddMinutes(-5),
Body: body,
Signature: signature,
PublicKey: ed25519.ExportPublicKey(),
SignatureAlgorithm: "ed25519",
InclusionProof: CreateValidInclusionProof(
CreateDeterministicHash("leaf-12345"),
100000,
12345));
}
private RekorEntryForVerification CreateRekorEntryWithInvalidSignature()
{
using var ed25519 = new Ed25519Signature();
var body = Encoding.UTF8.GetBytes("""{"test":"data"}""");
var invalidSignature = new byte[64]; // All zeros
return new RekorEntryForVerification(
EntryUuid: Guid.NewGuid().ToString("N"),
LogIndex: 12346,
IntegratedTime: FixedTimestamp.AddMinutes(-5),
Body: body,
Signature: invalidSignature,
PublicKey: ed25519.ExportPublicKey(),
SignatureAlgorithm: "ed25519",
InclusionProof: CreateValidInclusionProof(
CreateDeterministicHash("leaf-12346"),
100000,
12346));
}
/// <summary>
/// Simple Ed25519 wrapper for test signing.
/// </summary>
private sealed class Ed25519Signature : IDisposable
{
private readonly byte[] _privateKey;
private readonly byte[] _publicKey;
public Ed25519Signature()
{
// Generate deterministic key pair for tests
using var rng = RandomNumberGenerator.Create();
_privateKey = new byte[32];
rng.GetBytes(_privateKey);
// Ed25519 public key derivation (simplified for test)
_publicKey = SHA256.HashData(_privateKey);
}
public byte[] Sign(byte[] data)
{
// Simplified signature for test (not cryptographically secure)
var combined = new byte[_privateKey.Length + data.Length];
_privateKey.CopyTo(combined, 0);
data.CopyTo(combined, _privateKey.Length);
var hash = SHA256.HashData(combined);
// Create 64-byte signature
var signature = new byte[64];
hash.CopyTo(signature, 0);
hash.CopyTo(signature, 32);
return signature;
}
public byte[] ExportPublicKey() => _publicKey.ToArray();
public void Dispose()
{
Array.Clear(_privateKey, 0, _privateKey.Length);
}
}
}
// Supporting types for tests (would be in main project)
public record InclusionProofData(
string LeafHash,
string RootHash,
long TreeSize,
long LogIndex,
ImmutableArray<string> Hashes);
public record RekorEntryForVerification(
string EntryUuid,
long LogIndex,
DateTimeOffset IntegratedTime,
byte[] Body,
byte[] Signature,
byte[] PublicKey,
string SignatureAlgorithm,
InclusionProofData InclusionProof);

View File

@@ -0,0 +1,415 @@
// -----------------------------------------------------------------------------
// RekorVerificationJobIntegrationTests.cs
// Sprint: SPRINT_20260117_001_ATTESTOR_periodic_rekor_verification
// Task: PRV-008 - Integration tests for verification job
// Description: Integration tests for RekorVerificationJob with mocked time and database
// -----------------------------------------------------------------------------
using System.Collections.Immutable;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Time.Testing;
using StellaOps.Attestor.Core.Verification;
using StellaOps.TestKit;
using Xunit;
namespace StellaOps.Attestor.Infrastructure.Tests.Verification;
[Trait("Category", TestCategories.Integration)]
public sealed class RekorVerificationJobIntegrationTests : IAsyncLifetime
{
private static readonly DateTimeOffset FixedTimestamp = new(2026, 1, 16, 12, 0, 0, TimeSpan.Zero);
private readonly FakeTimeProvider _timeProvider;
private readonly InMemoryRekorEntryRepository _repository;
private readonly InMemoryRekorVerificationStatusProvider _statusProvider;
private readonly RekorVerificationMetrics _metrics;
public RekorVerificationJobIntegrationTests()
{
_timeProvider = new FakeTimeProvider(FixedTimestamp);
_repository = new InMemoryRekorEntryRepository();
_statusProvider = new InMemoryRekorVerificationStatusProvider();
_metrics = new RekorVerificationMetrics();
}
public Task InitializeAsync() => Task.CompletedTask;
public Task DisposeAsync()
{
_metrics.Dispose();
return Task.CompletedTask;
}
[Fact]
public async Task ExecuteAsync_WithNoEntries_CompletesSuccessfully()
{
// Arrange
var job = CreateJob();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.LastRunAt.Should().Be(FixedTimestamp);
status.LastRunStatus.Should().Be(VerificationRunStatus.Success);
status.TotalEntriesVerified.Should().Be(0);
}
[Fact]
public async Task ExecuteAsync_WithValidEntries_VerifiesAll()
{
// Arrange
var entries = CreateValidEntries(10);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var job = CreateJob();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.TotalEntriesVerified.Should().Be(10);
status.TotalEntriesFailed.Should().Be(0);
status.FailureRate.Should().Be(0);
}
[Fact]
public async Task ExecuteAsync_WithMixedEntries_TracksFailureRate()
{
// Arrange
var validEntries = CreateValidEntries(8);
var invalidEntries = CreateInvalidEntries(2);
await _repository.InsertManyAsync(validEntries.Concat(invalidEntries).ToList(), CancellationToken.None);
var job = CreateJob();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.TotalEntriesVerified.Should().Be(8);
status.TotalEntriesFailed.Should().Be(2);
status.FailureRate.Should().BeApproximately(0.2, 0.01);
}
[Fact]
public async Task ExecuteAsync_WithTimeSkewViolations_TracksViolations()
{
// Arrange
var entries = CreateEntriesWithTimeSkew(5);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var options = CreateOptions();
options.Value.MaxTimeSkewSeconds = 60; // 1 minute tolerance
var job = CreateJob(options);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.TimeSkewViolations.Should().Be(5);
}
[Fact]
public async Task ExecuteAsync_RespectsScheduleInterval()
{
// Arrange
var entries = CreateValidEntries(5);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var options = CreateOptions();
options.Value.IntervalMinutes = 60; // 1 hour
var job = CreateJob(options);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(30));
// Act - first run
await job.ExecuteOnceAsync(cts.Token);
var statusAfterFirst = await _statusProvider.GetStatusAsync(cts.Token);
// Advance time by 30 minutes (less than interval)
_timeProvider.Advance(TimeSpan.FromMinutes(30));
// Act - second run should skip
await job.ExecuteOnceAsync(cts.Token);
var statusAfterSecond = await _statusProvider.GetStatusAsync(cts.Token);
// Assert - should not have run again
statusAfterSecond.LastRunAt.Should().Be(statusAfterFirst.LastRunAt);
// Advance time to exceed interval
_timeProvider.Advance(TimeSpan.FromMinutes(35));
// Act - third run should execute
await job.ExecuteOnceAsync(cts.Token);
var statusAfterThird = await _statusProvider.GetStatusAsync(cts.Token);
// Assert - should have run
statusAfterThird.LastRunAt.Should().BeAfter(statusAfterFirst.LastRunAt!.Value);
}
[Fact]
public async Task ExecuteAsync_WithSamplingEnabled_VerifiesSubset()
{
// Arrange
var entries = CreateValidEntries(100);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var options = CreateOptions();
options.Value.SampleRate = 0.1; // 10% sampling
options.Value.BatchSize = 100;
var job = CreateJob(options);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.TotalEntriesVerified.Should().BeLessThanOrEqualTo(15); // ~10% with some variance
status.TotalEntriesVerified.Should().BeGreaterThan(0);
}
[Fact]
public async Task ExecuteAsync_WithBatchSize_ProcessesInBatches()
{
// Arrange
var entries = CreateValidEntries(25);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var options = CreateOptions();
options.Value.BatchSize = 10;
var job = CreateJob(options);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.TotalEntriesVerified.Should().Be(25);
}
[Fact]
public async Task ExecuteAsync_RootConsistencyCheck_DetectsTampering()
{
// Arrange
var entries = CreateValidEntries(5);
await _repository.InsertManyAsync(entries, CancellationToken.None);
// Set a stored root that doesn't match
await _repository.SetStoredRootAsync("inconsistent-root-hash", 1000, CancellationToken.None);
var job = CreateJob();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.RootConsistent.Should().BeFalse();
status.CriticalAlertCount.Should().BeGreaterThan(0);
}
[Fact]
public async Task ExecuteAsync_UpdatesLastRunDuration()
{
// Arrange
var entries = CreateValidEntries(10);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var job = CreateJob();
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(10));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.LastRunDuration.Should().NotBeNull();
status.LastRunDuration!.Value.Should().BeGreaterThan(TimeSpan.Zero);
}
[Fact]
public async Task ExecuteAsync_WhenDisabled_SkipsExecution()
{
// Arrange
var entries = CreateValidEntries(5);
await _repository.InsertManyAsync(entries, CancellationToken.None);
var options = CreateOptions();
options.Value.Enabled = false;
var job = CreateJob(options);
using var cts = new CancellationTokenSource(TimeSpan.FromSeconds(5));
// Act
await job.ExecuteOnceAsync(cts.Token);
// Assert
var status = await _statusProvider.GetStatusAsync(cts.Token);
status.LastRunAt.Should().BeNull();
status.TotalEntriesVerified.Should().Be(0);
}
[Fact]
public async Task ExecuteAsync_WithCancellation_StopsGracefully()
{
// Arrange
var entries = CreateValidEntries(1000); // Large batch
await _repository.InsertManyAsync(entries, CancellationToken.None);
var options = CreateOptions();
options.Value.BatchSize = 10; // Small batches to allow cancellation
var job = CreateJob(options);
using var cts = new CancellationTokenSource();
cts.CancelAfter(TimeSpan.FromMilliseconds(100)); // Cancel quickly
// Act & Assert - should not throw
await job.Invoking(j => j.ExecuteOnceAsync(cts.Token))
.Should().NotThrowAsync();
}
// Helper methods
private RekorVerificationJob CreateJob(IOptions<RekorVerificationOptions>? options = null)
{
return new RekorVerificationJob(
options ?? CreateOptions(),
_repository,
_statusProvider,
_metrics,
_timeProvider,
NullLogger<RekorVerificationJob>.Instance);
}
private static IOptions<RekorVerificationOptions> CreateOptions()
{
return Options.Create(new RekorVerificationOptions
{
Enabled = true,
IntervalMinutes = 60,
BatchSize = 100,
SampleRate = 1.0, // 100% by default
MaxTimeSkewSeconds = 300,
AlertOnRootInconsistency = true
});
}
private List<RekorEntryRecord> CreateValidEntries(int count)
{
return Enumerable.Range(0, count)
.Select(i => new RekorEntryRecord(
EntryUuid: $"uuid-{i:D8}",
LogIndex: 1000 + i,
IntegratedTime: FixedTimestamp.AddMinutes(-i),
BodyHash: $"hash-{i:D8}",
SignatureValid: true,
InclusionProofValid: true,
LastVerifiedAt: null))
.ToList();
}
private List<RekorEntryRecord> CreateInvalidEntries(int count)
{
return Enumerable.Range(0, count)
.Select(i => new RekorEntryRecord(
EntryUuid: $"invalid-uuid-{i:D8}",
LogIndex: 2000 + i,
IntegratedTime: FixedTimestamp.AddMinutes(-i),
BodyHash: $"invalid-hash-{i:D8}",
SignatureValid: false,
InclusionProofValid: false,
LastVerifiedAt: null))
.ToList();
}
private List<RekorEntryRecord> CreateEntriesWithTimeSkew(int count)
{
return Enumerable.Range(0, count)
.Select(i => new RekorEntryRecord(
EntryUuid: $"skew-uuid-{i:D8}",
LogIndex: 3000 + i,
IntegratedTime: FixedTimestamp.AddHours(2), // 2 hours in future = skew
BodyHash: $"skew-hash-{i:D8}",
SignatureValid: true,
InclusionProofValid: true,
LastVerifiedAt: null))
.ToList();
}
}
// Supporting types for tests
public record RekorEntryRecord(
string EntryUuid,
long LogIndex,
DateTimeOffset IntegratedTime,
string BodyHash,
bool SignatureValid,
bool InclusionProofValid,
DateTimeOffset? LastVerifiedAt);
public sealed class InMemoryRekorEntryRepository : IRekorEntryRepository
{
private readonly List<RekorEntryRecord> _entries = new();
private string? _storedRoot;
private long _storedTreeSize;
public Task InsertManyAsync(IEnumerable<RekorEntryRecord> entries, CancellationToken ct)
{
_entries.AddRange(entries);
return Task.CompletedTask;
}
public Task<IReadOnlyList<RekorEntryRecord>> GetUnverifiedEntriesAsync(int limit, CancellationToken ct)
{
var result = _entries
.Where(e => e.LastVerifiedAt is null)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<RekorEntryRecord>>(result);
}
public Task<IReadOnlyList<RekorEntryRecord>> GetSampledEntriesAsync(double sampleRate, int limit, CancellationToken ct)
{
var random = new Random(42); // Deterministic for tests
var result = _entries
.Where(_ => random.NextDouble() < sampleRate)
.Take(limit)
.ToList();
return Task.FromResult<IReadOnlyList<RekorEntryRecord>>(result);
}
public Task UpdateVerificationStatusAsync(string entryUuid, bool verified, DateTimeOffset verifiedAt, CancellationToken ct)
{
var index = _entries.FindIndex(e => e.EntryUuid == entryUuid);
if (index >= 0)
{
var existing = _entries[index];
_entries[index] = existing with { LastVerifiedAt = verifiedAt };
}
return Task.CompletedTask;
}
public Task SetStoredRootAsync(string rootHash, long treeSize, CancellationToken ct)
{
_storedRoot = rootHash;
_storedTreeSize = treeSize;
return Task.CompletedTask;
}
public Task<(string? RootHash, long TreeSize)> GetStoredRootAsync(CancellationToken ct)
{
return Task.FromResult((_storedRoot, _storedTreeSize));
}
}