132 lines
5.5 KiB
C#
132 lines
5.5 KiB
C#
using System.Collections.Concurrent;
|
|
using System.Diagnostics;
|
|
using System.Text.Json;
|
|
|
|
namespace LedgerReplayHarness;
|
|
|
|
public sealed class HarnessRunner
|
|
{
|
|
private readonly ILedgerClient _client;
|
|
private readonly int _maxParallel;
|
|
private readonly TimeProvider _timeProvider;
|
|
private readonly bool _allowParallel;
|
|
|
|
public HarnessRunner(ILedgerClient client, int maxParallel = 4, TimeProvider? timeProvider = null, bool allowParallel = false)
|
|
{
|
|
_client = client ?? throw new ArgumentNullException(nameof(client));
|
|
_maxParallel = maxParallel <= 0 ? 1 : maxParallel;
|
|
_timeProvider = timeProvider ?? TimeProvider.System;
|
|
_allowParallel = allowParallel;
|
|
}
|
|
|
|
public async Task<int> RunAsync(IEnumerable<string> fixtures, string tenant, string reportPath, CancellationToken cancellationToken)
|
|
{
|
|
if (fixtures is null || !fixtures.Any())
|
|
{
|
|
throw new ArgumentException("At least one fixture is required.", nameof(fixtures));
|
|
}
|
|
|
|
var stats = new HarnessStats();
|
|
|
|
tenant = string.IsNullOrWhiteSpace(tenant) ? "default" : tenant;
|
|
reportPath = string.IsNullOrWhiteSpace(reportPath) ? "harness-report.json" : reportPath;
|
|
|
|
var eventCount = 0L;
|
|
var hashesValid = true;
|
|
DateTimeOffset? earliest = null;
|
|
DateTimeOffset? latest = null;
|
|
var leafEntries = new List<(Guid ChainId, long Sequence, string LeafHash)>();
|
|
string? expectedMerkleRoot = null;
|
|
var latencies = new ConcurrentBag<double>();
|
|
var swTotal = Stopwatch.StartNew();
|
|
|
|
TaskThrottler? throttler = _allowParallel && _maxParallel > 1
|
|
? new TaskThrottler(_maxParallel)
|
|
: null;
|
|
|
|
var orderedFixtures = fixtures.OrderBy(f => f, StringComparer.Ordinal).ToArray();
|
|
foreach (var fixture in orderedFixtures)
|
|
{
|
|
var fixtureInfo = new FileInfo(fixture);
|
|
await foreach (var entry in HarnessFixtureReader.ReadEntriesAsync(fixtureInfo, tenant, _timeProvider, cancellationToken))
|
|
{
|
|
var record = entry.Record;
|
|
if (!string.IsNullOrEmpty(entry.ExpectedEventHash) &&
|
|
!string.Equals(entry.ExpectedEventHash, record.EventHash, StringComparison.Ordinal))
|
|
{
|
|
hashesValid = false;
|
|
}
|
|
|
|
earliest = earliest is null ? entry.RecordedAt : DateTimeOffset.Compare(entry.RecordedAt, earliest.Value) < 0 ? entry.RecordedAt : earliest;
|
|
latest = latest is null
|
|
? entry.RecordedAt
|
|
: DateTimeOffset.Compare(entry.RecordedAt, latest.Value) > 0 ? entry.RecordedAt : latest;
|
|
|
|
stats.UpdateHashes(record.EventHash, record.MerkleLeafHash);
|
|
leafEntries.Add((record.ChainId, record.SequenceNumber, record.MerkleLeafHash));
|
|
expectedMerkleRoot ??= entry.ExpectedMerkleRoot;
|
|
|
|
if (throttler is null)
|
|
{
|
|
var sw = Stopwatch.StartNew();
|
|
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
|
|
sw.Stop();
|
|
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
|
Interlocked.Increment(ref eventCount);
|
|
continue;
|
|
}
|
|
|
|
await throttler.RunAsync(async () =>
|
|
{
|
|
var sw = Stopwatch.StartNew();
|
|
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
|
|
sw.Stop();
|
|
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
|
Interlocked.Increment(ref eventCount);
|
|
}, cancellationToken).ConfigureAwait(false);
|
|
}
|
|
}
|
|
|
|
if (throttler is not null)
|
|
{
|
|
await throttler.DrainAsync(cancellationToken).ConfigureAwait(false);
|
|
}
|
|
swTotal.Stop();
|
|
|
|
var latencyArray = latencies.ToArray();
|
|
Array.Sort(latencyArray);
|
|
double p95 = latencyArray.Length == 0 ? 0 : latencyArray[(int)Math.Ceiling(latencyArray.Length * 0.95) - 1];
|
|
|
|
var orderedLeafHashes = leafEntries
|
|
.OrderBy(entry => entry.ChainId)
|
|
.ThenBy(entry => entry.Sequence)
|
|
.Select(entry => entry.LeafHash)
|
|
.ToList();
|
|
string? computedRoot = orderedLeafHashes.Count == 0 ? null : MerkleCalculator.ComputeRoot(orderedLeafHashes);
|
|
var merkleOk = expectedMerkleRoot is null || string.Equals(expectedMerkleRoot, computedRoot, StringComparison.OrdinalIgnoreCase);
|
|
|
|
var report = new
|
|
{
|
|
tenant,
|
|
fixtures = orderedFixtures,
|
|
eventsWritten = eventCount,
|
|
durationSeconds = Math.Max(swTotal.Elapsed.TotalSeconds, (latest - earliest)?.TotalSeconds ?? 0),
|
|
throughputEps = swTotal.Elapsed.TotalSeconds > 0 ? eventCount / swTotal.Elapsed.TotalSeconds : 0,
|
|
latencyP95Ms = p95,
|
|
projectionLagMaxSeconds = 0,
|
|
cpuPercentMax = 0,
|
|
memoryMbMax = 0,
|
|
status = hashesValid && merkleOk ? "pass" : "fail",
|
|
timestamp = _timeProvider.GetUtcNow().ToString("O"),
|
|
hashSummary = stats.ToReport(),
|
|
merkleRoot = computedRoot,
|
|
merkleExpected = expectedMerkleRoot
|
|
};
|
|
|
|
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
|
|
await File.WriteAllTextAsync(reportPath, json);
|
|
return hashesValid && merkleOk ? 0 : 1;
|
|
}
|
|
|
|
}
|