save progress

This commit is contained in:
StellaOps Bot
2026-01-02 15:52:31 +02:00
parent 2dec7e6a04
commit f46bde5575
174 changed files with 20793 additions and 8307 deletions

View File

@@ -0,0 +1,9 @@
using StellaOps.Findings.Ledger.Domain;
namespace LedgerReplayHarness;
internal sealed record HarnessFixtureEntry(
LedgerEventRecord Record,
DateTimeOffset RecordedAt,
string? ExpectedEventHash,
string? ExpectedMerkleRoot);

View File

@@ -0,0 +1,14 @@
namespace LedgerReplayHarness;
public sealed class HarnessFixtureException : Exception
{
public HarnessFixtureException(string fixturePath, int lineNumber, string message, Exception? innerException = null)
: base($"{Path.GetFileName(fixturePath)}:{lineNumber} {message}", innerException)
{
FixturePath = fixturePath;
LineNumber = lineNumber;
}
public string FixturePath { get; }
public int LineNumber { get; }
}

View File

@@ -0,0 +1,222 @@
using System.Globalization;
using System.Runtime.CompilerServices;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
namespace LedgerReplayHarness;
internal static class HarnessFixtureReader
{
public static async IAsyncEnumerable<HarnessFixtureEntry> ReadEntriesAsync(
FileInfo fixture,
string tenant,
TimeProvider timeProvider,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
await using var stream = fixture.OpenRead();
using var reader = new StreamReader(stream);
var recordedAtBase = timeProvider.GetUtcNow();
string? line;
var lineNumber = 0;
while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
lineNumber++;
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
JsonObject? node;
try
{
node = JsonNode.Parse(line)?.AsObject();
}
catch (JsonException ex)
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "invalid json", ex);
}
if (node is null)
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "expected json object");
}
if (!node.TryGetPropertyValue("canonical_envelope", out var envelopeNode) || envelopeNode is not JsonObject envelope)
{
continue;
}
if (!TryGetSequence(node, out var sequence, out var hasSequence))
{
if (hasSequence)
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "sequence_no invalid");
}
continue;
}
DateTimeOffset recordedAt = recordedAtBase;
if (node.TryGetPropertyValue("recorded_at", out var recordedAtNode) && recordedAtNode is not null)
{
if (!TryGetDateTime(recordedAtNode, out recordedAt))
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "recorded_at invalid");
}
}
DateTimeOffset occurredAt = recordedAt;
if (envelope.TryGetPropertyValue("occurred_at", out var occurredAtNode) && occurredAtNode is not null)
{
if (!TryGetDateTime(occurredAtNode, out occurredAt))
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "occurred_at invalid");
}
}
var chainId = TryGetGuid(envelope, "chain_id", Guid.Empty);
var eventId = TryGetGuid(envelope, "event_id", Guid.Empty);
var eventType = TryGetString(envelope, "event_type") ?? string.Empty;
var policyVersion = TryGetString(envelope, "policy_version") ?? string.Empty;
var findingId = TryGetString(envelope, "finding_id") ?? string.Empty;
var artifactId = TryGetString(envelope, "artifact_id") ?? string.Empty;
var sourceRunId = TryGetGuidNullable(envelope, "source_run_id");
var actorId = TryGetString(envelope, "actor_id") ?? "system";
var actorType = TryGetString(envelope, "actor_type") ?? "system";
var previousHash = TryGetString(envelope, "previous_hash") ?? string.Empty;
var computed = LedgerHashing.ComputeHashes(envelope, sequence);
var record = new LedgerEventRecord(
tenant,
chainId,
sequence,
eventId,
eventType,
policyVersion,
findingId,
artifactId,
sourceRunId,
actorId,
actorType,
occurredAt,
recordedAt,
envelope,
computed.EventHash,
previousHash,
computed.MerkleLeafHash,
computed.CanonicalJson);
var expectedEventHash = TryGetString(node, "event_hash");
var expectedMerkleRoot = TryGetString(node, "merkle_root");
yield return new HarnessFixtureEntry(record, recordedAt, expectedEventHash, expectedMerkleRoot);
}
}
private static bool TryGetSequence(JsonObject node, out long sequence, out bool hasSequence)
{
sequence = 0;
hasSequence = false;
if (node.TryGetPropertyValue("sequence_no", out var seqNode) && seqNode is not null)
{
hasSequence = true;
return TryGetLong(seqNode, out sequence);
}
return false;
}
private static Guid TryGetGuid(JsonObject node, string name, Guid fallback)
{
var value = TryGetString(node, name);
return value is not null && Guid.TryParse(value, out var guid) ? guid : fallback;
}
private static Guid? TryGetGuidNullable(JsonObject node, string name)
{
var value = TryGetString(node, name);
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return Guid.TryParse(value, out var guid) ? guid : null;
}
private static bool TryGetLong(JsonNode node, out long value)
{
value = 0;
if (node is JsonValue jsonValue)
{
try
{
return jsonValue.TryGetValue(out value);
}
catch
{
return false;
}
}
return false;
}
private static string? TryGetString(JsonObject node, string name)
{
if (node.TryGetPropertyValue(name, out var nodeValue) && nodeValue is not null)
{
return TryGetString(nodeValue);
}
return null;
}
private static string? TryGetString(JsonNode node)
{
if (node is JsonValue jsonValue)
{
try
{
return jsonValue.GetValue<string>();
}
catch
{
return null;
}
}
return null;
}
private static bool TryGetDateTime(JsonNode node, out DateTimeOffset value)
{
value = default;
if (node is JsonValue jsonValue)
{
try
{
if (jsonValue.TryGetValue(out value))
{
return true;
}
}
catch
{
return false;
}
if (jsonValue.TryGetValue(out string? stringValue) &&
!string.IsNullOrWhiteSpace(stringValue) &&
DateTimeOffset.TryParse(stringValue, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out value))
{
return true;
}
}
return false;
}
}

View File

@@ -1,9 +1,6 @@
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
namespace LedgerReplayHarness;
@@ -11,11 +8,15 @@ public sealed class HarnessRunner
{
private readonly ILedgerClient _client;
private readonly int _maxParallel;
private readonly TimeProvider _timeProvider;
private readonly bool _allowParallel;
public HarnessRunner(ILedgerClient client, int maxParallel = 4)
public HarnessRunner(ILedgerClient client, int maxParallel = 4, TimeProvider? timeProvider = null, bool allowParallel = false)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
_maxParallel = maxParallel <= 0 ? 1 : maxParallel;
_timeProvider = timeProvider ?? TimeProvider.System;
_allowParallel = allowParallel;
}
public async Task<int> RunAsync(IEnumerable<string> fixtures, string tenant, string reportPath, CancellationToken cancellationToken)
@@ -34,89 +35,80 @@ public sealed class HarnessRunner
var hashesValid = true;
DateTimeOffset? earliest = null;
DateTimeOffset? latest = null;
var leafHashes = new List<string>();
var leafEntries = new List<(Guid ChainId, long Sequence, string LeafHash)>();
string? expectedMerkleRoot = null;
var latencies = new ConcurrentBag<double>();
var swTotal = Stopwatch.StartNew();
var throttler = new TaskThrottler(_maxParallel);
TaskThrottler? throttler = _allowParallel && _maxParallel > 1
? new TaskThrottler(_maxParallel)
: null;
foreach (var fixture in fixtures)
var orderedFixtures = fixtures.OrderBy(f => f, StringComparer.Ordinal).ToArray();
foreach (var fixture in orderedFixtures)
{
await foreach (var line in ReadLinesAsync(fixture, cancellationToken))
var fixtureInfo = new FileInfo(fixture);
await foreach (var entry in HarnessFixtureReader.ReadEntriesAsync(fixtureInfo, tenant, _timeProvider, cancellationToken))
{
if (string.IsNullOrWhiteSpace(line)) continue;
var node = JsonNode.Parse(line)?.AsObject();
if (node is null) continue;
eventCount++;
var recordedAt = node["recorded_at"]?.GetValue<DateTimeOffset>() ?? DateTimeOffset.UtcNow;
earliest = earliest is null ? recordedAt : DateTimeOffset.Compare(recordedAt, earliest.Value) < 0 ? recordedAt : earliest;
latest = latest is null
? recordedAt
: DateTimeOffset.Compare(recordedAt, latest.Value) > 0 ? recordedAt : latest;
if (node["canonical_envelope"] is JsonObject envelope && node["sequence_no"] is not null)
var record = entry.Record;
if (!string.IsNullOrEmpty(entry.ExpectedEventHash) &&
!string.Equals(entry.ExpectedEventHash, record.EventHash, StringComparison.Ordinal))
{
var seq = node["sequence_no"]!.GetValue<long>();
var computed = LedgerHashing.ComputeHashes(envelope, seq);
var expected = node["event_hash"]?.GetValue<string>();
if (!string.IsNullOrEmpty(expected) && !string.Equals(expected, computed.EventHash, StringComparison.Ordinal))
{
hashesValid = false;
}
stats.UpdateHashes(computed.EventHash, computed.MerkleLeafHash);
leafHashes.Add(computed.MerkleLeafHash);
expectedMerkleRoot ??= node["merkle_root"]?.GetValue<string>();
// enqueue for concurrent append
var record = new LedgerEventRecord(
tenant,
envelope["chain_id"]?.GetValue<Guid>() ?? Guid.Empty,
seq,
envelope["event_id"]?.GetValue<Guid>() ?? Guid.Empty,
envelope["event_type"]?.GetValue<string>() ?? string.Empty,
envelope["policy_version"]?.GetValue<string>() ?? string.Empty,
envelope["finding_id"]?.GetValue<string>() ?? string.Empty,
envelope["artifact_id"]?.GetValue<string>() ?? string.Empty,
envelope["source_run_id"]?.GetValue<Guid?>(),
envelope["actor_id"]?.GetValue<string>() ?? "system",
envelope["actor_type"]?.GetValue<string>() ?? "system",
envelope["occurred_at"]?.GetValue<DateTimeOffset>() ?? recordedAt,
recordedAt,
envelope,
computed.EventHash,
envelope["previous_hash"]?.GetValue<string>() ?? string.Empty,
computed.MerkleLeafHash,
computed.CanonicalJson);
// fire-and-track latency
await throttler.RunAsync(async () =>
{
var sw = Stopwatch.StartNew();
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
}, cancellationToken).ConfigureAwait(false);
hashesValid = false;
}
earliest = earliest is null ? entry.RecordedAt : DateTimeOffset.Compare(entry.RecordedAt, earliest.Value) < 0 ? entry.RecordedAt : earliest;
latest = latest is null
? entry.RecordedAt
: DateTimeOffset.Compare(entry.RecordedAt, latest.Value) > 0 ? entry.RecordedAt : latest;
stats.UpdateHashes(record.EventHash, record.MerkleLeafHash);
leafEntries.Add((record.ChainId, record.SequenceNumber, record.MerkleLeafHash));
expectedMerkleRoot ??= entry.ExpectedMerkleRoot;
if (throttler is null)
{
var sw = Stopwatch.StartNew();
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
Interlocked.Increment(ref eventCount);
continue;
}
await throttler.RunAsync(async () =>
{
var sw = Stopwatch.StartNew();
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
Interlocked.Increment(ref eventCount);
}, cancellationToken).ConfigureAwait(false);
}
}
await throttler.DrainAsync(cancellationToken).ConfigureAwait(false);
if (throttler is not null)
{
await throttler.DrainAsync(cancellationToken).ConfigureAwait(false);
}
swTotal.Stop();
var latencyArray = latencies.ToArray();
Array.Sort(latencyArray);
double p95 = latencyArray.Length == 0 ? 0 : latencyArray[(int)Math.Ceiling(latencyArray.Length * 0.95) - 1];
string? computedRoot = leafHashes.Count == 0 ? null : MerkleCalculator.ComputeRoot(leafHashes);
var orderedLeafHashes = leafEntries
.OrderBy(entry => entry.ChainId)
.ThenBy(entry => entry.Sequence)
.Select(entry => entry.LeafHash)
.ToList();
string? computedRoot = orderedLeafHashes.Count == 0 ? null : MerkleCalculator.ComputeRoot(orderedLeafHashes);
var merkleOk = expectedMerkleRoot is null || string.Equals(expectedMerkleRoot, computedRoot, StringComparison.OrdinalIgnoreCase);
var report = new
{
tenant,
fixtures = fixtures.ToArray(),
fixtures = orderedFixtures,
eventsWritten = eventCount,
durationSeconds = Math.Max(swTotal.Elapsed.TotalSeconds, (latest - earliest)?.TotalSeconds ?? 0),
throughputEps = swTotal.Elapsed.TotalSeconds > 0 ? eventCount / swTotal.Elapsed.TotalSeconds : 0,
@@ -125,7 +117,7 @@ public sealed class HarnessRunner
cpuPercentMax = 0,
memoryMbMax = 0,
status = hashesValid && merkleOk ? "pass" : "fail",
timestamp = DateTimeOffset.UtcNow.ToString("O"),
timestamp = _timeProvider.GetUtcNow().ToString("O"),
hashSummary = stats.ToReport(),
merkleRoot = computedRoot,
merkleExpected = expectedMerkleRoot
@@ -136,13 +128,4 @@ public sealed class HarnessRunner
return hashesValid && merkleOk ? 0 : 1;
}
private static async IAsyncEnumerable<string> ReadLinesAsync(string path, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
await using var stream = File.OpenRead(path);
using var reader = new StreamReader(stream);
while (!cancellationToken.IsCancellationRequested && await reader.ReadLineAsync() is { } line)
{
yield return line;
}
}
}

View File

@@ -11,4 +11,9 @@
<ItemGroup>
<PackageReference Include="System.CommandLine" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleToAttribute">
<_Parameter1>StellaOps.Findings.Tools.LedgerReplayHarness.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
</Project>

View File

@@ -23,7 +23,12 @@ var reportOption = new Option<string>("--report")
var parallelOption = new Option<int>("--maxParallel")
{
Description = "Maximum parallelism when sending events",
DefaultValueFactory = _ => 4
DefaultValueFactory = _ => 1
};
var allowParallelOption = new Option<bool>("--allowParallel")
{
Description = "Allow non-deterministic parallel event ingestion"
};
var root = new RootCommand("Findings Ledger replay & determinism harness");
@@ -31,6 +36,7 @@ root.Add(fixtureOption);
root.Add(tenantOption);
root.Add(reportOption);
root.Add(parallelOption);
root.Add(allowParallelOption);
root.SetAction(async (parseResult, ct) =>
{
@@ -38,8 +44,9 @@ root.SetAction(async (parseResult, ct) =>
var tenant = parseResult.GetValue(tenantOption)!;
var report = parseResult.GetValue(reportOption)!;
var maxParallel = parseResult.GetValue(parallelOption);
var allowParallel = parseResult.GetValue(allowParallelOption);
var runner = new HarnessRunner(new InMemoryLedgerClient(), maxParallel);
var runner = new HarnessRunner(new InMemoryLedgerClient(), maxParallel, allowParallel: allowParallel);
var exitCode = await runner.RunAsync(fixtures, tenant, report, ct);
return exitCode;
});