save progress

This commit is contained in:
StellaOps Bot
2026-01-02 15:52:31 +02:00
parent 2dec7e6a04
commit f46bde5575
174 changed files with 20793 additions and 8307 deletions

View File

@@ -0,0 +1,303 @@
using System.Globalization;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
namespace LedgerReplayHarness;
internal static class HarnessDraftParser
{
public static bool TryParseDraft(
JsonObject node,
string defaultTenant,
DateTimeOffset recordedAtBase,
out LedgerEventDraft draft,
out string error)
{
draft = default!;
error = string.Empty;
if (!TryGetRequiredString(node, "tenant", out var tenantId))
{
tenantId = defaultTenant;
}
if (!TryGetRequiredGuid(node, "chain_id", out var chainId, out error))
{
return false;
}
if (!TryGetRequiredLong(node, out var sequence, out error))
{
return false;
}
if (!TryGetRequiredGuid(node, "event_id", out var eventId, out error))
{
return false;
}
if (!TryGetRequiredString(node, "event_type", out var eventType))
{
error = "event_type missing";
return false;
}
if (!TryGetRequiredString(node, "policy_version", out var policyVersion))
{
error = "policy_version missing";
return false;
}
if (!TryGetRequiredString(node, "finding_id", out var findingId))
{
error = "finding_id missing";
return false;
}
if (!TryGetRequiredString(node, "artifact_id", out var artifactId))
{
error = "artifact_id missing";
return false;
}
Guid? sourceRunId = null;
if (node.TryGetPropertyValue("source_run_id", out var sourceRunNode) &&
sourceRunNode is not null &&
TryGetString(sourceRunNode, out var sourceRunValue) &&
!string.IsNullOrWhiteSpace(sourceRunValue))
{
if (!Guid.TryParse(sourceRunValue, out var parsedSourceRun))
{
error = "source_run_id invalid";
return false;
}
sourceRunId = parsedSourceRun;
}
if (!TryGetRequiredString(node, "actor_id", out var actorId))
{
error = "actor_id missing";
return false;
}
if (!TryGetRequiredString(node, "actor_type", out var actorType))
{
error = "actor_type missing";
return false;
}
if (!TryGetRequiredDateTime(node, "occurred_at", out var occurredAt, out error))
{
return false;
}
DateTimeOffset recordedAt;
if (node.TryGetPropertyValue("recorded_at", out var recordedAtNode) && recordedAtNode is not null)
{
if (!TryGetDateTime(recordedAtNode, out recordedAt))
{
error = "recorded_at invalid";
return false;
}
}
else
{
recordedAt = recordedAtBase;
}
if (!node.TryGetPropertyValue("payload", out var payloadNode) || payloadNode is not JsonObject payload)
{
error = "payload missing";
return false;
}
JsonObject canonicalEnvelope;
try
{
canonicalEnvelope = LedgerCanonicalJsonSerializer.Canonicalize(payload);
}
catch (Exception ex)
{
error = $"payload canonicalize failed: {ex.GetType().Name}";
return false;
}
var prev = node.TryGetPropertyValue("previous_hash", out var prevNode) && prevNode is not null && TryGetString(prevNode, out var prevValue)
? prevValue
: null;
draft = new LedgerEventDraft(
tenantId,
chainId,
sequence,
eventId,
eventType,
policyVersion,
findingId,
artifactId,
sourceRunId,
actorId,
actorType,
occurredAt,
recordedAt,
payload,
canonicalEnvelope,
prev);
return true;
}
private static bool TryGetRequiredString(JsonObject node, string name, out string value)
{
value = string.Empty;
if (!node.TryGetPropertyValue(name, out var nodeValue) || nodeValue is null)
{
return false;
}
if (!TryGetString(nodeValue, out value))
{
return false;
}
return !string.IsNullOrWhiteSpace(value);
}
private static bool TryGetString(JsonNode node, out string value)
{
value = string.Empty;
if (node is JsonValue jsonValue)
{
try
{
value = jsonValue.GetValue<string>();
return true;
}
catch
{
return false;
}
}
return false;
}
private static bool TryGetRequiredGuid(JsonObject node, string name, out Guid value, out string error)
{
value = Guid.Empty;
error = string.Empty;
if (!TryGetRequiredString(node, name, out var stringValue))
{
error = $"{name} missing";
return false;
}
if (!Guid.TryParse(stringValue, out value))
{
error = $"{name} invalid";
return false;
}
return true;
}
private static bool TryGetRequiredLong(JsonObject node, out long value, out string error)
{
value = 0;
error = string.Empty;
if (node.TryGetPropertyValue("sequence_no", out var seqNode) && seqNode is not null)
{
if (TryGetLong(seqNode, out value))
{
return true;
}
error = "sequence_no invalid";
return false;
}
if (node.TryGetPropertyValue("sequence", out var altNode) && altNode is not null)
{
if (TryGetLong(altNode, out value))
{
return true;
}
error = "sequence invalid";
return false;
}
error = "sequence_no missing";
return false;
}
private static bool TryGetLong(JsonNode node, out long value)
{
value = 0;
if (node is JsonValue jsonValue)
{
try
{
return jsonValue.TryGetValue(out value);
}
catch
{
return false;
}
}
return false;
}
private static bool TryGetRequiredDateTime(JsonObject node, string name, out DateTimeOffset value, out string error)
{
value = default;
error = string.Empty;
if (!TryGetRequiredString(node, name, out var stringValue))
{
error = $"{name} missing";
return false;
}
if (!DateTimeOffset.TryParse(stringValue, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out value))
{
error = $"{name} invalid";
return false;
}
return true;
}
private static bool TryGetDateTime(JsonNode node, out DateTimeOffset value)
{
value = default;
if (node is JsonValue jsonValue)
{
try
{
if (jsonValue.TryGetValue(out value))
{
return true;
}
}
catch
{
return false;
}
if (jsonValue.TryGetValue(out string? stringValue) &&
!string.IsNullOrWhiteSpace(stringValue) &&
DateTimeOffset.TryParse(stringValue, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out value))
{
return true;
}
}
return false;
}
}

View File

@@ -0,0 +1,14 @@
namespace LedgerReplayHarness;
public sealed class HarnessFixtureException : Exception
{
public HarnessFixtureException(string fixturePath, int lineNumber, string message, Exception? innerException = null)
: base($"{Path.GetFileName(fixturePath)}:{lineNumber} {message}", innerException)
{
FixturePath = fixturePath;
LineNumber = lineNumber;
}
public string FixturePath { get; }
public int LineNumber { get; }
}

View File

@@ -0,0 +1,55 @@
using System.Runtime.CompilerServices;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
namespace LedgerReplayHarness;
internal static class HarnessFixtureReader
{
public static async IAsyncEnumerable<LedgerEventDraft> ReadDraftsAsync(
FileInfo file,
string tenant,
TimeProvider timeProvider,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
await using var stream = file.OpenRead();
using var reader = new StreamReader(stream);
var recordedAtBase = timeProvider.GetUtcNow();
string? line;
var lineNumber = 0;
while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
lineNumber++;
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
JsonObject? node;
try
{
node = JsonNode.Parse(line)?.AsObject();
}
catch (JsonException ex)
{
throw new HarnessFixtureException(file.FullName, lineNumber, "invalid json", ex);
}
if (node is null)
{
throw new HarnessFixtureException(file.FullName, lineNumber, "expected json object");
}
if (!HarnessDraftParser.TryParseDraft(node, tenant, recordedAtBase, out var draft, out var error))
{
throw new HarnessFixtureException(file.FullName, lineNumber, error);
}
yield return draft;
}
}
}

View File

@@ -0,0 +1,33 @@
using System.Security.Cryptography;
using System.Text;
namespace LedgerReplayHarness;
internal static class HarnessMath
{
public static double Percentile(IEnumerable<double> values, double percentile)
{
var data = values.Where(v => !double.IsNaN(v)).OrderBy(v => v).ToArray();
if (data.Length == 0)
{
return 0;
}
var rank = (percentile / 100.0) * (data.Length - 1);
var lowerIndex = (int)Math.Floor(rank);
var upperIndex = (int)Math.Ceiling(rank);
if (lowerIndex == upperIndex)
{
return data[lowerIndex];
}
var fraction = rank - lowerIndex;
return data[lowerIndex] + (data[upperIndex] - data[lowerIndex]) * fraction;
}
public static void AppendEventStreamEntry(IncrementalHash hasher, string eventHash, long sequence)
{
var payload = $"{eventHash}:{sequence}\n";
hasher.AppendData(Encoding.UTF8.GetBytes(payload));
}
}

View File

@@ -9,6 +9,11 @@
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Findings.Ledger.csproj" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleToAttribute">
<_Parameter1>StellaOps.Findings.Ledger.ReplayHarness.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
<ItemGroup>
<PackageReference Include="System.CommandLine" />
</ItemGroup>

View File

@@ -1,7 +1,6 @@
using System.CommandLine;
using System.Diagnostics;
using System.Diagnostics.Metrics;
using System.Runtime.CompilerServices;
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
@@ -21,6 +20,8 @@ using StellaOps.Findings.Ledger.Options;
using StellaOps.Findings.Ledger.Observability;
using StellaOps.Findings.Ledger.Services;
using LedgerReplayHarness;
// Command-line options
var fixturesOption = new Option<FileInfo[]>("--fixture")
{
@@ -44,7 +45,12 @@ var tenantOption = new Option<string>("--tenant")
var maxParallelOption = new Option<int>("--maxParallel")
{
Description = "Maximum concurrent append operations",
DefaultValueFactory = _ => 4
DefaultValueFactory = _ => 1
};
var allowParallelOption = new Option<bool>("--allowParallel")
{
Description = "Allow non-deterministic parallel fixture ingestion"
};
var reportOption = new Option<FileInfo?>("--report")
@@ -67,6 +73,7 @@ root.Add(fixturesOption);
root.Add(connectionOption);
root.Add(tenantOption);
root.Add(maxParallelOption);
root.Add(allowParallelOption);
root.Add(reportOption);
root.Add(metricsOption);
root.Add(expectedChecksumOption);
@@ -77,6 +84,7 @@ root.SetAction(async (parseResult, ct) =>
var connection = parseResult.GetValue(connectionOption)!;
var tenant = parseResult.GetValue(tenantOption)!;
var maxParallel = parseResult.GetValue(maxParallelOption);
var allowParallel = parseResult.GetValue(allowParallelOption);
var reportFile = parseResult.GetValue(reportOption);
var metricsFile = parseResult.GetValue(metricsOption);
var expectedChecksumsFile = parseResult.GetValue(expectedChecksumOption);
@@ -96,25 +104,23 @@ root.SetAction(async (parseResult, ct) =>
var (meterListener, metrics) = CreateMeterListener();
var sw = Stopwatch.StartNew();
long eventsWritten = 0;
var eventsWritten = new LongCounter();
var orderedFixtures = fixtures.OrderBy(f => f.FullName, StringComparer.Ordinal).ToArray();
await Parallel.ForEachAsync(fixtures, new ParallelOptions { MaxDegreeOfParallelism = maxParallel, CancellationToken = cts.Token }, async (file, token) =>
if (allowParallel && maxParallel > 1)
{
await foreach (var draft in ReadDraftsAsync(file, tenant, timeProvider, token))
await Parallel.ForEachAsync(orderedFixtures, new ParallelOptions { MaxDegreeOfParallelism = maxParallel, CancellationToken = cts.Token }, async (file, token) =>
{
var result = await writeService.AppendAsync(draft, token).ConfigureAwait(false);
if (result.Status is LedgerWriteStatus.ValidationFailed or LedgerWriteStatus.Conflict)
{
throw new InvalidOperationException($"Append failed for {draft.EventId}: {string.Join(",", result.Errors)} ({result.ConflictCode})");
}
Interlocked.Increment(ref eventsWritten);
if (eventsWritten % 50_000 == 0)
{
logger.LogInformation("Appended {Count} events...", eventsWritten);
}
await AppendFixtureAsync(file, tenant, timeProvider, writeService, logger, eventsWritten, token).ConfigureAwait(false);
}).ConfigureAwait(false);
}
else
{
foreach (var file in orderedFixtures)
{
await AppendFixtureAsync(file, tenant, timeProvider, writeService, logger, eventsWritten, cts.Token).ConfigureAwait(false);
}
}).ConfigureAwait(false);
}
// Wait for projector to catch up
await Task.Delay(TimeSpan.FromSeconds(2), cts.Token);
@@ -122,19 +128,19 @@ root.SetAction(async (parseResult, ct) =>
meterListener.RecordObservableInstruments();
var verification = await VerifyLedgerAsync(scope.ServiceProvider, tenant, eventsWritten, expectedChecksumsFile, cts.Token).ConfigureAwait(false);
var verification = await VerifyLedgerAsync(scope.ServiceProvider, tenant, eventsWritten.Value, expectedChecksumsFile, cts.Token).ConfigureAwait(false);
var writeDurations = metrics.HistDouble("ledger_write_duration_seconds").Concat(metrics.HistDouble("ledger_write_latency_seconds"));
var writeLatencyP95Ms = Percentile(writeDurations, 95) * 1000;
var rebuildP95Ms = Percentile(metrics.HistDouble("ledger_projection_rebuild_seconds"), 95) * 1000;
var writeLatencyP95Ms = HarnessMath.Percentile(writeDurations, 95) * 1000;
var rebuildP95Ms = HarnessMath.Percentile(metrics.HistDouble("ledger_projection_rebuild_seconds"), 95) * 1000;
var projectionLagSeconds = metrics.GaugeDouble("ledger_projection_lag_seconds").DefaultIfEmpty(0).Max();
var backlogEvents = metrics.GaugeLong("ledger_ingest_backlog_events").DefaultIfEmpty(0).Max();
var dbConnections = metrics.GaugeLong("ledger_db_connections_active").DefaultIfEmpty(0).Sum();
var report = new HarnessReport(
tenant,
fixtures.Select(f => f.FullName).ToArray(),
eventsWritten,
orderedFixtures.Select(f => f.FullName).ToArray(),
eventsWritten.Value,
sw.Elapsed.TotalSeconds,
Status: verification.Success ? "pass" : "fail",
WriteLatencyP95Ms: writeLatencyP95Ms,
@@ -252,82 +258,31 @@ static IHost BuildHost(string connectionString)
.Build();
}
static async IAsyncEnumerable<LedgerEventDraft> ReadDraftsAsync(FileInfo file, string tenant, TimeProvider timeProvider, [EnumeratorCancellation] CancellationToken cancellationToken)
static async Task AppendFixtureAsync(
FileInfo file,
string tenant,
TimeProvider timeProvider,
ILedgerEventWriteService writeService,
ILogger logger,
LongCounter eventsWritten,
CancellationToken cancellationToken)
{
await using var stream = file.OpenRead();
using var reader = new StreamReader(stream);
var recordedAtBase = timeProvider.GetUtcNow();
string? line;
while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null)
await foreach (var draft in HarnessFixtureReader.ReadDraftsAsync(file, tenant, timeProvider, cancellationToken))
{
if (string.IsNullOrWhiteSpace(line))
var result = await writeService.AppendAsync(draft, cancellationToken).ConfigureAwait(false);
if (result.Status is LedgerWriteStatus.ValidationFailed or LedgerWriteStatus.Conflict)
{
continue;
throw new InvalidOperationException($"Append failed for {draft.EventId}: {string.Join(",", result.Errors)} ({result.ConflictCode})");
}
var node = JsonNode.Parse(line)?.AsObject();
if (node is null)
var total = Interlocked.Increment(ref eventsWritten.Value);
if (total % 50_000 == 0)
{
continue;
logger.LogInformation("Appended {Count} events...", total);
}
yield return ToDraft(node, tenant, recordedAtBase);
cancellationToken.ThrowIfCancellationRequested();
}
}
static LedgerEventDraft ToDraft(JsonObject node, string defaultTenant, DateTimeOffset recordedAtBase)
{
string required(string name) => node[name]?.GetValue<string>() ?? throw new InvalidOperationException($"{name} missing");
var tenantId = node.TryGetPropertyValue("tenant", out var tenantNode)
? tenantNode!.GetValue<string>()
: defaultTenant;
var chainId = Guid.Parse(required("chain_id"));
var sequence = node["sequence_no"]?.GetValue<long>() ?? node["sequence"]?.GetValue<long>() ?? throw new InvalidOperationException("sequence_no missing");
var eventId = Guid.Parse(required("event_id"));
var eventType = required("event_type");
var policyVersion = required("policy_version");
var findingId = required("finding_id");
var artifactId = required("artifact_id");
var sourceRunId = node.TryGetPropertyValue("source_run_id", out var sourceRunNode) && sourceRunNode is not null && !string.IsNullOrWhiteSpace(sourceRunNode.GetValue<string>())
? (Guid?)Guid.Parse(sourceRunNode!.GetValue<string>())
: null;
var actorId = required("actor_id");
var actorType = required("actor_type");
var occurredAt = DateTimeOffset.Parse(required("occurred_at"));
var recordedAt = node.TryGetPropertyValue("recorded_at", out var recordedAtNode) && recordedAtNode is not null
? DateTimeOffset.Parse(recordedAtNode.GetValue<string>())
: recordedAtBase;
var payload = node.TryGetPropertyValue("payload", out var payloadNode) && payloadNode is JsonObject payloadObj
? payloadObj
: throw new InvalidOperationException("payload missing");
var canonicalEnvelope = LedgerCanonicalJsonSerializer.Canonicalize(payload);
var prev = node.TryGetPropertyValue("previous_hash", out var prevNode) ? prevNode?.GetValue<string>() : null;
return new LedgerEventDraft(
tenantId,
chainId,
sequence,
eventId,
eventType,
policyVersion,
findingId,
artifactId,
sourceRunId,
actorId,
actorType,
occurredAt,
recordedAt,
payload,
canonicalEnvelope,
prev);
}
static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider services, string tenant, long expectedEvents, FileInfo? expectedChecksumsFile, CancellationToken cancellationToken)
{
var errors = new List<string>();
@@ -377,7 +332,7 @@ static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider service
var eventHash = reader.GetString(4);
var previousHash = reader.GetString(5);
var merkleLeafHash = reader.GetString(6);
eventHasher.AppendData(Encoding.UTF8.GetBytes($"{eventHash}:{sequence}\n"));
HarnessMath.AppendEventStreamEntry(eventHasher, eventHash, sequence);
if (currentChain != chainId)
{
@@ -457,26 +412,6 @@ static async Task<VerificationResult> VerifyLedgerAsync(IServiceProvider service
return new VerificationResult(errors.Count == 0, errors, eventStreamChecksum, projectionChecksum);
}
static double Percentile(IEnumerable<double> values, double percentile)
{
var data = values.Where(v => !double.IsNaN(v)).OrderBy(v => v).ToArray();
if (data.Length == 0)
{
return 0;
}
var rank = (percentile / 100.0) * (data.Length - 1);
var lowerIndex = (int)Math.Floor(rank);
var upperIndex = (int)Math.Ceiling(rank);
if (lowerIndex == upperIndex)
{
return data[lowerIndex];
}
var fraction = rank - lowerIndex;
return data[lowerIndex] + (data[upperIndex] - data[lowerIndex]) * fraction;
}
// Local function - must be before type declarations
static ExpectedChecksums LoadExpectedChecksums(FileInfo? file)
{
@@ -536,6 +471,11 @@ internal sealed class MetricsBag
};
}
internal sealed class LongCounter
{
public long Value;
}
// Harness lightweight no-op implementations for projection/merkle to keep replay fast
internal sealed class NoOpPolicyEvaluationService : IPolicyEvaluationService
{

View File

@@ -0,0 +1,52 @@
using System.Text;
using FluentAssertions;
using Xunit;
namespace LedgerReplayHarness.Tests;
public class HarnessFixtureReaderTests
{
[Fact]
public async Task ReadDraftsAsync_ThrowsWithFixtureContextOnInvalidDate()
{
var line = "{" +
"\"chain_id\":\"11111111-1111-1111-1111-111111111111\"," +
"\"sequence_no\":1," +
"\"event_id\":\"22222222-2222-2222-2222-222222222222\"," +
"\"event_type\":\"test\"," +
"\"policy_version\":\"v1\"," +
"\"finding_id\":\"f1\"," +
"\"artifact_id\":\"a1\"," +
"\"actor_id\":\"u1\"," +
"\"actor_type\":\"user\"," +
"\"occurred_at\":\"not-a-date\"," +
"\"payload\":{}" +
"}";
var tempPath = Path.GetTempFileName();
await File.WriteAllTextAsync(tempPath, line, Encoding.UTF8, TestContext.Current.CancellationToken);
var fileInfo = new FileInfo(tempPath);
try
{
var action = async () =>
{
await foreach (var _ in HarnessFixtureReader.ReadDraftsAsync(fileInfo, "tenant-a", TimeProvider.System, TestContext.Current.CancellationToken))
{
}
};
var ex = await Assert.ThrowsAsync<HarnessFixtureException>(action);
ex.Message.Should().Contain("occurred_at invalid");
ex.Message.Should().Contain(Path.GetFileName(tempPath));
ex.LineNumber.Should().Be(1);
}
finally
{
if (File.Exists(tempPath))
{
File.Delete(tempPath);
}
}
}
}

View File

@@ -0,0 +1,30 @@
using System.Security.Cryptography;
using System.Text;
using FluentAssertions;
using Xunit;
namespace LedgerReplayHarness.Tests;
public class HarnessMathTests
{
[Fact]
public void Percentile_InterpolatesMedian()
{
var result = HarnessMath.Percentile(new[] { 1d, 2d, 3d, 4d }, 50);
result.Should().Be(2.5d);
}
[Fact]
public void AppendEventStreamEntry_ProducesStableChecksum()
{
var expectedPayload = "hash-a:1\nhash-b:2\n";
var expected = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(expectedPayload))).ToLowerInvariant();
using var hasher = IncrementalHash.CreateHash(HashAlgorithmName.SHA256);
HarnessMath.AppendEventStreamEntry(hasher, "hash-a", 1);
HarnessMath.AppendEventStreamEntry(hasher, "hash-b", 2);
var actual = Convert.ToHexString(hasher.GetHashAndReset()).ToLowerInvariant();
actual.Should().Be(expected);
}
}

View File

@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\StellaOps.Findings.Ledger\tools\LedgerReplayHarness\LedgerReplayHarness.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,39 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using FluentAssertions;
using Xunit;
namespace LedgerReplayHarness.Tests;
public class HarnessFixtureReaderTests
{
[Fact]
public async Task ReadEntriesAsync_ThrowsWithFixtureContextOnInvalidJson()
{
var tempPath = Path.GetTempFileName();
await File.WriteAllTextAsync(tempPath, "{}\n{", Encoding.UTF8, TestContext.Current.CancellationToken);
var fileInfo = new FileInfo(tempPath);
try
{
var action = async () =>
{
await foreach (var _ in HarnessFixtureReader.ReadEntriesAsync(fileInfo, "tenant-a", TimeProvider.System, TestContext.Current.CancellationToken))
{
}
};
var ex = await Assert.ThrowsAsync<HarnessFixtureException>(action);
ex.Message.Should().Contain(Path.GetFileName(tempPath));
ex.LineNumber.Should().Be(2);
}
finally
{
if (File.Exists(tempPath))
{
File.Delete(tempPath);
}
}
}
}

View File

@@ -0,0 +1,98 @@
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using FluentAssertions;
using StellaOps.Findings.Ledger.Domain;
using Xunit;
namespace LedgerReplayHarness.Tests;
public class HarnessRunnerTests
{
[Fact]
public async Task RunAsync_CountsOnlyAppendedRecordsAndUsesFixedRecordedAt()
{
var fixedTime = new DateTimeOffset(2025, 1, 1, 0, 0, 0, TimeSpan.Zero);
var timeProvider = new FixedTimeProvider(fixedTime);
var tempFixture = Path.GetTempFileName();
var tempReport = Path.GetTempFileName();
var envelope = new JsonObject
{
["chain_id"] = "11111111-1111-1111-1111-111111111111",
["event_id"] = "22222222-2222-2222-2222-222222222222",
["event_type"] = "test",
["policy_version"] = "v1",
["finding_id"] = "f1",
["artifact_id"] = "a1",
["actor_id"] = "u1",
["actor_type"] = "user",
["occurred_at"] = fixedTime.ToString("O")
};
var line = new JsonObject
{
["sequence_no"] = 1,
["canonical_envelope"] = envelope
};
await File.WriteAllTextAsync(
tempFixture,
JsonSerializer.Serialize(line) + "\n{\"note\":\"skip\"}",
Encoding.UTF8,
TestContext.Current.CancellationToken);
var client = new CaptureLedgerClient();
var runner = new HarnessRunner(client, maxParallel: 1, timeProvider: timeProvider, allowParallel: false);
try
{
var exitCode = await runner.RunAsync(new[] { tempFixture }, "tenant-x", tempReport, TestContext.Current.CancellationToken);
exitCode.Should().Be(0);
client.Records.Should().HaveCount(1);
client.Records[0].RecordedAt.Should().Be(fixedTime);
var json = await File.ReadAllTextAsync(tempReport, TestContext.Current.CancellationToken);
using var doc = JsonDocument.Parse(json);
doc.RootElement.GetProperty("eventsWritten").GetInt64().Should().Be(1);
}
finally
{
if (File.Exists(tempFixture))
{
File.Delete(tempFixture);
}
if (File.Exists(tempReport))
{
File.Delete(tempReport);
}
}
}
private sealed class CaptureLedgerClient : ILedgerClient
{
public List<LedgerEventRecord> Records { get; } = new();
public Task AppendAsync(LedgerEventRecord record, CancellationToken cancellationToken)
{
Records.Add(record);
return Task.CompletedTask;
}
}
private sealed class FixedTimeProvider : TimeProvider
{
private readonly DateTimeOffset _utcNow;
public FixedTimeProvider(DateTimeOffset utcNow)
{
_utcNow = utcNow;
}
public override DateTimeOffset GetUtcNow() => _utcNow;
public override TimeZoneInfo LocalTimeZone => TimeZoneInfo.Utc;
}
}

View File

@@ -0,0 +1,15 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<ImplicitUsings>enable</ImplicitUsings>
<Nullable>enable</Nullable>
<LangVersion>preview</LangVersion>
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\tools\LedgerReplayHarness\LedgerReplayHarness.csproj" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="FluentAssertions" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,9 @@
using StellaOps.Findings.Ledger.Domain;
namespace LedgerReplayHarness;
internal sealed record HarnessFixtureEntry(
LedgerEventRecord Record,
DateTimeOffset RecordedAt,
string? ExpectedEventHash,
string? ExpectedMerkleRoot);

View File

@@ -0,0 +1,14 @@
namespace LedgerReplayHarness;
public sealed class HarnessFixtureException : Exception
{
public HarnessFixtureException(string fixturePath, int lineNumber, string message, Exception? innerException = null)
: base($"{Path.GetFileName(fixturePath)}:{lineNumber} {message}", innerException)
{
FixturePath = fixturePath;
LineNumber = lineNumber;
}
public string FixturePath { get; }
public int LineNumber { get; }
}

View File

@@ -0,0 +1,222 @@
using System.Globalization;
using System.Runtime.CompilerServices;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
namespace LedgerReplayHarness;
internal static class HarnessFixtureReader
{
public static async IAsyncEnumerable<HarnessFixtureEntry> ReadEntriesAsync(
FileInfo fixture,
string tenant,
TimeProvider timeProvider,
[EnumeratorCancellation] CancellationToken cancellationToken)
{
await using var stream = fixture.OpenRead();
using var reader = new StreamReader(stream);
var recordedAtBase = timeProvider.GetUtcNow();
string? line;
var lineNumber = 0;
while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
lineNumber++;
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
JsonObject? node;
try
{
node = JsonNode.Parse(line)?.AsObject();
}
catch (JsonException ex)
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "invalid json", ex);
}
if (node is null)
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "expected json object");
}
if (!node.TryGetPropertyValue("canonical_envelope", out var envelopeNode) || envelopeNode is not JsonObject envelope)
{
continue;
}
if (!TryGetSequence(node, out var sequence, out var hasSequence))
{
if (hasSequence)
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "sequence_no invalid");
}
continue;
}
DateTimeOffset recordedAt = recordedAtBase;
if (node.TryGetPropertyValue("recorded_at", out var recordedAtNode) && recordedAtNode is not null)
{
if (!TryGetDateTime(recordedAtNode, out recordedAt))
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "recorded_at invalid");
}
}
DateTimeOffset occurredAt = recordedAt;
if (envelope.TryGetPropertyValue("occurred_at", out var occurredAtNode) && occurredAtNode is not null)
{
if (!TryGetDateTime(occurredAtNode, out occurredAt))
{
throw new HarnessFixtureException(fixture.FullName, lineNumber, "occurred_at invalid");
}
}
var chainId = TryGetGuid(envelope, "chain_id", Guid.Empty);
var eventId = TryGetGuid(envelope, "event_id", Guid.Empty);
var eventType = TryGetString(envelope, "event_type") ?? string.Empty;
var policyVersion = TryGetString(envelope, "policy_version") ?? string.Empty;
var findingId = TryGetString(envelope, "finding_id") ?? string.Empty;
var artifactId = TryGetString(envelope, "artifact_id") ?? string.Empty;
var sourceRunId = TryGetGuidNullable(envelope, "source_run_id");
var actorId = TryGetString(envelope, "actor_id") ?? "system";
var actorType = TryGetString(envelope, "actor_type") ?? "system";
var previousHash = TryGetString(envelope, "previous_hash") ?? string.Empty;
var computed = LedgerHashing.ComputeHashes(envelope, sequence);
var record = new LedgerEventRecord(
tenant,
chainId,
sequence,
eventId,
eventType,
policyVersion,
findingId,
artifactId,
sourceRunId,
actorId,
actorType,
occurredAt,
recordedAt,
envelope,
computed.EventHash,
previousHash,
computed.MerkleLeafHash,
computed.CanonicalJson);
var expectedEventHash = TryGetString(node, "event_hash");
var expectedMerkleRoot = TryGetString(node, "merkle_root");
yield return new HarnessFixtureEntry(record, recordedAt, expectedEventHash, expectedMerkleRoot);
}
}
private static bool TryGetSequence(JsonObject node, out long sequence, out bool hasSequence)
{
sequence = 0;
hasSequence = false;
if (node.TryGetPropertyValue("sequence_no", out var seqNode) && seqNode is not null)
{
hasSequence = true;
return TryGetLong(seqNode, out sequence);
}
return false;
}
private static Guid TryGetGuid(JsonObject node, string name, Guid fallback)
{
var value = TryGetString(node, name);
return value is not null && Guid.TryParse(value, out var guid) ? guid : fallback;
}
private static Guid? TryGetGuidNullable(JsonObject node, string name)
{
var value = TryGetString(node, name);
if (string.IsNullOrWhiteSpace(value))
{
return null;
}
return Guid.TryParse(value, out var guid) ? guid : null;
}
private static bool TryGetLong(JsonNode node, out long value)
{
value = 0;
if (node is JsonValue jsonValue)
{
try
{
return jsonValue.TryGetValue(out value);
}
catch
{
return false;
}
}
return false;
}
private static string? TryGetString(JsonObject node, string name)
{
if (node.TryGetPropertyValue(name, out var nodeValue) && nodeValue is not null)
{
return TryGetString(nodeValue);
}
return null;
}
private static string? TryGetString(JsonNode node)
{
if (node is JsonValue jsonValue)
{
try
{
return jsonValue.GetValue<string>();
}
catch
{
return null;
}
}
return null;
}
private static bool TryGetDateTime(JsonNode node, out DateTimeOffset value)
{
value = default;
if (node is JsonValue jsonValue)
{
try
{
if (jsonValue.TryGetValue(out value))
{
return true;
}
}
catch
{
return false;
}
if (jsonValue.TryGetValue(out string? stringValue) &&
!string.IsNullOrWhiteSpace(stringValue) &&
DateTimeOffset.TryParse(stringValue, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out value))
{
return true;
}
}
return false;
}
}

View File

@@ -1,9 +1,6 @@
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Text.Json;
using System.Text.Json.Nodes;
using StellaOps.Findings.Ledger.Domain;
using StellaOps.Findings.Ledger.Hashing;
namespace LedgerReplayHarness;
@@ -11,11 +8,15 @@ public sealed class HarnessRunner
{
private readonly ILedgerClient _client;
private readonly int _maxParallel;
private readonly TimeProvider _timeProvider;
private readonly bool _allowParallel;
public HarnessRunner(ILedgerClient client, int maxParallel = 4)
public HarnessRunner(ILedgerClient client, int maxParallel = 4, TimeProvider? timeProvider = null, bool allowParallel = false)
{
_client = client ?? throw new ArgumentNullException(nameof(client));
_maxParallel = maxParallel <= 0 ? 1 : maxParallel;
_timeProvider = timeProvider ?? TimeProvider.System;
_allowParallel = allowParallel;
}
public async Task<int> RunAsync(IEnumerable<string> fixtures, string tenant, string reportPath, CancellationToken cancellationToken)
@@ -34,89 +35,80 @@ public sealed class HarnessRunner
var hashesValid = true;
DateTimeOffset? earliest = null;
DateTimeOffset? latest = null;
var leafHashes = new List<string>();
var leafEntries = new List<(Guid ChainId, long Sequence, string LeafHash)>();
string? expectedMerkleRoot = null;
var latencies = new ConcurrentBag<double>();
var swTotal = Stopwatch.StartNew();
var throttler = new TaskThrottler(_maxParallel);
TaskThrottler? throttler = _allowParallel && _maxParallel > 1
? new TaskThrottler(_maxParallel)
: null;
foreach (var fixture in fixtures)
var orderedFixtures = fixtures.OrderBy(f => f, StringComparer.Ordinal).ToArray();
foreach (var fixture in orderedFixtures)
{
await foreach (var line in ReadLinesAsync(fixture, cancellationToken))
var fixtureInfo = new FileInfo(fixture);
await foreach (var entry in HarnessFixtureReader.ReadEntriesAsync(fixtureInfo, tenant, _timeProvider, cancellationToken))
{
if (string.IsNullOrWhiteSpace(line)) continue;
var node = JsonNode.Parse(line)?.AsObject();
if (node is null) continue;
eventCount++;
var recordedAt = node["recorded_at"]?.GetValue<DateTimeOffset>() ?? DateTimeOffset.UtcNow;
earliest = earliest is null ? recordedAt : DateTimeOffset.Compare(recordedAt, earliest.Value) < 0 ? recordedAt : earliest;
latest = latest is null
? recordedAt
: DateTimeOffset.Compare(recordedAt, latest.Value) > 0 ? recordedAt : latest;
if (node["canonical_envelope"] is JsonObject envelope && node["sequence_no"] is not null)
var record = entry.Record;
if (!string.IsNullOrEmpty(entry.ExpectedEventHash) &&
!string.Equals(entry.ExpectedEventHash, record.EventHash, StringComparison.Ordinal))
{
var seq = node["sequence_no"]!.GetValue<long>();
var computed = LedgerHashing.ComputeHashes(envelope, seq);
var expected = node["event_hash"]?.GetValue<string>();
if (!string.IsNullOrEmpty(expected) && !string.Equals(expected, computed.EventHash, StringComparison.Ordinal))
{
hashesValid = false;
}
stats.UpdateHashes(computed.EventHash, computed.MerkleLeafHash);
leafHashes.Add(computed.MerkleLeafHash);
expectedMerkleRoot ??= node["merkle_root"]?.GetValue<string>();
// enqueue for concurrent append
var record = new LedgerEventRecord(
tenant,
envelope["chain_id"]?.GetValue<Guid>() ?? Guid.Empty,
seq,
envelope["event_id"]?.GetValue<Guid>() ?? Guid.Empty,
envelope["event_type"]?.GetValue<string>() ?? string.Empty,
envelope["policy_version"]?.GetValue<string>() ?? string.Empty,
envelope["finding_id"]?.GetValue<string>() ?? string.Empty,
envelope["artifact_id"]?.GetValue<string>() ?? string.Empty,
envelope["source_run_id"]?.GetValue<Guid?>(),
envelope["actor_id"]?.GetValue<string>() ?? "system",
envelope["actor_type"]?.GetValue<string>() ?? "system",
envelope["occurred_at"]?.GetValue<DateTimeOffset>() ?? recordedAt,
recordedAt,
envelope,
computed.EventHash,
envelope["previous_hash"]?.GetValue<string>() ?? string.Empty,
computed.MerkleLeafHash,
computed.CanonicalJson);
// fire-and-track latency
await throttler.RunAsync(async () =>
{
var sw = Stopwatch.StartNew();
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
}, cancellationToken).ConfigureAwait(false);
hashesValid = false;
}
earliest = earliest is null ? entry.RecordedAt : DateTimeOffset.Compare(entry.RecordedAt, earliest.Value) < 0 ? entry.RecordedAt : earliest;
latest = latest is null
? entry.RecordedAt
: DateTimeOffset.Compare(entry.RecordedAt, latest.Value) > 0 ? entry.RecordedAt : latest;
stats.UpdateHashes(record.EventHash, record.MerkleLeafHash);
leafEntries.Add((record.ChainId, record.SequenceNumber, record.MerkleLeafHash));
expectedMerkleRoot ??= entry.ExpectedMerkleRoot;
if (throttler is null)
{
var sw = Stopwatch.StartNew();
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
Interlocked.Increment(ref eventCount);
continue;
}
await throttler.RunAsync(async () =>
{
var sw = Stopwatch.StartNew();
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
sw.Stop();
latencies.Add(sw.Elapsed.TotalMilliseconds);
Interlocked.Increment(ref eventCount);
}, cancellationToken).ConfigureAwait(false);
}
}
await throttler.DrainAsync(cancellationToken).ConfigureAwait(false);
if (throttler is not null)
{
await throttler.DrainAsync(cancellationToken).ConfigureAwait(false);
}
swTotal.Stop();
var latencyArray = latencies.ToArray();
Array.Sort(latencyArray);
double p95 = latencyArray.Length == 0 ? 0 : latencyArray[(int)Math.Ceiling(latencyArray.Length * 0.95) - 1];
string? computedRoot = leafHashes.Count == 0 ? null : MerkleCalculator.ComputeRoot(leafHashes);
var orderedLeafHashes = leafEntries
.OrderBy(entry => entry.ChainId)
.ThenBy(entry => entry.Sequence)
.Select(entry => entry.LeafHash)
.ToList();
string? computedRoot = orderedLeafHashes.Count == 0 ? null : MerkleCalculator.ComputeRoot(orderedLeafHashes);
var merkleOk = expectedMerkleRoot is null || string.Equals(expectedMerkleRoot, computedRoot, StringComparison.OrdinalIgnoreCase);
var report = new
{
tenant,
fixtures = fixtures.ToArray(),
fixtures = orderedFixtures,
eventsWritten = eventCount,
durationSeconds = Math.Max(swTotal.Elapsed.TotalSeconds, (latest - earliest)?.TotalSeconds ?? 0),
throughputEps = swTotal.Elapsed.TotalSeconds > 0 ? eventCount / swTotal.Elapsed.TotalSeconds : 0,
@@ -125,7 +117,7 @@ public sealed class HarnessRunner
cpuPercentMax = 0,
memoryMbMax = 0,
status = hashesValid && merkleOk ? "pass" : "fail",
timestamp = DateTimeOffset.UtcNow.ToString("O"),
timestamp = _timeProvider.GetUtcNow().ToString("O"),
hashSummary = stats.ToReport(),
merkleRoot = computedRoot,
merkleExpected = expectedMerkleRoot
@@ -136,13 +128,4 @@ public sealed class HarnessRunner
return hashesValid && merkleOk ? 0 : 1;
}
private static async IAsyncEnumerable<string> ReadLinesAsync(string path, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
await using var stream = File.OpenRead(path);
using var reader = new StreamReader(stream);
while (!cancellationToken.IsCancellationRequested && await reader.ReadLineAsync() is { } line)
{
yield return line;
}
}
}

View File

@@ -11,4 +11,9 @@
<ItemGroup>
<PackageReference Include="System.CommandLine" />
</ItemGroup>
<ItemGroup>
<AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleToAttribute">
<_Parameter1>StellaOps.Findings.Tools.LedgerReplayHarness.Tests</_Parameter1>
</AssemblyAttribute>
</ItemGroup>
</Project>

View File

@@ -23,7 +23,12 @@ var reportOption = new Option<string>("--report")
var parallelOption = new Option<int>("--maxParallel")
{
Description = "Maximum parallelism when sending events",
DefaultValueFactory = _ => 4
DefaultValueFactory = _ => 1
};
var allowParallelOption = new Option<bool>("--allowParallel")
{
Description = "Allow non-deterministic parallel event ingestion"
};
var root = new RootCommand("Findings Ledger replay & determinism harness");
@@ -31,6 +36,7 @@ root.Add(fixtureOption);
root.Add(tenantOption);
root.Add(reportOption);
root.Add(parallelOption);
root.Add(allowParallelOption);
root.SetAction(async (parseResult, ct) =>
{
@@ -38,8 +44,9 @@ root.SetAction(async (parseResult, ct) =>
var tenant = parseResult.GetValue(tenantOption)!;
var report = parseResult.GetValue(reportOption)!;
var maxParallel = parseResult.GetValue(parallelOption);
var allowParallel = parseResult.GetValue(allowParallelOption);
var runner = new HarnessRunner(new InMemoryLedgerClient(), maxParallel);
var runner = new HarnessRunner(new InMemoryLedgerClient(), maxParallel, allowParallel: allowParallel);
var exitCode = await runner.RunAsync(fixtures, tenant, report, ct);
return exitCode;
});