Add unit tests and implementations for MongoDB index models and OpenAPI metadata
- Implemented `MongoIndexModelTests` to verify index models for various stores. - Created `OpenApiMetadataFactory` with methods to generate OpenAPI metadata. - Added tests for `OpenApiMetadataFactory` to ensure expected defaults and URL overrides. - Introduced `ObserverSurfaceSecrets` and `WebhookSurfaceSecrets` for managing secrets. - Developed `RuntimeSurfaceFsClient` and `WebhookSurfaceFsClient` for manifest retrieval. - Added dependency injection tests for `SurfaceEnvironmentRegistration` in both Observer and Webhook contexts. - Implemented tests for secret resolution in `ObserverSurfaceSecretsTests` and `WebhookSurfaceSecretsTests`. - Created `EnsureLinkNotMergeCollectionsMigrationTests` to validate MongoDB migration logic. - Added project files for MongoDB tests and NuGet package mirroring.
This commit is contained in:
148
src/Findings/tools/LedgerReplayHarness/HarnessRunner.cs
Normal file
148
src/Findings/tools/LedgerReplayHarness/HarnessRunner.cs
Normal file
@@ -0,0 +1,148 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Nodes;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
using StellaOps.Findings.Ledger.Hashing;
|
||||
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
public sealed class HarnessRunner
|
||||
{
|
||||
private readonly ILedgerClient _client;
|
||||
private readonly int _maxParallel;
|
||||
|
||||
public HarnessRunner(ILedgerClient client, int maxParallel = 4)
|
||||
{
|
||||
_client = client ?? throw new ArgumentNullException(nameof(client));
|
||||
_maxParallel = maxParallel <= 0 ? 1 : maxParallel;
|
||||
}
|
||||
|
||||
public async Task<int> RunAsync(IEnumerable<string> fixtures, string tenant, string reportPath, CancellationToken cancellationToken)
|
||||
{
|
||||
if (fixtures is null || !fixtures.Any())
|
||||
{
|
||||
throw new ArgumentException("At least one fixture is required.", nameof(fixtures));
|
||||
}
|
||||
|
||||
var stats = new HarnessStats();
|
||||
|
||||
tenant = string.IsNullOrWhiteSpace(tenant) ? "default" : tenant;
|
||||
reportPath = string.IsNullOrWhiteSpace(reportPath) ? "harness-report.json" : reportPath;
|
||||
|
||||
var eventCount = 0L;
|
||||
var hashesValid = true;
|
||||
DateTimeOffset? earliest = null;
|
||||
DateTimeOffset? latest = null;
|
||||
var latencies = new List<double>();
|
||||
var leafHashes = new List<string>();
|
||||
string? expectedMerkleRoot = null;
|
||||
var latencies = new ConcurrentBag<double>();
|
||||
var swTotal = Stopwatch.StartNew();
|
||||
|
||||
var throttler = new TaskThrottler(_maxParallel);
|
||||
|
||||
foreach (var fixture in fixtures)
|
||||
{
|
||||
await foreach (var line in ReadLinesAsync(fixture, cancellationToken))
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(line)) continue;
|
||||
var node = JsonNode.Parse(line)?.AsObject();
|
||||
if (node is null) continue;
|
||||
|
||||
eventCount++;
|
||||
var recordedAt = node["recorded_at"]?.GetValue<DateTimeOffset>() ?? DateTimeOffset.UtcNow;
|
||||
earliest = earliest is null ? recordedAt : DateTimeOffset.Compare(recordedAt, earliest.Value) < 0 ? recordedAt : earliest;
|
||||
latest = latest is null
|
||||
? recordedAt
|
||||
: DateTimeOffset.Compare(recordedAt, latest.Value) > 0 ? recordedAt : latest;
|
||||
|
||||
if (node["canonical_envelope"] is JsonObject envelope && node["sequence_no"] is not null)
|
||||
{
|
||||
var seq = node["sequence_no"]!.GetValue<long>();
|
||||
var computed = LedgerHashing.ComputeHashes(envelope, seq);
|
||||
var expected = node["event_hash"]?.GetValue<string>();
|
||||
if (!string.IsNullOrEmpty(expected) && !string.Equals(expected, computed.EventHash, StringComparison.Ordinal))
|
||||
{
|
||||
hashesValid = false;
|
||||
}
|
||||
|
||||
stats.UpdateHashes(computed.EventHash, computed.MerkleLeafHash);
|
||||
leafHashes.Add(computed.MerkleLeafHash);
|
||||
expectedMerkleRoot ??= node["merkle_root"]?.GetValue<string>();
|
||||
|
||||
// enqueue for concurrent append
|
||||
var record = new LedgerEventRecord(
|
||||
tenant,
|
||||
envelope["chain_id"]?.GetValue<Guid>() ?? Guid.Empty,
|
||||
seq,
|
||||
envelope["event_id"]?.GetValue<Guid>() ?? Guid.Empty,
|
||||
envelope["event_type"]?.GetValue<string>() ?? string.Empty,
|
||||
envelope["policy_version"]?.GetValue<string>() ?? string.Empty,
|
||||
envelope["finding_id"]?.GetValue<string>() ?? string.Empty,
|
||||
envelope["artifact_id"]?.GetValue<string>() ?? string.Empty,
|
||||
envelope["source_run_id"]?.GetValue<Guid?>(),
|
||||
envelope["actor_id"]?.GetValue<string>() ?? "system",
|
||||
envelope["actor_type"]?.GetValue<string>() ?? "system",
|
||||
envelope["occurred_at"]?.GetValue<DateTimeOffset>() ?? recordedAt,
|
||||
recordedAt,
|
||||
envelope,
|
||||
computed.EventHash,
|
||||
envelope["previous_hash"]?.GetValue<string>() ?? string.Empty,
|
||||
computed.MerkleLeafHash,
|
||||
computed.CanonicalJson);
|
||||
|
||||
// fire-and-track latency
|
||||
await throttler.RunAsync(async () =>
|
||||
{
|
||||
var sw = Stopwatch.StartNew();
|
||||
await _client.AppendAsync(record, cancellationToken).ConfigureAwait(false);
|
||||
sw.Stop();
|
||||
latencies.Add(sw.Elapsed.TotalMilliseconds);
|
||||
}, cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await throttler.DrainAsync(cancellationToken).ConfigureAwait(false);
|
||||
swTotal.Stop();
|
||||
|
||||
var latencyArray = latencies.ToArray();
|
||||
Array.Sort(latencyArray);
|
||||
double p95 = latencyArray.Length == 0 ? 0 : latencyArray[(int)Math.Ceiling(latencyArray.Length * 0.95) - 1];
|
||||
|
||||
string? computedRoot = leafHashes.Count == 0 ? null : MerkleCalculator.ComputeRoot(leafHashes);
|
||||
var merkleOk = expectedMerkleRoot is null || string.Equals(expectedMerkleRoot, computedRoot, StringComparison.OrdinalIgnoreCase);
|
||||
|
||||
var report = new
|
||||
{
|
||||
tenant,
|
||||
fixtures = fixtures.ToArray(),
|
||||
eventsWritten = eventCount,
|
||||
durationSeconds = Math.Max(swTotal.Elapsed.TotalSeconds, (latest - earliest)?.TotalSeconds ?? 0),
|
||||
throughputEps = swTotal.Elapsed.TotalSeconds > 0 ? eventCount / swTotal.Elapsed.TotalSeconds : 0,
|
||||
latencyP95Ms = p95,
|
||||
projectionLagMaxSeconds = 0,
|
||||
cpuPercentMax = 0,
|
||||
memoryMbMax = 0,
|
||||
status = hashesValid && merkleOk ? "pass" : "fail",
|
||||
timestamp = DateTimeOffset.UtcNow.ToString("O"),
|
||||
hashSummary = stats.ToReport(),
|
||||
merkleRoot = computedRoot,
|
||||
merkleExpected = expectedMerkleRoot
|
||||
};
|
||||
|
||||
var json = JsonSerializer.Serialize(report, new JsonSerializerOptions { WriteIndented = true });
|
||||
await File.WriteAllTextAsync(reportPath, json);
|
||||
return hashesValid && merkleOk ? 0 : 1;
|
||||
}
|
||||
|
||||
private static async IAsyncEnumerable<string> ReadLinesAsync(string path, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
|
||||
{
|
||||
await using var stream = File.OpenRead(path);
|
||||
using var reader = new StreamReader(stream);
|
||||
string? line;
|
||||
while (!reader.EndOfStream && !cancellationToken.IsCancellationRequested && (line = await reader.ReadLineAsync()) is not null)
|
||||
{
|
||||
yield return line;
|
||||
}
|
||||
}
|
||||
}
|
||||
26
src/Findings/tools/LedgerReplayHarness/HarnessStats.cs
Normal file
26
src/Findings/tools/LedgerReplayHarness/HarnessStats.cs
Normal file
@@ -0,0 +1,26 @@
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
internal sealed class HarnessStats
|
||||
{
|
||||
private readonly HashSet<string> _eventHashes = new(StringComparer.OrdinalIgnoreCase);
|
||||
private readonly HashSet<string> _leafHashes = new(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
public void UpdateHashes(string eventHash, string leafHash)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(eventHash))
|
||||
{
|
||||
_eventHashes.Add(eventHash);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(leafHash))
|
||||
{
|
||||
_leafHashes.Add(leafHash);
|
||||
}
|
||||
}
|
||||
|
||||
public object ToReport() => new
|
||||
{
|
||||
uniqueEventHashes = _eventHashes.Count,
|
||||
uniqueMerkleLeaves = _leafHashes.Count
|
||||
};
|
||||
}
|
||||
8
src/Findings/tools/LedgerReplayHarness/ILedgerClient.cs
Normal file
8
src/Findings/tools/LedgerReplayHarness/ILedgerClient.cs
Normal file
@@ -0,0 +1,8 @@
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
public interface ILedgerClient
|
||||
{
|
||||
Task AppendAsync(LedgerEventRecord record, CancellationToken cancellationToken);
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
using System.Collections.Concurrent;
|
||||
using StellaOps.Findings.Ledger.Domain;
|
||||
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
public sealed class InMemoryLedgerClient : ILedgerClient
|
||||
{
|
||||
private readonly ConcurrentDictionary<(string Tenant, Guid EventId), LedgerEventRecord> _store = new();
|
||||
|
||||
public Task AppendAsync(LedgerEventRecord record, CancellationToken cancellationToken)
|
||||
{
|
||||
_store.TryAdd((record.TenantId, record.EventId), record);
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
</PropertyGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\\..\\StellaOps.Findings.Ledger\\StellaOps.Findings.Ledger.csproj" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<PackageReference Include="System.CommandLine" Version="2.0.0-beta4.22272.1" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
41
src/Findings/tools/LedgerReplayHarness/MerkleCalculator.cs
Normal file
41
src/Findings/tools/LedgerReplayHarness/MerkleCalculator.cs
Normal file
@@ -0,0 +1,41 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
internal static class MerkleCalculator
|
||||
{
|
||||
public static string ComputeRoot(IReadOnlyList<string> leafHashes)
|
||||
{
|
||||
if (leafHashes is null || leafHashes.Count == 0)
|
||||
{
|
||||
throw new ArgumentException("At least one leaf hash is required.", nameof(leafHashes));
|
||||
}
|
||||
|
||||
var level = leafHashes.Select(Normalize).ToList();
|
||||
while (level.Count > 1)
|
||||
{
|
||||
var next = new List<string>((level.Count + 1) / 2);
|
||||
for (int i = 0; i < level.Count; i += 2)
|
||||
{
|
||||
var left = level[i];
|
||||
var right = i + 1 < level.Count ? level[i + 1] : level[i];
|
||||
next.Add(HashPair(left, right));
|
||||
}
|
||||
level = next;
|
||||
}
|
||||
|
||||
return level[0];
|
||||
}
|
||||
|
||||
private static string Normalize(string hex)
|
||||
=> hex?.Trim().ToLowerInvariant() ?? string.Empty;
|
||||
|
||||
private static string HashPair(string left, string right)
|
||||
{
|
||||
using var sha = SHA256.Create();
|
||||
var data = Encoding.UTF8.GetBytes(left + right);
|
||||
var hash = sha.ComputeHash(data);
|
||||
return Convert.ToHexString(hash).ToLowerInvariant();
|
||||
}
|
||||
}
|
||||
22
src/Findings/tools/LedgerReplayHarness/Program.cs
Normal file
22
src/Findings/tools/LedgerReplayHarness/Program.cs
Normal file
@@ -0,0 +1,22 @@
|
||||
using System.CommandLine;
|
||||
using LedgerReplayHarness;
|
||||
|
||||
var fixtureOption = new Option<string[]>("--fixture", "NDJSON fixture path(s)") { IsRequired = true, AllowMultipleArgumentsPerToken = true };
|
||||
var tenantOption = new Option<string>("--tenant", () => "default", "Tenant identifier");
|
||||
var reportOption = new Option<string>("--report", () => "harness-report.json", "Path to write JSON report");
|
||||
var parallelOption = new Option<int>("--maxParallel", () => 4, "Maximum parallelism when sending events");
|
||||
|
||||
var root = new RootCommand("Findings Ledger replay & determinism harness");
|
||||
root.AddOption(fixtureOption);
|
||||
root.AddOption(tenantOption);
|
||||
root.AddOption(reportOption);
|
||||
root.AddOption(parallelOption);
|
||||
|
||||
root.SetHandler(async (fixtures, tenant, report, maxParallel) =>
|
||||
{
|
||||
var runner = new HarnessRunner(new InMemoryLedgerClient(), maxParallel);
|
||||
var exitCode = await runner.RunAsync(fixtures, tenant, report, CancellationToken.None);
|
||||
Environment.Exit(exitCode);
|
||||
}, fixtureOption, tenantOption, reportOption, parallelOption);
|
||||
|
||||
return await root.InvokeAsync(args);
|
||||
36
src/Findings/tools/LedgerReplayHarness/TaskThrottler.cs
Normal file
36
src/Findings/tools/LedgerReplayHarness/TaskThrottler.cs
Normal file
@@ -0,0 +1,36 @@
|
||||
namespace LedgerReplayHarness;
|
||||
|
||||
internal sealed class TaskThrottler
|
||||
{
|
||||
private readonly SemaphoreSlim _semaphore;
|
||||
private readonly List<Task> _tasks = new();
|
||||
|
||||
public TaskThrottler(int maxDegreeOfParallelism)
|
||||
{
|
||||
_semaphore = new SemaphoreSlim(maxDegreeOfParallelism > 0 ? maxDegreeOfParallelism : 1);
|
||||
}
|
||||
|
||||
public async Task RunAsync(Func<Task> taskFactory, CancellationToken cancellationToken)
|
||||
{
|
||||
await _semaphore.WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
var task = Task.Run(async () =>
|
||||
{
|
||||
try
|
||||
{
|
||||
await taskFactory().ConfigureAwait(false);
|
||||
}
|
||||
finally
|
||||
{
|
||||
_semaphore.Release();
|
||||
}
|
||||
}, cancellationToken);
|
||||
lock (_tasks) _tasks.Add(task);
|
||||
}
|
||||
|
||||
public async Task DrainAsync(CancellationToken cancellationToken)
|
||||
{
|
||||
Task[] pending;
|
||||
lock (_tasks) pending = _tasks.ToArray();
|
||||
await Task.WhenAll(pending).WaitAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user