Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Implemented PolicyDslValidator with command-line options for strict mode and JSON output. - Created PolicySchemaExporter to generate JSON schemas for policy-related models. - Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes. - Added project files and necessary dependencies for each tool. - Ensured proper error handling and usage instructions across tools.
This commit is contained in:
26
src/StellaOps.Bench/LinkNotMerge.Vex/README.md
Normal file
26
src/StellaOps.Bench/LinkNotMerge.Vex/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Link-Not-Merge VEX Bench
|
||||
|
||||
Measures synthetic VEX observation ingest and event emission throughput for the Link-Not-Merge program.
|
||||
|
||||
## Scenarios
|
||||
|
||||
`config.json` defines workloads with varying statement density and tenant fan-out. Metrics captured per scenario:
|
||||
|
||||
- Total latency (ingest + correlation) and p95/max percentiles
|
||||
- Correlator-only latency and Mongo insert latency
|
||||
- Observation throughput (observations/sec)
|
||||
- Event emission throughput (events/sec)
|
||||
- Peak managed heap allocations
|
||||
|
||||
## Running locally
|
||||
|
||||
```bash
|
||||
dotnet run \
|
||||
--project src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj \
|
||||
-- \
|
||||
--csv out/linknotmerge-vex-bench.csv \
|
||||
--json out/linknotmerge-vex-bench.json \
|
||||
--prometheus out/linknotmerge-vex-bench.prom
|
||||
```
|
||||
|
||||
The benchmark exits non-zero if latency thresholds are exceeded, observation or event throughput drops below configured floors, allocations exceed the ceiling, or regression ratios breach the baseline.
|
||||
@@ -0,0 +1,37 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
|
||||
|
||||
public sealed class BaselineLoaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReadsEntries()
|
||||
{
|
||||
var path = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
path,
|
||||
"scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb\n" +
|
||||
"vex_ingest_baseline,5,4000,24000,12000,620.5,700.1,820.9,320.5,300.0,9800.0,9100.0,4200.0,3900.0,150.0\n");
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
|
||||
var entry = Assert.Single(baseline);
|
||||
|
||||
Assert.Equal("vex_ingest_baseline", entry.Key);
|
||||
Assert.Equal(4000, entry.Value.Observations);
|
||||
Assert.Equal(24000, entry.Value.Statements);
|
||||
Assert.Equal(12000, entry.Value.Events);
|
||||
Assert.Equal(700.1, entry.Value.P95TotalMs);
|
||||
Assert.Equal(3900.0, entry.Value.MinEventThroughputPerSecond);
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
|
||||
|
||||
public sealed class BenchmarkScenarioReportTests
|
||||
{
|
||||
[Fact]
|
||||
public void RegressionDetection_FlagsBreaches()
|
||||
{
|
||||
var result = new VexScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
StatementCount: 6000,
|
||||
EventCount: 3200,
|
||||
TotalStatistics: new DurationStatistics(600, 700, 750),
|
||||
InsertStatistics: new DurationStatistics(320, 360, 380),
|
||||
CorrelationStatistics: new DurationStatistics(280, 320, 340),
|
||||
ObservationThroughputStatistics: new ThroughputStatistics(8000, 7000),
|
||||
EventThroughputStatistics: new ThroughputStatistics(3500, 3200),
|
||||
AllocationStatistics: new AllocationStatistics(180),
|
||||
ThresholdMs: null,
|
||||
MinObservationThroughputPerSecond: null,
|
||||
MinEventThroughputPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var baseline = new BaselineEntry(
|
||||
ScenarioId: "scenario",
|
||||
Iterations: 3,
|
||||
Observations: 1000,
|
||||
Statements: 6000,
|
||||
Events: 3200,
|
||||
MeanTotalMs: 520,
|
||||
P95TotalMs: 560,
|
||||
MaxTotalMs: 580,
|
||||
MeanInsertMs: 250,
|
||||
MeanCorrelationMs: 260,
|
||||
MeanObservationThroughputPerSecond: 9000,
|
||||
MinObservationThroughputPerSecond: 8500,
|
||||
MeanEventThroughputPerSecond: 4200,
|
||||
MinEventThroughputPerSecond: 3800,
|
||||
MaxAllocatedMb: 140);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
|
||||
|
||||
Assert.True(report.DurationRegressionBreached);
|
||||
Assert.True(report.ObservationThroughputRegressionBreached);
|
||||
Assert.True(report.EventThroughputRegressionBreached);
|
||||
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("event throughput"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegressionDetection_NoBaseline_NoBreaches()
|
||||
{
|
||||
var result = new VexScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
StatementCount: 6000,
|
||||
EventCount: 3200,
|
||||
TotalStatistics: new DurationStatistics(480, 520, 540),
|
||||
InsertStatistics: new DurationStatistics(260, 280, 300),
|
||||
CorrelationStatistics: new DurationStatistics(220, 240, 260),
|
||||
ObservationThroughputStatistics: new ThroughputStatistics(9000, 8800),
|
||||
EventThroughputStatistics: new ThroughputStatistics(4200, 4100),
|
||||
AllocationStatistics: new AllocationStatistics(150),
|
||||
ThresholdMs: null,
|
||||
MinObservationThroughputPerSecond: null,
|
||||
MinEventThroughputPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
|
||||
|
||||
Assert.False(report.RegressionBreached);
|
||||
Assert.Empty(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Bench.LinkNotMerge.Vex\StellaOps.Bench.LinkNotMerge.Vex.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,34 @@
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
|
||||
|
||||
public sealed class VexScenarioRunnerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Execute_ComputesEvents()
|
||||
{
|
||||
var config = new VexScenarioConfig
|
||||
{
|
||||
Id = "unit",
|
||||
Observations = 600,
|
||||
AliasGroups = 120,
|
||||
StatementsPerObservation = 5,
|
||||
ProductsPerObservation = 3,
|
||||
Tenants = 2,
|
||||
BatchSize = 120,
|
||||
Seed = 12345,
|
||||
};
|
||||
|
||||
var runner = new VexScenarioRunner(config);
|
||||
var result = runner.Execute(2, CancellationToken.None);
|
||||
|
||||
Assert.Equal(600, result.ObservationCount);
|
||||
Assert.True(result.StatementCount > 0);
|
||||
Assert.True(result.EventCount > 0);
|
||||
Assert.All(result.TotalDurationsMs, duration => Assert.True(duration > 0));
|
||||
Assert.All(result.EventThroughputsPerSecond, throughput => Assert.True(throughput > 0));
|
||||
Assert.Equal(result.AggregationResult.EventCount, result.EventCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Statements,
|
||||
int Events,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanObservationThroughputPerSecond,
|
||||
double MinObservationThroughputPerSecond,
|
||||
double MeanEventThroughputPerSecond,
|
||||
double MinEventThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
@@ -0,0 +1,87 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 15)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
Observations: ParseInt(parts[2], resolved, lineNumber),
|
||||
Statements: ParseInt(parts[3], resolved, lineNumber),
|
||||
Events: ParseInt(parts[4], resolved, lineNumber),
|
||||
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
|
||||
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
|
||||
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
|
||||
MeanObservationThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
|
||||
MinObservationThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
|
||||
MeanEventThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
|
||||
MinEventThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
|
||||
|
||||
result[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,376 @@
|
||||
using System.Globalization;
|
||||
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await VexBenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<VexScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var iterations = scenario.ResolveIterations(config.Iterations);
|
||||
var runner = new VexScenarioRunner(scenario);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
|
||||
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
|
||||
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
var observationThroughputStats = ThroughputStatistics.From(execution.ObservationThroughputsPerSecond);
|
||||
var eventThroughputStats = ThroughputStatistics.From(execution.EventThroughputsPerSecond);
|
||||
|
||||
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
|
||||
var observationFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var eventFloor = scenario.MinEventThroughputPerSecond ?? options.MinEventThroughputPerSecond ?? config.MinEventThroughputPerSecond;
|
||||
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
|
||||
var result = new VexScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.DisplayLabel,
|
||||
iterations,
|
||||
execution.ObservationCount,
|
||||
execution.AliasGroups,
|
||||
execution.StatementCount,
|
||||
execution.EventCount,
|
||||
totalStats,
|
||||
insertStats,
|
||||
correlationStats,
|
||||
observationThroughputStats,
|
||||
eventThroughputStats,
|
||||
allocationStats,
|
||||
thresholdMs,
|
||||
observationFloor,
|
||||
eventFloor,
|
||||
allocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (observationFloor is { } obsFloor && result.ObservationThroughputStatistics.MinPerSecond < obsFloor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below observation throughput floor: {result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s < {obsFloor:N0} obs/s");
|
||||
}
|
||||
|
||||
if (eventFloor is { } evtFloor && result.EventThroughputStatistics.MinPerSecond < evtFloor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below event throughput floor: {result.EventThroughputStatistics.MinPerSecond:N0} events/s < {evtFloor:N0} events/s");
|
||||
}
|
||||
|
||||
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "linknotmerge-vex-bench/1.0",
|
||||
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"linknotmerge-vex-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinEventThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? minEventThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-event-throughput":
|
||||
EnsureNext(args, index);
|
||||
minEventThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
minEventThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: linknotmerge-vex-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Observation throughput floor (observations/second).");
|
||||
Console.WriteLine(" --min-event-throughput <value> Event emission throughput floor (events/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<VexScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Observations | Statements | Events | Total(ms) | Correl(ms) | Insert(ms) | Obs k/s | Evnt k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ------------- | ---------- | ------- | ---------- | ---------- | ----------- | ------- | -------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.ObservationsColumn,
|
||||
row.StatementColumn,
|
||||
row.EventColumn,
|
||||
row.TotalMeanColumn,
|
||||
row.CorrelationMeanColumn,
|
||||
row.InsertMeanColumn,
|
||||
row.ObservationThroughputColumn,
|
||||
row.EventThroughputColumn,
|
||||
row.AllocatedColumn,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<VexScenarioResult> results)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(results);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var result in results)
|
||||
{
|
||||
writer.Write(result.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.StatementCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.EventCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.ObservationThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.ObservationThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.EventThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.EventThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Vex.Tests")]
|
||||
@@ -0,0 +1,151 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.ObservationCount,
|
||||
report.Result.StatementCount,
|
||||
report.Result.EventCount,
|
||||
report.Result.TotalStatistics.MeanMs,
|
||||
report.Result.TotalStatistics.P95Ms,
|
||||
report.Result.TotalStatistics.MaxMs,
|
||||
report.Result.InsertStatistics.MeanMs,
|
||||
report.Result.CorrelationStatistics.MeanMs,
|
||||
report.Result.ObservationThroughputStatistics.MeanPerSecond,
|
||||
report.Result.ObservationThroughputStatistics.MinPerSecond,
|
||||
report.Result.EventThroughputStatistics.MeanPerSecond,
|
||||
report.Result.EventThroughputStatistics.MinPerSecond,
|
||||
report.Result.AllocationStatistics.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinObservationThroughputPerSecond,
|
||||
report.Result.MinEventThroughputPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.Observations,
|
||||
baseline.Statements,
|
||||
baseline.Events,
|
||||
baseline.MeanTotalMs,
|
||||
baseline.P95TotalMs,
|
||||
baseline.MaxTotalMs,
|
||||
baseline.MeanInsertMs,
|
||||
baseline.MeanCorrelationMs,
|
||||
baseline.MeanObservationThroughputPerSecond,
|
||||
baseline.MinObservationThroughputPerSecond,
|
||||
baseline.MeanEventThroughputPerSecond,
|
||||
baseline.MinEventThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ObservationThroughputRegressionRatio,
|
||||
report.EventThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Statements,
|
||||
int Events,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanObservationThroughputPerSecond,
|
||||
double MinObservationThroughputPerSecond,
|
||||
double MeanEventThroughputPerSecond,
|
||||
double MinEventThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinObservationThroughputThresholdPerSecond,
|
||||
double? MinEventThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Statements,
|
||||
int Events,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanObservationThroughputPerSecond,
|
||||
double MinObservationThroughputPerSecond,
|
||||
double MeanEventThroughputPerSecond,
|
||||
double MinEventThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ObservationThroughputRatio,
|
||||
double? EventThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
@@ -0,0 +1,89 @@
|
||||
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(VexScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
|
||||
ObservationThroughputRegressionRatio = CalculateInverseRatio(result.ObservationThroughputStatistics.MinPerSecond, baseline?.MinObservationThroughputPerSecond);
|
||||
EventThroughputRegressionRatio = CalculateInverseRatio(result.EventThroughputStatistics.MinPerSecond, baseline?.MinEventThroughputPerSecond);
|
||||
}
|
||||
|
||||
public VexScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ObservationThroughputRegressionRatio { get; }
|
||||
|
||||
public double? EventThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool ObservationThroughputRegressionBreached => ObservationThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool EventThroughputRegressionBreached => EventThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ObservationThroughputRegressionBreached || EventThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ObservationThroughputRegressionBreached && ObservationThroughputRegressionRatio is { } obsRatio)
|
||||
{
|
||||
var delta = (obsRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} observation throughput regressed: min {Result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinObservationThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (EventThroughputRegressionBreached && EventThroughputRegressionRatio is { } evtRatio)
|
||||
{
|
||||
var delta = (evtRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} event throughput regressed: min {Result.EventThroughputStatistics.MinPerSecond:N0} events/s vs baseline {Baseline.MinEventThroughputPerSecond:N0} events/s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateInverseRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,94 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_vex_bench_total_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_vex_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes).");
|
||||
builder.AppendLine("# TYPE linknotmerge_vex_bench_allocated_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenario = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_threshold_ms", scenario, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_mean_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_min_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_observation_throughput_floor_per_sec", scenario, report.Result.MinObservationThroughputPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_mean_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_min_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_event_throughput_floor_per_sec", scenario, report.Result.MinEventThroughputPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_observation_throughput_per_sec", scenario, baseline.MinObservationThroughputPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_event_throughput_per_sec", scenario, baseline.MinEventThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_duration_regression_ratio", scenario, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ObservationThroughputRegressionRatio is { } obsRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_observation_regression_ratio", scenario, obsRatio);
|
||||
}
|
||||
|
||||
if (report.EventThroughputRegressionRatio is { } evtRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_event_regression_ratio", scenario, evtRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_regression_limit", scenario, report.RegressionLimit);
|
||||
AppendMetric(builder, "linknotmerge_vex_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = values.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,166 @@
|
||||
using MongoDB.Bson;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal sealed class VexLinksetAggregator
|
||||
{
|
||||
public VexAggregationResult Correlate(IEnumerable<BsonDocument> documents)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(documents);
|
||||
|
||||
var groups = new Dictionary<string, VexAccumulator>(StringComparer.Ordinal);
|
||||
var statementsSeen = 0;
|
||||
|
||||
foreach (var document in documents)
|
||||
{
|
||||
var tenant = document.GetValue("tenant", "unknown").AsString;
|
||||
var linksetValue = document.GetValue("linkset", new BsonDocument());
|
||||
var linkset = linksetValue.IsBsonDocument ? linksetValue.AsBsonDocument : new BsonDocument();
|
||||
var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray;
|
||||
|
||||
var statementsValue = document.GetValue("statements", new BsonArray());
|
||||
var statements = statementsValue.IsBsonArray ? statementsValue.AsBsonArray : new BsonArray();
|
||||
|
||||
foreach (var statementValue in statements)
|
||||
{
|
||||
if (!statementValue.IsBsonDocument)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
statementsSeen++;
|
||||
|
||||
var statement = statementValue.AsBsonDocument;
|
||||
var status = statement.GetValue("status", "unknown").AsString;
|
||||
var justification = statement.GetValue("justification", BsonNull.Value);
|
||||
var lastUpdated = statement.GetValue("last_updated", BsonNull.Value);
|
||||
var productValue = statement.GetValue("product", new BsonDocument());
|
||||
var product = productValue.IsBsonDocument ? productValue.AsBsonDocument : new BsonDocument();
|
||||
var productKey = product.GetValue("purl", "unknown").AsString;
|
||||
|
||||
foreach (var aliasValue in aliases)
|
||||
{
|
||||
if (!aliasValue.IsString)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var alias = aliasValue.AsString;
|
||||
var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) =>
|
||||
{
|
||||
var (tenantValue, aliasValue, productValue) = data;
|
||||
var offset = 0;
|
||||
tenantValue.AsSpan().CopyTo(span);
|
||||
offset += tenantValue.Length;
|
||||
span[offset++] = '|';
|
||||
aliasValue.AsSpan().CopyTo(span[offset..]);
|
||||
offset += aliasValue.Length;
|
||||
span[offset++] = '|';
|
||||
productValue.AsSpan().CopyTo(span[offset..]);
|
||||
});
|
||||
|
||||
if (!groups.TryGetValue(key, out var accumulator))
|
||||
{
|
||||
accumulator = new VexAccumulator(tenant, alias, productKey);
|
||||
groups[key] = accumulator;
|
||||
}
|
||||
|
||||
accumulator.AddStatement(status, justification, lastUpdated);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var eventDocuments = new List<BsonDocument>(groups.Count);
|
||||
foreach (var accumulator in groups.Values)
|
||||
{
|
||||
if (accumulator.ShouldEmitEvent)
|
||||
{
|
||||
eventDocuments.Add(accumulator.ToEvent());
|
||||
}
|
||||
}
|
||||
|
||||
return new VexAggregationResult(
|
||||
LinksetCount: groups.Count,
|
||||
StatementCount: statementsSeen,
|
||||
EventCount: eventDocuments.Count,
|
||||
EventDocuments: eventDocuments);
|
||||
}
|
||||
|
||||
private sealed class VexAccumulator
|
||||
{
|
||||
private readonly Dictionary<string, int> _statusCounts = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _justifications = new(StringComparer.Ordinal);
|
||||
private readonly string _tenant;
|
||||
private readonly string _alias;
|
||||
private readonly string _product;
|
||||
private DateTime? _latest;
|
||||
|
||||
public VexAccumulator(string tenant, string alias, string product)
|
||||
{
|
||||
_tenant = tenant;
|
||||
_alias = alias;
|
||||
_product = product;
|
||||
}
|
||||
|
||||
public void AddStatement(string status, BsonValue justification, BsonValue updatedAt)
|
||||
{
|
||||
if (!_statusCounts.TryAdd(status, 1))
|
||||
{
|
||||
_statusCounts[status]++;
|
||||
}
|
||||
|
||||
if (justification.IsString)
|
||||
{
|
||||
_justifications.Add(justification.AsString);
|
||||
}
|
||||
|
||||
if (updatedAt.IsValidDateTime)
|
||||
{
|
||||
var value = updatedAt.ToUniversalTime();
|
||||
if (!_latest.HasValue || value > _latest)
|
||||
{
|
||||
_latest = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public bool ShouldEmitEvent
|
||||
{
|
||||
get
|
||||
{
|
||||
if (_statusCounts.TryGetValue("affected", out var affected) && affected > 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
if (_statusCounts.TryGetValue("under_investigation", out var investigating) && investigating > 0)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public BsonDocument ToEvent()
|
||||
{
|
||||
var payload = new BsonDocument
|
||||
{
|
||||
["tenant"] = _tenant,
|
||||
["alias"] = _alias,
|
||||
["product"] = _product,
|
||||
["statuses"] = new BsonDocument(_statusCounts.Select(kvp => new BsonElement(kvp.Key, kvp.Value))),
|
||||
["justifications"] = new BsonArray(_justifications.Select(justification => justification)),
|
||||
["last_updated"] = _latest.HasValue ? _latest.Value : (BsonValue)BsonNull.Value,
|
||||
};
|
||||
|
||||
return payload;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record VexAggregationResult(
|
||||
int LinksetCount,
|
||||
int StatementCount,
|
||||
int EventCount,
|
||||
IReadOnlyList<BsonDocument> EventDocuments);
|
||||
@@ -0,0 +1,252 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using MongoDB.Bson;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal static class VexObservationGenerator
|
||||
{
|
||||
private static readonly ImmutableArray<string> StatusPool = ImmutableArray.Create(
|
||||
"affected",
|
||||
"not_affected",
|
||||
"under_investigation");
|
||||
|
||||
private static readonly ImmutableArray<string> JustificationPool = ImmutableArray.Create(
|
||||
"exploitation_mitigated",
|
||||
"component_not_present",
|
||||
"vulnerable_code_not_present",
|
||||
"vulnerable_code_not_in_execute_path");
|
||||
|
||||
public static IReadOnlyList<VexObservationSeed> Generate(VexScenarioConfig config)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
var observationCount = config.ResolveObservationCount();
|
||||
var aliasGroups = config.ResolveAliasGroups();
|
||||
var statementsPerObservation = config.ResolveStatementsPerObservation();
|
||||
var tenantCount = config.ResolveTenantCount();
|
||||
var productsPerObservation = config.ResolveProductsPerObservation();
|
||||
var seed = config.ResolveSeed();
|
||||
|
||||
var seeds = new VexObservationSeed[observationCount];
|
||||
var random = new Random(seed);
|
||||
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
for (var index = 0; index < observationCount; index++)
|
||||
{
|
||||
var tenantIndex = index % tenantCount;
|
||||
var tenant = $"tenant-{tenantIndex:D2}";
|
||||
var group = index % aliasGroups;
|
||||
var revision = index / aliasGroups;
|
||||
var vulnerabilityAlias = $"CVE-2025-{group:D4}";
|
||||
var upstreamId = $"VEX-{group:D4}-{revision:D3}";
|
||||
var observationId = $"{tenant}:vex:{group:D5}:{revision:D6}";
|
||||
|
||||
var fetchedAt = baseTime.AddMinutes(revision);
|
||||
var receivedAt = fetchedAt.AddSeconds(2);
|
||||
var documentVersion = fetchedAt.AddSeconds(15).ToString("O");
|
||||
|
||||
var products = CreateProducts(group, revision, productsPerObservation);
|
||||
var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt);
|
||||
var rawPayload = CreateRawPayload(upstreamId, vulnerabilityAlias, statements);
|
||||
var contentHash = ComputeContentHash(rawPayload, tenant, group, revision);
|
||||
|
||||
var aliases = ImmutableArray.Create(vulnerabilityAlias, $"GHSA-{group:D4}-{revision % 26 + 'a'}{revision % 26 + 'a'}");
|
||||
var references = ImmutableArray.Create(
|
||||
new VexReference("advisory", $"https://vendor.example/advisories/{vulnerabilityAlias.ToLowerInvariant()}"),
|
||||
new VexReference("fix", $"https://vendor.example/patch/{vulnerabilityAlias.ToLowerInvariant()}"));
|
||||
|
||||
seeds[index] = new VexObservationSeed(
|
||||
ObservationId: observationId,
|
||||
Tenant: tenant,
|
||||
Vendor: "excititor-bench",
|
||||
Stream: "simulated",
|
||||
Api: $"https://bench.stella/vex/{group:D4}/{revision:D3}",
|
||||
CollectorVersion: "1.0.0-bench",
|
||||
UpstreamId: upstreamId,
|
||||
DocumentVersion: documentVersion,
|
||||
FetchedAt: fetchedAt,
|
||||
ReceivedAt: receivedAt,
|
||||
ContentHash: contentHash,
|
||||
VulnerabilityAlias: vulnerabilityAlias,
|
||||
Aliases: aliases,
|
||||
Products: products,
|
||||
Statements: statements,
|
||||
References: references,
|
||||
ContentFormat: "CycloneDX-VEX",
|
||||
SpecVersion: "1.4",
|
||||
RawPayload: rawPayload);
|
||||
}
|
||||
|
||||
return seeds;
|
||||
}
|
||||
|
||||
private static ImmutableArray<VexProduct> CreateProducts(int group, int revision, int count)
|
||||
{
|
||||
var builder = ImmutableArray.CreateBuilder<VexProduct>(count);
|
||||
for (var index = 0; index < count; index++)
|
||||
{
|
||||
var purl = $"pkg:generic/stella/product-{group:D4}-{index}@{1 + revision % 5}.{index + 1}.{revision % 9}";
|
||||
builder.Add(new VexProduct(purl, $"component-{group % 30:D2}", $"namespace-{group % 10:D2}"));
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<BsonDocument> CreateStatements(
|
||||
string vulnerabilityAlias,
|
||||
ImmutableArray<VexProduct> products,
|
||||
int statementsPerObservation,
|
||||
Random random,
|
||||
DateTimeOffset baseTime)
|
||||
{
|
||||
var builder = ImmutableArray.CreateBuilder<BsonDocument>(statementsPerObservation);
|
||||
for (var index = 0; index < statementsPerObservation; index++)
|
||||
{
|
||||
var statusIndex = random.Next(StatusPool.Length);
|
||||
var status = StatusPool[statusIndex];
|
||||
var justification = JustificationPool[random.Next(JustificationPool.Length)];
|
||||
var product = products[index % products.Length];
|
||||
var statementId = $"stmt-{vulnerabilityAlias}-{index:D2}";
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
["statement_id"] = statementId,
|
||||
["vulnerability_alias"] = vulnerabilityAlias,
|
||||
["product"] = new BsonDocument
|
||||
{
|
||||
["purl"] = product.Purl,
|
||||
["component"] = product.Component,
|
||||
["namespace"] = product.Namespace,
|
||||
},
|
||||
["status"] = status,
|
||||
["justification"] = justification,
|
||||
["impact"] = status == "affected" ? "high" : "none",
|
||||
["last_updated"] = baseTime.AddMinutes(index).UtcDateTime,
|
||||
};
|
||||
|
||||
builder.Add(document);
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private static BsonDocument CreateRawPayload(string upstreamId, string vulnerabilityAlias, ImmutableArray<BsonDocument> statements)
|
||||
{
|
||||
var doc = new BsonDocument
|
||||
{
|
||||
["documentId"] = upstreamId,
|
||||
["title"] = $"Simulated VEX report {upstreamId}",
|
||||
["summary"] = $"Synthetic VEX payload for {vulnerabilityAlias}.",
|
||||
["statements"] = new BsonArray(statements),
|
||||
};
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var seed = $"{tenant}|{group}|{revision}";
|
||||
var rawBytes = rawPayload.ToBson();
|
||||
var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed);
|
||||
var combined = new byte[rawBytes.Length + seedBytes.Length];
|
||||
Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length);
|
||||
Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length);
|
||||
var hash = sha256.ComputeHash(combined);
|
||||
return $"sha256:{Convert.ToHexString(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record VexObservationSeed(
|
||||
string ObservationId,
|
||||
string Tenant,
|
||||
string Vendor,
|
||||
string Stream,
|
||||
string Api,
|
||||
string CollectorVersion,
|
||||
string UpstreamId,
|
||||
string DocumentVersion,
|
||||
DateTimeOffset FetchedAt,
|
||||
DateTimeOffset ReceivedAt,
|
||||
string ContentHash,
|
||||
string VulnerabilityAlias,
|
||||
ImmutableArray<string> Aliases,
|
||||
ImmutableArray<VexProduct> Products,
|
||||
ImmutableArray<BsonDocument> Statements,
|
||||
ImmutableArray<VexReference> References,
|
||||
string ContentFormat,
|
||||
string SpecVersion,
|
||||
BsonDocument RawPayload)
|
||||
{
|
||||
public BsonDocument ToBsonDocument()
|
||||
{
|
||||
var aliases = new BsonArray(Aliases.Select(alias => alias));
|
||||
var statements = new BsonArray(Statements);
|
||||
var productsArray = new BsonArray(Products.Select(product => new BsonDocument
|
||||
{
|
||||
["purl"] = product.Purl,
|
||||
["component"] = product.Component,
|
||||
["namespace"] = product.Namespace,
|
||||
}));
|
||||
var references = new BsonArray(References.Select(reference => new BsonDocument
|
||||
{
|
||||
["type"] = reference.Type,
|
||||
["url"] = reference.Url,
|
||||
}));
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
["_id"] = ObservationId,
|
||||
["tenant"] = Tenant,
|
||||
["source"] = new BsonDocument
|
||||
{
|
||||
["vendor"] = Vendor,
|
||||
["stream"] = Stream,
|
||||
["api"] = Api,
|
||||
["collector_version"] = CollectorVersion,
|
||||
},
|
||||
["upstream"] = new BsonDocument
|
||||
{
|
||||
["upstream_id"] = UpstreamId,
|
||||
["document_version"] = DocumentVersion,
|
||||
["fetched_at"] = FetchedAt.UtcDateTime,
|
||||
["received_at"] = ReceivedAt.UtcDateTime,
|
||||
["content_hash"] = ContentHash,
|
||||
["signature"] = new BsonDocument
|
||||
{
|
||||
["present"] = false,
|
||||
["format"] = BsonNull.Value,
|
||||
["key_id"] = BsonNull.Value,
|
||||
["signature"] = BsonNull.Value,
|
||||
},
|
||||
},
|
||||
["content"] = new BsonDocument
|
||||
{
|
||||
["format"] = ContentFormat,
|
||||
["spec_version"] = SpecVersion,
|
||||
["raw"] = RawPayload,
|
||||
},
|
||||
["identifiers"] = new BsonDocument
|
||||
{
|
||||
["aliases"] = aliases,
|
||||
["primary"] = VulnerabilityAlias,
|
||||
},
|
||||
["statements"] = statements,
|
||||
["linkset"] = new BsonDocument
|
||||
{
|
||||
["aliases"] = aliases,
|
||||
["products"] = productsArray,
|
||||
["references"] = references,
|
||||
["reconciled_from"] = new BsonArray { "/statements" },
|
||||
},
|
||||
["supersedes"] = BsonNull.Value,
|
||||
};
|
||||
|
||||
return document;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record VexProduct(string Purl, string Component, string Namespace);
|
||||
|
||||
internal sealed record VexReference(string Type, string Url);
|
||||
@@ -0,0 +1,183 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal sealed record VexBenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinEventThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<VexScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<VexBenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<VexBenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new VexBenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MinEventThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class VexBenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minEventThroughputPerSecond")]
|
||||
public double? MinEventThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<VexScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class VexScenarioConfig
|
||||
{
|
||||
private const int DefaultObservationCount = 4_000;
|
||||
private const int DefaultAliasGroups = 400;
|
||||
private const int DefaultStatementsPerObservation = 6;
|
||||
private const int DefaultProductsPerObservation = 3;
|
||||
private const int DefaultTenants = 3;
|
||||
private const int DefaultBatchSize = 250;
|
||||
private const int DefaultSeed = 520_025;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("observations")]
|
||||
public int? Observations { get; init; }
|
||||
|
||||
[JsonPropertyName("aliasGroups")]
|
||||
public int? AliasGroups { get; init; }
|
||||
|
||||
[JsonPropertyName("statementsPerObservation")]
|
||||
public int? StatementsPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("productsPerObservation")]
|
||||
public int? ProductsPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("tenants")]
|
||||
public int? Tenants { get; init; }
|
||||
|
||||
[JsonPropertyName("batchSize")]
|
||||
public int? BatchSize { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minEventThroughputPerSecond")]
|
||||
public double? MinEventThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "vex" : Id!.Trim();
|
||||
|
||||
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
|
||||
|
||||
public int ResolveObservationCount() => Observations is > 0 ? Observations.Value : DefaultObservationCount;
|
||||
|
||||
public int ResolveAliasGroups() => AliasGroups is > 0 ? AliasGroups.Value : DefaultAliasGroups;
|
||||
|
||||
public int ResolveStatementsPerObservation() => StatementsPerObservation is > 0 ? StatementsPerObservation.Value : DefaultStatementsPerObservation;
|
||||
|
||||
public int ResolveProductsPerObservation() => ProductsPerObservation is > 0 ? ProductsPerObservation.Value : DefaultProductsPerObservation;
|
||||
|
||||
public int ResolveTenantCount() => Tenants is > 0 ? Tenants.Value : DefaultTenants;
|
||||
|
||||
public int ResolveBatchSize() => BatchSize is > 0 ? BatchSize.Value : DefaultBatchSize;
|
||||
|
||||
public int ResolveSeed() => Seed is > 0 ? Seed.Value : DefaultSeed;
|
||||
|
||||
public int ResolveIterations(int? defaultIterations)
|
||||
{
|
||||
var iterations = Iterations ?? defaultIterations ?? 3;
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
|
||||
}
|
||||
|
||||
return iterations;
|
||||
}
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (ResolveObservationCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
|
||||
}
|
||||
|
||||
if (ResolveAliasGroups() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
|
||||
}
|
||||
|
||||
if (ResolveStatementsPerObservation() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires statementsPerObservation > 0.");
|
||||
}
|
||||
|
||||
if (ResolveProductsPerObservation() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires productsPerObservation > 0.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal sealed record VexScenarioExecutionResult(
|
||||
IReadOnlyList<double> TotalDurationsMs,
|
||||
IReadOnlyList<double> InsertDurationsMs,
|
||||
IReadOnlyList<double> CorrelationDurationsMs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
IReadOnlyList<double> ObservationThroughputsPerSecond,
|
||||
IReadOnlyList<double> EventThroughputsPerSecond,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int StatementCount,
|
||||
int EventCount,
|
||||
VexAggregationResult AggregationResult);
|
||||
@@ -0,0 +1,43 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal sealed record VexScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int StatementCount,
|
||||
int EventCount,
|
||||
DurationStatistics TotalStatistics,
|
||||
DurationStatistics InsertStatistics,
|
||||
DurationStatistics CorrelationStatistics,
|
||||
ThroughputStatistics ObservationThroughputStatistics,
|
||||
ThroughputStatistics EventThroughputStatistics,
|
||||
AllocationStatistics AllocationStatistics,
|
||||
double? ThresholdMs,
|
||||
double? MinObservationThroughputPerSecond,
|
||||
double? MinEventThroughputPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
|
||||
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
|
||||
public string StatementColumn => StatementCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string EventColumn => EventCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
|
||||
|
||||
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string ObservationThroughputColumn => (ObservationThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string EventThroughputColumn => (EventThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
@@ -0,0 +1,138 @@
|
||||
using System.Diagnostics;
|
||||
using EphemeralMongo;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Vex;
|
||||
|
||||
internal sealed class VexScenarioRunner
|
||||
{
|
||||
private readonly VexScenarioConfig _config;
|
||||
private readonly IReadOnlyList<VexObservationSeed> _seeds;
|
||||
|
||||
public VexScenarioRunner(VexScenarioConfig config)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
_seeds = VexObservationGenerator.Generate(config);
|
||||
}
|
||||
|
||||
public VexScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var totalDurations = new double[iterations];
|
||||
var insertDurations = new double[iterations];
|
||||
var correlationDurations = new double[iterations];
|
||||
var allocated = new double[iterations];
|
||||
var observationThroughputs = new double[iterations];
|
||||
var eventThroughputs = new double[iterations];
|
||||
VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty<BsonDocument>());
|
||||
|
||||
for (var iteration = 0; iteration < iterations; iteration++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
using var runner = MongoRunner.Run(new MongoRunnerOptions
|
||||
{
|
||||
UseSingleNodeReplicaSet = false,
|
||||
});
|
||||
|
||||
var client = new MongoClient(runner.ConnectionString);
|
||||
var database = client.GetDatabase("linknotmerge_vex_bench");
|
||||
var collection = database.GetCollection<BsonDocument>("vex_observations");
|
||||
|
||||
CreateIndexes(collection, cancellationToken);
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
var insertStopwatch = Stopwatch.StartNew();
|
||||
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
|
||||
insertStopwatch.Stop();
|
||||
|
||||
var correlationStopwatch = Stopwatch.StartNew();
|
||||
var documents = collection
|
||||
.Find(FilterDefinition<BsonDocument>.Empty)
|
||||
.Project(Builders<BsonDocument>.Projection
|
||||
.Include("tenant")
|
||||
.Include("statements")
|
||||
.Include("linkset"))
|
||||
.ToList(cancellationToken);
|
||||
|
||||
var aggregator = new VexLinksetAggregator();
|
||||
lastAggregation = aggregator.Correlate(documents);
|
||||
correlationStopwatch.Stop();
|
||||
|
||||
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
|
||||
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
|
||||
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
|
||||
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
|
||||
observationThroughputs[iteration] = _seeds.Count / totalSeconds;
|
||||
|
||||
var eventSeconds = Math.Max(correlationStopwatch.Elapsed.TotalSeconds, 0.0001d);
|
||||
var eventCount = Math.Max(lastAggregation.EventCount, 1);
|
||||
eventThroughputs[iteration] = eventCount / eventSeconds;
|
||||
}
|
||||
|
||||
return new VexScenarioExecutionResult(
|
||||
totalDurations,
|
||||
insertDurations,
|
||||
correlationDurations,
|
||||
allocated,
|
||||
observationThroughputs,
|
||||
eventThroughputs,
|
||||
ObservationCount: _seeds.Count,
|
||||
AliasGroups: _config.ResolveAliasGroups(),
|
||||
StatementCount: lastAggregation.StatementCount,
|
||||
EventCount: lastAggregation.EventCount,
|
||||
AggregationResult: lastAggregation);
|
||||
}
|
||||
|
||||
private static void InsertObservations(
|
||||
IMongoCollection<BsonDocument> collection,
|
||||
IReadOnlyList<VexObservationSeed> seeds,
|
||||
int batchSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
for (var offset = 0; offset < seeds.Count; offset += batchSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remaining = Math.Min(batchSize, seeds.Count - offset);
|
||||
var batch = new List<BsonDocument>(remaining);
|
||||
for (var index = 0; index < remaining; index++)
|
||||
{
|
||||
batch.Add(seeds[offset + index].ToBsonDocument());
|
||||
}
|
||||
|
||||
collection.InsertMany(batch, new InsertManyOptions
|
||||
{
|
||||
IsOrdered = false,
|
||||
BypassDocumentValidation = true,
|
||||
}, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
|
||||
{
|
||||
var indexKeys = Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenant")
|
||||
.Ascending("linkset.aliases");
|
||||
|
||||
try
|
||||
{
|
||||
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// non-fatal
|
||||
}
|
||||
}
|
||||
}
|
||||
4
src/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv
Normal file
4
src/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv
Normal file
@@ -0,0 +1,4 @@
|
||||
scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb
|
||||
vex_ingest_baseline,5,4000,24000,21326,842.8191,1319.3038,1432.7675,346.7277,496.0915,5349.8940,2791.7998,48942.4901,24653.0556,138.6365
|
||||
vex_ingest_medium,5,8000,64000,56720,1525.9929,1706.8900,1748.9056,533.3378,992.6552,5274.5883,4574.2892,57654.9190,48531.7353,326.8638
|
||||
vex_ingest_high,5,12000,120000,106910,2988.5094,3422.1728,3438.9364,903.3927,2085.1167,4066.2300,3489.4510,52456.9493,42358.0556,583.9903
|
||||
|
54
src/StellaOps.Bench/LinkNotMerge.Vex/config.json
Normal file
54
src/StellaOps.Bench/LinkNotMerge.Vex/config.json
Normal file
@@ -0,0 +1,54 @@
|
||||
{
|
||||
"thresholdMs": 4200,
|
||||
"minThroughputPerSecond": 1800,
|
||||
"minEventThroughputPerSecond": 2000,
|
||||
"maxAllocatedMb": 800,
|
||||
"iterations": 5,
|
||||
"scenarios": [
|
||||
{
|
||||
"id": "vex_ingest_baseline",
|
||||
"label": "4k observations, 400 aliases",
|
||||
"observations": 4000,
|
||||
"aliasGroups": 400,
|
||||
"statementsPerObservation": 6,
|
||||
"productsPerObservation": 3,
|
||||
"tenants": 3,
|
||||
"batchSize": 200,
|
||||
"seed": 420020,
|
||||
"thresholdMs": 2300,
|
||||
"minThroughputPerSecond": 1800,
|
||||
"minEventThroughputPerSecond": 2000,
|
||||
"maxAllocatedMb": 220
|
||||
},
|
||||
{
|
||||
"id": "vex_ingest_medium",
|
||||
"label": "8k observations, 700 aliases",
|
||||
"observations": 8000,
|
||||
"aliasGroups": 700,
|
||||
"statementsPerObservation": 8,
|
||||
"productsPerObservation": 4,
|
||||
"tenants": 5,
|
||||
"batchSize": 300,
|
||||
"seed": 520020,
|
||||
"thresholdMs": 3200,
|
||||
"minThroughputPerSecond": 2200,
|
||||
"minEventThroughputPerSecond": 2500,
|
||||
"maxAllocatedMb": 400
|
||||
},
|
||||
{
|
||||
"id": "vex_ingest_high",
|
||||
"label": "12k observations, 1100 aliases",
|
||||
"observations": 12000,
|
||||
"aliasGroups": 1100,
|
||||
"statementsPerObservation": 10,
|
||||
"productsPerObservation": 5,
|
||||
"tenants": 7,
|
||||
"batchSize": 400,
|
||||
"seed": 620020,
|
||||
"thresholdMs": 4200,
|
||||
"minThroughputPerSecond": 2200,
|
||||
"minEventThroughputPerSecond": 2500,
|
||||
"maxAllocatedMb": 700
|
||||
}
|
||||
]
|
||||
}
|
||||
26
src/StellaOps.Bench/LinkNotMerge/README.md
Normal file
26
src/StellaOps.Bench/LinkNotMerge/README.md
Normal file
@@ -0,0 +1,26 @@
|
||||
# Link-Not-Merge Bench
|
||||
|
||||
Synthetic workload that measures advisory observation ingestion and linkset correlation throughput for the Link-Not-Merge program.
|
||||
|
||||
## Scenarios
|
||||
|
||||
`config.json` defines three scenarios that vary observation volume, alias density, and correlation fan-out. Each scenario captures:
|
||||
|
||||
- Total latency (ingest + correlation) and p95/max percentiles
|
||||
- Insert latency against an ephemeral MongoDB instance
|
||||
- Correlator-only latency, tracking fan-out costs
|
||||
- Observation and Mongo insert throughput (ops/sec)
|
||||
- Peak managed heap allocations
|
||||
|
||||
## Running locally
|
||||
|
||||
```bash
|
||||
dotnet run \
|
||||
--project src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj \
|
||||
-- \
|
||||
--csv out/linknotmerge-bench.csv \
|
||||
--json out/linknotmerge-bench.json \
|
||||
--prometheus out/linknotmerge-bench.prom
|
||||
```
|
||||
|
||||
The benchmark exits non-zero if latency exceeds configured thresholds, throughput falls below the floor, Mongo insert throughput regresses, allocations exceed the ceiling, or regression ratios breach the baseline.
|
||||
@@ -0,0 +1,38 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class BaselineLoaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReadsEntries()
|
||||
{
|
||||
var path = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
path,
|
||||
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
|
||||
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
|
||||
var entry = Assert.Single(baseline);
|
||||
|
||||
Assert.Equal("lnm_ingest_baseline", entry.Key);
|
||||
Assert.Equal(5, entry.Value.Iterations);
|
||||
Assert.Equal(5000, entry.Value.Observations);
|
||||
Assert.Equal(500, entry.Value.Aliases);
|
||||
Assert.Equal(360.9, entry.Value.MaxTotalMs);
|
||||
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
|
||||
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,81 @@
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class BenchmarkScenarioReportTests
|
||||
{
|
||||
[Fact]
|
||||
public void RegressionDetection_FlagsBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
LinksetCount: 90,
|
||||
TotalStatistics: new DurationStatistics(200, 240, 260),
|
||||
InsertStatistics: new DurationStatistics(80, 90, 100),
|
||||
CorrelationStatistics: new DurationStatistics(120, 150, 170),
|
||||
TotalThroughputStatistics: new ThroughputStatistics(8000, 7000),
|
||||
InsertThroughputStatistics: new ThroughputStatistics(9000, 8000),
|
||||
AllocationStatistics: new AllocationStatistics(120),
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MinMongoThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var baseline = new BaselineEntry(
|
||||
ScenarioId: "scenario",
|
||||
Iterations: 3,
|
||||
Observations: 1000,
|
||||
Aliases: 100,
|
||||
Linksets: 90,
|
||||
MeanTotalMs: 150,
|
||||
P95TotalMs: 170,
|
||||
MaxTotalMs: 180,
|
||||
MeanInsertMs: 60,
|
||||
MeanCorrelationMs: 90,
|
||||
MeanThroughputPerSecond: 9000,
|
||||
MinThroughputPerSecond: 8500,
|
||||
MeanMongoThroughputPerSecond: 10000,
|
||||
MinMongoThroughputPerSecond: 9500,
|
||||
MaxAllocatedMb: 100);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
|
||||
|
||||
Assert.True(report.DurationRegressionBreached);
|
||||
Assert.True(report.ThroughputRegressionBreached);
|
||||
Assert.True(report.MongoThroughputRegressionBreached);
|
||||
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegressionDetection_NoBaseline_NoBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
LinksetCount: 90,
|
||||
TotalStatistics: new DurationStatistics(200, 220, 230),
|
||||
InsertStatistics: new DurationStatistics(90, 100, 110),
|
||||
CorrelationStatistics: new DurationStatistics(110, 120, 130),
|
||||
TotalThroughputStatistics: new ThroughputStatistics(8000, 7900),
|
||||
InsertThroughputStatistics: new ThroughputStatistics(9000, 8900),
|
||||
AllocationStatistics: new AllocationStatistics(64),
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MinMongoThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
|
||||
|
||||
Assert.False(report.RegressionBreached);
|
||||
Assert.Empty(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class LinkNotMergeScenarioRunnerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Execute_BuildsDeterministicAggregation()
|
||||
{
|
||||
var config = new LinkNotMergeScenarioConfig
|
||||
{
|
||||
Id = "unit",
|
||||
Observations = 120,
|
||||
AliasGroups = 24,
|
||||
PurlsPerObservation = 3,
|
||||
CpesPerObservation = 2,
|
||||
ReferencesPerObservation = 2,
|
||||
Tenants = 3,
|
||||
BatchSize = 40,
|
||||
Seed = 1337,
|
||||
};
|
||||
|
||||
var runner = new LinkNotMergeScenarioRunner(config);
|
||||
var result = runner.Execute(iterations: 2, CancellationToken.None);
|
||||
|
||||
Assert.Equal(120, result.ObservationCount);
|
||||
Assert.Equal(24, result.AliasGroups);
|
||||
Assert.True(result.TotalDurationsMs.All(value => value > 0));
|
||||
Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0));
|
||||
Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0));
|
||||
Assert.True(result.AllocatedMb.All(value => value >= 0));
|
||||
Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount);
|
||||
Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Bench.LinkNotMerge\StellaOps.Bench.LinkNotMerge.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,18 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
@@ -0,0 +1,87 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 15)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
Observations: ParseInt(parts[2], resolved, lineNumber),
|
||||
Aliases: ParseInt(parts[3], resolved, lineNumber),
|
||||
Linksets: ParseInt(parts[4], resolved, lineNumber),
|
||||
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
|
||||
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
|
||||
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
|
||||
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
|
||||
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
|
||||
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
|
||||
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
|
||||
|
||||
result[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,210 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record BenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinMongoThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<BenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new BenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MinMongoThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class BenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minMongoThroughputPerSecond")]
|
||||
public double? MinMongoThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<LinkNotMergeScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class LinkNotMergeScenarioConfig
|
||||
{
|
||||
private const int DefaultObservationCount = 5_000;
|
||||
private const int DefaultAliasGroups = 500;
|
||||
private const int DefaultPurlsPerObservation = 4;
|
||||
private const int DefaultCpesPerObservation = 2;
|
||||
private const int DefaultReferencesPerObservation = 3;
|
||||
private const int DefaultTenants = 4;
|
||||
private const int DefaultBatchSize = 500;
|
||||
private const int DefaultSeed = 42_022;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("observations")]
|
||||
public int? Observations { get; init; }
|
||||
|
||||
[JsonPropertyName("aliasGroups")]
|
||||
public int? AliasGroups { get; init; }
|
||||
|
||||
[JsonPropertyName("purlsPerObservation")]
|
||||
public int? PurlsPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("cpesPerObservation")]
|
||||
public int? CpesPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("referencesPerObservation")]
|
||||
public int? ReferencesPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("tenants")]
|
||||
public int? Tenants { get; init; }
|
||||
|
||||
[JsonPropertyName("batchSize")]
|
||||
public int? BatchSize { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minMongoThroughputPerSecond")]
|
||||
public double? MinMongoThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim();
|
||||
|
||||
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
|
||||
|
||||
public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0
|
||||
? Observations.Value
|
||||
: DefaultObservationCount;
|
||||
|
||||
public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0
|
||||
? AliasGroups.Value
|
||||
: DefaultAliasGroups;
|
||||
|
||||
public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0
|
||||
? PurlsPerObservation.Value
|
||||
: DefaultPurlsPerObservation;
|
||||
|
||||
public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0
|
||||
? CpesPerObservation.Value
|
||||
: DefaultCpesPerObservation;
|
||||
|
||||
public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0
|
||||
? ReferencesPerObservation.Value
|
||||
: DefaultReferencesPerObservation;
|
||||
|
||||
public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0
|
||||
? Tenants.Value
|
||||
: DefaultTenants;
|
||||
|
||||
public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0
|
||||
? BatchSize.Value
|
||||
: DefaultBatchSize;
|
||||
|
||||
public int ResolveSeed() => Seed.HasValue && Seed.Value > 0
|
||||
? Seed.Value
|
||||
: DefaultSeed;
|
||||
|
||||
public int ResolveIterations(int? defaultIterations)
|
||||
{
|
||||
var iterations = Iterations ?? defaultIterations ?? 3;
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
|
||||
}
|
||||
|
||||
return iterations;
|
||||
}
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (ResolveObservationCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
|
||||
}
|
||||
|
||||
if (ResolveAliasGroups() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
|
||||
}
|
||||
|
||||
if (ResolvePurlsPerObservation() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0.");
|
||||
}
|
||||
|
||||
if (ResolveTenantCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0.");
|
||||
}
|
||||
|
||||
if (ResolveBatchSize() > ResolveObservationCount())
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,135 @@
|
||||
using System.Diagnostics;
|
||||
using EphemeralMongo;
|
||||
using MongoDB.Bson;
|
||||
using MongoDB.Driver;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinkNotMergeScenarioRunner
|
||||
{
|
||||
private readonly LinkNotMergeScenarioConfig _config;
|
||||
private readonly IReadOnlyList<ObservationSeed> _seeds;
|
||||
|
||||
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
_seeds = ObservationGenerator.Generate(config);
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var totalDurations = new double[iterations];
|
||||
var insertDurations = new double[iterations];
|
||||
var correlationDurations = new double[iterations];
|
||||
var allocated = new double[iterations];
|
||||
var totalThroughputs = new double[iterations];
|
||||
var insertThroughputs = new double[iterations];
|
||||
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
|
||||
|
||||
for (var iteration = 0; iteration < iterations; iteration++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
using var runner = MongoRunner.Run(new MongoRunnerOptions
|
||||
{
|
||||
UseSingleNodeReplicaSet = false,
|
||||
});
|
||||
|
||||
var client = new MongoClient(runner.ConnectionString);
|
||||
var database = client.GetDatabase("linknotmerge_bench");
|
||||
var collection = database.GetCollection<BsonDocument>("advisory_observations");
|
||||
|
||||
CreateIndexes(collection, cancellationToken);
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var insertStopwatch = Stopwatch.StartNew();
|
||||
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
|
||||
insertStopwatch.Stop();
|
||||
|
||||
var correlationStopwatch = Stopwatch.StartNew();
|
||||
var documents = collection
|
||||
.Find(FilterDefinition<BsonDocument>.Empty)
|
||||
.Project(Builders<BsonDocument>.Projection
|
||||
.Include("tenant")
|
||||
.Include("linkset"))
|
||||
.ToList(cancellationToken);
|
||||
|
||||
var correlator = new LinksetAggregator();
|
||||
lastAggregation = correlator.Correlate(documents);
|
||||
correlationStopwatch.Stop();
|
||||
|
||||
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
|
||||
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
|
||||
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
|
||||
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
|
||||
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
|
||||
|
||||
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
|
||||
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
totalDurations,
|
||||
insertDurations,
|
||||
correlationDurations,
|
||||
allocated,
|
||||
totalThroughputs,
|
||||
insertThroughputs,
|
||||
ObservationCount: _seeds.Count,
|
||||
AliasGroups: _config.ResolveAliasGroups(),
|
||||
LinksetCount: lastAggregation.LinksetCount,
|
||||
TenantCount: _config.ResolveTenantCount(),
|
||||
AggregationResult: lastAggregation);
|
||||
}
|
||||
|
||||
private static void InsertObservations(
|
||||
IMongoCollection<BsonDocument> collection,
|
||||
IReadOnlyList<ObservationSeed> seeds,
|
||||
int batchSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
for (var offset = 0; offset < seeds.Count; offset += batchSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remaining = Math.Min(batchSize, seeds.Count - offset);
|
||||
var batch = new List<BsonDocument>(remaining);
|
||||
for (var index = 0; index < remaining; index++)
|
||||
{
|
||||
batch.Add(seeds[offset + index].ToBsonDocument());
|
||||
}
|
||||
|
||||
collection.InsertMany(batch, new InsertManyOptions
|
||||
{
|
||||
IsOrdered = false,
|
||||
BypassDocumentValidation = true,
|
||||
}, cancellationToken);
|
||||
}
|
||||
}
|
||||
|
||||
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
|
||||
{
|
||||
var indexKeys = Builders<BsonDocument>.IndexKeys
|
||||
.Ascending("tenant")
|
||||
.Ascending("identifiers.aliases");
|
||||
|
||||
try
|
||||
{
|
||||
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Index creation failures should not abort the benchmark; they may occur when running multiple iterations concurrently.
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,140 @@
|
||||
using MongoDB.Bson;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinksetAggregator
|
||||
{
|
||||
public LinksetAggregationResult Correlate(IEnumerable<BsonDocument> documents)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(documents);
|
||||
|
||||
var groups = new Dictionary<string, LinksetAccumulator>(StringComparer.Ordinal);
|
||||
var totalObservations = 0;
|
||||
|
||||
foreach (var document in documents)
|
||||
{
|
||||
totalObservations++;
|
||||
|
||||
var tenant = document.GetValue("tenant", "unknown").AsString;
|
||||
var linkset = document.GetValue("linkset", new BsonDocument()).AsBsonDocument;
|
||||
var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray;
|
||||
var purls = linkset.GetValue("purls", new BsonArray()).AsBsonArray;
|
||||
var cpes = linkset.GetValue("cpes", new BsonArray()).AsBsonArray;
|
||||
var references = linkset.GetValue("references", new BsonArray()).AsBsonArray;
|
||||
|
||||
foreach (var aliasValue in aliases)
|
||||
{
|
||||
if (!aliasValue.IsString)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var alias = aliasValue.AsString;
|
||||
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
|
||||
{
|
||||
var (tenantValue, aliasValue) = data;
|
||||
tenantValue.AsSpan().CopyTo(span);
|
||||
span[tenantValue.Length] = '|';
|
||||
aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]);
|
||||
});
|
||||
|
||||
if (!groups.TryGetValue(key, out var accumulator))
|
||||
{
|
||||
accumulator = new LinksetAccumulator(tenant, alias);
|
||||
groups[key] = accumulator;
|
||||
}
|
||||
|
||||
accumulator.AddPurls(purls);
|
||||
accumulator.AddCpes(cpes);
|
||||
accumulator.AddReferences(references);
|
||||
}
|
||||
}
|
||||
|
||||
var totalReferences = 0;
|
||||
var totalPurls = 0;
|
||||
var totalCpes = 0;
|
||||
|
||||
foreach (var accumulator in groups.Values)
|
||||
{
|
||||
totalReferences += accumulator.ReferenceCount;
|
||||
totalPurls += accumulator.PurlCount;
|
||||
totalCpes += accumulator.CpeCount;
|
||||
}
|
||||
|
||||
return new LinksetAggregationResult(
|
||||
LinksetCount: groups.Count,
|
||||
ObservationCount: totalObservations,
|
||||
TotalPurls: totalPurls,
|
||||
TotalCpes: totalCpes,
|
||||
TotalReferences: totalReferences);
|
||||
}
|
||||
|
||||
private sealed class LinksetAccumulator
|
||||
{
|
||||
private readonly HashSet<string> _purls = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _cpes = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _references = new(StringComparer.Ordinal);
|
||||
|
||||
public LinksetAccumulator(string tenant, string alias)
|
||||
{
|
||||
Tenant = tenant;
|
||||
Alias = alias;
|
||||
}
|
||||
|
||||
public string Tenant { get; }
|
||||
|
||||
public string Alias { get; }
|
||||
|
||||
public int PurlCount => _purls.Count;
|
||||
|
||||
public int CpeCount => _cpes.Count;
|
||||
|
||||
public int ReferenceCount => _references.Count;
|
||||
|
||||
public void AddPurls(BsonArray array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (item.IsString)
|
||||
{
|
||||
_purls.Add(item.AsString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void AddCpes(BsonArray array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (item.IsString)
|
||||
{
|
||||
_cpes.Add(item.AsString);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void AddReferences(BsonArray array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!item.IsBsonDocument)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var document = item.AsBsonDocument;
|
||||
if (document.TryGetValue("url", out var urlValue) && urlValue.IsString)
|
||||
{
|
||||
_references.Add(urlValue.AsString);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record LinksetAggregationResult(
|
||||
int LinksetCount,
|
||||
int ObservationCount,
|
||||
int TotalPurls,
|
||||
int TotalCpes,
|
||||
int TotalReferences);
|
||||
@@ -0,0 +1,270 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using MongoDB.Bson;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal static class ObservationGenerator
|
||||
{
|
||||
public static IReadOnlyList<ObservationSeed> Generate(LinkNotMergeScenarioConfig config)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(config);
|
||||
|
||||
var observationCount = config.ResolveObservationCount();
|
||||
var aliasGroups = config.ResolveAliasGroups();
|
||||
var purlsPerObservation = config.ResolvePurlsPerObservation();
|
||||
var cpesPerObservation = config.ResolveCpesPerObservation();
|
||||
var referencesPerObservation = config.ResolveReferencesPerObservation();
|
||||
var tenantCount = config.ResolveTenantCount();
|
||||
var seed = config.ResolveSeed();
|
||||
|
||||
var seeds = new ObservationSeed[observationCount];
|
||||
var random = new Random(seed);
|
||||
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
|
||||
|
||||
for (var index = 0; index < observationCount; index++)
|
||||
{
|
||||
var tenantIndex = index % tenantCount;
|
||||
var tenant = $"tenant-{tenantIndex:D2}";
|
||||
var group = index % aliasGroups;
|
||||
var revision = index / aliasGroups;
|
||||
var primaryAlias = $"CVE-2025-{group:D4}";
|
||||
var vendorAlias = $"VENDOR-{group:D4}";
|
||||
var thirdAlias = $"GHSA-{group:D4}-{(revision % 26 + 'a')}{(revision % 26 + 'a')}";
|
||||
var aliases = ImmutableArray.Create(primaryAlias, vendorAlias, thirdAlias);
|
||||
|
||||
var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}";
|
||||
var upstreamId = primaryAlias;
|
||||
var documentVersion = baseTime.AddMinutes(revision).ToString("O");
|
||||
var fetchedAt = baseTime.AddSeconds(index % 1_800);
|
||||
var receivedAt = fetchedAt.AddSeconds(1);
|
||||
|
||||
var purls = CreatePurls(group, revision, purlsPerObservation);
|
||||
var cpes = CreateCpes(group, revision, cpesPerObservation);
|
||||
var references = CreateReferences(primaryAlias, referencesPerObservation);
|
||||
|
||||
var rawPayload = CreateRawPayload(primaryAlias, vendorAlias, purls, cpes, references);
|
||||
var contentHash = ComputeContentHash(rawPayload, tenant, group, revision);
|
||||
|
||||
seeds[index] = new ObservationSeed(
|
||||
ObservationId: observationId,
|
||||
Tenant: tenant,
|
||||
Vendor: "concelier-bench",
|
||||
Stream: "simulated",
|
||||
Api: $"https://bench.stella/{group:D4}/{revision:D2}",
|
||||
CollectorVersion: "1.0.0-bench",
|
||||
UpstreamId: upstreamId,
|
||||
DocumentVersion: documentVersion,
|
||||
FetchedAt: fetchedAt,
|
||||
ReceivedAt: receivedAt,
|
||||
ContentHash: contentHash,
|
||||
Aliases: aliases,
|
||||
Purls: purls,
|
||||
Cpes: cpes,
|
||||
References: references,
|
||||
ContentFormat: "CSAF",
|
||||
SpecVersion: "2.0",
|
||||
RawPayload: rawPayload);
|
||||
}
|
||||
|
||||
return seeds;
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> CreatePurls(int group, int revision, int count)
|
||||
{
|
||||
if (count <= 0)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<string>(count);
|
||||
for (var index = 0; index < count; index++)
|
||||
{
|
||||
var version = $"{revision % 9 + 1}.{index + 1}.{group % 10}";
|
||||
builder.Add($"pkg:generic/stella/sample-{group:D4}-{index}@{version}");
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> CreateCpes(int group, int revision, int count)
|
||||
{
|
||||
if (count <= 0)
|
||||
{
|
||||
return ImmutableArray<string>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<string>(count);
|
||||
for (var index = 0; index < count; index++)
|
||||
{
|
||||
var component = $"benchtool{group % 50:D2}";
|
||||
var version = $"{revision % 5}.{index}";
|
||||
builder.Add($"cpe:2.3:a:stellaops:{component}:{version}:*:*:*:*:*:*:*");
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private static ImmutableArray<ObservationReference> CreateReferences(string primaryAlias, int count)
|
||||
{
|
||||
if (count <= 0)
|
||||
{
|
||||
return ImmutableArray<ObservationReference>.Empty;
|
||||
}
|
||||
|
||||
var builder = ImmutableArray.CreateBuilder<ObservationReference>(count);
|
||||
for (var index = 0; index < count; index++)
|
||||
{
|
||||
builder.Add(new ObservationReference(
|
||||
Type: index % 2 == 0 ? "advisory" : "patch",
|
||||
Url: $"https://vendor.example/{primaryAlias.ToLowerInvariant()}/ref/{index:D2}"));
|
||||
}
|
||||
|
||||
return builder.MoveToImmutable();
|
||||
}
|
||||
|
||||
private static BsonDocument CreateRawPayload(
|
||||
string primaryAlias,
|
||||
string vendorAlias,
|
||||
IReadOnlyCollection<string> purls,
|
||||
IReadOnlyCollection<string> cpes,
|
||||
IReadOnlyCollection<ObservationReference> references)
|
||||
{
|
||||
var document = new BsonDocument
|
||||
{
|
||||
["id"] = primaryAlias,
|
||||
["vendorId"] = vendorAlias,
|
||||
["title"] = $"Simulated advisory {primaryAlias}",
|
||||
["summary"] = "Synthetic payload produced by Link-Not-Merge benchmark.",
|
||||
["metrics"] = new BsonArray
|
||||
{
|
||||
new BsonDocument
|
||||
{
|
||||
["kind"] = "cvss:v3.1",
|
||||
["score"] = 7.5,
|
||||
["vector"] = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if (purls.Count > 0)
|
||||
{
|
||||
document["purls"] = new BsonArray(purls);
|
||||
}
|
||||
|
||||
if (cpes.Count > 0)
|
||||
{
|
||||
document["cpes"] = new BsonArray(cpes);
|
||||
}
|
||||
|
||||
if (references.Count > 0)
|
||||
{
|
||||
document["references"] = new BsonArray(references.Select(reference => new BsonDocument
|
||||
{
|
||||
["type"] = reference.Type,
|
||||
["url"] = reference.Url,
|
||||
}));
|
||||
}
|
||||
|
||||
return document;
|
||||
}
|
||||
|
||||
private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision)
|
||||
{
|
||||
using var sha256 = SHA256.Create();
|
||||
var seed = $"{tenant}|{group}|{revision}";
|
||||
var rawBytes = rawPayload.ToBson();
|
||||
var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed);
|
||||
var combined = new byte[rawBytes.Length + seedBytes.Length];
|
||||
Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length);
|
||||
Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length);
|
||||
var hash = sha256.ComputeHash(combined);
|
||||
return $"sha256:{Convert.ToHexString(hash)}";
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record ObservationSeed(
|
||||
string ObservationId,
|
||||
string Tenant,
|
||||
string Vendor,
|
||||
string Stream,
|
||||
string Api,
|
||||
string CollectorVersion,
|
||||
string UpstreamId,
|
||||
string DocumentVersion,
|
||||
DateTimeOffset FetchedAt,
|
||||
DateTimeOffset ReceivedAt,
|
||||
string ContentHash,
|
||||
ImmutableArray<string> Aliases,
|
||||
ImmutableArray<string> Purls,
|
||||
ImmutableArray<string> Cpes,
|
||||
ImmutableArray<ObservationReference> References,
|
||||
string ContentFormat,
|
||||
string SpecVersion,
|
||||
BsonDocument RawPayload)
|
||||
{
|
||||
public BsonDocument ToBsonDocument()
|
||||
{
|
||||
var aliases = new BsonArray(Aliases.Select(alias => alias));
|
||||
var purls = new BsonArray(Purls.Select(purl => purl));
|
||||
var cpes = new BsonArray(Cpes.Select(cpe => cpe));
|
||||
var references = new BsonArray(References.Select(reference => new BsonDocument
|
||||
{
|
||||
["type"] = reference.Type,
|
||||
["url"] = reference.Url,
|
||||
}));
|
||||
|
||||
var document = new BsonDocument
|
||||
{
|
||||
["_id"] = ObservationId,
|
||||
["tenant"] = Tenant,
|
||||
["source"] = new BsonDocument
|
||||
{
|
||||
["vendor"] = Vendor,
|
||||
["stream"] = Stream,
|
||||
["api"] = Api,
|
||||
["collector_version"] = CollectorVersion,
|
||||
},
|
||||
["upstream"] = new BsonDocument
|
||||
{
|
||||
["upstream_id"] = UpstreamId,
|
||||
["document_version"] = DocumentVersion,
|
||||
["fetched_at"] = FetchedAt.UtcDateTime,
|
||||
["received_at"] = ReceivedAt.UtcDateTime,
|
||||
["content_hash"] = ContentHash,
|
||||
["signature"] = new BsonDocument
|
||||
{
|
||||
["present"] = false,
|
||||
["format"] = BsonNull.Value,
|
||||
["key_id"] = BsonNull.Value,
|
||||
["signature"] = BsonNull.Value,
|
||||
},
|
||||
},
|
||||
["content"] = new BsonDocument
|
||||
{
|
||||
["format"] = ContentFormat,
|
||||
["spec_version"] = SpecVersion,
|
||||
["raw"] = RawPayload,
|
||||
},
|
||||
["identifiers"] = new BsonDocument
|
||||
{
|
||||
["aliases"] = aliases,
|
||||
["primary"] = UpstreamId,
|
||||
["cve"] = Aliases.FirstOrDefault(alias => alias.StartsWith("CVE-", StringComparison.Ordinal)) ?? UpstreamId,
|
||||
},
|
||||
["linkset"] = new BsonDocument
|
||||
{
|
||||
["aliases"] = aliases,
|
||||
["purls"] = purls,
|
||||
["cpes"] = cpes,
|
||||
["references"] = references,
|
||||
["reconciled_from"] = new BsonArray { "/content/product_tree" },
|
||||
},
|
||||
["supersedes"] = BsonNull.Value,
|
||||
};
|
||||
|
||||
return document;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record ObservationReference(string Type, string Url);
|
||||
@@ -0,0 +1,375 @@
|
||||
using System.Globalization;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var iterations = scenario.ResolveIterations(config.Iterations);
|
||||
var runner = new LinkNotMergeScenarioRunner(scenario);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
|
||||
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
|
||||
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
|
||||
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
|
||||
|
||||
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
|
||||
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
|
||||
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
|
||||
var result = new ScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.DisplayLabel,
|
||||
iterations,
|
||||
execution.ObservationCount,
|
||||
execution.AliasGroups,
|
||||
execution.LinksetCount,
|
||||
totalStats,
|
||||
insertStats,
|
||||
correlationStats,
|
||||
throughputStats,
|
||||
mongoThroughputStats,
|
||||
allocationStats,
|
||||
thresholdMs,
|
||||
throughputFloor,
|
||||
mongoThroughputFloor,
|
||||
allocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
|
||||
}
|
||||
|
||||
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
|
||||
}
|
||||
|
||||
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "linknotmerge-bench/1.0",
|
||||
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinMongoThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? minMongoThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-mongo-throughput":
|
||||
EnsureNext(args, index);
|
||||
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
minMongoThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: linknotmerge-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
|
||||
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.ObservationsColumn,
|
||||
row.AliasColumn,
|
||||
row.LinksetColumn,
|
||||
row.TotalMeanColumn,
|
||||
row.CorrelationMeanColumn,
|
||||
row.InsertMeanColumn,
|
||||
row.ThroughputColumn,
|
||||
row.MongoThroughputColumn,
|
||||
row.AllocatedColumn,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(results);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var result in results)
|
||||
{
|
||||
writer.Write(result.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")]
|
||||
@@ -0,0 +1,151 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.ObservationCount,
|
||||
report.Result.AliasGroups,
|
||||
report.Result.LinksetCount,
|
||||
report.Result.TotalStatistics.MeanMs,
|
||||
report.Result.TotalStatistics.P95Ms,
|
||||
report.Result.TotalStatistics.MaxMs,
|
||||
report.Result.InsertStatistics.MeanMs,
|
||||
report.Result.CorrelationStatistics.MeanMs,
|
||||
report.Result.TotalThroughputStatistics.MeanPerSecond,
|
||||
report.Result.TotalThroughputStatistics.MinPerSecond,
|
||||
report.Result.InsertThroughputStatistics.MeanPerSecond,
|
||||
report.Result.InsertThroughputStatistics.MinPerSecond,
|
||||
report.Result.AllocationStatistics.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinThroughputThresholdPerSecond,
|
||||
report.Result.MinMongoThroughputThresholdPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.Observations,
|
||||
baseline.Aliases,
|
||||
baseline.Linksets,
|
||||
baseline.MeanTotalMs,
|
||||
baseline.P95TotalMs,
|
||||
baseline.MaxTotalMs,
|
||||
baseline.MeanInsertMs,
|
||||
baseline.MeanCorrelationMs,
|
||||
baseline.MeanThroughputPerSecond,
|
||||
baseline.MinThroughputPerSecond,
|
||||
baseline.MeanMongoThroughputPerSecond,
|
||||
baseline.MinMongoThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ThroughputRegressionRatio,
|
||||
report.MongoThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MinMongoThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ThroughputRatio,
|
||||
double? MongoThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
@@ -0,0 +1,89 @@
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
|
||||
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
|
||||
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
|
||||
}
|
||||
|
||||
public ScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ThroughputRegressionRatio { get; }
|
||||
|
||||
public double? MongoThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
var delta = (throughputRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
|
||||
{
|
||||
var delta = (mongoRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateInverseRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,101 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenario = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
|
||||
AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
|
||||
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
|
||||
}
|
||||
|
||||
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);
|
||||
AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record ScenarioExecutionResult(
|
||||
IReadOnlyList<double> TotalDurationsMs,
|
||||
IReadOnlyList<double> InsertDurationsMs,
|
||||
IReadOnlyList<double> CorrelationDurationsMs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
IReadOnlyList<double> TotalThroughputsPerSecond,
|
||||
IReadOnlyList<double> InsertThroughputsPerSecond,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int LinksetCount,
|
||||
int TenantCount,
|
||||
LinksetAggregationResult AggregationResult);
|
||||
@@ -0,0 +1,42 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record ScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int LinksetCount,
|
||||
DurationStatistics TotalStatistics,
|
||||
DurationStatistics InsertStatistics,
|
||||
DurationStatistics CorrelationStatistics,
|
||||
ThroughputStatistics TotalThroughputStatistics,
|
||||
ThroughputStatistics InsertThroughputStatistics,
|
||||
AllocationStatistics AllocationStatistics,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MinMongoThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
|
||||
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
|
||||
public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
|
||||
|
||||
public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
|
||||
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = values.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
|
||||
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
4
src/StellaOps.Bench/LinkNotMerge/baseline.csv
Normal file
4
src/StellaOps.Bench/LinkNotMerge/baseline.csv
Normal file
@@ -0,0 +1,4 @@
|
||||
scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb
|
||||
lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477
|
||||
lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328
|
||||
lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450
|
||||
|
57
src/StellaOps.Bench/LinkNotMerge/config.json
Normal file
57
src/StellaOps.Bench/LinkNotMerge/config.json
Normal file
@@ -0,0 +1,57 @@
|
||||
{
|
||||
"thresholdMs": 2000,
|
||||
"minThroughputPerSecond": 7000,
|
||||
"minMongoThroughputPerSecond": 12000,
|
||||
"maxAllocatedMb": 600,
|
||||
"iterations": 5,
|
||||
"scenarios": [
|
||||
{
|
||||
"id": "lnm_ingest_baseline",
|
||||
"label": "5k observations, 500 aliases",
|
||||
"observations": 5000,
|
||||
"aliasGroups": 500,
|
||||
"purlsPerObservation": 4,
|
||||
"cpesPerObservation": 2,
|
||||
"referencesPerObservation": 3,
|
||||
"tenants": 4,
|
||||
"batchSize": 250,
|
||||
"seed": 42022,
|
||||
"thresholdMs": 900,
|
||||
"minThroughputPerSecond": 5500,
|
||||
"minMongoThroughputPerSecond": 8000,
|
||||
"maxAllocatedMb": 160
|
||||
},
|
||||
{
|
||||
"id": "lnm_ingest_fanout_medium",
|
||||
"label": "10k observations, 800 aliases",
|
||||
"observations": 10000,
|
||||
"aliasGroups": 800,
|
||||
"purlsPerObservation": 6,
|
||||
"cpesPerObservation": 3,
|
||||
"referencesPerObservation": 4,
|
||||
"tenants": 6,
|
||||
"batchSize": 400,
|
||||
"seed": 52022,
|
||||
"thresholdMs": 1300,
|
||||
"minThroughputPerSecond": 8000,
|
||||
"minMongoThroughputPerSecond": 13000,
|
||||
"maxAllocatedMb": 220
|
||||
},
|
||||
{
|
||||
"id": "lnm_ingest_fanout_high",
|
||||
"label": "15k observations, 1200 aliases",
|
||||
"observations": 15000,
|
||||
"aliasGroups": 1200,
|
||||
"purlsPerObservation": 8,
|
||||
"cpesPerObservation": 4,
|
||||
"referencesPerObservation": 5,
|
||||
"tenants": 8,
|
||||
"batchSize": 500,
|
||||
"seed": 62022,
|
||||
"thresholdMs": 2200,
|
||||
"minThroughputPerSecond": 7000,
|
||||
"minMongoThroughputPerSecond": 13000,
|
||||
"maxAllocatedMb": 300
|
||||
}
|
||||
]
|
||||
}
|
||||
25
src/StellaOps.Bench/Notify/README.md
Normal file
25
src/StellaOps.Bench/Notify/README.md
Normal file
@@ -0,0 +1,25 @@
|
||||
# Notify Dispatch Bench
|
||||
|
||||
Synthetic workload measuring rule evaluation and channel dispatch fan-out under varying rule densities.
|
||||
|
||||
## Scenarios
|
||||
|
||||
`config.json` defines three density profiles (5%, 20%, 40%). Each scenario synthesizes deterministic tenants, rules, and delivery actions to measure:
|
||||
|
||||
- Latency (mean/p95/max milliseconds)
|
||||
- Throughput (deliveries per second)
|
||||
- Managed heap allocations (megabytes)
|
||||
- Match fan-out statistics (matches and deliveries per event)
|
||||
|
||||
## Running locally
|
||||
|
||||
```bash
|
||||
dotnet run \
|
||||
--project src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj \
|
||||
-- \
|
||||
--csv out/notify-bench.csv \
|
||||
--json out/notify-bench.json \
|
||||
--prometheus out/notify-bench.prom
|
||||
```
|
||||
|
||||
The benchmark exits non-zero if latency exceeds the configured thresholds, throughput drops below the floor, allocations exceed the ceiling, or regression limits are breached relative to `baseline.csv`.
|
||||
@@ -0,0 +1,38 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Bench.Notify.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Tests;
|
||||
|
||||
public sealed class BaselineLoaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReadsBaselineEntries()
|
||||
{
|
||||
var path = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
path,
|
||||
"scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb\n" +
|
||||
"notify_dispatch_density_05,5,5000,25000,120.5,150.1,199.9,42000.5,39000.2,85.7\n");
|
||||
|
||||
var entries = await BaselineLoader.LoadAsync(path, CancellationToken.None);
|
||||
var entry = Assert.Single(entries);
|
||||
|
||||
Assert.Equal("notify_dispatch_density_05", entry.Key);
|
||||
Assert.Equal(5, entry.Value.Iterations);
|
||||
Assert.Equal(5000, entry.Value.EventCount);
|
||||
Assert.Equal(25000, entry.Value.DeliveryCount);
|
||||
Assert.Equal(120.5, entry.Value.MeanMs);
|
||||
Assert.Equal(39000.2, entry.Value.MinThroughputPerSecond);
|
||||
Assert.Equal(85.7, entry.Value.MaxAllocatedMb);
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,85 @@
|
||||
using System.Linq;
|
||||
using StellaOps.Bench.Notify.Baseline;
|
||||
using StellaOps.Bench.Notify.Reporting;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Tests;
|
||||
|
||||
public sealed class BenchmarkScenarioReportTests
|
||||
{
|
||||
[Fact]
|
||||
public void RegressionDetection_FlagsLatencies()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
TotalEvents: 1000,
|
||||
TotalRules: 100,
|
||||
ActionsPerRule: 2,
|
||||
AverageMatchesPerEvent: 10,
|
||||
MinMatchesPerEvent: 8,
|
||||
MaxMatchesPerEvent: 12,
|
||||
AverageDeliveriesPerEvent: 20,
|
||||
TotalDeliveries: 20000,
|
||||
MeanMs: 200,
|
||||
P95Ms: 250,
|
||||
MaxMs: 300,
|
||||
MeanThroughputPerSecond: 50000,
|
||||
MinThroughputPerSecond: 40000,
|
||||
MaxAllocatedMb: 100,
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var baseline = new BaselineEntry(
|
||||
ScenarioId: "scenario",
|
||||
Iterations: 3,
|
||||
EventCount: 1000,
|
||||
DeliveryCount: 20000,
|
||||
MeanMs: 150,
|
||||
P95Ms: 180,
|
||||
MaxMs: 200,
|
||||
MeanThroughputPerSecond: 60000,
|
||||
MinThroughputPerSecond: 50000,
|
||||
MaxAllocatedMb: 90);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
|
||||
|
||||
Assert.True(report.DurationRegressionBreached);
|
||||
Assert.True(report.ThroughputRegressionBreached);
|
||||
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegressionDetection_NoBaseline_NoBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
TotalEvents: 1000,
|
||||
TotalRules: 100,
|
||||
ActionsPerRule: 2,
|
||||
AverageMatchesPerEvent: 10,
|
||||
MinMatchesPerEvent: 8,
|
||||
MaxMatchesPerEvent: 12,
|
||||
AverageDeliveriesPerEvent: 20,
|
||||
TotalDeliveries: 20000,
|
||||
MeanMs: 200,
|
||||
P95Ms: 250,
|
||||
MaxMs: 300,
|
||||
MeanThroughputPerSecond: 50000,
|
||||
MinThroughputPerSecond: 40000,
|
||||
MaxAllocatedMb: 100,
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
|
||||
|
||||
Assert.False(report.DurationRegressionBreached);
|
||||
Assert.False(report.ThroughputRegressionBreached);
|
||||
Assert.Empty(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,33 @@
|
||||
using System.Threading;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Tests;
|
||||
|
||||
public sealed class NotifyScenarioRunnerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Execute_ComputesDeterministicMetrics()
|
||||
{
|
||||
var config = new NotifyScenarioConfig
|
||||
{
|
||||
Id = "unit_test",
|
||||
EventCount = 500,
|
||||
RuleCount = 40,
|
||||
ActionsPerRule = 3,
|
||||
MatchRate = 0.25,
|
||||
TenantCount = 4,
|
||||
ChannelCount = 16
|
||||
};
|
||||
|
||||
var runner = new NotifyScenarioRunner(config);
|
||||
var result = runner.Execute(2, CancellationToken.None);
|
||||
|
||||
Assert.Equal(config.ResolveEventCount(), result.TotalEvents);
|
||||
Assert.Equal(config.ResolveRuleCount(), result.TotalRules);
|
||||
Assert.Equal(config.ResolveActionsPerRule(), result.ActionsPerRule);
|
||||
Assert.True(result.TotalMatches > 0);
|
||||
Assert.Equal(result.TotalMatches * result.ActionsPerRule, result.TotalDeliveries);
|
||||
Assert.Equal(2, result.Durations.Count);
|
||||
Assert.All(result.Durations, value => Assert.True(value > 0));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
using System.IO;
|
||||
using StellaOps.Bench.Notify.Baseline;
|
||||
using StellaOps.Bench.Notify.Reporting;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Tests;
|
||||
|
||||
public sealed class PrometheusWriterTests
|
||||
{
|
||||
[Fact]
|
||||
public void Write_EmitsScenarioMetrics()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
TotalEvents: 1000,
|
||||
TotalRules: 100,
|
||||
ActionsPerRule: 2,
|
||||
AverageMatchesPerEvent: 10,
|
||||
MinMatchesPerEvent: 8,
|
||||
MaxMatchesPerEvent: 12,
|
||||
AverageDeliveriesPerEvent: 20,
|
||||
TotalDeliveries: 20000,
|
||||
MeanMs: 200,
|
||||
P95Ms: 250,
|
||||
MaxMs: 300,
|
||||
MeanThroughputPerSecond: 50000,
|
||||
MinThroughputPerSecond: 40000,
|
||||
MaxAllocatedMb: 100,
|
||||
ThresholdMs: 900,
|
||||
MinThroughputThresholdPerSecond: 35000,
|
||||
MaxAllocatedThresholdMb: 150);
|
||||
|
||||
var baseline = new BaselineEntry(
|
||||
ScenarioId: "scenario",
|
||||
Iterations: 3,
|
||||
EventCount: 1000,
|
||||
DeliveryCount: 20000,
|
||||
MeanMs: 180,
|
||||
P95Ms: 210,
|
||||
MaxMs: 240,
|
||||
MeanThroughputPerSecond: 52000,
|
||||
MinThroughputPerSecond: 41000,
|
||||
MaxAllocatedMb: 95);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline);
|
||||
|
||||
var path = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
PrometheusWriter.Write(path, new[] { report });
|
||||
var content = File.ReadAllText(path);
|
||||
|
||||
Assert.Contains("notify_dispatch_bench_mean_ms", content);
|
||||
Assert.Contains("scenario\"} 200", content);
|
||||
Assert.Contains("notify_dispatch_bench_baseline_mean_ms", content);
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<UseConcelierTestInfra>false</UseConcelierTestInfra>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
|
||||
<PackageReference Include="xunit" Version="2.9.2" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="coverlet.collector" Version="6.0.4">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Bench.Notify\StellaOps.Bench.Notify.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,13 @@
|
||||
namespace StellaOps.Bench.Notify.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int EventCount,
|
||||
int DeliveryCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
@@ -0,0 +1,87 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var results = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 10)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 10 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
EventCount: ParseInt(parts[2], resolved, lineNumber),
|
||||
DeliveryCount: ParseInt(parts[3], resolved, lineNumber),
|
||||
MeanMs: ParseDouble(parts[4], resolved, lineNumber),
|
||||
P95Ms: ParseDouble(parts[5], resolved, lineNumber),
|
||||
MaxMs: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MeanThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MinThroughputPerSecond: ParseDouble(parts[8], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[9], resolved, lineNumber));
|
||||
|
||||
results[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
|
||||
{
|
||||
return parsed;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,220 @@
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal sealed record BenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<NotifyScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<BenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new BenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class BenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<NotifyScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class NotifyScenarioConfig
|
||||
{
|
||||
private const int DefaultEventCount = 10_000;
|
||||
private const int DefaultRuleCount = 200;
|
||||
private const int DefaultActionsPerRule = 3;
|
||||
private const double DefaultMatchRate = 0.25d;
|
||||
private const int DefaultTenantCount = 4;
|
||||
private const int DefaultChannelCount = 8;
|
||||
private const int BaseSeed = 2025_10_26;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("eventCount")]
|
||||
public int EventCount { get; init; } = DefaultEventCount;
|
||||
|
||||
[JsonPropertyName("ruleCount")]
|
||||
public int RuleCount { get; init; } = DefaultRuleCount;
|
||||
|
||||
[JsonPropertyName("actionsPerRule")]
|
||||
public int ActionsPerRule { get; init; } = DefaultActionsPerRule;
|
||||
|
||||
[JsonPropertyName("matchRate")]
|
||||
public double? MatchRate { get; init; }
|
||||
|
||||
[JsonPropertyName("tenantCount")]
|
||||
public int? TenantCount { get; init; }
|
||||
|
||||
[JsonPropertyName("channelCount")]
|
||||
public int? ChannelCount { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "notify_dispatch" : Id!.Trim();
|
||||
|
||||
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
|
||||
|
||||
public int ResolveEventCount()
|
||||
{
|
||||
if (EventCount <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires eventCount > 0.");
|
||||
}
|
||||
|
||||
return EventCount;
|
||||
}
|
||||
|
||||
public int ResolveRuleCount()
|
||||
{
|
||||
if (RuleCount <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires ruleCount > 0.");
|
||||
}
|
||||
|
||||
return RuleCount;
|
||||
}
|
||||
|
||||
public int ResolveActionsPerRule()
|
||||
{
|
||||
if (ActionsPerRule <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires actionsPerRule > 0.");
|
||||
}
|
||||
|
||||
return ActionsPerRule;
|
||||
}
|
||||
|
||||
public double ResolveMatchRate()
|
||||
{
|
||||
var rate = MatchRate ?? DefaultMatchRate;
|
||||
if (!double.IsFinite(rate) || rate <= 0d || rate > 1d)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires matchRate within (0, 1].");
|
||||
}
|
||||
|
||||
return rate;
|
||||
}
|
||||
|
||||
public int ResolveTenantCount()
|
||||
{
|
||||
var tenants = TenantCount ?? DefaultTenantCount;
|
||||
if (tenants <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenantCount > 0.");
|
||||
}
|
||||
|
||||
return tenants;
|
||||
}
|
||||
|
||||
public int ResolveChannelCount()
|
||||
{
|
||||
var channels = ChannelCount ?? DefaultChannelCount;
|
||||
if (channels <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires channelCount > 0.");
|
||||
}
|
||||
|
||||
return channels;
|
||||
}
|
||||
|
||||
public int ResolveSeed()
|
||||
{
|
||||
if (Seed is { } explicitSeed && explicitSeed > 0)
|
||||
{
|
||||
return explicitSeed;
|
||||
}
|
||||
|
||||
var material = Encoding.UTF8.GetBytes($"stellaops-notify-bench::{ScenarioId}");
|
||||
var hash = SHA256.HashData(material);
|
||||
var derived = BitConverter.ToInt32(hash, 0) & int.MaxValue;
|
||||
if (derived == 0)
|
||||
{
|
||||
derived = BaseSeed;
|
||||
}
|
||||
|
||||
return derived;
|
||||
}
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
ResolveEventCount();
|
||||
ResolveRuleCount();
|
||||
ResolveActionsPerRule();
|
||||
ResolveMatchRate();
|
||||
ResolveTenantCount();
|
||||
ResolveChannelCount();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
using System;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal sealed class DispatchAccumulator
|
||||
{
|
||||
private long _value = 17;
|
||||
|
||||
public void Add(int ruleHash, int actionHash, int eventHash)
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
_value = (_value * 31) ^ ruleHash;
|
||||
_value = (_value * 31) ^ actionHash;
|
||||
_value = (_value * 31) ^ eventHash;
|
||||
}
|
||||
}
|
||||
|
||||
public void AssertConsumed()
|
||||
{
|
||||
if (_value == 17)
|
||||
{
|
||||
throw new InvalidOperationException("Dispatch accumulator did not receive any values.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,386 @@
|
||||
using System.Collections.Generic;
|
||||
using System.Diagnostics;
|
||||
using System.Linq;
|
||||
using System.Text;
|
||||
using StellaOps.Notify.Models;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal sealed class NotifyScenarioRunner
|
||||
{
|
||||
private static readonly DateTimeOffset BaseTimestamp = new(2025, 10, 26, 0, 0, 0, TimeSpan.Zero);
|
||||
private const string EventKind = NotifyEventKinds.ScannerReportReady;
|
||||
|
||||
private readonly NotifyScenarioConfig _config;
|
||||
private readonly EventDescriptor[] _events;
|
||||
private readonly RuleDescriptor[][] _rulesByTenant;
|
||||
private readonly int _totalEvents;
|
||||
private readonly int _ruleCount;
|
||||
private readonly int _actionsPerRule;
|
||||
private readonly int _totalMatches;
|
||||
private readonly int _totalDeliveries;
|
||||
private readonly double _averageMatchesPerEvent;
|
||||
private readonly double _averageDeliveriesPerEvent;
|
||||
private readonly int _minMatchesPerEvent;
|
||||
private readonly int _maxMatchesPerEvent;
|
||||
|
||||
public NotifyScenarioRunner(NotifyScenarioConfig config)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
|
||||
var eventCount = config.ResolveEventCount();
|
||||
var ruleCount = config.ResolveRuleCount();
|
||||
var actionsPerRule = config.ResolveActionsPerRule();
|
||||
var matchRate = config.ResolveMatchRate();
|
||||
var tenantCount = config.ResolveTenantCount();
|
||||
var channelCount = config.ResolveChannelCount();
|
||||
var seed = config.ResolveSeed();
|
||||
|
||||
if (tenantCount > ruleCount)
|
||||
{
|
||||
tenantCount = Math.Max(1, ruleCount);
|
||||
}
|
||||
|
||||
_totalEvents = eventCount;
|
||||
_ruleCount = ruleCount;
|
||||
_actionsPerRule = actionsPerRule;
|
||||
|
||||
var tenants = BuildTenants(tenantCount);
|
||||
var channels = BuildChannels(channelCount);
|
||||
var random = new Random(seed);
|
||||
|
||||
var targetMatchesPerEvent = Math.Max(1, (int)Math.Round(ruleCount * matchRate));
|
||||
targetMatchesPerEvent = Math.Min(targetMatchesPerEvent, ruleCount);
|
||||
|
||||
var ruleDescriptors = new List<RuleDescriptor>(ruleCount);
|
||||
var groups = new List<RuleGroup>();
|
||||
|
||||
var ruleIndex = 0;
|
||||
var groupIndex = 0;
|
||||
var channelCursor = 0;
|
||||
|
||||
while (ruleIndex < ruleCount)
|
||||
{
|
||||
var groupSize = Math.Min(targetMatchesPerEvent, ruleCount - ruleIndex);
|
||||
var tenantIndex = groupIndex % tenantCount;
|
||||
var tenantId = tenants[tenantIndex];
|
||||
|
||||
var namespaceValue = $"svc-{tenantIndex:D2}-{groupIndex:D3}";
|
||||
var repositoryValue = $"registry.local/{tenantId}/service-{groupIndex:D3}";
|
||||
var digestValue = GenerateDigest(random, groupIndex);
|
||||
|
||||
var rules = new RuleDescriptor[groupSize];
|
||||
for (var local = 0; local < groupSize && ruleIndex < ruleCount; local++, ruleIndex++)
|
||||
{
|
||||
var ruleId = $"rule-{groupIndex:D3}-{local:D3}";
|
||||
var actions = new ActionDescriptor[actionsPerRule];
|
||||
|
||||
for (var actionIndex = 0; actionIndex < actionsPerRule; actionIndex++)
|
||||
{
|
||||
var channel = channels[channelCursor % channelCount];
|
||||
channelCursor++;
|
||||
|
||||
var actionId = $"{ruleId}-act-{actionIndex:D2}";
|
||||
actions[actionIndex] = new ActionDescriptor(
|
||||
actionId,
|
||||
channel,
|
||||
StableHash($"{actionId}|{channel}"));
|
||||
}
|
||||
|
||||
rules[local] = new RuleDescriptor(
|
||||
ruleId,
|
||||
StableHash(ruleId),
|
||||
tenantIndex,
|
||||
namespaceValue,
|
||||
repositoryValue,
|
||||
digestValue,
|
||||
actions);
|
||||
|
||||
ruleDescriptors.Add(rules[local]);
|
||||
}
|
||||
|
||||
groups.Add(new RuleGroup(tenantIndex, namespaceValue, repositoryValue, digestValue, rules));
|
||||
groupIndex++;
|
||||
}
|
||||
|
||||
_rulesByTenant = BuildRulesByTenant(tenantCount, ruleDescriptors);
|
||||
|
||||
var events = new EventDescriptor[eventCount];
|
||||
long totalMatches = 0;
|
||||
var minMatches = int.MaxValue;
|
||||
var maxMatches = 0;
|
||||
|
||||
for (var eventIndex = 0; eventIndex < eventCount; eventIndex++)
|
||||
{
|
||||
var group = groups[eventIndex % groups.Count];
|
||||
var matchingRules = group.Rules.Length;
|
||||
|
||||
totalMatches += matchingRules;
|
||||
if (matchingRules < minMatches)
|
||||
{
|
||||
minMatches = matchingRules;
|
||||
}
|
||||
|
||||
if (matchingRules > maxMatches)
|
||||
{
|
||||
maxMatches = matchingRules;
|
||||
}
|
||||
|
||||
var eventId = GenerateEventId(random, group.TenantIndex, eventIndex);
|
||||
var timestamp = BaseTimestamp.AddMilliseconds(eventIndex * 10d);
|
||||
|
||||
// Materialize NotifyEvent to reflect production payload shape.
|
||||
_ = NotifyEvent.Create(
|
||||
eventId,
|
||||
EventKind,
|
||||
tenants[group.TenantIndex],
|
||||
timestamp,
|
||||
payload: null,
|
||||
scope: NotifyEventScope.Create(
|
||||
@namespace: group.Namespace,
|
||||
repo: group.Repository,
|
||||
digest: group.Digest));
|
||||
|
||||
events[eventIndex] = new EventDescriptor(
|
||||
group.TenantIndex,
|
||||
EventKind,
|
||||
group.Namespace,
|
||||
group.Repository,
|
||||
group.Digest,
|
||||
ComputeEventHash(eventId));
|
||||
}
|
||||
|
||||
_events = events;
|
||||
_totalMatches = checked((int)totalMatches);
|
||||
_totalDeliveries = checked(_totalMatches * actionsPerRule);
|
||||
_averageMatchesPerEvent = totalMatches / (double)eventCount;
|
||||
_averageDeliveriesPerEvent = _averageMatchesPerEvent * actionsPerRule;
|
||||
_minMatchesPerEvent = minMatches;
|
||||
_maxMatchesPerEvent = maxMatches;
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var durations = new double[iterations];
|
||||
var throughputs = new double[iterations];
|
||||
var allocations = new double[iterations];
|
||||
|
||||
for (var index = 0; index < iterations; index++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
var accumulator = new DispatchAccumulator();
|
||||
var observedMatches = 0;
|
||||
var observedDeliveries = 0;
|
||||
|
||||
foreach (ref readonly var @event in _events.AsSpan())
|
||||
{
|
||||
var tenantRules = _rulesByTenant[@event.TenantIndex];
|
||||
foreach (var rule in tenantRules)
|
||||
{
|
||||
if (!Matches(rule, @event))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
observedMatches++;
|
||||
|
||||
var actions = rule.Actions;
|
||||
for (var actionIndex = 0; actionIndex < actions.Length; actionIndex++)
|
||||
{
|
||||
observedDeliveries++;
|
||||
accumulator.Add(rule.RuleHash, actions[actionIndex].Hash, @event.EventHash);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
if (observedMatches != _totalMatches)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalMatches} matches but observed {observedMatches}.");
|
||||
}
|
||||
|
||||
if (observedDeliveries != _totalDeliveries)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalDeliveries} deliveries but observed {observedDeliveries}.");
|
||||
}
|
||||
|
||||
accumulator.AssertConsumed();
|
||||
|
||||
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
|
||||
if (elapsedMs <= 0d)
|
||||
{
|
||||
elapsedMs = 0.0001d;
|
||||
}
|
||||
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
durations[index] = elapsedMs;
|
||||
throughputs[index] = observedDeliveries / Math.Max(stopwatch.Elapsed.TotalSeconds, 0.0001d);
|
||||
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
durations,
|
||||
throughputs,
|
||||
allocations,
|
||||
_totalEvents,
|
||||
_ruleCount,
|
||||
_actionsPerRule,
|
||||
_averageMatchesPerEvent,
|
||||
_minMatchesPerEvent,
|
||||
_maxMatchesPerEvent,
|
||||
_averageDeliveriesPerEvent,
|
||||
_totalMatches,
|
||||
_totalDeliveries);
|
||||
}
|
||||
|
||||
private static bool Matches(in RuleDescriptor rule, in EventDescriptor @event)
|
||||
{
|
||||
if (!string.Equals(@event.Kind, EventKind, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(rule.Namespace, @event.Namespace, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(rule.Repository, @event.Repository, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!string.Equals(rule.Digest, @event.Digest, StringComparison.Ordinal))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static int ComputeEventHash(Guid eventId)
|
||||
{
|
||||
var bytes = eventId.ToByteArray();
|
||||
var value = BitConverter.ToInt32(bytes, 0);
|
||||
return value & int.MaxValue;
|
||||
}
|
||||
|
||||
private static string GenerateDigest(Random random, int groupIndex)
|
||||
{
|
||||
var buffer = new byte[16];
|
||||
random.NextBytes(buffer);
|
||||
|
||||
var hex = Convert.ToHexString(buffer).ToLowerInvariant();
|
||||
return $"sha256:{hex}{groupIndex:D3}";
|
||||
}
|
||||
|
||||
private static Guid GenerateEventId(Random random, int tenantIndex, int eventIndex)
|
||||
{
|
||||
Span<byte> buffer = stackalloc byte[16];
|
||||
random.NextBytes(buffer);
|
||||
buffer[^1] = (byte)(tenantIndex & 0xFF);
|
||||
buffer[^2] = (byte)(eventIndex & 0xFF);
|
||||
return new Guid(buffer);
|
||||
}
|
||||
|
||||
private static RuleDescriptor[][] BuildRulesByTenant(int tenantCount, List<RuleDescriptor> rules)
|
||||
{
|
||||
var result = new RuleDescriptor[tenantCount][];
|
||||
for (var tenantIndex = 0; tenantIndex < tenantCount; tenantIndex++)
|
||||
{
|
||||
result[tenantIndex] = rules
|
||||
.Where(rule => rule.TenantIndex == tenantIndex)
|
||||
.ToArray();
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static string[] BuildTenants(int tenantCount)
|
||||
{
|
||||
var tenants = new string[tenantCount];
|
||||
for (var index = 0; index < tenantCount; index++)
|
||||
{
|
||||
tenants[index] = $"tenant-{index:D2}";
|
||||
}
|
||||
|
||||
return tenants;
|
||||
}
|
||||
|
||||
private static string[] BuildChannels(int channelCount)
|
||||
{
|
||||
var channels = new string[channelCount];
|
||||
for (var index = 0; index < channelCount; index++)
|
||||
{
|
||||
var kind = (index % 4) switch
|
||||
{
|
||||
0 => "slack",
|
||||
1 => "teams",
|
||||
2 => "email",
|
||||
_ => "webhook"
|
||||
};
|
||||
|
||||
channels[index] = $"{kind}:channel-{index:D2}";
|
||||
}
|
||||
|
||||
return channels;
|
||||
}
|
||||
|
||||
private static int StableHash(string value)
|
||||
{
|
||||
unchecked
|
||||
{
|
||||
const int offset = unchecked((int)2166136261);
|
||||
const int prime = 16777619;
|
||||
|
||||
var hash = offset;
|
||||
foreach (var ch in value.AsSpan())
|
||||
{
|
||||
hash ^= ch;
|
||||
hash *= prime;
|
||||
}
|
||||
|
||||
return hash & int.MaxValue;
|
||||
}
|
||||
}
|
||||
|
||||
private readonly record struct RuleDescriptor(
|
||||
string RuleId,
|
||||
int RuleHash,
|
||||
int TenantIndex,
|
||||
string Namespace,
|
||||
string Repository,
|
||||
string Digest,
|
||||
ActionDescriptor[] Actions);
|
||||
|
||||
private readonly record struct ActionDescriptor(
|
||||
string ActionId,
|
||||
string Channel,
|
||||
int Hash);
|
||||
|
||||
private readonly record struct RuleGroup(
|
||||
int TenantIndex,
|
||||
string Namespace,
|
||||
string Repository,
|
||||
string Digest,
|
||||
RuleDescriptor[] Rules);
|
||||
|
||||
private readonly record struct EventDescriptor(
|
||||
int TenantIndex,
|
||||
string Kind,
|
||||
string Namespace,
|
||||
string Repository,
|
||||
string Digest,
|
||||
int EventHash);
|
||||
}
|
||||
364
src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs
Normal file
364
src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs
Normal file
@@ -0,0 +1,364 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using System.Globalization;
|
||||
using StellaOps.Bench.Notify.Baseline;
|
||||
using StellaOps.Bench.Notify.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var iterations = options.Iterations
|
||||
?? scenario.Iterations
|
||||
?? config.Iterations
|
||||
?? 5;
|
||||
|
||||
var runner = new NotifyScenarioRunner(scenario);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var durationStats = DurationStatistics.From(execution.Durations);
|
||||
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
|
||||
var scenarioThreshold = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
|
||||
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
|
||||
var result = new ScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.DisplayLabel,
|
||||
iterations,
|
||||
execution.TotalEvents,
|
||||
execution.TotalRules,
|
||||
execution.ActionsPerRule,
|
||||
execution.AverageMatchesPerEvent,
|
||||
execution.MinMatchesPerEvent,
|
||||
execution.MaxMatchesPerEvent,
|
||||
execution.AverageDeliveriesPerEvent,
|
||||
execution.TotalDeliveries,
|
||||
durationStats.MeanMs,
|
||||
durationStats.P95Ms,
|
||||
durationStats.MaxMs,
|
||||
throughputStats.MeanPerSecond,
|
||||
throughputStats.MinPerSecond,
|
||||
allocationStats.MaxAllocatedMb,
|
||||
scenarioThreshold,
|
||||
scenarioThroughputFloor,
|
||||
scenarioAllocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} deliveries/s < {floor:N0} deliveries/s");
|
||||
}
|
||||
|
||||
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "notify-dispatch-bench/1.0",
|
||||
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(
|
||||
options.JsonOutPath!,
|
||||
metadata,
|
||||
reports,
|
||||
CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"notify-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: notify-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Global throughput floor (deliveries/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Events | Rules | Match/Evt | Deliver/Evt | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ------------| -------- | --------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.EventsColumn,
|
||||
row.RulesColumn,
|
||||
row.MatchesColumn,
|
||||
row.DeliveriesColumn,
|
||||
row.MeanColumn,
|
||||
row.P95Column,
|
||||
row.MaxColumn,
|
||||
row.MinThroughputColumn,
|
||||
row.AllocatedColumn
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolvedPath);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var row in results)
|
||||
{
|
||||
writer.Write(row.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.TotalEvents.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.TotalDeliveries.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Bench.Notify.Tests")]
|
||||
@@ -0,0 +1,147 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Bench.Notify.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.TotalEvents,
|
||||
report.Result.TotalRules,
|
||||
report.Result.ActionsPerRule,
|
||||
report.Result.AverageMatchesPerEvent,
|
||||
report.Result.MinMatchesPerEvent,
|
||||
report.Result.MaxMatchesPerEvent,
|
||||
report.Result.AverageDeliveriesPerEvent,
|
||||
report.Result.TotalDeliveries,
|
||||
report.Result.MeanMs,
|
||||
report.Result.P95Ms,
|
||||
report.Result.MaxMs,
|
||||
report.Result.MeanThroughputPerSecond,
|
||||
report.Result.MinThroughputPerSecond,
|
||||
report.Result.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinThroughputThresholdPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.EventCount,
|
||||
baseline.DeliveryCount,
|
||||
baseline.MeanMs,
|
||||
baseline.P95Ms,
|
||||
baseline.MaxMs,
|
||||
baseline.MeanThroughputPerSecond,
|
||||
baseline.MinThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int TotalEvents,
|
||||
int TotalRules,
|
||||
int ActionsPerRule,
|
||||
double AverageMatchesPerEvent,
|
||||
int MinMatchesPerEvent,
|
||||
int MaxMatchesPerEvent,
|
||||
double AverageDeliveriesPerEvent,
|
||||
int TotalDeliveries,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int EventCount,
|
||||
int DeliveryCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
@@ -0,0 +1,84 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using StellaOps.Bench.Notify.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
|
||||
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
|
||||
}
|
||||
|
||||
public ScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached =>
|
||||
DurationRegressionRatio is { } ratio &&
|
||||
ratio >= RegressionLimit;
|
||||
|
||||
public bool ThroughputRegressionBreached =>
|
||||
ThroughputRegressionRatio is { } ratio &&
|
||||
ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
var delta = (throughputRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateDurationRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateThroughputRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Globalization;
|
||||
using System.IO;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.Notify.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE notify_dispatch_bench_duration_ms gauge");
|
||||
builder.AppendLine("# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second).");
|
||||
builder.AppendLine("# TYPE notify_dispatch_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes).");
|
||||
builder.AppendLine("# TYPE notify_dispatch_bench_allocation_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenarioLabel = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "notify_dispatch_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
|
||||
AppendMetric(builder, "notify_dispatch_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
|
||||
AppendMetric(builder, "notify_dispatch_bench_max_ms", scenarioLabel, report.Result.MaxMs);
|
||||
AppendMetric(builder, "notify_dispatch_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "notify_dispatch_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
|
||||
AppendMetric(builder, "notify_dispatch_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
|
||||
AppendMetric(builder, "notify_dispatch_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "notify_dispatch_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
|
||||
AppendMetric(builder, "notify_dispatch_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "notify_dispatch_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
|
||||
AppendMetric(builder, "notify_dispatch_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
|
||||
AppendMetric(builder, "notify_dispatch_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "notify_dispatch_bench_duration_regression_ratio", scenarioLabel, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
AppendMetric(builder, "notify_dispatch_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "notify_dispatch_bench_regression_limit", scenarioLabel, report.RegressionLimit);
|
||||
AppendMetric(builder, "notify_dispatch_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,17 @@
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal sealed record ScenarioExecutionResult(
|
||||
IReadOnlyList<double> Durations,
|
||||
IReadOnlyList<double> Throughputs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
int TotalEvents,
|
||||
int TotalRules,
|
||||
int ActionsPerRule,
|
||||
double AverageMatchesPerEvent,
|
||||
int MinMatchesPerEvent,
|
||||
int MaxMatchesPerEvent,
|
||||
double AverageDeliveriesPerEvent,
|
||||
int TotalMatches,
|
||||
int TotalDeliveries);
|
||||
@@ -0,0 +1,46 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal sealed record ScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int TotalEvents,
|
||||
int TotalRules,
|
||||
int ActionsPerRule,
|
||||
double AverageMatchesPerEvent,
|
||||
int MinMatchesPerEvent,
|
||||
int MaxMatchesPerEvent,
|
||||
double AverageDeliveriesPerEvent,
|
||||
int TotalDeliveries,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
|
||||
public string EventsColumn => TotalEvents.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
|
||||
public string RulesColumn => TotalRules.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
|
||||
public string MatchesColumn => AverageMatchesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(8);
|
||||
|
||||
public string DeliveriesColumn => AverageDeliveriesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
@@ -0,0 +1,87 @@
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
|
||||
namespace StellaOps.Bench.Notify;
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> durations)
|
||||
{
|
||||
if (durations.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = durations.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in durations)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / durations.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
4
src/StellaOps.Bench/Notify/baseline.csv
Normal file
4
src/StellaOps.Bench/Notify/baseline.csv
Normal file
@@ -0,0 +1,4 @@
|
||||
scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb
|
||||
notify_dispatch_density_05,5,5000,20000,3.4150,4.1722,4.3039,6053938.5172,4646948.1168,0.0000
|
||||
notify_dispatch_density_20,5,7500,675000,24.2274,25.8517,26.0526,27923335.5855,25909122.3141,0.0000
|
||||
notify_dispatch_density_40,5,10000,4000080,138.7387,147.7174,149.1124,28916602.9214,26825938.0172,0.0000
|
||||
|
47
src/StellaOps.Bench/Notify/config.json
Normal file
47
src/StellaOps.Bench/Notify/config.json
Normal file
@@ -0,0 +1,47 @@
|
||||
{
|
||||
"thresholdMs": 1200,
|
||||
"minThroughputPerSecond": 10000,
|
||||
"maxAllocatedMb": 512,
|
||||
"iterations": 5,
|
||||
"scenarios": [
|
||||
{
|
||||
"id": "notify_dispatch_density_05",
|
||||
"label": "50 rules / 5% fanout",
|
||||
"eventCount": 5000,
|
||||
"ruleCount": 50,
|
||||
"actionsPerRule": 2,
|
||||
"matchRate": 0.05,
|
||||
"tenantCount": 4,
|
||||
"channelCount": 12,
|
||||
"thresholdMs": 400,
|
||||
"minThroughputPerSecond": 15000,
|
||||
"maxAllocatedMb": 128
|
||||
},
|
||||
{
|
||||
"id": "notify_dispatch_density_20",
|
||||
"label": "150 rules / 20% fanout",
|
||||
"eventCount": 7500,
|
||||
"ruleCount": 150,
|
||||
"actionsPerRule": 3,
|
||||
"matchRate": 0.2,
|
||||
"tenantCount": 6,
|
||||
"channelCount": 24,
|
||||
"thresholdMs": 650,
|
||||
"minThroughputPerSecond": 30000,
|
||||
"maxAllocatedMb": 192
|
||||
},
|
||||
{
|
||||
"id": "notify_dispatch_density_40",
|
||||
"label": "300 rules / 40% fanout",
|
||||
"eventCount": 10000,
|
||||
"ruleCount": 300,
|
||||
"actionsPerRule": 4,
|
||||
"matchRate": 0.4,
|
||||
"tenantCount": 8,
|
||||
"channelCount": 32,
|
||||
"thresholdMs": 900,
|
||||
"minThroughputPerSecond": 45000,
|
||||
"maxAllocatedMb": 256
|
||||
}
|
||||
]
|
||||
}
|
||||
24
src/StellaOps.Bench/PolicyEngine/README.md
Normal file
24
src/StellaOps.Bench/PolicyEngine/README.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Policy Engine Bench
|
||||
|
||||
Synthetic workload that measures end-to-end policy evaluation throughput against large advisory snapshots.
|
||||
|
||||
## Scenarios
|
||||
|
||||
`config.json` defines the default scenario (`policy_eval_baseline`) which generates **100 000 components** with **1 000 000 findings** derived from deterministic seeds. The harness measures:
|
||||
|
||||
- Latency (mean/p95/max milliseconds)
|
||||
- Throughput (findings per second)
|
||||
- Managed heap allocations (megabytes)
|
||||
|
||||
## Running locally
|
||||
|
||||
```bash
|
||||
dotnet run \
|
||||
--project src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj \
|
||||
-- \
|
||||
--csv out/policy-bench.csv \
|
||||
--json out/policy-bench.json \
|
||||
--prometheus out/policy-bench.prom
|
||||
```
|
||||
|
||||
The run exits non-zero if latency exceeds configured thresholds, throughput falls below the configured floor, or allocations surpass the budget. Baselines live in `baseline.csv` and can be regenerated whenever new steady-state metrics are established.
|
||||
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
@@ -0,0 +1,86 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1)
|
||||
{
|
||||
continue; // header
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 9)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 9 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
FindingCount: ParseInt(parts[2], resolved, lineNumber),
|
||||
MeanMs: ParseDouble(parts[3], resolved, lineNumber),
|
||||
P95Ms: ParseDouble(parts[4], resolved, lineNumber),
|
||||
MaxMs: ParseDouble(parts[5], resolved, lineNumber),
|
||||
MeanThroughputPerSecond: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MinThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[8], resolved, lineNumber));
|
||||
|
||||
result[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal sealed record BenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<PolicyScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<BenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new BenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class BenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<PolicyScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class PolicyScenarioConfig
|
||||
{
|
||||
private const int DefaultComponentCount = 100_000;
|
||||
private const int DefaultAdvisoriesPerComponent = 10;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("policyPath")]
|
||||
public string PolicyPath { get; init; } = "docs/examples/policies/baseline.yaml";
|
||||
|
||||
[JsonPropertyName("scoringConfig")]
|
||||
public string? ScoringConfigPath { get; init; }
|
||||
|
||||
[JsonPropertyName("componentCount")]
|
||||
public int ComponentCount { get; init; } = DefaultComponentCount;
|
||||
|
||||
[JsonPropertyName("advisoriesPerComponent")]
|
||||
public int AdvisoriesPerComponent { get; init; } = DefaultAdvisoriesPerComponent;
|
||||
|
||||
[JsonPropertyName("totalFindings")]
|
||||
public int? TotalFindings { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "policy_eval" : Id.Trim();
|
||||
|
||||
public int ResolveFindingCount()
|
||||
{
|
||||
if (TotalFindings is { } findings)
|
||||
{
|
||||
if (findings <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires totalFindings > 0.");
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
|
||||
if (ComponentCount <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires componentCount > 0.");
|
||||
}
|
||||
|
||||
if (AdvisoriesPerComponent <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires advisoriesPerComponent > 0.");
|
||||
}
|
||||
|
||||
checked
|
||||
{
|
||||
var total = ComponentCount * AdvisoriesPerComponent;
|
||||
return total;
|
||||
}
|
||||
}
|
||||
|
||||
public int ResolveSeed() => Seed ?? 2025_10_26;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(PolicyPath))
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires a policyPath.");
|
||||
}
|
||||
|
||||
ResolveFindingCount();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal static class PathUtilities
|
||||
{
|
||||
public static bool IsWithinRoot(string root, string candidate)
|
||||
{
|
||||
var relative = Path.GetRelativePath(root, candidate);
|
||||
if (string.IsNullOrEmpty(relative) || relative == ".")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using StellaOps.Policy;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal sealed class PolicyScenarioRunner
|
||||
{
|
||||
private readonly PolicyScenarioConfig _config;
|
||||
private readonly PolicyDocument _document;
|
||||
private readonly PolicyScoringConfig _scoringConfig;
|
||||
private readonly PolicyFinding[] _findings;
|
||||
|
||||
public PolicyScenarioRunner(PolicyScenarioConfig config, string repoRoot)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
|
||||
|
||||
var policyPath = ResolvePathWithinRoot(repoRoot, config.PolicyPath);
|
||||
var policyContent = File.ReadAllText(policyPath);
|
||||
var policyFormat = PolicySchema.DetectFormat(policyPath);
|
||||
var binding = PolicyBinder.Bind(policyContent, policyFormat);
|
||||
if (!binding.Success)
|
||||
{
|
||||
var issues = string.Join(", ", binding.Issues.Select(issue => issue.Code));
|
||||
throw new InvalidOperationException($"Policy '{config.PolicyPath}' failed validation: {issues}.");
|
||||
}
|
||||
|
||||
_document = binding.Document;
|
||||
|
||||
_scoringConfig = LoadScoringConfig(repoRoot, config.ScoringConfigPath);
|
||||
_findings = SyntheticFindingGenerator.Create(config, repoRoot);
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var durations = new double[iterations];
|
||||
var throughputs = new double[iterations];
|
||||
var allocations = new double[iterations];
|
||||
var hashingAccumulator = new EvaluationAccumulator();
|
||||
|
||||
for (var index = 0; index < iterations; index++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
hashingAccumulator.Reset();
|
||||
foreach (var finding in _findings)
|
||||
{
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(_document, _scoringConfig, finding);
|
||||
hashingAccumulator.Add(verdict);
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
|
||||
if (elapsedMs <= 0)
|
||||
{
|
||||
elapsedMs = 0.0001;
|
||||
}
|
||||
|
||||
durations[index] = elapsedMs;
|
||||
throughputs[index] = _findings.Length / stopwatch.Elapsed.TotalSeconds;
|
||||
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
hashingAccumulator.AssertConsumed();
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
durations,
|
||||
throughputs,
|
||||
allocations,
|
||||
_findings.Length);
|
||||
}
|
||||
|
||||
private static PolicyScoringConfig LoadScoringConfig(string repoRoot, string? scoringPath)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(scoringPath))
|
||||
{
|
||||
return PolicyScoringConfig.Default;
|
||||
}
|
||||
|
||||
var resolved = ResolvePathWithinRoot(repoRoot, scoringPath);
|
||||
var format = PolicySchema.DetectFormat(resolved);
|
||||
var content = File.ReadAllText(resolved);
|
||||
var binding = PolicyScoringConfigBinder.Bind(content, format);
|
||||
if (!binding.Success || binding.Config is null)
|
||||
{
|
||||
var issues = binding.Issues.Length == 0
|
||||
? "unknown"
|
||||
: string.Join(", ", binding.Issues.Select(issue => issue.Code));
|
||||
throw new InvalidOperationException($"Scoring configuration '{scoringPath}' failed validation: {issues}.");
|
||||
}
|
||||
|
||||
return binding.Config;
|
||||
}
|
||||
|
||||
private static string ResolvePathWithinRoot(string repoRoot, string relativePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(relativePath);
|
||||
|
||||
var combined = Path.GetFullPath(Path.Combine(repoRoot, relativePath));
|
||||
if (!PathUtilities.IsWithinRoot(repoRoot, combined))
|
||||
{
|
||||
throw new InvalidOperationException($"Path '{relativePath}' escapes repository root '{repoRoot}'.");
|
||||
}
|
||||
|
||||
if (!File.Exists(combined))
|
||||
{
|
||||
throw new FileNotFoundException($"Path '{relativePath}' resolved to '{combined}' but does not exist.", combined);
|
||||
}
|
||||
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record ScenarioExecutionResult(
|
||||
IReadOnlyList<double> Durations,
|
||||
IReadOnlyList<double> Throughputs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
int FindingCount);
|
||||
|
||||
internal static class SyntheticFindingGenerator
|
||||
{
|
||||
private static readonly ImmutableArray<string> Environments = ImmutableArray.Create("prod", "staging", "dev");
|
||||
private static readonly ImmutableArray<string> Sources = ImmutableArray.Create("concelier", "excitor", "sbom");
|
||||
private static readonly ImmutableArray<string> Vendors = ImmutableArray.Create("acme", "contoso", "globex", "initech", "umbrella");
|
||||
private static readonly ImmutableArray<string> Licenses = ImmutableArray.Create("MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "Proprietary");
|
||||
private static readonly ImmutableArray<string> Repositories = ImmutableArray.Create("acme/service-api", "acme/web", "acme/worker", "acme/mobile", "acme/cli");
|
||||
private static readonly ImmutableArray<string> Images = ImmutableArray.Create("registry.local/worker:2025.10", "registry.local/api:2025.10", "registry.local/cli:2025.10");
|
||||
private static readonly ImmutableArray<string> TagPool = ImmutableArray.Create("kev", "runtime", "reachable", "public", "third-party", "critical-path");
|
||||
private static readonly ImmutableArray<ImmutableArray<string>> TagSets = BuildTagSets();
|
||||
private static readonly PolicySeverity[] SeverityPool =
|
||||
{
|
||||
PolicySeverity.Critical,
|
||||
PolicySeverity.High,
|
||||
PolicySeverity.Medium,
|
||||
PolicySeverity.Low,
|
||||
PolicySeverity.Informational
|
||||
};
|
||||
|
||||
public static PolicyFinding[] Create(PolicyScenarioConfig config, string repoRoot)
|
||||
{
|
||||
var totalFindings = config.ResolveFindingCount();
|
||||
if (totalFindings <= 0)
|
||||
{
|
||||
return Array.Empty<PolicyFinding>();
|
||||
}
|
||||
|
||||
var seed = config.ResolveSeed();
|
||||
var random = new Random(seed);
|
||||
var findings = new PolicyFinding[totalFindings];
|
||||
var tagsBuffer = new List<string>(3);
|
||||
|
||||
var componentCount = Math.Max(1, config.ComponentCount);
|
||||
|
||||
for (var index = 0; index < totalFindings; index++)
|
||||
{
|
||||
var componentIndex = index % componentCount;
|
||||
var findingId = $"F-{componentIndex:D5}-{index:D6}";
|
||||
var severity = SeverityPool[random.Next(SeverityPool.Length)];
|
||||
var environment = Environments[componentIndex % Environments.Length];
|
||||
var source = Sources[random.Next(Sources.Length)];
|
||||
var vendor = Vendors[random.Next(Vendors.Length)];
|
||||
var license = Licenses[random.Next(Licenses.Length)];
|
||||
var repository = Repositories[componentIndex % Repositories.Length];
|
||||
var image = Images[(componentIndex + index) % Images.Length];
|
||||
var packageName = $"pkg{componentIndex % 1000}";
|
||||
var purl = $"pkg:generic/{packageName}@{1 + (index % 20)}.{1 + (componentIndex % 10)}.{index % 5}";
|
||||
var cve = index % 7 == 0 ? $"CVE-2025-{1000 + index % 9000:D4}" : null;
|
||||
var layerDigest = $"sha256:{Convert.ToHexString(Guid.NewGuid().ToByteArray())[..32].ToLowerInvariant()}";
|
||||
|
||||
var tags = TagSets[random.Next(TagSets.Length)];
|
||||
|
||||
findings[index] = PolicyFinding.Create(
|
||||
findingId,
|
||||
severity,
|
||||
environment: environment,
|
||||
source: source,
|
||||
vendor: vendor,
|
||||
license: license,
|
||||
image: image,
|
||||
repository: repository,
|
||||
package: packageName,
|
||||
purl: purl,
|
||||
cve: cve,
|
||||
path: $"/app/{packageName}/{index % 50}.so",
|
||||
layerDigest: layerDigest,
|
||||
tags: tags);
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
|
||||
private static ImmutableArray<ImmutableArray<string>> BuildTagSets()
|
||||
{
|
||||
var builder = ImmutableArray.CreateBuilder<ImmutableArray<string>>();
|
||||
builder.Add(ImmutableArray<string>.Empty);
|
||||
builder.Add(ImmutableArray.Create("kev"));
|
||||
builder.Add(ImmutableArray.Create("runtime"));
|
||||
builder.Add(ImmutableArray.Create("reachable"));
|
||||
builder.Add(ImmutableArray.Create("third-party"));
|
||||
builder.Add(ImmutableArray.Create("kev", "runtime"));
|
||||
builder.Add(ImmutableArray.Create("kev", "third-party"));
|
||||
builder.Add(ImmutableArray.Create("runtime", "public"));
|
||||
builder.Add(ImmutableArray.Create("reachable", "critical-path"));
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class EvaluationAccumulator
|
||||
{
|
||||
private double _scoreAccumulator;
|
||||
private int _quietCount;
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
_scoreAccumulator = 0;
|
||||
_quietCount = 0;
|
||||
}
|
||||
|
||||
public void Add(PolicyVerdict verdict)
|
||||
{
|
||||
_scoreAccumulator += verdict.Score;
|
||||
if (verdict.Quiet)
|
||||
{
|
||||
_quietCount++;
|
||||
}
|
||||
}
|
||||
|
||||
public void AssertConsumed()
|
||||
{
|
||||
if (_scoreAccumulator == 0 && _quietCount == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Evaluation accumulator detected zero work; dataset may be empty.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,373 @@
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using StellaOps.Bench.PolicyEngine.Baseline;
|
||||
using StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
var iterations = options.Iterations ?? config.Iterations ?? 3;
|
||||
var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath);
|
||||
var thresholdMs = options.ThresholdMs ?? config.ThresholdMs;
|
||||
var throughputFloor = options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var allocationLimit = options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
var regressionLimit = options.RegressionLimit;
|
||||
var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime();
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var runner = new PolicyScenarioRunner(scenario, repoRoot);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var durationStats = DurationStatistics.From(execution.Durations);
|
||||
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
|
||||
var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs;
|
||||
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? throughputFloor;
|
||||
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? allocationLimit;
|
||||
|
||||
var result = new ScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.Label ?? scenario.ScenarioId,
|
||||
iterations,
|
||||
execution.FindingCount,
|
||||
durationStats.MeanMs,
|
||||
durationStats.P95Ms,
|
||||
durationStats.MaxMs,
|
||||
throughputStats.MeanPerSecond,
|
||||
throughputStats.MinPerSecond,
|
||||
allocationStats.MaxAllocatedMb,
|
||||
scenarioThreshold,
|
||||
scenarioThroughputFloor,
|
||||
scenarioAllocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} findings/s < {floor:N0} findings/s");
|
||||
}
|
||||
|
||||
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "policy-bench/1.0",
|
||||
CapturedAtUtc: capturedAt,
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(
|
||||
options.JsonOutPath!,
|
||||
metadata,
|
||||
reports,
|
||||
CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"policy-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static string ResolveRepoRoot(string? overridePath, string configPath)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(overridePath))
|
||||
{
|
||||
return Path.GetFullPath(overridePath);
|
||||
}
|
||||
|
||||
var configDirectory = Path.GetDirectoryName(configPath);
|
||||
if (string.IsNullOrWhiteSpace(configDirectory))
|
||||
{
|
||||
return Directory.GetCurrentDirectory();
|
||||
}
|
||||
|
||||
return Path.GetFullPath(Path.Combine(configDirectory, "..", "..", ".."));
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string? RepoRoot,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
string? repoRoot = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--repo-root":
|
||||
EnsureNext(args, index);
|
||||
repoRoot = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
repoRoot,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: policy-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Global throughput floor (findings/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --repo-root <path> Repository root override.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Findings | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.FindingsColumn,
|
||||
row.MeanColumn,
|
||||
row.P95Column,
|
||||
row.MaxColumn,
|
||||
row.MinThroughputColumn,
|
||||
row.AllocatedColumn
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolvedPath);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var row in results)
|
||||
{
|
||||
writer.Write(row.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.FindingCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.FindingCount,
|
||||
report.Result.MeanMs,
|
||||
report.Result.P95Ms,
|
||||
report.Result.MaxMs,
|
||||
report.Result.MeanThroughputPerSecond,
|
||||
report.Result.MinThroughputPerSecond,
|
||||
report.Result.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinThroughputThresholdPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.FindingCount,
|
||||
baseline.MeanMs,
|
||||
baseline.P95Ms,
|
||||
baseline.MaxMs,
|
||||
baseline.MeanThroughputPerSecond,
|
||||
baseline.MinThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
@@ -0,0 +1,82 @@
|
||||
using StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
|
||||
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
|
||||
}
|
||||
|
||||
public ScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached =>
|
||||
DurationRegressionRatio is { } ratio &&
|
||||
ratio >= RegressionLimit;
|
||||
|
||||
public bool ThroughputRegressionBreached =>
|
||||
ThroughputRegressionRatio is { } ratio &&
|
||||
ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
var delta = (throughputRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateDurationRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateThroughputRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE policy_engine_bench_duration_ms gauge");
|
||||
builder.AppendLine("# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second).");
|
||||
builder.AppendLine("# TYPE policy_engine_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes).");
|
||||
builder.AppendLine("# TYPE policy_engine_bench_allocation_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenarioLabel = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "policy_engine_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
|
||||
AppendMetric(builder, "policy_engine_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
|
||||
AppendMetric(builder, "policy_engine_bench_max_ms", scenarioLabel, report.Result.MaxMs);
|
||||
AppendMetric(builder, "policy_engine_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "policy_engine_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
|
||||
AppendMetric(builder, "policy_engine_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
|
||||
AppendMetric(builder, "policy_engine_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "policy_engine_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
|
||||
AppendMetric(builder, "policy_engine_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "policy_engine_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
|
||||
AppendMetric(builder, "policy_engine_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
|
||||
AppendMetric(builder, "policy_engine_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "policy_engine_bench_duration_regression_ratio", scenarioLabel, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
AppendMetric(builder, "policy_engine_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "policy_engine_bench_regression_limit", scenarioLabel, report.RegressionLimit);
|
||||
AppendMetric(builder, "policy_engine_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal sealed record ScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
public string FindingsColumn => FindingCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> durations)
|
||||
{
|
||||
if (durations.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = durations.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in durations)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / durations.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\StellaOps.Policy\StellaOps.Policy.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
2
src/StellaOps.Bench/PolicyEngine/baseline.csv
Normal file
2
src/StellaOps.Bench/PolicyEngine/baseline.csv
Normal file
@@ -0,0 +1,2 @@
|
||||
scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb
|
||||
policy_eval_baseline,3,1000000,1109.3542,1257.7493,1280.1721,912094.5581,781144.9726,563.6901
|
||||
|
19
src/StellaOps.Bench/PolicyEngine/config.json
Normal file
19
src/StellaOps.Bench/PolicyEngine/config.json
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"iterations": 3,
|
||||
"thresholdMs": 20000,
|
||||
"minThroughputPerSecond": 60000,
|
||||
"maxAllocatedMb": 900,
|
||||
"scenarios": [
|
||||
{
|
||||
"id": "policy_eval_baseline",
|
||||
"label": "Policy evaluation (100k components, 1M findings)",
|
||||
"policyPath": "docs/examples/policies/baseline.yaml",
|
||||
"componentCount": 100000,
|
||||
"advisoriesPerComponent": 10,
|
||||
"seed": 20251026,
|
||||
"thresholdMs": 20000,
|
||||
"minThroughputPerSecond": 60000,
|
||||
"maxAllocatedMb": 900
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -5,35 +5,36 @@
|
||||
| BENCH-SCANNER-10-001 | DONE | Bench Guild, Scanner Team | SCANNER-ANALYZERS-LANG-10-303 | Analyzer microbench harness (node_modules, site-packages) + baseline CSV. | Harness committed under `src/StellaOps.Bench/Scanner.Analyzers`; baseline CSV recorded; CI job publishes results. |
|
||||
| BENCH-SCANNER-10-002 | DONE (2025-10-21) | Bench Guild, Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301..309 | Wire real language analyzers into bench harness & refresh baselines post-implementation. | Harness executes analyzer assemblies end-to-end; updated baseline committed; CI trend doc linked. |
|
||||
| BENCH-IMPACT-16-001 | TODO | Bench Guild, Scheduler Team | SCHED-IMPACT-16-301 | ImpactIndex throughput bench (resolve 10k productKeys) + RAM profile. | Benchmark script ready; baseline metrics recorded; alert thresholds defined. |
|
||||
| BENCH-NOTIFY-15-001 | TODO | Bench Guild, Notify Team | NOTIFY-ENGINE-15-301 | Notify dispatch throughput bench (vary rule density) with results CSV. | Bench executed; results stored; regression alert configured. |
|
||||
| BENCH-NOTIFY-15-001 | DONE (2025-10-26) | Bench Guild, Notify Team | NOTIFY-ENGINE-15-301 | Notify dispatch throughput bench (vary rule density) with results CSV. | Bench executed; results stored; regression alert configured. |
|
||||
|
||||
## Policy Engine v2
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| BENCH-POLICY-20-001 | TODO | Bench Guild, Policy Guild | POLICY-ENGINE-20-002, POLICY-ENGINE-20-006 | Build policy evaluation benchmark suite (100k components, 1M advisories) capturing latency, throughput, memory. | Bench harness committed; baseline metrics recorded; ties into CI dashboards. |
|
||||
| BENCH-POLICY-20-002 | TODO | Bench Guild, Policy Guild, Scheduler Guild | BENCH-POLICY-20-001, SCHED-WORKER-20-302 | Add incremental run benchmark measuring delta evaluation vs full; capture SLA compliance. | Incremental bench executed; results stored; regression alerts configured. |
|
||||
| BENCH-POLICY-20-001 | DONE (2025-10-26) | Bench Guild, Policy Guild | POLICY-ENGINE-20-002, POLICY-ENGINE-20-006 | Build policy evaluation benchmark suite (100k components, 1M advisories) capturing latency, throughput, memory. | Bench harness committed; baseline metrics recorded; ties into CI dashboards. |
|
||||
> 2025-10-26: Added `StellaOps.Bench.PolicyEngine` harness, synthetic dataset generator, baseline + Prom/JSON outputs; default thresholds cover latency/throughput/allocation.
|
||||
| BENCH-POLICY-20-002 | BLOCKED (waiting on SCHED-WORKER-20-302) | Bench Guild, Policy Guild, Scheduler Guild | BENCH-POLICY-20-001, SCHED-WORKER-20-302 | Add incremental run benchmark measuring delta evaluation vs full; capture SLA compliance. | Incremental bench executed; results stored; regression alerts configured. |
|
||||
> 2025-10-26: Scheduler delta targeting (SCHED-WORKER-20-302) not implemented; incremental bench paused until worker emits delta input stream.
|
||||
|
||||
## Graph Explorer v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| BENCH-GRAPH-21-001 | TODO | Bench Guild, Cartographer Guild | CARTO-GRAPH-21-004, CARTO-GRAPH-21-006 | Build graph viewport/path benchmark harness simulating 50k/100k nodes; record latency, memory, tile cache hit rates. | Harness committed; baseline metrics logged; integrates with perf dashboards. |
|
||||
| BENCH-GRAPH-21-002 | TODO | Bench Guild, UI Guild | BENCH-GRAPH-21-001, UI-GRAPH-21-001 | Add headless UI load benchmark (Playwright) for graph canvas interactions to track render times and FPS budgets. | Benchmark runs in CI; results exported; alert thresholds defined. |
|
||||
| BENCH-GRAPH-21-001 | DOING (2025-10-27) | Bench Guild, Graph Platform Guild | GRAPH-API-28-003, GRAPH-INDEX-28-006 | Build graph viewport/path benchmark harness (50k/100k nodes) measuring Graph API/Indexer latency, memory, and tile cache hit rates. *(Executed within Sprint 28 Graph program).* | Harness committed; baseline metrics logged; integrates with perf dashboards. |
|
||||
| BENCH-GRAPH-21-002 | TODO | Bench Guild, UI Guild | BENCH-GRAPH-21-001, UI-GRAPH-24-001 | Add headless UI load benchmark (Playwright) for graph canvas interactions to track render times and FPS budgets. *(Executed within Sprint 28 Graph program).* | Benchmark runs in CI; results exported; alert thresholds defined. |
|
||||
|
||||
## Link-Not-Merge v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| BENCH-LNM-22-001 | TODO | Bench Guild, Concelier Guild | CONCELIER-LNM-21-002 | Create ingest benchmark simulating 500 advisory observations/sec, measuring correlator latency and Mongo throughput; publish baseline metrics. | Harness added; baseline stored; alerts wired for SLA breach. |
|
||||
| BENCH-LNM-22-002 | TODO | Bench Guild, Excititor Guild | EXCITITOR-LNM-21-002 | Build VEX ingestion/correlation perf test focusing on alias/product matching and event emission latency. | Benchmark executed; metrics captured; CI integration established. |
|
||||
| BENCH-LNM-22-001 | DONE (2025-10-26) | Bench Guild, Concelier Guild | CONCELIER-LNM-21-002 | Create ingest benchmark simulating 500 advisory observations/sec, measuring correlator latency and Mongo throughput; publish baseline metrics. | Harness added; baseline stored; alerts wired for SLA breach. |
|
||||
| BENCH-LNM-22-002 | DONE (2025-10-26) | Bench Guild, Excititor Guild | EXCITITOR-LNM-21-002 | Build VEX ingestion/correlation perf test focusing on alias/product matching and event emission latency. | Benchmark executed; metrics captured; CI integration established. |
|
||||
|
||||
## Graph & Vuln Explorer v1
|
||||
|
||||
| ID | Status | Owner(s) | Depends on | Description | Exit Criteria |
|
||||
|----|--------|----------|------------|-------------|---------------|
|
||||
| BENCH-GRAPH-24-001 | TODO | Bench Guild, SBOM Service Guild | SBOM-GRAPH-24-002 | Develop SBOM graph performance benchmark measuring build time, memory, and cache warm latency for 40k-node assets. | Benchmark runs in CI; baseline metrics recorded; alerts configured. |
|
||||
| BENCH-GRAPH-24-002 | TODO | Bench Guild, UI Guild | UI-GRAPH-24-001..005 | Implement UI interaction benchmarks (filter/zoom/table operations) citing p95 latency; integrate with perf dashboards. | UI perf metrics collected; thresholds enforced; documentation updated. |
|
||||
| BENCH-GRAPH-24-002 | TODO | Bench Guild, UI Guild | UI-GRAPH-24-001, UI-GRAPH-24-002 | Implement UI interaction benchmarks (filter/zoom/table operations) citing p95 latency; integrate with perf dashboards. | UI perf metrics collected; thresholds enforced; documentation updated. |
|
||||
|
||||
## Reachability v1
|
||||
|
||||
|
||||
Reference in New Issue
Block a user