up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled

This commit is contained in:
StellaOps Bot
2025-12-13 00:20:26 +02:00
parent e1f1bef4c1
commit 564df71bfb
2376 changed files with 334389 additions and 328032 deletions

View File

@@ -1,37 +1,37 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb\n" +
"vex_ingest_baseline,5,4000,24000,12000,620.5,700.1,820.9,320.5,300.0,9800.0,9100.0,4200.0,3900.0,150.0\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(baseline);
Assert.Equal("vex_ingest_baseline", entry.Key);
Assert.Equal(4000, entry.Value.Observations);
Assert.Equal(24000, entry.Value.Statements);
Assert.Equal(12000, entry.Value.Events);
Assert.Equal(700.1, entry.Value.P95TotalMs);
Assert.Equal(3900.0, entry.Value.MinEventThroughputPerSecond);
}
finally
{
File.Delete(path);
}
}
}
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb\n" +
"vex_ingest_baseline,5,4000,24000,12000,620.5,700.1,820.9,320.5,300.0,9800.0,9100.0,4200.0,3900.0,150.0\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(baseline);
Assert.Equal("vex_ingest_baseline", entry.Key);
Assert.Equal(4000, entry.Value.Observations);
Assert.Equal(24000, entry.Value.Statements);
Assert.Equal(12000, entry.Value.Events);
Assert.Equal(700.1, entry.Value.P95TotalMs);
Assert.Equal(3900.0, entry.Value.MinEventThroughputPerSecond);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -1,83 +1,83 @@
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsBreaches()
{
var result = new VexScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
StatementCount: 6000,
EventCount: 3200,
TotalStatistics: new DurationStatistics(600, 700, 750),
InsertStatistics: new DurationStatistics(320, 360, 380),
CorrelationStatistics: new DurationStatistics(280, 320, 340),
ObservationThroughputStatistics: new ThroughputStatistics(8000, 7000),
EventThroughputStatistics: new ThroughputStatistics(3500, 3200),
AllocationStatistics: new AllocationStatistics(180),
ThresholdMs: null,
MinObservationThroughputPerSecond: null,
MinEventThroughputPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
Observations: 1000,
Statements: 6000,
Events: 3200,
MeanTotalMs: 520,
P95TotalMs: 560,
MaxTotalMs: 580,
MeanInsertMs: 250,
MeanCorrelationMs: 260,
MeanObservationThroughputPerSecond: 9000,
MinObservationThroughputPerSecond: 8500,
MeanEventThroughputPerSecond: 4200,
MinEventThroughputPerSecond: 3800,
MaxAllocatedMb: 140);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ObservationThroughputRegressionBreached);
Assert.True(report.EventThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("event throughput"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new VexScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
StatementCount: 6000,
EventCount: 3200,
TotalStatistics: new DurationStatistics(480, 520, 540),
InsertStatistics: new DurationStatistics(260, 280, 300),
CorrelationStatistics: new DurationStatistics(220, 240, 260),
ObservationThroughputStatistics: new ThroughputStatistics(9000, 8800),
EventThroughputStatistics: new ThroughputStatistics(4200, 4100),
AllocationStatistics: new AllocationStatistics(150),
ThresholdMs: null,
MinObservationThroughputPerSecond: null,
MinEventThroughputPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.RegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsBreaches()
{
var result = new VexScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
StatementCount: 6000,
EventCount: 3200,
TotalStatistics: new DurationStatistics(600, 700, 750),
InsertStatistics: new DurationStatistics(320, 360, 380),
CorrelationStatistics: new DurationStatistics(280, 320, 340),
ObservationThroughputStatistics: new ThroughputStatistics(8000, 7000),
EventThroughputStatistics: new ThroughputStatistics(3500, 3200),
AllocationStatistics: new AllocationStatistics(180),
ThresholdMs: null,
MinObservationThroughputPerSecond: null,
MinEventThroughputPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
Observations: 1000,
Statements: 6000,
Events: 3200,
MeanTotalMs: 520,
P95TotalMs: 560,
MaxTotalMs: 580,
MeanInsertMs: 250,
MeanCorrelationMs: 260,
MeanObservationThroughputPerSecond: 9000,
MinObservationThroughputPerSecond: 8500,
MeanEventThroughputPerSecond: 4200,
MinEventThroughputPerSecond: 3800,
MaxAllocatedMb: 140);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ObservationThroughputRegressionBreached);
Assert.True(report.EventThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("event throughput"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new VexScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
StatementCount: 6000,
EventCount: 3200,
TotalStatistics: new DurationStatistics(480, 520, 540),
InsertStatistics: new DurationStatistics(260, 280, 300),
CorrelationStatistics: new DurationStatistics(220, 240, 260),
ObservationThroughputStatistics: new ThroughputStatistics(9000, 8800),
EventThroughputStatistics: new ThroughputStatistics(4200, 4100),
AllocationStatistics: new AllocationStatistics(150),
ThresholdMs: null,
MinObservationThroughputPerSecond: null,
MinEventThroughputPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.RegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}

View File

@@ -1,34 +1,34 @@
using System.Linq;
using System.Threading;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class VexScenarioRunnerTests
{
[Fact]
public void Execute_ComputesEvents()
{
var config = new VexScenarioConfig
{
Id = "unit",
Observations = 600,
AliasGroups = 120,
StatementsPerObservation = 5,
ProductsPerObservation = 3,
Tenants = 2,
BatchSize = 120,
Seed = 12345,
};
var runner = new VexScenarioRunner(config);
var result = runner.Execute(2, CancellationToken.None);
Assert.Equal(600, result.ObservationCount);
Assert.True(result.StatementCount > 0);
Assert.True(result.EventCount > 0);
Assert.All(result.TotalDurationsMs, duration => Assert.True(duration > 0));
Assert.All(result.EventThroughputsPerSecond, throughput => Assert.True(throughput > 0));
Assert.Equal(result.AggregationResult.EventCount, result.EventCount);
}
}
using System.Linq;
using System.Threading;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class VexScenarioRunnerTests
{
[Fact]
public void Execute_ComputesEvents()
{
var config = new VexScenarioConfig
{
Id = "unit",
Observations = 600,
AliasGroups = 120,
StatementsPerObservation = 5,
ProductsPerObservation = 3,
Tenants = 2,
BatchSize = 120,
Seed = 12345,
};
var runner = new VexScenarioRunner(config);
var result = runner.Execute(2, CancellationToken.None);
Assert.Equal(600, result.ObservationCount);
Assert.True(result.StatementCount > 0);
Assert.True(result.EventCount > 0);
Assert.All(result.TotalDurationsMs, duration => Assert.True(duration > 0));
Assert.All(result.EventThroughputsPerSecond, throughput => Assert.True(throughput > 0));
Assert.Equal(result.AggregationResult.EventCount, result.EventCount);
}
}

View File

@@ -1,18 +1,18 @@
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb);
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -1,87 +1,87 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 15)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
Observations: ParseInt(parts[2], resolved, lineNumber),
Statements: ParseInt(parts[3], resolved, lineNumber),
Events: ParseInt(parts[4], resolved, lineNumber),
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanObservationThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinObservationThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanEventThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinEventThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 15)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
Observations: ParseInt(parts[2], resolved, lineNumber),
Statements: ParseInt(parts[3], resolved, lineNumber),
Events: ParseInt(parts[4], resolved, lineNumber),
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanObservationThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinObservationThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanEventThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinEventThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -1,376 +1,376 @@
using System.Globalization;
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await VexBenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<VexScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = scenario.ResolveIterations(config.Iterations);
var runner = new VexScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var observationThroughputStats = ThroughputStatistics.From(execution.ObservationThroughputsPerSecond);
var eventThroughputStats = ThroughputStatistics.From(execution.EventThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var observationFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var eventFloor = scenario.MinEventThroughputPerSecond ?? options.MinEventThroughputPerSecond ?? config.MinEventThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new VexScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.ObservationCount,
execution.AliasGroups,
execution.StatementCount,
execution.EventCount,
totalStats,
insertStats,
correlationStats,
observationThroughputStats,
eventThroughputStats,
allocationStats,
thresholdMs,
observationFloor,
eventFloor,
allocationLimit);
results.Add(result);
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
}
if (observationFloor is { } obsFloor && result.ObservationThroughputStatistics.MinPerSecond < obsFloor)
{
failures.Add($"{result.Id} fell below observation throughput floor: {result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s < {obsFloor:N0} obs/s");
}
if (eventFloor is { } evtFloor && result.EventThroughputStatistics.MinPerSecond < evtFloor)
{
failures.Add($"{result.Id} fell below event throughput floor: {result.EventThroughputStatistics.MinPerSecond:N0} events/s < {evtFloor:N0} events/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "linknotmerge-vex-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"linknotmerge-vex-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minEventThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-event-throughput":
EnsureNext(args, index);
minEventThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
minEventThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: linknotmerge-vex-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Observation throughput floor (observations/second).");
Console.WriteLine(" --min-event-throughput <value> Event emission throughput floor (events/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<VexScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Statements | Events | Total(ms) | Correl(ms) | Insert(ms) | Obs k/s | Evnt k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ---------- | ------- | ---------- | ---------- | ----------- | ------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.ObservationsColumn,
row.StatementColumn,
row.EventColumn,
row.TotalMeanColumn,
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ObservationThroughputColumn,
row.EventThroughputColumn,
row.AllocatedColumn,
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<VexScenarioResult> results)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(results);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{
writer.Write(result.Id);
writer.Write(',');
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.StatementCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}
using System.Globalization;
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await VexBenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<VexScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = scenario.ResolveIterations(config.Iterations);
var runner = new VexScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var observationThroughputStats = ThroughputStatistics.From(execution.ObservationThroughputsPerSecond);
var eventThroughputStats = ThroughputStatistics.From(execution.EventThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var observationFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var eventFloor = scenario.MinEventThroughputPerSecond ?? options.MinEventThroughputPerSecond ?? config.MinEventThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new VexScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.ObservationCount,
execution.AliasGroups,
execution.StatementCount,
execution.EventCount,
totalStats,
insertStats,
correlationStats,
observationThroughputStats,
eventThroughputStats,
allocationStats,
thresholdMs,
observationFloor,
eventFloor,
allocationLimit);
results.Add(result);
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
}
if (observationFloor is { } obsFloor && result.ObservationThroughputStatistics.MinPerSecond < obsFloor)
{
failures.Add($"{result.Id} fell below observation throughput floor: {result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s < {obsFloor:N0} obs/s");
}
if (eventFloor is { } evtFloor && result.EventThroughputStatistics.MinPerSecond < evtFloor)
{
failures.Add($"{result.Id} fell below event throughput floor: {result.EventThroughputStatistics.MinPerSecond:N0} events/s < {evtFloor:N0} events/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "linknotmerge-vex-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"linknotmerge-vex-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minEventThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-event-throughput":
EnsureNext(args, index);
minEventThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
minEventThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: linknotmerge-vex-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Observation throughput floor (observations/second).");
Console.WriteLine(" --min-event-throughput <value> Event emission throughput floor (events/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<VexScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Statements | Events | Total(ms) | Correl(ms) | Insert(ms) | Obs k/s | Evnt k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ---------- | ------- | ---------- | ---------- | ----------- | ------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.ObservationsColumn,
row.StatementColumn,
row.EventColumn,
row.TotalMeanColumn,
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ObservationThroughputColumn,
row.EventThroughputColumn,
row.AllocatedColumn,
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<VexScenarioResult> results)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(results);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{
writer.Write(result.Id);
writer.Write(',');
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.StatementCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -1,3 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Vex.Tests")]
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Vex.Tests")]

View File

@@ -1,151 +1,151 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.ObservationCount,
report.Result.StatementCount,
report.Result.EventCount,
report.Result.TotalStatistics.MeanMs,
report.Result.TotalStatistics.P95Ms,
report.Result.TotalStatistics.MaxMs,
report.Result.InsertStatistics.MeanMs,
report.Result.CorrelationStatistics.MeanMs,
report.Result.ObservationThroughputStatistics.MeanPerSecond,
report.Result.ObservationThroughputStatistics.MinPerSecond,
report.Result.EventThroughputStatistics.MeanPerSecond,
report.Result.EventThroughputStatistics.MinPerSecond,
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinObservationThroughputPerSecond,
report.Result.MinEventThroughputPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.Observations,
baseline.Statements,
baseline.Events,
baseline.MeanTotalMs,
baseline.P95TotalMs,
baseline.MaxTotalMs,
baseline.MeanInsertMs,
baseline.MeanCorrelationMs,
baseline.MeanObservationThroughputPerSecond,
baseline.MinObservationThroughputPerSecond,
baseline.MeanEventThroughputPerSecond,
baseline.MinEventThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ObservationThroughputRegressionRatio,
report.EventThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinObservationThroughputThresholdPerSecond,
double? MinEventThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ObservationThroughputRatio,
double? EventThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.ObservationCount,
report.Result.StatementCount,
report.Result.EventCount,
report.Result.TotalStatistics.MeanMs,
report.Result.TotalStatistics.P95Ms,
report.Result.TotalStatistics.MaxMs,
report.Result.InsertStatistics.MeanMs,
report.Result.CorrelationStatistics.MeanMs,
report.Result.ObservationThroughputStatistics.MeanPerSecond,
report.Result.ObservationThroughputStatistics.MinPerSecond,
report.Result.EventThroughputStatistics.MeanPerSecond,
report.Result.EventThroughputStatistics.MinPerSecond,
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinObservationThroughputPerSecond,
report.Result.MinEventThroughputPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.Observations,
baseline.Statements,
baseline.Events,
baseline.MeanTotalMs,
baseline.P95TotalMs,
baseline.MaxTotalMs,
baseline.MeanInsertMs,
baseline.MeanCorrelationMs,
baseline.MeanObservationThroughputPerSecond,
baseline.MinObservationThroughputPerSecond,
baseline.MeanEventThroughputPerSecond,
baseline.MinEventThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ObservationThroughputRegressionRatio,
report.EventThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinObservationThroughputThresholdPerSecond,
double? MinEventThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ObservationThroughputRatio,
double? EventThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -1,89 +1,89 @@
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(VexScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ObservationThroughputRegressionRatio = CalculateInverseRatio(result.ObservationThroughputStatistics.MinPerSecond, baseline?.MinObservationThroughputPerSecond);
EventThroughputRegressionRatio = CalculateInverseRatio(result.EventThroughputStatistics.MinPerSecond, baseline?.MinEventThroughputPerSecond);
}
public VexScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ObservationThroughputRegressionRatio { get; }
public double? EventThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ObservationThroughputRegressionBreached => ObservationThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool EventThroughputRegressionBreached => EventThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ObservationThroughputRegressionBreached || EventThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
}
if (ObservationThroughputRegressionBreached && ObservationThroughputRegressionRatio is { } obsRatio)
{
var delta = (obsRatio - 1d) * 100d;
yield return $"{Result.Id} observation throughput regressed: min {Result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinObservationThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (EventThroughputRegressionBreached && EventThroughputRegressionRatio is { } evtRatio)
{
var delta = (evtRatio - 1d) * 100d;
yield return $"{Result.Id} event throughput regressed: min {Result.EventThroughputStatistics.MinPerSecond:N0} events/s vs baseline {Baseline.MinEventThroughputPerSecond:N0} events/s (-{delta:F1}%).";
}
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateInverseRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(VexScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ObservationThroughputRegressionRatio = CalculateInverseRatio(result.ObservationThroughputStatistics.MinPerSecond, baseline?.MinObservationThroughputPerSecond);
EventThroughputRegressionRatio = CalculateInverseRatio(result.EventThroughputStatistics.MinPerSecond, baseline?.MinEventThroughputPerSecond);
}
public VexScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ObservationThroughputRegressionRatio { get; }
public double? EventThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ObservationThroughputRegressionBreached => ObservationThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool EventThroughputRegressionBreached => EventThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ObservationThroughputRegressionBreached || EventThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
}
if (ObservationThroughputRegressionBreached && ObservationThroughputRegressionRatio is { } obsRatio)
{
var delta = (obsRatio - 1d) * 100d;
yield return $"{Result.Id} observation throughput regressed: min {Result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinObservationThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (EventThroughputRegressionBreached && EventThroughputRegressionRatio is { } evtRatio)
{
var delta = (evtRatio - 1d) * 100d;
yield return $"{Result.Id} event throughput regressed: min {Result.EventThroughputStatistics.MinPerSecond:N0} events/s vs baseline {Baseline.MinEventThroughputPerSecond:N0} events/s (-{delta:F1}%).";
}
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateInverseRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -1,94 +1,94 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_allocated_mb gauge");
foreach (var report in reports)
{
var scenario = Escape(report.Result.Id);
AppendMetric(builder, "linknotmerge_vex_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_vex_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
AppendMetric(builder, "linknotmerge_vex_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
AppendMetric(builder, "linknotmerge_vex_bench_threshold_ms", scenario, report.Result.ThresholdMs);
AppendMetric(builder, "linknotmerge_vex_bench_mean_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_min_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_observation_throughput_floor_per_sec", scenario, report.Result.MinObservationThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_mean_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_min_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_event_throughput_floor_per_sec", scenario, report.Result.MinEventThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "linknotmerge_vex_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_observation_throughput_per_sec", scenario, baseline.MinObservationThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_event_throughput_per_sec", scenario, baseline.MinEventThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_duration_regression_ratio", scenario, durationRatio);
}
if (report.ObservationThroughputRegressionRatio is { } obsRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_observation_regression_ratio", scenario, obsRatio);
}
if (report.EventThroughputRegressionRatio is { } evtRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_event_regression_ratio", scenario, evtRatio);
}
AppendMetric(builder, "linknotmerge_vex_bench_regression_limit", scenario, report.RegressionLimit);
AppendMetric(builder, "linknotmerge_vex_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_allocated_mb gauge");
foreach (var report in reports)
{
var scenario = Escape(report.Result.Id);
AppendMetric(builder, "linknotmerge_vex_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_vex_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
AppendMetric(builder, "linknotmerge_vex_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
AppendMetric(builder, "linknotmerge_vex_bench_threshold_ms", scenario, report.Result.ThresholdMs);
AppendMetric(builder, "linknotmerge_vex_bench_mean_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_min_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_observation_throughput_floor_per_sec", scenario, report.Result.MinObservationThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_mean_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_min_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_event_throughput_floor_per_sec", scenario, report.Result.MinEventThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "linknotmerge_vex_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_observation_throughput_per_sec", scenario, baseline.MinObservationThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_event_throughput_per_sec", scenario, baseline.MinEventThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_duration_regression_ratio", scenario, durationRatio);
}
if (report.ObservationThroughputRegressionRatio is { } obsRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_observation_regression_ratio", scenario, obsRatio);
}
if (report.EventThroughputRegressionRatio is { } evtRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_event_regression_ratio", scenario, evtRatio);
}
AppendMetric(builder, "linknotmerge_vex_bench_regression_limit", scenario, report.RegressionLimit);
AppendMetric(builder, "linknotmerge_vex_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -1,84 +1,84 @@
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = values.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in values)
{
total += value;
}
var mean = total / values.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = values.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in values)
{
total += value;
}
var mean = total / values.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -1,150 +1,150 @@
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexLinksetAggregator
{
public VexAggregationResult Correlate(IEnumerable<VexObservationDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var groups = new Dictionary<string, VexAccumulator>(StringComparer.Ordinal);
var statementsSeen = 0;
foreach (var document in documents)
{
var tenant = document.Tenant;
var aliases = document.Aliases;
var statements = document.Statements;
foreach (var statementValue in statements)
{
statementsSeen++;
var status = statementValue.Status;
var justification = statementValue.Justification;
var lastUpdated = statementValue.LastUpdated;
var productKey = statementValue.Product.Purl;
foreach (var alias in aliases)
{
var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) =>
{
var (tenantValue, aliasValue, productValue) = data;
var offset = 0;
tenantValue.AsSpan().CopyTo(span);
offset += tenantValue.Length;
span[offset++] = '|';
aliasValue.AsSpan().CopyTo(span[offset..]);
offset += aliasValue.Length;
span[offset++] = '|';
productValue.AsSpan().CopyTo(span[offset..]);
});
if (!groups.TryGetValue(key, out var accumulator))
{
accumulator = new VexAccumulator(tenant, alias, productKey);
groups[key] = accumulator;
}
accumulator.AddStatement(status, justification, lastUpdated);
}
}
}
var eventDocuments = new List<VexEvent>(groups.Count);
foreach (var accumulator in groups.Values)
{
if (accumulator.ShouldEmitEvent)
{
eventDocuments.Add(accumulator.ToEvent());
}
}
return new VexAggregationResult(
LinksetCount: groups.Count,
StatementCount: statementsSeen,
EventCount: eventDocuments.Count,
EventDocuments: eventDocuments);
}
private sealed class VexAccumulator
{
private readonly Dictionary<string, int> _statusCounts = new(StringComparer.Ordinal);
private readonly HashSet<string> _justifications = new(StringComparer.Ordinal);
private readonly string _tenant;
private readonly string _alias;
private readonly string _product;
private DateTimeOffset? _latest;
public VexAccumulator(string tenant, string alias, string product)
{
_tenant = tenant;
_alias = alias;
_product = product;
}
public void AddStatement(string status, string justification, DateTimeOffset updatedAt)
{
if (!_statusCounts.TryAdd(status, 1))
{
_statusCounts[status]++;
}
if (!string.IsNullOrEmpty(justification))
{
_justifications.Add(justification);
}
if (updatedAt != default)
{
var value = updatedAt.ToUniversalTime();
if (!_latest.HasValue || value > _latest.Value)
{
_latest = value;
}
}
}
public bool ShouldEmitEvent
{
get
{
if (_statusCounts.TryGetValue("affected", out var affected) && affected > 0)
{
return true;
}
if (_statusCounts.TryGetValue("under_investigation", out var investigating) && investigating > 0)
{
return true;
}
return false;
}
}
public VexEvent ToEvent()
{
return new VexEvent(
_tenant,
_alias,
_product,
new Dictionary<string, int>(_statusCounts, StringComparer.Ordinal),
_justifications.ToArray(),
_latest);
}
}
}
internal sealed record VexAggregationResult(
int LinksetCount,
int StatementCount,
int EventCount,
IReadOnlyList<VexEvent> EventDocuments);
internal sealed record VexEvent(
string Tenant,
string Alias,
string Product,
IReadOnlyDictionary<string, int> Statuses,
IReadOnlyCollection<string> Justifications,
DateTimeOffset? LastUpdated);
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexLinksetAggregator
{
public VexAggregationResult Correlate(IEnumerable<VexObservationDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var groups = new Dictionary<string, VexAccumulator>(StringComparer.Ordinal);
var statementsSeen = 0;
foreach (var document in documents)
{
var tenant = document.Tenant;
var aliases = document.Aliases;
var statements = document.Statements;
foreach (var statementValue in statements)
{
statementsSeen++;
var status = statementValue.Status;
var justification = statementValue.Justification;
var lastUpdated = statementValue.LastUpdated;
var productKey = statementValue.Product.Purl;
foreach (var alias in aliases)
{
var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) =>
{
var (tenantValue, aliasValue, productValue) = data;
var offset = 0;
tenantValue.AsSpan().CopyTo(span);
offset += tenantValue.Length;
span[offset++] = '|';
aliasValue.AsSpan().CopyTo(span[offset..]);
offset += aliasValue.Length;
span[offset++] = '|';
productValue.AsSpan().CopyTo(span[offset..]);
});
if (!groups.TryGetValue(key, out var accumulator))
{
accumulator = new VexAccumulator(tenant, alias, productKey);
groups[key] = accumulator;
}
accumulator.AddStatement(status, justification, lastUpdated);
}
}
}
var eventDocuments = new List<VexEvent>(groups.Count);
foreach (var accumulator in groups.Values)
{
if (accumulator.ShouldEmitEvent)
{
eventDocuments.Add(accumulator.ToEvent());
}
}
return new VexAggregationResult(
LinksetCount: groups.Count,
StatementCount: statementsSeen,
EventCount: eventDocuments.Count,
EventDocuments: eventDocuments);
}
private sealed class VexAccumulator
{
private readonly Dictionary<string, int> _statusCounts = new(StringComparer.Ordinal);
private readonly HashSet<string> _justifications = new(StringComparer.Ordinal);
private readonly string _tenant;
private readonly string _alias;
private readonly string _product;
private DateTimeOffset? _latest;
public VexAccumulator(string tenant, string alias, string product)
{
_tenant = tenant;
_alias = alias;
_product = product;
}
public void AddStatement(string status, string justification, DateTimeOffset updatedAt)
{
if (!_statusCounts.TryAdd(status, 1))
{
_statusCounts[status]++;
}
if (!string.IsNullOrEmpty(justification))
{
_justifications.Add(justification);
}
if (updatedAt != default)
{
var value = updatedAt.ToUniversalTime();
if (!_latest.HasValue || value > _latest.Value)
{
_latest = value;
}
}
}
public bool ShouldEmitEvent
{
get
{
if (_statusCounts.TryGetValue("affected", out var affected) && affected > 0)
{
return true;
}
if (_statusCounts.TryGetValue("under_investigation", out var investigating) && investigating > 0)
{
return true;
}
return false;
}
}
public VexEvent ToEvent()
{
return new VexEvent(
_tenant,
_alias,
_product,
new Dictionary<string, int>(_statusCounts, StringComparer.Ordinal),
_justifications.ToArray(),
_latest);
}
}
}
internal sealed record VexAggregationResult(
int LinksetCount,
int StatementCount,
int EventCount,
IReadOnlyList<VexEvent> EventDocuments);
internal sealed record VexEvent(
string Tenant,
string Alias,
string Product,
IReadOnlyDictionary<string, int> Statuses,
IReadOnlyCollection<string> Justifications,
DateTimeOffset? LastUpdated);

View File

@@ -1,183 +1,183 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexBenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<VexScenarioConfig> Scenarios)
{
public static async Task<VexBenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<VexBenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new VexBenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinEventThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class VexBenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minEventThroughputPerSecond")]
public double? MinEventThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<VexScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class VexScenarioConfig
{
private const int DefaultObservationCount = 4_000;
private const int DefaultAliasGroups = 400;
private const int DefaultStatementsPerObservation = 6;
private const int DefaultProductsPerObservation = 3;
private const int DefaultTenants = 3;
private const int DefaultBatchSize = 250;
private const int DefaultSeed = 520_025;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("observations")]
public int? Observations { get; init; }
[JsonPropertyName("aliasGroups")]
public int? AliasGroups { get; init; }
[JsonPropertyName("statementsPerObservation")]
public int? StatementsPerObservation { get; init; }
[JsonPropertyName("productsPerObservation")]
public int? ProductsPerObservation { get; init; }
[JsonPropertyName("tenants")]
public int? Tenants { get; init; }
[JsonPropertyName("batchSize")]
public int? BatchSize { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minEventThroughputPerSecond")]
public double? MinEventThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "vex" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveObservationCount() => Observations is > 0 ? Observations.Value : DefaultObservationCount;
public int ResolveAliasGroups() => AliasGroups is > 0 ? AliasGroups.Value : DefaultAliasGroups;
public int ResolveStatementsPerObservation() => StatementsPerObservation is > 0 ? StatementsPerObservation.Value : DefaultStatementsPerObservation;
public int ResolveProductsPerObservation() => ProductsPerObservation is > 0 ? ProductsPerObservation.Value : DefaultProductsPerObservation;
public int ResolveTenantCount() => Tenants is > 0 ? Tenants.Value : DefaultTenants;
public int ResolveBatchSize() => BatchSize is > 0 ? BatchSize.Value : DefaultBatchSize;
public int ResolveSeed() => Seed is > 0 ? Seed.Value : DefaultSeed;
public int ResolveIterations(int? defaultIterations)
{
var iterations = Iterations ?? defaultIterations ?? 3;
if (iterations <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
}
return iterations;
}
public void Validate()
{
if (ResolveObservationCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
}
if (ResolveAliasGroups() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
}
if (ResolveStatementsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires statementsPerObservation > 0.");
}
if (ResolveProductsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires productsPerObservation > 0.");
}
}
}
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexBenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<VexScenarioConfig> Scenarios)
{
public static async Task<VexBenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<VexBenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new VexBenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinEventThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class VexBenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minEventThroughputPerSecond")]
public double? MinEventThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<VexScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class VexScenarioConfig
{
private const int DefaultObservationCount = 4_000;
private const int DefaultAliasGroups = 400;
private const int DefaultStatementsPerObservation = 6;
private const int DefaultProductsPerObservation = 3;
private const int DefaultTenants = 3;
private const int DefaultBatchSize = 250;
private const int DefaultSeed = 520_025;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("observations")]
public int? Observations { get; init; }
[JsonPropertyName("aliasGroups")]
public int? AliasGroups { get; init; }
[JsonPropertyName("statementsPerObservation")]
public int? StatementsPerObservation { get; init; }
[JsonPropertyName("productsPerObservation")]
public int? ProductsPerObservation { get; init; }
[JsonPropertyName("tenants")]
public int? Tenants { get; init; }
[JsonPropertyName("batchSize")]
public int? BatchSize { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minEventThroughputPerSecond")]
public double? MinEventThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "vex" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveObservationCount() => Observations is > 0 ? Observations.Value : DefaultObservationCount;
public int ResolveAliasGroups() => AliasGroups is > 0 ? AliasGroups.Value : DefaultAliasGroups;
public int ResolveStatementsPerObservation() => StatementsPerObservation is > 0 ? StatementsPerObservation.Value : DefaultStatementsPerObservation;
public int ResolveProductsPerObservation() => ProductsPerObservation is > 0 ? ProductsPerObservation.Value : DefaultProductsPerObservation;
public int ResolveTenantCount() => Tenants is > 0 ? Tenants.Value : DefaultTenants;
public int ResolveBatchSize() => BatchSize is > 0 ? BatchSize.Value : DefaultBatchSize;
public int ResolveSeed() => Seed is > 0 ? Seed.Value : DefaultSeed;
public int ResolveIterations(int? defaultIterations)
{
var iterations = Iterations ?? defaultIterations ?? 3;
if (iterations <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
}
return iterations;
}
public void Validate()
{
if (ResolveObservationCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
}
if (ResolveAliasGroups() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
}
if (ResolveStatementsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires statementsPerObservation > 0.");
}
if (ResolveProductsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires productsPerObservation > 0.");
}
}
}

View File

@@ -1,14 +1,14 @@
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexScenarioExecutionResult(
IReadOnlyList<double> TotalDurationsMs,
IReadOnlyList<double> InsertDurationsMs,
IReadOnlyList<double> CorrelationDurationsMs,
IReadOnlyList<double> AllocatedMb,
IReadOnlyList<double> ObservationThroughputsPerSecond,
IReadOnlyList<double> EventThroughputsPerSecond,
int ObservationCount,
int AliasGroups,
int StatementCount,
int EventCount,
VexAggregationResult AggregationResult);
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexScenarioExecutionResult(
IReadOnlyList<double> TotalDurationsMs,
IReadOnlyList<double> InsertDurationsMs,
IReadOnlyList<double> CorrelationDurationsMs,
IReadOnlyList<double> AllocatedMb,
IReadOnlyList<double> ObservationThroughputsPerSecond,
IReadOnlyList<double> EventThroughputsPerSecond,
int ObservationCount,
int AliasGroups,
int StatementCount,
int EventCount,
VexAggregationResult AggregationResult);

View File

@@ -1,43 +1,43 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexScenarioResult(
string Id,
string Label,
int Iterations,
int ObservationCount,
int AliasGroups,
int StatementCount,
int EventCount,
DurationStatistics TotalStatistics,
DurationStatistics InsertStatistics,
DurationStatistics CorrelationStatistics,
ThroughputStatistics ObservationThroughputStatistics,
ThroughputStatistics EventThroughputStatistics,
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinObservationThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string StatementColumn => StatementCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(10);
public string EventColumn => EventCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string ObservationThroughputColumn => (ObservationThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string EventThroughputColumn => (EventThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexScenarioResult(
string Id,
string Label,
int Iterations,
int ObservationCount,
int AliasGroups,
int StatementCount,
int EventCount,
DurationStatistics TotalStatistics,
DurationStatistics InsertStatistics,
DurationStatistics CorrelationStatistics,
ThroughputStatistics ObservationThroughputStatistics,
ThroughputStatistics EventThroughputStatistics,
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinObservationThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string StatementColumn => StatementCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(10);
public string EventColumn => EventCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string ObservationThroughputColumn => (ObservationThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string EventThroughputColumn => (EventThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -1,98 +1,98 @@
using System.Diagnostics;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexScenarioRunner
{
private readonly VexScenarioConfig _config;
private readonly IReadOnlyList<VexObservationSeed> _seeds;
public VexScenarioRunner(VexScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
_seeds = VexObservationGenerator.Generate(config);
}
public VexScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var totalDurations = new double[iterations];
var insertDurations = new double[iterations];
var correlationDurations = new double[iterations];
var allocated = new double[iterations];
var observationThroughputs = new double[iterations];
var eventThroughputs = new double[iterations];
VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty<VexEvent>());
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var aggregator = new VexLinksetAggregator();
lastAggregation = aggregator.Correlate(documents);
correlationStopwatch.Stop();
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
var afterAllocated = GC.GetTotalAllocatedBytes();
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
observationThroughputs[iteration] = _seeds.Count / totalSeconds;
var eventSeconds = Math.Max(correlationStopwatch.Elapsed.TotalSeconds, 0.0001d);
var eventCount = Math.Max(lastAggregation.EventCount, 1);
eventThroughputs[iteration] = eventCount / eventSeconds;
}
return new VexScenarioExecutionResult(
totalDurations,
insertDurations,
correlationDurations,
allocated,
observationThroughputs,
eventThroughputs,
ObservationCount: _seeds.Count,
AliasGroups: _config.ResolveAliasGroups(),
StatementCount: lastAggregation.StatementCount,
EventCount: lastAggregation.EventCount,
AggregationResult: lastAggregation);
}
private static IReadOnlyList<VexObservationDocument> InsertObservations(
IReadOnlyList<VexObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
var documents = new List<VexObservationDocument>(seeds.Count);
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<VexObservationDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToDocument());
}
documents.AddRange(batch);
}
return documents;
}
}
using System.Diagnostics;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexScenarioRunner
{
private readonly VexScenarioConfig _config;
private readonly IReadOnlyList<VexObservationSeed> _seeds;
public VexScenarioRunner(VexScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
_seeds = VexObservationGenerator.Generate(config);
}
public VexScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var totalDurations = new double[iterations];
var insertDurations = new double[iterations];
var correlationDurations = new double[iterations];
var allocated = new double[iterations];
var observationThroughputs = new double[iterations];
var eventThroughputs = new double[iterations];
VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty<VexEvent>());
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var aggregator = new VexLinksetAggregator();
lastAggregation = aggregator.Correlate(documents);
correlationStopwatch.Stop();
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
var afterAllocated = GC.GetTotalAllocatedBytes();
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
observationThroughputs[iteration] = _seeds.Count / totalSeconds;
var eventSeconds = Math.Max(correlationStopwatch.Elapsed.TotalSeconds, 0.0001d);
var eventCount = Math.Max(lastAggregation.EventCount, 1);
eventThroughputs[iteration] = eventCount / eventSeconds;
}
return new VexScenarioExecutionResult(
totalDurations,
insertDurations,
correlationDurations,
allocated,
observationThroughputs,
eventThroughputs,
ObservationCount: _seeds.Count,
AliasGroups: _config.ResolveAliasGroups(),
StatementCount: lastAggregation.StatementCount,
EventCount: lastAggregation.EventCount,
AggregationResult: lastAggregation);
}
private static IReadOnlyList<VexObservationDocument> InsertObservations(
IReadOnlyList<VexObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
var documents = new List<VexObservationDocument>(seeds.Count);
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<VexObservationDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToDocument());
}
documents.AddRange(batch);
}
return documents;
}
}

View File

@@ -1,38 +1,38 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.LinkNotMerge.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(baseline);
Assert.Equal("lnm_ingest_baseline", entry.Key);
Assert.Equal(5, entry.Value.Iterations);
Assert.Equal(5000, entry.Value.Observations);
Assert.Equal(500, entry.Value.Aliases);
Assert.Equal(360.9, entry.Value.MaxTotalMs);
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
}
finally
{
File.Delete(path);
}
}
}
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.LinkNotMerge.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(baseline);
Assert.Equal("lnm_ingest_baseline", entry.Key);
Assert.Equal(5, entry.Value.Iterations);
Assert.Equal(5000, entry.Value.Observations);
Assert.Equal(500, entry.Value.Aliases);
Assert.Equal(360.9, entry.Value.MaxTotalMs);
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -1,81 +1,81 @@
using StellaOps.Bench.LinkNotMerge.Baseline;
using StellaOps.Bench.LinkNotMerge.Reporting;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
LinksetCount: 90,
TotalStatistics: new DurationStatistics(200, 240, 260),
InsertStatistics: new DurationStatistics(80, 90, 100),
CorrelationStatistics: new DurationStatistics(120, 150, 170),
TotalThroughputStatistics: new ThroughputStatistics(8000, 7000),
InsertThroughputStatistics: new ThroughputStatistics(9000, 8000),
AllocationStatistics: new AllocationStatistics(120),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
Observations: 1000,
Aliases: 100,
Linksets: 90,
MeanTotalMs: 150,
P95TotalMs: 170,
MaxTotalMs: 180,
MeanInsertMs: 60,
MeanCorrelationMs: 90,
MeanThroughputPerSecond: 9000,
MinThroughputPerSecond: 8500,
MeanMongoThroughputPerSecond: 10000,
MinMongoThroughputPerSecond: 9500,
MaxAllocatedMb: 100);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.True(report.MongoThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
LinksetCount: 90,
TotalStatistics: new DurationStatistics(200, 220, 230),
InsertStatistics: new DurationStatistics(90, 100, 110),
CorrelationStatistics: new DurationStatistics(110, 120, 130),
TotalThroughputStatistics: new ThroughputStatistics(8000, 7900),
InsertThroughputStatistics: new ThroughputStatistics(9000, 8900),
AllocationStatistics: new AllocationStatistics(64),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.RegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}
using StellaOps.Bench.LinkNotMerge.Baseline;
using StellaOps.Bench.LinkNotMerge.Reporting;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
LinksetCount: 90,
TotalStatistics: new DurationStatistics(200, 240, 260),
InsertStatistics: new DurationStatistics(80, 90, 100),
CorrelationStatistics: new DurationStatistics(120, 150, 170),
TotalThroughputStatistics: new ThroughputStatistics(8000, 7000),
InsertThroughputStatistics: new ThroughputStatistics(9000, 8000),
AllocationStatistics: new AllocationStatistics(120),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
Observations: 1000,
Aliases: 100,
Linksets: 90,
MeanTotalMs: 150,
P95TotalMs: 170,
MaxTotalMs: 180,
MeanInsertMs: 60,
MeanCorrelationMs: 90,
MeanThroughputPerSecond: 9000,
MinThroughputPerSecond: 8500,
MeanMongoThroughputPerSecond: 10000,
MinMongoThroughputPerSecond: 9500,
MaxAllocatedMb: 100);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.True(report.MongoThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
LinksetCount: 90,
TotalStatistics: new DurationStatistics(200, 220, 230),
InsertStatistics: new DurationStatistics(90, 100, 110),
CorrelationStatistics: new DurationStatistics(110, 120, 130),
TotalThroughputStatistics: new ThroughputStatistics(8000, 7900),
InsertThroughputStatistics: new ThroughputStatistics(9000, 8900),
AllocationStatistics: new AllocationStatistics(64),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.RegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}

View File

@@ -1,38 +1,38 @@
using System.Linq;
using System.Threading;
using StellaOps.Bench.LinkNotMerge.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class LinkNotMergeScenarioRunnerTests
{
[Fact]
public void Execute_BuildsDeterministicAggregation()
{
var config = new LinkNotMergeScenarioConfig
{
Id = "unit",
Observations = 120,
AliasGroups = 24,
PurlsPerObservation = 3,
CpesPerObservation = 2,
ReferencesPerObservation = 2,
Tenants = 3,
BatchSize = 40,
Seed = 1337,
};
var runner = new LinkNotMergeScenarioRunner(config);
var result = runner.Execute(iterations: 2, CancellationToken.None);
Assert.Equal(120, result.ObservationCount);
Assert.Equal(24, result.AliasGroups);
Assert.True(result.TotalDurationsMs.All(value => value > 0));
Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0));
Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0));
Assert.True(result.AllocatedMb.All(value => value >= 0));
Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount);
Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount);
}
}
using System.Linq;
using System.Threading;
using StellaOps.Bench.LinkNotMerge.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class LinkNotMergeScenarioRunnerTests
{
[Fact]
public void Execute_BuildsDeterministicAggregation()
{
var config = new LinkNotMergeScenarioConfig
{
Id = "unit",
Observations = 120,
AliasGroups = 24,
PurlsPerObservation = 3,
CpesPerObservation = 2,
ReferencesPerObservation = 2,
Tenants = 3,
BatchSize = 40,
Seed = 1337,
};
var runner = new LinkNotMergeScenarioRunner(config);
var result = runner.Execute(iterations: 2, CancellationToken.None);
Assert.Equal(120, result.ObservationCount);
Assert.Equal(24, result.AliasGroups);
Assert.True(result.TotalDurationsMs.All(value => value > 0));
Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0));
Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0));
Assert.True(result.AllocatedMb.All(value => value >= 0));
Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount);
Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount);
}
}

View File

@@ -1,18 +1,18 @@
namespace StellaOps.Bench.LinkNotMerge.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb);
namespace StellaOps.Bench.LinkNotMerge.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -1,87 +1,87 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 15)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
Observations: ParseInt(parts[2], resolved, lineNumber),
Aliases: ParseInt(parts[3], resolved, lineNumber),
Linksets: ParseInt(parts[4], resolved, lineNumber),
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 15)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
Observations: ParseInt(parts[2], resolved, lineNumber),
Aliases: ParseInt(parts[3], resolved, lineNumber),
Linksets: ParseInt(parts[4], resolved, lineNumber),
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -1,210 +1,210 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinMongoThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<LinkNotMergeScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class LinkNotMergeScenarioConfig
{
private const int DefaultObservationCount = 5_000;
private const int DefaultAliasGroups = 500;
private const int DefaultPurlsPerObservation = 4;
private const int DefaultCpesPerObservation = 2;
private const int DefaultReferencesPerObservation = 3;
private const int DefaultTenants = 4;
private const int DefaultBatchSize = 500;
private const int DefaultSeed = 42_022;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("observations")]
public int? Observations { get; init; }
[JsonPropertyName("aliasGroups")]
public int? AliasGroups { get; init; }
[JsonPropertyName("purlsPerObservation")]
public int? PurlsPerObservation { get; init; }
[JsonPropertyName("cpesPerObservation")]
public int? CpesPerObservation { get; init; }
[JsonPropertyName("referencesPerObservation")]
public int? ReferencesPerObservation { get; init; }
[JsonPropertyName("tenants")]
public int? Tenants { get; init; }
[JsonPropertyName("batchSize")]
public int? BatchSize { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0
? Observations.Value
: DefaultObservationCount;
public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0
? AliasGroups.Value
: DefaultAliasGroups;
public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0
? PurlsPerObservation.Value
: DefaultPurlsPerObservation;
public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0
? CpesPerObservation.Value
: DefaultCpesPerObservation;
public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0
? ReferencesPerObservation.Value
: DefaultReferencesPerObservation;
public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0
? Tenants.Value
: DefaultTenants;
public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0
? BatchSize.Value
: DefaultBatchSize;
public int ResolveSeed() => Seed.HasValue && Seed.Value > 0
? Seed.Value
: DefaultSeed;
public int ResolveIterations(int? defaultIterations)
{
var iterations = Iterations ?? defaultIterations ?? 3;
if (iterations <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
}
return iterations;
}
public void Validate()
{
if (ResolveObservationCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
}
if (ResolveAliasGroups() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
}
if (ResolvePurlsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0.");
}
if (ResolveTenantCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0.");
}
if (ResolveBatchSize() > ResolveObservationCount())
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations.");
}
}
}
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinMongoThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<LinkNotMergeScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class LinkNotMergeScenarioConfig
{
private const int DefaultObservationCount = 5_000;
private const int DefaultAliasGroups = 500;
private const int DefaultPurlsPerObservation = 4;
private const int DefaultCpesPerObservation = 2;
private const int DefaultReferencesPerObservation = 3;
private const int DefaultTenants = 4;
private const int DefaultBatchSize = 500;
private const int DefaultSeed = 42_022;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("observations")]
public int? Observations { get; init; }
[JsonPropertyName("aliasGroups")]
public int? AliasGroups { get; init; }
[JsonPropertyName("purlsPerObservation")]
public int? PurlsPerObservation { get; init; }
[JsonPropertyName("cpesPerObservation")]
public int? CpesPerObservation { get; init; }
[JsonPropertyName("referencesPerObservation")]
public int? ReferencesPerObservation { get; init; }
[JsonPropertyName("tenants")]
public int? Tenants { get; init; }
[JsonPropertyName("batchSize")]
public int? BatchSize { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0
? Observations.Value
: DefaultObservationCount;
public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0
? AliasGroups.Value
: DefaultAliasGroups;
public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0
? PurlsPerObservation.Value
: DefaultPurlsPerObservation;
public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0
? CpesPerObservation.Value
: DefaultCpesPerObservation;
public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0
? ReferencesPerObservation.Value
: DefaultReferencesPerObservation;
public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0
? Tenants.Value
: DefaultTenants;
public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0
? BatchSize.Value
: DefaultBatchSize;
public int ResolveSeed() => Seed.HasValue && Seed.Value > 0
? Seed.Value
: DefaultSeed;
public int ResolveIterations(int? defaultIterations)
{
var iterations = Iterations ?? defaultIterations ?? 3;
if (iterations <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
}
return iterations;
}
public void Validate()
{
if (ResolveObservationCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
}
if (ResolveAliasGroups() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
}
if (ResolvePurlsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0.");
}
if (ResolveTenantCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0.");
}
if (ResolveBatchSize() > ResolveObservationCount())
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations.");
}
}
}

View File

@@ -1,96 +1,96 @@
using System.Diagnostics;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinkNotMergeScenarioRunner
{
private readonly LinkNotMergeScenarioConfig _config;
private readonly IReadOnlyList<ObservationSeed> _seeds;
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
_seeds = ObservationGenerator.Generate(config);
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var totalDurations = new double[iterations];
var insertDurations = new double[iterations];
var correlationDurations = new double[iterations];
var allocated = new double[iterations];
var totalThroughputs = new double[iterations];
var insertThroughputs = new double[iterations];
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var correlator = new LinksetAggregator();
lastAggregation = correlator.Correlate(documents);
correlationStopwatch.Stop();
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
var afterAllocated = GC.GetTotalAllocatedBytes();
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
}
return new ScenarioExecutionResult(
totalDurations,
insertDurations,
correlationDurations,
allocated,
totalThroughputs,
insertThroughputs,
ObservationCount: _seeds.Count,
AliasGroups: _config.ResolveAliasGroups(),
LinksetCount: lastAggregation.LinksetCount,
TenantCount: _config.ResolveTenantCount(),
AggregationResult: lastAggregation);
}
private static IReadOnlyList<ObservationDocument> InsertObservations(
IReadOnlyList<ObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
var documents = new List<ObservationDocument>(seeds.Count);
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<ObservationDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToDocument());
}
documents.AddRange(batch);
}
return documents;
}
}
using System.Diagnostics;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinkNotMergeScenarioRunner
{
private readonly LinkNotMergeScenarioConfig _config;
private readonly IReadOnlyList<ObservationSeed> _seeds;
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
_seeds = ObservationGenerator.Generate(config);
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var totalDurations = new double[iterations];
var insertDurations = new double[iterations];
var correlationDurations = new double[iterations];
var allocated = new double[iterations];
var totalThroughputs = new double[iterations];
var insertThroughputs = new double[iterations];
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var correlator = new LinksetAggregator();
lastAggregation = correlator.Correlate(documents);
correlationStopwatch.Stop();
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
var afterAllocated = GC.GetTotalAllocatedBytes();
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
}
return new ScenarioExecutionResult(
totalDurations,
insertDurations,
correlationDurations,
allocated,
totalThroughputs,
insertThroughputs,
ObservationCount: _seeds.Count,
AliasGroups: _config.ResolveAliasGroups(),
LinksetCount: lastAggregation.LinksetCount,
TenantCount: _config.ResolveTenantCount(),
AggregationResult: lastAggregation);
}
private static IReadOnlyList<ObservationDocument> InsertObservations(
IReadOnlyList<ObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
var documents = new List<ObservationDocument>(seeds.Count);
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<ObservationDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToDocument());
}
documents.AddRange(batch);
}
return documents;
}
}

View File

@@ -1,121 +1,121 @@
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinksetAggregator
{
public LinksetAggregationResult Correlate(IEnumerable<ObservationDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var groups = new Dictionary<string, LinksetAccumulator>(StringComparer.Ordinal);
var totalObservations = 0;
foreach (var document in documents)
{
totalObservations++;
var tenant = document.Tenant;
var linkset = document.Linkset;
var aliases = linkset.Aliases;
var purls = linkset.Purls;
var cpes = linkset.Cpes;
var references = linkset.References;
foreach (var aliasValue in aliases)
{
var alias = aliasValue;
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
{
var (tenantValue, aliasValue) = data;
tenantValue.AsSpan().CopyTo(span);
span[tenantValue.Length] = '|';
aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]);
});
if (!groups.TryGetValue(key, out var accumulator))
{
accumulator = new LinksetAccumulator(tenant, alias);
groups[key] = accumulator;
}
accumulator.AddPurls(purls);
accumulator.AddCpes(cpes);
accumulator.AddReferences(references);
}
}
var totalReferences = 0;
var totalPurls = 0;
var totalCpes = 0;
foreach (var accumulator in groups.Values)
{
totalReferences += accumulator.ReferenceCount;
totalPurls += accumulator.PurlCount;
totalCpes += accumulator.CpeCount;
}
return new LinksetAggregationResult(
LinksetCount: groups.Count,
ObservationCount: totalObservations,
TotalPurls: totalPurls,
TotalCpes: totalCpes,
TotalReferences: totalReferences);
}
private sealed class LinksetAccumulator
{
private readonly HashSet<string> _purls = new(StringComparer.Ordinal);
private readonly HashSet<string> _cpes = new(StringComparer.Ordinal);
private readonly HashSet<string> _references = new(StringComparer.Ordinal);
public LinksetAccumulator(string tenant, string alias)
{
Tenant = tenant;
Alias = alias;
}
public string Tenant { get; }
public string Alias { get; }
public int PurlCount => _purls.Count;
public int CpeCount => _cpes.Count;
public int ReferenceCount => _references.Count;
public void AddPurls(IEnumerable<string> array)
{
foreach (var item in array)
{
if (!string.IsNullOrEmpty(item))
_purls.Add(item);
}
}
public void AddCpes(IEnumerable<string> array)
{
foreach (var item in array)
{
if (!string.IsNullOrEmpty(item))
_cpes.Add(item);
}
}
public void AddReferences(IEnumerable<ObservationReference> array)
{
foreach (var item in array)
{
if (!string.IsNullOrEmpty(item.Url))
_references.Add(item.Url);
}
}
}
}
internal sealed record LinksetAggregationResult(
int LinksetCount,
int ObservationCount,
int TotalPurls,
int TotalCpes,
int TotalReferences);
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinksetAggregator
{
public LinksetAggregationResult Correlate(IEnumerable<ObservationDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var groups = new Dictionary<string, LinksetAccumulator>(StringComparer.Ordinal);
var totalObservations = 0;
foreach (var document in documents)
{
totalObservations++;
var tenant = document.Tenant;
var linkset = document.Linkset;
var aliases = linkset.Aliases;
var purls = linkset.Purls;
var cpes = linkset.Cpes;
var references = linkset.References;
foreach (var aliasValue in aliases)
{
var alias = aliasValue;
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
{
var (tenantValue, aliasValue) = data;
tenantValue.AsSpan().CopyTo(span);
span[tenantValue.Length] = '|';
aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]);
});
if (!groups.TryGetValue(key, out var accumulator))
{
accumulator = new LinksetAccumulator(tenant, alias);
groups[key] = accumulator;
}
accumulator.AddPurls(purls);
accumulator.AddCpes(cpes);
accumulator.AddReferences(references);
}
}
var totalReferences = 0;
var totalPurls = 0;
var totalCpes = 0;
foreach (var accumulator in groups.Values)
{
totalReferences += accumulator.ReferenceCount;
totalPurls += accumulator.PurlCount;
totalCpes += accumulator.CpeCount;
}
return new LinksetAggregationResult(
LinksetCount: groups.Count,
ObservationCount: totalObservations,
TotalPurls: totalPurls,
TotalCpes: totalCpes,
TotalReferences: totalReferences);
}
private sealed class LinksetAccumulator
{
private readonly HashSet<string> _purls = new(StringComparer.Ordinal);
private readonly HashSet<string> _cpes = new(StringComparer.Ordinal);
private readonly HashSet<string> _references = new(StringComparer.Ordinal);
public LinksetAccumulator(string tenant, string alias)
{
Tenant = tenant;
Alias = alias;
}
public string Tenant { get; }
public string Alias { get; }
public int PurlCount => _purls.Count;
public int CpeCount => _cpes.Count;
public int ReferenceCount => _references.Count;
public void AddPurls(IEnumerable<string> array)
{
foreach (var item in array)
{
if (!string.IsNullOrEmpty(item))
_purls.Add(item);
}
}
public void AddCpes(IEnumerable<string> array)
{
foreach (var item in array)
{
if (!string.IsNullOrEmpty(item))
_cpes.Add(item);
}
}
public void AddReferences(IEnumerable<ObservationReference> array)
{
foreach (var item in array)
{
if (!string.IsNullOrEmpty(item.Url))
_references.Add(item.Url);
}
}
}
}
internal sealed record LinksetAggregationResult(
int LinksetCount,
int ObservationCount,
int TotalPurls,
int TotalCpes,
int TotalReferences);

View File

@@ -1,375 +1,375 @@
using System.Globalization;
using StellaOps.Bench.LinkNotMerge.Baseline;
using StellaOps.Bench.LinkNotMerge.Reporting;
namespace StellaOps.Bench.LinkNotMerge;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = scenario.ResolveIterations(config.Iterations);
var runner = new LinkNotMergeScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.ObservationCount,
execution.AliasGroups,
execution.LinksetCount,
totalStats,
insertStats,
correlationStats,
throughputStats,
mongoThroughputStats,
allocationStats,
thresholdMs,
throughputFloor,
mongoThroughputFloor,
allocationLimit);
results.Add(result);
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
}
if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
}
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
{
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "linknotmerge-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minMongoThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-mongo-throughput":
EnsureNext(args, index);
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
minMongoThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: linknotmerge-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.ObservationsColumn,
row.AliasColumn,
row.LinksetColumn,
row.TotalMeanColumn,
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ThroughputColumn,
row.MongoThroughputColumn,
row.AllocatedColumn,
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(results);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{
writer.Write(result.Id);
writer.Write(',');
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}
using System.Globalization;
using StellaOps.Bench.LinkNotMerge.Baseline;
using StellaOps.Bench.LinkNotMerge.Reporting;
namespace StellaOps.Bench.LinkNotMerge;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = scenario.ResolveIterations(config.Iterations);
var runner = new LinkNotMergeScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.ObservationCount,
execution.AliasGroups,
execution.LinksetCount,
totalStats,
insertStats,
correlationStats,
throughputStats,
mongoThroughputStats,
allocationStats,
thresholdMs,
throughputFloor,
mongoThroughputFloor,
allocationLimit);
results.Add(result);
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
}
if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
}
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
{
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "linknotmerge-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minMongoThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-mongo-throughput":
EnsureNext(args, index);
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
minMongoThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: linknotmerge-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.ObservationsColumn,
row.AliasColumn,
row.LinksetColumn,
row.TotalMeanColumn,
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ThroughputColumn,
row.MongoThroughputColumn,
row.AllocatedColumn,
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(results);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{
writer.Write(result.Id);
writer.Write(',');
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -1,3 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")]
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")]

View File

@@ -1,151 +1,151 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.ObservationCount,
report.Result.AliasGroups,
report.Result.LinksetCount,
report.Result.TotalStatistics.MeanMs,
report.Result.TotalStatistics.P95Ms,
report.Result.TotalStatistics.MaxMs,
report.Result.InsertStatistics.MeanMs,
report.Result.CorrelationStatistics.MeanMs,
report.Result.TotalThroughputStatistics.MeanPerSecond,
report.Result.TotalThroughputStatistics.MinPerSecond,
report.Result.InsertThroughputStatistics.MeanPerSecond,
report.Result.InsertThroughputStatistics.MinPerSecond,
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MinMongoThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.Observations,
baseline.Aliases,
baseline.Linksets,
baseline.MeanTotalMs,
baseline.P95TotalMs,
baseline.MaxTotalMs,
baseline.MeanInsertMs,
baseline.MeanCorrelationMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MeanMongoThroughputPerSecond,
baseline.MinMongoThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.MongoThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double? MongoThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.ObservationCount,
report.Result.AliasGroups,
report.Result.LinksetCount,
report.Result.TotalStatistics.MeanMs,
report.Result.TotalStatistics.P95Ms,
report.Result.TotalStatistics.MaxMs,
report.Result.InsertStatistics.MeanMs,
report.Result.CorrelationStatistics.MeanMs,
report.Result.TotalThroughputStatistics.MeanPerSecond,
report.Result.TotalThroughputStatistics.MinPerSecond,
report.Result.InsertThroughputStatistics.MeanPerSecond,
report.Result.InsertThroughputStatistics.MinPerSecond,
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MinMongoThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.Observations,
baseline.Aliases,
baseline.Linksets,
baseline.MeanTotalMs,
baseline.P95TotalMs,
baseline.MaxTotalMs,
baseline.MeanInsertMs,
baseline.MeanCorrelationMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MeanMongoThroughputPerSecond,
baseline.MinMongoThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.MongoThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double? MongoThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -1,89 +1,89 @@
using StellaOps.Bench.LinkNotMerge.Baseline;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public double? MongoThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
{
var delta = (mongoRatio - 1d) * 100d;
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
}
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateInverseRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}
using StellaOps.Bench.LinkNotMerge.Baseline;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public double? MongoThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
{
var delta = (mongoRatio - 1d) * 100d;
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
}
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateInverseRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -1,101 +1,101 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
foreach (var report in reports)
{
var scenario = Escape(report.Result.Id);
AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs);
AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
}
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
{
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
}
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);
AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
foreach (var report in reports)
{
var scenario = Escape(report.Result.Id);
AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs);
AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
}
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
{
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
}
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);
AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -1,14 +1,14 @@
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> TotalDurationsMs,
IReadOnlyList<double> InsertDurationsMs,
IReadOnlyList<double> CorrelationDurationsMs,
IReadOnlyList<double> AllocatedMb,
IReadOnlyList<double> TotalThroughputsPerSecond,
IReadOnlyList<double> InsertThroughputsPerSecond,
int ObservationCount,
int AliasGroups,
int LinksetCount,
int TenantCount,
LinksetAggregationResult AggregationResult);
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> TotalDurationsMs,
IReadOnlyList<double> InsertDurationsMs,
IReadOnlyList<double> CorrelationDurationsMs,
IReadOnlyList<double> AllocatedMb,
IReadOnlyList<double> TotalThroughputsPerSecond,
IReadOnlyList<double> InsertThroughputsPerSecond,
int ObservationCount,
int AliasGroups,
int LinksetCount,
int TenantCount,
LinksetAggregationResult AggregationResult);

View File

@@ -1,42 +1,42 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int ObservationCount,
int AliasGroups,
int LinksetCount,
DurationStatistics TotalStatistics,
DurationStatistics InsertStatistics,
DurationStatistics CorrelationStatistics,
ThroughputStatistics TotalThroughputStatistics,
ThroughputStatistics InsertThroughputStatistics,
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int ObservationCount,
int AliasGroups,
int LinksetCount,
DurationStatistics TotalStatistics,
DurationStatistics InsertStatistics,
DurationStatistics CorrelationStatistics,
ThroughputStatistics TotalThroughputStatistics,
ThroughputStatistics InsertThroughputStatistics,
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -1,84 +1,84 @@
namespace StellaOps.Bench.LinkNotMerge;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = values.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in values)
{
total += value;
}
var mean = total / values.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}
namespace StellaOps.Bench.LinkNotMerge;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = values.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in values)
{
total += value;
}
var mean = total / values.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -1,38 +1,38 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.Notify.Baseline;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsBaselineEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb\n" +
"notify_dispatch_density_05,5,5000,25000,120.5,150.1,199.9,42000.5,39000.2,85.7\n");
var entries = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(entries);
Assert.Equal("notify_dispatch_density_05", entry.Key);
Assert.Equal(5, entry.Value.Iterations);
Assert.Equal(5000, entry.Value.EventCount);
Assert.Equal(25000, entry.Value.DeliveryCount);
Assert.Equal(120.5, entry.Value.MeanMs);
Assert.Equal(39000.2, entry.Value.MinThroughputPerSecond);
Assert.Equal(85.7, entry.Value.MaxAllocatedMb);
}
finally
{
File.Delete(path);
}
}
}
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.Notify.Baseline;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsBaselineEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb\n" +
"notify_dispatch_density_05,5,5000,25000,120.5,150.1,199.9,42000.5,39000.2,85.7\n");
var entries = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(entries);
Assert.Equal("notify_dispatch_density_05", entry.Key);
Assert.Equal(5, entry.Value.Iterations);
Assert.Equal(5000, entry.Value.EventCount);
Assert.Equal(25000, entry.Value.DeliveryCount);
Assert.Equal(120.5, entry.Value.MeanMs);
Assert.Equal(39000.2, entry.Value.MinThroughputPerSecond);
Assert.Equal(85.7, entry.Value.MaxAllocatedMb);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -1,85 +1,85 @@
using System.Linq;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsLatencies()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
EventCount: 1000,
DeliveryCount: 20000,
MeanMs: 150,
P95Ms: 180,
MaxMs: 200,
MeanThroughputPerSecond: 60000,
MinThroughputPerSecond: 50000,
MaxAllocatedMb: 90);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.DurationRegressionBreached);
Assert.False(report.ThroughputRegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}
using System.Linq;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsLatencies()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
EventCount: 1000,
DeliveryCount: 20000,
MeanMs: 150,
P95Ms: 180,
MaxMs: 200,
MeanThroughputPerSecond: 60000,
MinThroughputPerSecond: 50000,
MaxAllocatedMb: 90);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.DurationRegressionBreached);
Assert.False(report.ThroughputRegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}

View File

@@ -1,33 +1,33 @@
using System.Threading;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class NotifyScenarioRunnerTests
{
[Fact]
public void Execute_ComputesDeterministicMetrics()
{
var config = new NotifyScenarioConfig
{
Id = "unit_test",
EventCount = 500,
RuleCount = 40,
ActionsPerRule = 3,
MatchRate = 0.25,
TenantCount = 4,
ChannelCount = 16
};
var runner = new NotifyScenarioRunner(config);
var result = runner.Execute(2, CancellationToken.None);
Assert.Equal(config.ResolveEventCount(), result.TotalEvents);
Assert.Equal(config.ResolveRuleCount(), result.TotalRules);
Assert.Equal(config.ResolveActionsPerRule(), result.ActionsPerRule);
Assert.True(result.TotalMatches > 0);
Assert.Equal(result.TotalMatches * result.ActionsPerRule, result.TotalDeliveries);
Assert.Equal(2, result.Durations.Count);
Assert.All(result.Durations, value => Assert.True(value > 0));
}
}
using System.Threading;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class NotifyScenarioRunnerTests
{
[Fact]
public void Execute_ComputesDeterministicMetrics()
{
var config = new NotifyScenarioConfig
{
Id = "unit_test",
EventCount = 500,
RuleCount = 40,
ActionsPerRule = 3,
MatchRate = 0.25,
TenantCount = 4,
ChannelCount = 16
};
var runner = new NotifyScenarioRunner(config);
var result = runner.Execute(2, CancellationToken.None);
Assert.Equal(config.ResolveEventCount(), result.TotalEvents);
Assert.Equal(config.ResolveRuleCount(), result.TotalRules);
Assert.Equal(config.ResolveActionsPerRule(), result.ActionsPerRule);
Assert.True(result.TotalMatches > 0);
Assert.Equal(result.TotalMatches * result.ActionsPerRule, result.TotalDeliveries);
Assert.Equal(2, result.Durations.Count);
Assert.All(result.Durations, value => Assert.True(value > 0));
}
}

View File

@@ -1,64 +1,64 @@
using System.IO;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class PrometheusWriterTests
{
[Fact]
public void Write_EmitsScenarioMetrics()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: 900,
MinThroughputThresholdPerSecond: 35000,
MaxAllocatedThresholdMb: 150);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
EventCount: 1000,
DeliveryCount: 20000,
MeanMs: 180,
P95Ms: 210,
MaxMs: 240,
MeanThroughputPerSecond: 52000,
MinThroughputPerSecond: 41000,
MaxAllocatedMb: 95);
var report = new BenchmarkScenarioReport(result, baseline);
var path = Path.GetTempFileName();
try
{
PrometheusWriter.Write(path, new[] { report });
var content = File.ReadAllText(path);
Assert.Contains("notify_dispatch_bench_mean_ms", content);
Assert.Contains("scenario\"} 200", content);
Assert.Contains("notify_dispatch_bench_baseline_mean_ms", content);
}
finally
{
File.Delete(path);
}
}
}
using System.IO;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class PrometheusWriterTests
{
[Fact]
public void Write_EmitsScenarioMetrics()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: 900,
MinThroughputThresholdPerSecond: 35000,
MaxAllocatedThresholdMb: 150);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
EventCount: 1000,
DeliveryCount: 20000,
MeanMs: 180,
P95Ms: 210,
MaxMs: 240,
MeanThroughputPerSecond: 52000,
MinThroughputPerSecond: 41000,
MaxAllocatedMb: 95);
var report = new BenchmarkScenarioReport(result, baseline);
var path = Path.GetTempFileName();
try
{
PrometheusWriter.Write(path, new[] { report });
var content = File.ReadAllText(path);
Assert.Contains("notify_dispatch_bench_mean_ms", content);
Assert.Contains("scenario\"} 200", content);
Assert.Contains("notify_dispatch_bench_baseline_mean_ms", content);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -1,13 +1,13 @@
namespace StellaOps.Bench.Notify.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int EventCount,
int DeliveryCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
namespace StellaOps.Bench.Notify.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int EventCount,
int DeliveryCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -1,87 +1,87 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Bench.Notify.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var results = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 10)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 10 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
EventCount: ParseInt(parts[2], resolved, lineNumber),
DeliveryCount: ParseInt(parts[3], resolved, lineNumber),
MeanMs: ParseDouble(parts[4], resolved, lineNumber),
P95Ms: ParseDouble(parts[5], resolved, lineNumber),
MaxMs: ParseDouble(parts[6], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[8], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[9], resolved, lineNumber));
results[entry.ScenarioId] = entry;
}
return results;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Bench.Notify.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var results = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 10)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 10 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
EventCount: ParseInt(parts[2], resolved, lineNumber),
DeliveryCount: ParseInt(parts[3], resolved, lineNumber),
MeanMs: ParseDouble(parts[4], resolved, lineNumber),
P95Ms: ParseDouble(parts[5], resolved, lineNumber),
MaxMs: ParseDouble(parts[6], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[8], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[9], resolved, lineNumber));
results[entry.ScenarioId] = entry;
}
return results;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -1,220 +1,220 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.Notify;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<NotifyScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<NotifyScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class NotifyScenarioConfig
{
private const int DefaultEventCount = 10_000;
private const int DefaultRuleCount = 200;
private const int DefaultActionsPerRule = 3;
private const double DefaultMatchRate = 0.25d;
private const int DefaultTenantCount = 4;
private const int DefaultChannelCount = 8;
private const int BaseSeed = 2025_10_26;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("eventCount")]
public int EventCount { get; init; } = DefaultEventCount;
[JsonPropertyName("ruleCount")]
public int RuleCount { get; init; } = DefaultRuleCount;
[JsonPropertyName("actionsPerRule")]
public int ActionsPerRule { get; init; } = DefaultActionsPerRule;
[JsonPropertyName("matchRate")]
public double? MatchRate { get; init; }
[JsonPropertyName("tenantCount")]
public int? TenantCount { get; init; }
[JsonPropertyName("channelCount")]
public int? ChannelCount { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "notify_dispatch" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveEventCount()
{
if (EventCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires eventCount > 0.");
}
return EventCount;
}
public int ResolveRuleCount()
{
if (RuleCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires ruleCount > 0.");
}
return RuleCount;
}
public int ResolveActionsPerRule()
{
if (ActionsPerRule <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires actionsPerRule > 0.");
}
return ActionsPerRule;
}
public double ResolveMatchRate()
{
var rate = MatchRate ?? DefaultMatchRate;
if (!double.IsFinite(rate) || rate <= 0d || rate > 1d)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires matchRate within (0, 1].");
}
return rate;
}
public int ResolveTenantCount()
{
var tenants = TenantCount ?? DefaultTenantCount;
if (tenants <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenantCount > 0.");
}
return tenants;
}
public int ResolveChannelCount()
{
var channels = ChannelCount ?? DefaultChannelCount;
if (channels <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires channelCount > 0.");
}
return channels;
}
public int ResolveSeed()
{
if (Seed is { } explicitSeed && explicitSeed > 0)
{
return explicitSeed;
}
var material = Encoding.UTF8.GetBytes($"stellaops-notify-bench::{ScenarioId}");
var hash = SHA256.HashData(material);
var derived = BitConverter.ToInt32(hash, 0) & int.MaxValue;
if (derived == 0)
{
derived = BaseSeed;
}
return derived;
}
public void Validate()
{
ResolveEventCount();
ResolveRuleCount();
ResolveActionsPerRule();
ResolveMatchRate();
ResolveTenantCount();
ResolveChannelCount();
}
}
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.Notify;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<NotifyScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<NotifyScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class NotifyScenarioConfig
{
private const int DefaultEventCount = 10_000;
private const int DefaultRuleCount = 200;
private const int DefaultActionsPerRule = 3;
private const double DefaultMatchRate = 0.25d;
private const int DefaultTenantCount = 4;
private const int DefaultChannelCount = 8;
private const int BaseSeed = 2025_10_26;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("eventCount")]
public int EventCount { get; init; } = DefaultEventCount;
[JsonPropertyName("ruleCount")]
public int RuleCount { get; init; } = DefaultRuleCount;
[JsonPropertyName("actionsPerRule")]
public int ActionsPerRule { get; init; } = DefaultActionsPerRule;
[JsonPropertyName("matchRate")]
public double? MatchRate { get; init; }
[JsonPropertyName("tenantCount")]
public int? TenantCount { get; init; }
[JsonPropertyName("channelCount")]
public int? ChannelCount { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "notify_dispatch" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveEventCount()
{
if (EventCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires eventCount > 0.");
}
return EventCount;
}
public int ResolveRuleCount()
{
if (RuleCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires ruleCount > 0.");
}
return RuleCount;
}
public int ResolveActionsPerRule()
{
if (ActionsPerRule <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires actionsPerRule > 0.");
}
return ActionsPerRule;
}
public double ResolveMatchRate()
{
var rate = MatchRate ?? DefaultMatchRate;
if (!double.IsFinite(rate) || rate <= 0d || rate > 1d)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires matchRate within (0, 1].");
}
return rate;
}
public int ResolveTenantCount()
{
var tenants = TenantCount ?? DefaultTenantCount;
if (tenants <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenantCount > 0.");
}
return tenants;
}
public int ResolveChannelCount()
{
var channels = ChannelCount ?? DefaultChannelCount;
if (channels <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires channelCount > 0.");
}
return channels;
}
public int ResolveSeed()
{
if (Seed is { } explicitSeed && explicitSeed > 0)
{
return explicitSeed;
}
var material = Encoding.UTF8.GetBytes($"stellaops-notify-bench::{ScenarioId}");
var hash = SHA256.HashData(material);
var derived = BitConverter.ToInt32(hash, 0) & int.MaxValue;
if (derived == 0)
{
derived = BaseSeed;
}
return derived;
}
public void Validate()
{
ResolveEventCount();
ResolveRuleCount();
ResolveActionsPerRule();
ResolveMatchRate();
ResolveTenantCount();
ResolveChannelCount();
}
}

View File

@@ -1,26 +1,26 @@
using System;
namespace StellaOps.Bench.Notify;
internal sealed class DispatchAccumulator
{
private long _value = 17;
public void Add(int ruleHash, int actionHash, int eventHash)
{
unchecked
{
_value = (_value * 31) ^ ruleHash;
_value = (_value * 31) ^ actionHash;
_value = (_value * 31) ^ eventHash;
}
}
public void AssertConsumed()
{
if (_value == 17)
{
throw new InvalidOperationException("Dispatch accumulator did not receive any values.");
}
}
}
using System;
namespace StellaOps.Bench.Notify;
internal sealed class DispatchAccumulator
{
private long _value = 17;
public void Add(int ruleHash, int actionHash, int eventHash)
{
unchecked
{
_value = (_value * 31) ^ ruleHash;
_value = (_value * 31) ^ actionHash;
_value = (_value * 31) ^ eventHash;
}
}
public void AssertConsumed()
{
if (_value == 17)
{
throw new InvalidOperationException("Dispatch accumulator did not receive any values.");
}
}
}

View File

@@ -1,386 +1,386 @@
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using StellaOps.Notify.Models;
namespace StellaOps.Bench.Notify;
internal sealed class NotifyScenarioRunner
{
private static readonly DateTimeOffset BaseTimestamp = new(2025, 10, 26, 0, 0, 0, TimeSpan.Zero);
private const string EventKind = NotifyEventKinds.ScannerReportReady;
private readonly NotifyScenarioConfig _config;
private readonly EventDescriptor[] _events;
private readonly RuleDescriptor[][] _rulesByTenant;
private readonly int _totalEvents;
private readonly int _ruleCount;
private readonly int _actionsPerRule;
private readonly int _totalMatches;
private readonly int _totalDeliveries;
private readonly double _averageMatchesPerEvent;
private readonly double _averageDeliveriesPerEvent;
private readonly int _minMatchesPerEvent;
private readonly int _maxMatchesPerEvent;
public NotifyScenarioRunner(NotifyScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
var eventCount = config.ResolveEventCount();
var ruleCount = config.ResolveRuleCount();
var actionsPerRule = config.ResolveActionsPerRule();
var matchRate = config.ResolveMatchRate();
var tenantCount = config.ResolveTenantCount();
var channelCount = config.ResolveChannelCount();
var seed = config.ResolveSeed();
if (tenantCount > ruleCount)
{
tenantCount = Math.Max(1, ruleCount);
}
_totalEvents = eventCount;
_ruleCount = ruleCount;
_actionsPerRule = actionsPerRule;
var tenants = BuildTenants(tenantCount);
var channels = BuildChannels(channelCount);
var random = new Random(seed);
var targetMatchesPerEvent = Math.Max(1, (int)Math.Round(ruleCount * matchRate));
targetMatchesPerEvent = Math.Min(targetMatchesPerEvent, ruleCount);
var ruleDescriptors = new List<RuleDescriptor>(ruleCount);
var groups = new List<RuleGroup>();
var ruleIndex = 0;
var groupIndex = 0;
var channelCursor = 0;
while (ruleIndex < ruleCount)
{
var groupSize = Math.Min(targetMatchesPerEvent, ruleCount - ruleIndex);
var tenantIndex = groupIndex % tenantCount;
var tenantId = tenants[tenantIndex];
var namespaceValue = $"svc-{tenantIndex:D2}-{groupIndex:D3}";
var repositoryValue = $"registry.local/{tenantId}/service-{groupIndex:D3}";
var digestValue = GenerateDigest(random, groupIndex);
var rules = new RuleDescriptor[groupSize];
for (var local = 0; local < groupSize && ruleIndex < ruleCount; local++, ruleIndex++)
{
var ruleId = $"rule-{groupIndex:D3}-{local:D3}";
var actions = new ActionDescriptor[actionsPerRule];
for (var actionIndex = 0; actionIndex < actionsPerRule; actionIndex++)
{
var channel = channels[channelCursor % channelCount];
channelCursor++;
var actionId = $"{ruleId}-act-{actionIndex:D2}";
actions[actionIndex] = new ActionDescriptor(
actionId,
channel,
StableHash($"{actionId}|{channel}"));
}
rules[local] = new RuleDescriptor(
ruleId,
StableHash(ruleId),
tenantIndex,
namespaceValue,
repositoryValue,
digestValue,
actions);
ruleDescriptors.Add(rules[local]);
}
groups.Add(new RuleGroup(tenantIndex, namespaceValue, repositoryValue, digestValue, rules));
groupIndex++;
}
_rulesByTenant = BuildRulesByTenant(tenantCount, ruleDescriptors);
var events = new EventDescriptor[eventCount];
long totalMatches = 0;
var minMatches = int.MaxValue;
var maxMatches = 0;
for (var eventIndex = 0; eventIndex < eventCount; eventIndex++)
{
var group = groups[eventIndex % groups.Count];
var matchingRules = group.Rules.Length;
totalMatches += matchingRules;
if (matchingRules < minMatches)
{
minMatches = matchingRules;
}
if (matchingRules > maxMatches)
{
maxMatches = matchingRules;
}
var eventId = GenerateEventId(random, group.TenantIndex, eventIndex);
var timestamp = BaseTimestamp.AddMilliseconds(eventIndex * 10d);
// Materialize NotifyEvent to reflect production payload shape.
_ = NotifyEvent.Create(
eventId,
EventKind,
tenants[group.TenantIndex],
timestamp,
payload: null,
scope: NotifyEventScope.Create(
@namespace: group.Namespace,
repo: group.Repository,
digest: group.Digest));
events[eventIndex] = new EventDescriptor(
group.TenantIndex,
EventKind,
group.Namespace,
group.Repository,
group.Digest,
ComputeEventHash(eventId));
}
_events = events;
_totalMatches = checked((int)totalMatches);
_totalDeliveries = checked(_totalMatches * actionsPerRule);
_averageMatchesPerEvent = totalMatches / (double)eventCount;
_averageDeliveriesPerEvent = _averageMatchesPerEvent * actionsPerRule;
_minMatchesPerEvent = minMatches;
_maxMatchesPerEvent = maxMatches;
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var throughputs = new double[iterations];
var allocations = new double[iterations];
for (var index = 0; index < iterations; index++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var stopwatch = Stopwatch.StartNew();
var accumulator = new DispatchAccumulator();
var observedMatches = 0;
var observedDeliveries = 0;
foreach (ref readonly var @event in _events.AsSpan())
{
var tenantRules = _rulesByTenant[@event.TenantIndex];
foreach (var rule in tenantRules)
{
if (!Matches(rule, @event))
{
continue;
}
observedMatches++;
var actions = rule.Actions;
for (var actionIndex = 0; actionIndex < actions.Length; actionIndex++)
{
observedDeliveries++;
accumulator.Add(rule.RuleHash, actions[actionIndex].Hash, @event.EventHash);
}
}
}
stopwatch.Stop();
if (observedMatches != _totalMatches)
{
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalMatches} matches but observed {observedMatches}.");
}
if (observedDeliveries != _totalDeliveries)
{
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalDeliveries} deliveries but observed {observedDeliveries}.");
}
accumulator.AssertConsumed();
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
if (elapsedMs <= 0d)
{
elapsedMs = 0.0001d;
}
var afterAllocated = GC.GetTotalAllocatedBytes();
durations[index] = elapsedMs;
throughputs[index] = observedDeliveries / Math.Max(stopwatch.Elapsed.TotalSeconds, 0.0001d);
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
}
return new ScenarioExecutionResult(
durations,
throughputs,
allocations,
_totalEvents,
_ruleCount,
_actionsPerRule,
_averageMatchesPerEvent,
_minMatchesPerEvent,
_maxMatchesPerEvent,
_averageDeliveriesPerEvent,
_totalMatches,
_totalDeliveries);
}
private static bool Matches(in RuleDescriptor rule, in EventDescriptor @event)
{
if (!string.Equals(@event.Kind, EventKind, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Namespace, @event.Namespace, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Repository, @event.Repository, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Digest, @event.Digest, StringComparison.Ordinal))
{
return false;
}
return true;
}
private static int ComputeEventHash(Guid eventId)
{
var bytes = eventId.ToByteArray();
var value = BitConverter.ToInt32(bytes, 0);
return value & int.MaxValue;
}
private static string GenerateDigest(Random random, int groupIndex)
{
var buffer = new byte[16];
random.NextBytes(buffer);
var hex = Convert.ToHexString(buffer).ToLowerInvariant();
return $"sha256:{hex}{groupIndex:D3}";
}
private static Guid GenerateEventId(Random random, int tenantIndex, int eventIndex)
{
Span<byte> buffer = stackalloc byte[16];
random.NextBytes(buffer);
buffer[^1] = (byte)(tenantIndex & 0xFF);
buffer[^2] = (byte)(eventIndex & 0xFF);
return new Guid(buffer);
}
private static RuleDescriptor[][] BuildRulesByTenant(int tenantCount, List<RuleDescriptor> rules)
{
var result = new RuleDescriptor[tenantCount][];
for (var tenantIndex = 0; tenantIndex < tenantCount; tenantIndex++)
{
result[tenantIndex] = rules
.Where(rule => rule.TenantIndex == tenantIndex)
.ToArray();
}
return result;
}
private static string[] BuildTenants(int tenantCount)
{
var tenants = new string[tenantCount];
for (var index = 0; index < tenantCount; index++)
{
tenants[index] = $"tenant-{index:D2}";
}
return tenants;
}
private static string[] BuildChannels(int channelCount)
{
var channels = new string[channelCount];
for (var index = 0; index < channelCount; index++)
{
var kind = (index % 4) switch
{
0 => "slack",
1 => "teams",
2 => "email",
_ => "webhook"
};
channels[index] = $"{kind}:channel-{index:D2}";
}
return channels;
}
private static int StableHash(string value)
{
unchecked
{
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
var hash = offset;
foreach (var ch in value.AsSpan())
{
hash ^= ch;
hash *= prime;
}
return hash & int.MaxValue;
}
}
private readonly record struct RuleDescriptor(
string RuleId,
int RuleHash,
int TenantIndex,
string Namespace,
string Repository,
string Digest,
ActionDescriptor[] Actions);
private readonly record struct ActionDescriptor(
string ActionId,
string Channel,
int Hash);
private readonly record struct RuleGroup(
int TenantIndex,
string Namespace,
string Repository,
string Digest,
RuleDescriptor[] Rules);
private readonly record struct EventDescriptor(
int TenantIndex,
string Kind,
string Namespace,
string Repository,
string Digest,
int EventHash);
}
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using StellaOps.Notify.Models;
namespace StellaOps.Bench.Notify;
internal sealed class NotifyScenarioRunner
{
private static readonly DateTimeOffset BaseTimestamp = new(2025, 10, 26, 0, 0, 0, TimeSpan.Zero);
private const string EventKind = NotifyEventKinds.ScannerReportReady;
private readonly NotifyScenarioConfig _config;
private readonly EventDescriptor[] _events;
private readonly RuleDescriptor[][] _rulesByTenant;
private readonly int _totalEvents;
private readonly int _ruleCount;
private readonly int _actionsPerRule;
private readonly int _totalMatches;
private readonly int _totalDeliveries;
private readonly double _averageMatchesPerEvent;
private readonly double _averageDeliveriesPerEvent;
private readonly int _minMatchesPerEvent;
private readonly int _maxMatchesPerEvent;
public NotifyScenarioRunner(NotifyScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
var eventCount = config.ResolveEventCount();
var ruleCount = config.ResolveRuleCount();
var actionsPerRule = config.ResolveActionsPerRule();
var matchRate = config.ResolveMatchRate();
var tenantCount = config.ResolveTenantCount();
var channelCount = config.ResolveChannelCount();
var seed = config.ResolveSeed();
if (tenantCount > ruleCount)
{
tenantCount = Math.Max(1, ruleCount);
}
_totalEvents = eventCount;
_ruleCount = ruleCount;
_actionsPerRule = actionsPerRule;
var tenants = BuildTenants(tenantCount);
var channels = BuildChannels(channelCount);
var random = new Random(seed);
var targetMatchesPerEvent = Math.Max(1, (int)Math.Round(ruleCount * matchRate));
targetMatchesPerEvent = Math.Min(targetMatchesPerEvent, ruleCount);
var ruleDescriptors = new List<RuleDescriptor>(ruleCount);
var groups = new List<RuleGroup>();
var ruleIndex = 0;
var groupIndex = 0;
var channelCursor = 0;
while (ruleIndex < ruleCount)
{
var groupSize = Math.Min(targetMatchesPerEvent, ruleCount - ruleIndex);
var tenantIndex = groupIndex % tenantCount;
var tenantId = tenants[tenantIndex];
var namespaceValue = $"svc-{tenantIndex:D2}-{groupIndex:D3}";
var repositoryValue = $"registry.local/{tenantId}/service-{groupIndex:D3}";
var digestValue = GenerateDigest(random, groupIndex);
var rules = new RuleDescriptor[groupSize];
for (var local = 0; local < groupSize && ruleIndex < ruleCount; local++, ruleIndex++)
{
var ruleId = $"rule-{groupIndex:D3}-{local:D3}";
var actions = new ActionDescriptor[actionsPerRule];
for (var actionIndex = 0; actionIndex < actionsPerRule; actionIndex++)
{
var channel = channels[channelCursor % channelCount];
channelCursor++;
var actionId = $"{ruleId}-act-{actionIndex:D2}";
actions[actionIndex] = new ActionDescriptor(
actionId,
channel,
StableHash($"{actionId}|{channel}"));
}
rules[local] = new RuleDescriptor(
ruleId,
StableHash(ruleId),
tenantIndex,
namespaceValue,
repositoryValue,
digestValue,
actions);
ruleDescriptors.Add(rules[local]);
}
groups.Add(new RuleGroup(tenantIndex, namespaceValue, repositoryValue, digestValue, rules));
groupIndex++;
}
_rulesByTenant = BuildRulesByTenant(tenantCount, ruleDescriptors);
var events = new EventDescriptor[eventCount];
long totalMatches = 0;
var minMatches = int.MaxValue;
var maxMatches = 0;
for (var eventIndex = 0; eventIndex < eventCount; eventIndex++)
{
var group = groups[eventIndex % groups.Count];
var matchingRules = group.Rules.Length;
totalMatches += matchingRules;
if (matchingRules < minMatches)
{
minMatches = matchingRules;
}
if (matchingRules > maxMatches)
{
maxMatches = matchingRules;
}
var eventId = GenerateEventId(random, group.TenantIndex, eventIndex);
var timestamp = BaseTimestamp.AddMilliseconds(eventIndex * 10d);
// Materialize NotifyEvent to reflect production payload shape.
_ = NotifyEvent.Create(
eventId,
EventKind,
tenants[group.TenantIndex],
timestamp,
payload: null,
scope: NotifyEventScope.Create(
@namespace: group.Namespace,
repo: group.Repository,
digest: group.Digest));
events[eventIndex] = new EventDescriptor(
group.TenantIndex,
EventKind,
group.Namespace,
group.Repository,
group.Digest,
ComputeEventHash(eventId));
}
_events = events;
_totalMatches = checked((int)totalMatches);
_totalDeliveries = checked(_totalMatches * actionsPerRule);
_averageMatchesPerEvent = totalMatches / (double)eventCount;
_averageDeliveriesPerEvent = _averageMatchesPerEvent * actionsPerRule;
_minMatchesPerEvent = minMatches;
_maxMatchesPerEvent = maxMatches;
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var throughputs = new double[iterations];
var allocations = new double[iterations];
for (var index = 0; index < iterations; index++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var stopwatch = Stopwatch.StartNew();
var accumulator = new DispatchAccumulator();
var observedMatches = 0;
var observedDeliveries = 0;
foreach (ref readonly var @event in _events.AsSpan())
{
var tenantRules = _rulesByTenant[@event.TenantIndex];
foreach (var rule in tenantRules)
{
if (!Matches(rule, @event))
{
continue;
}
observedMatches++;
var actions = rule.Actions;
for (var actionIndex = 0; actionIndex < actions.Length; actionIndex++)
{
observedDeliveries++;
accumulator.Add(rule.RuleHash, actions[actionIndex].Hash, @event.EventHash);
}
}
}
stopwatch.Stop();
if (observedMatches != _totalMatches)
{
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalMatches} matches but observed {observedMatches}.");
}
if (observedDeliveries != _totalDeliveries)
{
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalDeliveries} deliveries but observed {observedDeliveries}.");
}
accumulator.AssertConsumed();
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
if (elapsedMs <= 0d)
{
elapsedMs = 0.0001d;
}
var afterAllocated = GC.GetTotalAllocatedBytes();
durations[index] = elapsedMs;
throughputs[index] = observedDeliveries / Math.Max(stopwatch.Elapsed.TotalSeconds, 0.0001d);
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
}
return new ScenarioExecutionResult(
durations,
throughputs,
allocations,
_totalEvents,
_ruleCount,
_actionsPerRule,
_averageMatchesPerEvent,
_minMatchesPerEvent,
_maxMatchesPerEvent,
_averageDeliveriesPerEvent,
_totalMatches,
_totalDeliveries);
}
private static bool Matches(in RuleDescriptor rule, in EventDescriptor @event)
{
if (!string.Equals(@event.Kind, EventKind, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Namespace, @event.Namespace, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Repository, @event.Repository, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Digest, @event.Digest, StringComparison.Ordinal))
{
return false;
}
return true;
}
private static int ComputeEventHash(Guid eventId)
{
var bytes = eventId.ToByteArray();
var value = BitConverter.ToInt32(bytes, 0);
return value & int.MaxValue;
}
private static string GenerateDigest(Random random, int groupIndex)
{
var buffer = new byte[16];
random.NextBytes(buffer);
var hex = Convert.ToHexString(buffer).ToLowerInvariant();
return $"sha256:{hex}{groupIndex:D3}";
}
private static Guid GenerateEventId(Random random, int tenantIndex, int eventIndex)
{
Span<byte> buffer = stackalloc byte[16];
random.NextBytes(buffer);
buffer[^1] = (byte)(tenantIndex & 0xFF);
buffer[^2] = (byte)(eventIndex & 0xFF);
return new Guid(buffer);
}
private static RuleDescriptor[][] BuildRulesByTenant(int tenantCount, List<RuleDescriptor> rules)
{
var result = new RuleDescriptor[tenantCount][];
for (var tenantIndex = 0; tenantIndex < tenantCount; tenantIndex++)
{
result[tenantIndex] = rules
.Where(rule => rule.TenantIndex == tenantIndex)
.ToArray();
}
return result;
}
private static string[] BuildTenants(int tenantCount)
{
var tenants = new string[tenantCount];
for (var index = 0; index < tenantCount; index++)
{
tenants[index] = $"tenant-{index:D2}";
}
return tenants;
}
private static string[] BuildChannels(int channelCount)
{
var channels = new string[channelCount];
for (var index = 0; index < channelCount; index++)
{
var kind = (index % 4) switch
{
0 => "slack",
1 => "teams",
2 => "email",
_ => "webhook"
};
channels[index] = $"{kind}:channel-{index:D2}";
}
return channels;
}
private static int StableHash(string value)
{
unchecked
{
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
var hash = offset;
foreach (var ch in value.AsSpan())
{
hash ^= ch;
hash *= prime;
}
return hash & int.MaxValue;
}
}
private readonly record struct RuleDescriptor(
string RuleId,
int RuleHash,
int TenantIndex,
string Namespace,
string Repository,
string Digest,
ActionDescriptor[] Actions);
private readonly record struct ActionDescriptor(
string ActionId,
string Channel,
int Hash);
private readonly record struct RuleGroup(
int TenantIndex,
string Namespace,
string Repository,
string Digest,
RuleDescriptor[] Rules);
private readonly record struct EventDescriptor(
int TenantIndex,
string Kind,
string Namespace,
string Repository,
string Digest,
int EventHash);
}

View File

@@ -1,364 +1,364 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Globalization;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
namespace StellaOps.Bench.Notify;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = options.Iterations
?? scenario.Iterations
?? config.Iterations
?? 5;
var runner = new NotifyScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var durationStats = DurationStatistics.From(execution.Durations);
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var scenarioThreshold = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.TotalEvents,
execution.TotalRules,
execution.ActionsPerRule,
execution.AverageMatchesPerEvent,
execution.MinMatchesPerEvent,
execution.MaxMatchesPerEvent,
execution.AverageDeliveriesPerEvent,
execution.TotalDeliveries,
durationStats.MeanMs,
durationStats.P95Ms,
durationStats.MaxMs,
throughputStats.MeanPerSecond,
throughputStats.MinPerSecond,
allocationStats.MaxAllocatedMb,
scenarioThreshold,
scenarioThroughputFloor,
scenarioAllocationLimit);
results.Add(result);
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
}
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} deliveries/s < {floor:N0} deliveries/s");
}
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "notify-dispatch-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(
options.JsonOutPath!,
metadata,
reports,
CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"notify-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: notify-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (deliveries/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Events | Rules | Match/Evt | Deliver/Evt | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------| -------- | --------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.EventsColumn,
row.RulesColumn,
row.MatchesColumn,
row.DeliveriesColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn,
row.MinThroughputColumn,
row.AllocatedColumn
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.TotalEvents.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.TotalDeliveries.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Globalization;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
namespace StellaOps.Bench.Notify;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = options.Iterations
?? scenario.Iterations
?? config.Iterations
?? 5;
var runner = new NotifyScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var durationStats = DurationStatistics.From(execution.Durations);
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var scenarioThreshold = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.TotalEvents,
execution.TotalRules,
execution.ActionsPerRule,
execution.AverageMatchesPerEvent,
execution.MinMatchesPerEvent,
execution.MaxMatchesPerEvent,
execution.AverageDeliveriesPerEvent,
execution.TotalDeliveries,
durationStats.MeanMs,
durationStats.P95Ms,
durationStats.MaxMs,
throughputStats.MeanPerSecond,
throughputStats.MinPerSecond,
allocationStats.MaxAllocatedMb,
scenarioThreshold,
scenarioThroughputFloor,
scenarioAllocationLimit);
results.Add(result);
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
}
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} deliveries/s < {floor:N0} deliveries/s");
}
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "notify-dispatch-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(
options.JsonOutPath!,
metadata,
reports,
CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"notify-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: notify-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (deliveries/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Events | Rules | Match/Evt | Deliver/Evt | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------| -------- | --------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.EventsColumn,
row.RulesColumn,
row.MatchesColumn,
row.DeliveriesColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn,
row.MinThroughputColumn,
row.AllocatedColumn
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.TotalEvents.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.TotalDeliveries.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -1,3 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.Notify.Tests")]
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.Notify.Tests")]

View File

@@ -1,147 +1,147 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.Notify.Baseline;
namespace StellaOps.Bench.Notify.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.TotalEvents,
report.Result.TotalRules,
report.Result.ActionsPerRule,
report.Result.AverageMatchesPerEvent,
report.Result.MinMatchesPerEvent,
report.Result.MaxMatchesPerEvent,
report.Result.AverageDeliveriesPerEvent,
report.Result.TotalDeliveries,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.MeanThroughputPerSecond,
report.Result.MinThroughputPerSecond,
report.Result.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.EventCount,
baseline.DeliveryCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalDeliveries,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int EventCount,
int DeliveryCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.Notify.Baseline;
namespace StellaOps.Bench.Notify.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.TotalEvents,
report.Result.TotalRules,
report.Result.ActionsPerRule,
report.Result.AverageMatchesPerEvent,
report.Result.MinMatchesPerEvent,
report.Result.MaxMatchesPerEvent,
report.Result.AverageDeliveriesPerEvent,
report.Result.TotalDeliveries,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.MeanThroughputPerSecond,
report.Result.MinThroughputPerSecond,
report.Result.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.EventCount,
baseline.DeliveryCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalDeliveries,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int EventCount,
int DeliveryCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -1,84 +1,84 @@
using System;
using System.Collections.Generic;
using StellaOps.Bench.Notify.Baseline;
namespace StellaOps.Bench.Notify.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public bool DurationRegressionBreached =>
DurationRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool ThroughputRegressionBreached =>
ThroughputRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
}
}
private static double? CalculateDurationRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateThroughputRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}
using System;
using System.Collections.Generic;
using StellaOps.Bench.Notify.Baseline;
namespace StellaOps.Bench.Notify.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public bool DurationRegressionBreached =>
DurationRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool ThroughputRegressionBreached =>
ThroughputRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
}
}
private static double? CalculateDurationRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateThroughputRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -1,86 +1,86 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
namespace StellaOps.Bench.Notify.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds).");
builder.AppendLine("# TYPE notify_dispatch_bench_duration_ms gauge");
builder.AppendLine("# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second).");
builder.AppendLine("# TYPE notify_dispatch_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE notify_dispatch_bench_allocation_mb gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "notify_dispatch_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "notify_dispatch_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "notify_dispatch_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "notify_dispatch_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
AppendMetric(builder, "notify_dispatch_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
AppendMetric(builder, "notify_dispatch_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
AppendMetric(builder, "notify_dispatch_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "notify_dispatch_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
AppendMetric(builder, "notify_dispatch_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "notify_dispatch_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "notify_dispatch_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
AppendMetric(builder, "notify_dispatch_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "notify_dispatch_bench_duration_regression_ratio", scenarioLabel, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "notify_dispatch_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
}
AppendMetric(builder, "notify_dispatch_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "notify_dispatch_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
namespace StellaOps.Bench.Notify.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds).");
builder.AppendLine("# TYPE notify_dispatch_bench_duration_ms gauge");
builder.AppendLine("# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second).");
builder.AppendLine("# TYPE notify_dispatch_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE notify_dispatch_bench_allocation_mb gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "notify_dispatch_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "notify_dispatch_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "notify_dispatch_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "notify_dispatch_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
AppendMetric(builder, "notify_dispatch_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
AppendMetric(builder, "notify_dispatch_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
AppendMetric(builder, "notify_dispatch_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "notify_dispatch_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
AppendMetric(builder, "notify_dispatch_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "notify_dispatch_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "notify_dispatch_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
AppendMetric(builder, "notify_dispatch_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "notify_dispatch_bench_duration_regression_ratio", scenarioLabel, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "notify_dispatch_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
}
AppendMetric(builder, "notify_dispatch_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "notify_dispatch_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -1,17 +1,17 @@
using System.Collections.Generic;
namespace StellaOps.Bench.Notify;
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> Durations,
IReadOnlyList<double> Throughputs,
IReadOnlyList<double> AllocatedMb,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalMatches,
int TotalDeliveries);
using System.Collections.Generic;
namespace StellaOps.Bench.Notify;
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> Durations,
IReadOnlyList<double> Throughputs,
IReadOnlyList<double> AllocatedMb,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalMatches,
int TotalDeliveries);

View File

@@ -1,46 +1,46 @@
using System.Globalization;
namespace StellaOps.Bench.Notify;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalDeliveries,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string EventsColumn => TotalEvents.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string RulesColumn => TotalRules.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
public string MatchesColumn => AverageMatchesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(8);
public string DeliveriesColumn => AverageDeliveriesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(10);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}
using System.Globalization;
namespace StellaOps.Bench.Notify;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalDeliveries,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string EventsColumn => TotalEvents.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string RulesColumn => TotalRules.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
public string MatchesColumn => AverageMatchesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(8);
public string DeliveriesColumn => AverageDeliveriesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(10);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -1,87 +1,87 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Bench.Notify;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}
using System;
using System.Collections.Generic;
namespace StellaOps.Bench.Notify;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -1,12 +1,12 @@
namespace StellaOps.Bench.PolicyEngine.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
namespace StellaOps.Bench.PolicyEngine.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -1,86 +1,86 @@
using System.Globalization;
namespace StellaOps.Bench.PolicyEngine.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1)
{
continue; // header
}
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 9)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 9 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
FindingCount: ParseInt(parts[2], resolved, lineNumber),
MeanMs: ParseDouble(parts[3], resolved, lineNumber),
P95Ms: ParseDouble(parts[4], resolved, lineNumber),
MaxMs: ParseDouble(parts[5], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[6], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[8], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}
using System.Globalization;
namespace StellaOps.Bench.PolicyEngine.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1)
{
continue; // header
}
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 9)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 9 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
FindingCount: ParseInt(parts[2], resolved, lineNumber),
MeanMs: ParseDouble(parts[3], resolved, lineNumber),
P95Ms: ParseDouble(parts[4], resolved, lineNumber),
MaxMs: ParseDouble(parts[5], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[6], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[8], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -1,155 +1,155 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.PolicyEngine;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<PolicyScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<PolicyScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class PolicyScenarioConfig
{
private const int DefaultComponentCount = 100_000;
private const int DefaultAdvisoriesPerComponent = 10;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("policyPath")]
public string PolicyPath { get; init; } = "docs/examples/policies/baseline.yaml";
[JsonPropertyName("scoringConfig")]
public string? ScoringConfigPath { get; init; }
[JsonPropertyName("componentCount")]
public int ComponentCount { get; init; } = DefaultComponentCount;
[JsonPropertyName("advisoriesPerComponent")]
public int AdvisoriesPerComponent { get; init; } = DefaultAdvisoriesPerComponent;
[JsonPropertyName("totalFindings")]
public int? TotalFindings { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "policy_eval" : Id.Trim();
public int ResolveFindingCount()
{
if (TotalFindings is { } findings)
{
if (findings <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires totalFindings > 0.");
}
return findings;
}
if (ComponentCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires componentCount > 0.");
}
if (AdvisoriesPerComponent <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires advisoriesPerComponent > 0.");
}
checked
{
var total = ComponentCount * AdvisoriesPerComponent;
return total;
}
}
public int ResolveSeed() => Seed ?? 2025_10_26;
public void Validate()
{
if (string.IsNullOrWhiteSpace(PolicyPath))
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires a policyPath.");
}
ResolveFindingCount();
}
}
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.PolicyEngine;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<PolicyScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<PolicyScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class PolicyScenarioConfig
{
private const int DefaultComponentCount = 100_000;
private const int DefaultAdvisoriesPerComponent = 10;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("policyPath")]
public string PolicyPath { get; init; } = "docs/examples/policies/baseline.yaml";
[JsonPropertyName("scoringConfig")]
public string? ScoringConfigPath { get; init; }
[JsonPropertyName("componentCount")]
public int ComponentCount { get; init; } = DefaultComponentCount;
[JsonPropertyName("advisoriesPerComponent")]
public int AdvisoriesPerComponent { get; init; } = DefaultAdvisoriesPerComponent;
[JsonPropertyName("totalFindings")]
public int? TotalFindings { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "policy_eval" : Id.Trim();
public int ResolveFindingCount()
{
if (TotalFindings is { } findings)
{
if (findings <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires totalFindings > 0.");
}
return findings;
}
if (ComponentCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires componentCount > 0.");
}
if (AdvisoriesPerComponent <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires advisoriesPerComponent > 0.");
}
checked
{
var total = ComponentCount * AdvisoriesPerComponent;
return total;
}
}
public int ResolveSeed() => Seed ?? 2025_10_26;
public void Validate()
{
if (string.IsNullOrWhiteSpace(PolicyPath))
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires a policyPath.");
}
ResolveFindingCount();
}
}

View File

@@ -1,15 +1,15 @@
namespace StellaOps.Bench.PolicyEngine;
internal static class PathUtilities
{
public static bool IsWithinRoot(string root, string candidate)
{
var relative = Path.GetRelativePath(root, candidate);
if (string.IsNullOrEmpty(relative) || relative == ".")
{
return true;
}
return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative);
}
}
namespace StellaOps.Bench.PolicyEngine;
internal static class PathUtilities
{
public static bool IsWithinRoot(string root, string candidate)
{
var relative = Path.GetRelativePath(root, candidate);
if (string.IsNullOrEmpty(relative) || relative == ".")
{
return true;
}
return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative);
}
}

View File

@@ -1,249 +1,249 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using StellaOps.Policy;
namespace StellaOps.Bench.PolicyEngine;
internal sealed class PolicyScenarioRunner
{
private readonly PolicyScenarioConfig _config;
private readonly PolicyDocument _document;
private readonly PolicyScoringConfig _scoringConfig;
private readonly PolicyFinding[] _findings;
public PolicyScenarioRunner(PolicyScenarioConfig config, string repoRoot)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
var policyPath = ResolvePathWithinRoot(repoRoot, config.PolicyPath);
var policyContent = File.ReadAllText(policyPath);
var policyFormat = PolicySchema.DetectFormat(policyPath);
var binding = PolicyBinder.Bind(policyContent, policyFormat);
if (!binding.Success)
{
var issues = string.Join(", ", binding.Issues.Select(issue => issue.Code));
throw new InvalidOperationException($"Policy '{config.PolicyPath}' failed validation: {issues}.");
}
_document = binding.Document;
_scoringConfig = LoadScoringConfig(repoRoot, config.ScoringConfigPath);
_findings = SyntheticFindingGenerator.Create(config, repoRoot);
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var throughputs = new double[iterations];
var allocations = new double[iterations];
var hashingAccumulator = new EvaluationAccumulator();
for (var index = 0; index < iterations; index++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var stopwatch = Stopwatch.StartNew();
hashingAccumulator.Reset();
foreach (var finding in _findings)
{
var verdict = PolicyEvaluation.EvaluateFinding(_document, _scoringConfig, finding);
hashingAccumulator.Add(verdict);
}
stopwatch.Stop();
var afterAllocated = GC.GetTotalAllocatedBytes();
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
if (elapsedMs <= 0)
{
elapsedMs = 0.0001;
}
durations[index] = elapsedMs;
throughputs[index] = _findings.Length / stopwatch.Elapsed.TotalSeconds;
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
hashingAccumulator.AssertConsumed();
}
return new ScenarioExecutionResult(
durations,
throughputs,
allocations,
_findings.Length);
}
private static PolicyScoringConfig LoadScoringConfig(string repoRoot, string? scoringPath)
{
if (string.IsNullOrWhiteSpace(scoringPath))
{
return PolicyScoringConfig.Default;
}
var resolved = ResolvePathWithinRoot(repoRoot, scoringPath);
var format = PolicySchema.DetectFormat(resolved);
var content = File.ReadAllText(resolved);
var binding = PolicyScoringConfigBinder.Bind(content, format);
if (!binding.Success || binding.Config is null)
{
var issues = binding.Issues.Length == 0
? "unknown"
: string.Join(", ", binding.Issues.Select(issue => issue.Code));
throw new InvalidOperationException($"Scoring configuration '{scoringPath}' failed validation: {issues}.");
}
return binding.Config;
}
private static string ResolvePathWithinRoot(string repoRoot, string relativePath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
ArgumentException.ThrowIfNullOrWhiteSpace(relativePath);
var combined = Path.GetFullPath(Path.Combine(repoRoot, relativePath));
if (!PathUtilities.IsWithinRoot(repoRoot, combined))
{
throw new InvalidOperationException($"Path '{relativePath}' escapes repository root '{repoRoot}'.");
}
if (!File.Exists(combined))
{
throw new FileNotFoundException($"Path '{relativePath}' resolved to '{combined}' but does not exist.", combined);
}
return combined;
}
}
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> Durations,
IReadOnlyList<double> Throughputs,
IReadOnlyList<double> AllocatedMb,
int FindingCount);
internal static class SyntheticFindingGenerator
{
private static readonly ImmutableArray<string> Environments = ImmutableArray.Create("prod", "staging", "dev");
private static readonly ImmutableArray<string> Sources = ImmutableArray.Create("concelier", "excitor", "sbom");
private static readonly ImmutableArray<string> Vendors = ImmutableArray.Create("acme", "contoso", "globex", "initech", "umbrella");
private static readonly ImmutableArray<string> Licenses = ImmutableArray.Create("MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "Proprietary");
private static readonly ImmutableArray<string> Repositories = ImmutableArray.Create("acme/service-api", "acme/web", "acme/worker", "acme/mobile", "acme/cli");
private static readonly ImmutableArray<string> Images = ImmutableArray.Create("registry.local/worker:2025.10", "registry.local/api:2025.10", "registry.local/cli:2025.10");
private static readonly ImmutableArray<string> TagPool = ImmutableArray.Create("kev", "runtime", "reachable", "public", "third-party", "critical-path");
private static readonly ImmutableArray<ImmutableArray<string>> TagSets = BuildTagSets();
private static readonly PolicySeverity[] SeverityPool =
{
PolicySeverity.Critical,
PolicySeverity.High,
PolicySeverity.Medium,
PolicySeverity.Low,
PolicySeverity.Informational
};
public static PolicyFinding[] Create(PolicyScenarioConfig config, string repoRoot)
{
var totalFindings = config.ResolveFindingCount();
if (totalFindings <= 0)
{
return Array.Empty<PolicyFinding>();
}
var seed = config.ResolveSeed();
var random = new Random(seed);
var findings = new PolicyFinding[totalFindings];
var tagsBuffer = new List<string>(3);
var componentCount = Math.Max(1, config.ComponentCount);
for (var index = 0; index < totalFindings; index++)
{
var componentIndex = index % componentCount;
var findingId = $"F-{componentIndex:D5}-{index:D6}";
var severity = SeverityPool[random.Next(SeverityPool.Length)];
var environment = Environments[componentIndex % Environments.Length];
var source = Sources[random.Next(Sources.Length)];
var vendor = Vendors[random.Next(Vendors.Length)];
var license = Licenses[random.Next(Licenses.Length)];
var repository = Repositories[componentIndex % Repositories.Length];
var image = Images[(componentIndex + index) % Images.Length];
var packageName = $"pkg{componentIndex % 1000}";
var purl = $"pkg:generic/{packageName}@{1 + (index % 20)}.{1 + (componentIndex % 10)}.{index % 5}";
var cve = index % 7 == 0 ? $"CVE-2025-{1000 + index % 9000:D4}" : null;
var layerDigest = $"sha256:{Convert.ToHexString(Guid.NewGuid().ToByteArray())[..32].ToLowerInvariant()}";
var tags = TagSets[random.Next(TagSets.Length)];
findings[index] = PolicyFinding.Create(
findingId,
severity,
environment: environment,
source: source,
vendor: vendor,
license: license,
image: image,
repository: repository,
package: packageName,
purl: purl,
cve: cve,
path: $"/app/{packageName}/{index % 50}.so",
layerDigest: layerDigest,
tags: tags);
}
return findings;
}
private static ImmutableArray<ImmutableArray<string>> BuildTagSets()
{
var builder = ImmutableArray.CreateBuilder<ImmutableArray<string>>();
builder.Add(ImmutableArray<string>.Empty);
builder.Add(ImmutableArray.Create("kev"));
builder.Add(ImmutableArray.Create("runtime"));
builder.Add(ImmutableArray.Create("reachable"));
builder.Add(ImmutableArray.Create("third-party"));
builder.Add(ImmutableArray.Create("kev", "runtime"));
builder.Add(ImmutableArray.Create("kev", "third-party"));
builder.Add(ImmutableArray.Create("runtime", "public"));
builder.Add(ImmutableArray.Create("reachable", "critical-path"));
return builder.ToImmutable();
}
}
internal sealed class EvaluationAccumulator
{
private double _scoreAccumulator;
private int _quietCount;
public void Reset()
{
_scoreAccumulator = 0;
_quietCount = 0;
}
public void Add(PolicyVerdict verdict)
{
_scoreAccumulator += verdict.Score;
if (verdict.Quiet)
{
_quietCount++;
}
}
public void AssertConsumed()
{
if (_scoreAccumulator == 0 && _quietCount == 0)
{
throw new InvalidOperationException("Evaluation accumulator detected zero work; dataset may be empty.");
}
}
}
using System.Collections.Immutable;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using StellaOps.Policy;
namespace StellaOps.Bench.PolicyEngine;
internal sealed class PolicyScenarioRunner
{
private readonly PolicyScenarioConfig _config;
private readonly PolicyDocument _document;
private readonly PolicyScoringConfig _scoringConfig;
private readonly PolicyFinding[] _findings;
public PolicyScenarioRunner(PolicyScenarioConfig config, string repoRoot)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
var policyPath = ResolvePathWithinRoot(repoRoot, config.PolicyPath);
var policyContent = File.ReadAllText(policyPath);
var policyFormat = PolicySchema.DetectFormat(policyPath);
var binding = PolicyBinder.Bind(policyContent, policyFormat);
if (!binding.Success)
{
var issues = string.Join(", ", binding.Issues.Select(issue => issue.Code));
throw new InvalidOperationException($"Policy '{config.PolicyPath}' failed validation: {issues}.");
}
_document = binding.Document;
_scoringConfig = LoadScoringConfig(repoRoot, config.ScoringConfigPath);
_findings = SyntheticFindingGenerator.Create(config, repoRoot);
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var throughputs = new double[iterations];
var allocations = new double[iterations];
var hashingAccumulator = new EvaluationAccumulator();
for (var index = 0; index < iterations; index++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var stopwatch = Stopwatch.StartNew();
hashingAccumulator.Reset();
foreach (var finding in _findings)
{
var verdict = PolicyEvaluation.EvaluateFinding(_document, _scoringConfig, finding);
hashingAccumulator.Add(verdict);
}
stopwatch.Stop();
var afterAllocated = GC.GetTotalAllocatedBytes();
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
if (elapsedMs <= 0)
{
elapsedMs = 0.0001;
}
durations[index] = elapsedMs;
throughputs[index] = _findings.Length / stopwatch.Elapsed.TotalSeconds;
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
hashingAccumulator.AssertConsumed();
}
return new ScenarioExecutionResult(
durations,
throughputs,
allocations,
_findings.Length);
}
private static PolicyScoringConfig LoadScoringConfig(string repoRoot, string? scoringPath)
{
if (string.IsNullOrWhiteSpace(scoringPath))
{
return PolicyScoringConfig.Default;
}
var resolved = ResolvePathWithinRoot(repoRoot, scoringPath);
var format = PolicySchema.DetectFormat(resolved);
var content = File.ReadAllText(resolved);
var binding = PolicyScoringConfigBinder.Bind(content, format);
if (!binding.Success || binding.Config is null)
{
var issues = binding.Issues.Length == 0
? "unknown"
: string.Join(", ", binding.Issues.Select(issue => issue.Code));
throw new InvalidOperationException($"Scoring configuration '{scoringPath}' failed validation: {issues}.");
}
return binding.Config;
}
private static string ResolvePathWithinRoot(string repoRoot, string relativePath)
{
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
ArgumentException.ThrowIfNullOrWhiteSpace(relativePath);
var combined = Path.GetFullPath(Path.Combine(repoRoot, relativePath));
if (!PathUtilities.IsWithinRoot(repoRoot, combined))
{
throw new InvalidOperationException($"Path '{relativePath}' escapes repository root '{repoRoot}'.");
}
if (!File.Exists(combined))
{
throw new FileNotFoundException($"Path '{relativePath}' resolved to '{combined}' but does not exist.", combined);
}
return combined;
}
}
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> Durations,
IReadOnlyList<double> Throughputs,
IReadOnlyList<double> AllocatedMb,
int FindingCount);
internal static class SyntheticFindingGenerator
{
private static readonly ImmutableArray<string> Environments = ImmutableArray.Create("prod", "staging", "dev");
private static readonly ImmutableArray<string> Sources = ImmutableArray.Create("concelier", "excitor", "sbom");
private static readonly ImmutableArray<string> Vendors = ImmutableArray.Create("acme", "contoso", "globex", "initech", "umbrella");
private static readonly ImmutableArray<string> Licenses = ImmutableArray.Create("MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "Proprietary");
private static readonly ImmutableArray<string> Repositories = ImmutableArray.Create("acme/service-api", "acme/web", "acme/worker", "acme/mobile", "acme/cli");
private static readonly ImmutableArray<string> Images = ImmutableArray.Create("registry.local/worker:2025.10", "registry.local/api:2025.10", "registry.local/cli:2025.10");
private static readonly ImmutableArray<string> TagPool = ImmutableArray.Create("kev", "runtime", "reachable", "public", "third-party", "critical-path");
private static readonly ImmutableArray<ImmutableArray<string>> TagSets = BuildTagSets();
private static readonly PolicySeverity[] SeverityPool =
{
PolicySeverity.Critical,
PolicySeverity.High,
PolicySeverity.Medium,
PolicySeverity.Low,
PolicySeverity.Informational
};
public static PolicyFinding[] Create(PolicyScenarioConfig config, string repoRoot)
{
var totalFindings = config.ResolveFindingCount();
if (totalFindings <= 0)
{
return Array.Empty<PolicyFinding>();
}
var seed = config.ResolveSeed();
var random = new Random(seed);
var findings = new PolicyFinding[totalFindings];
var tagsBuffer = new List<string>(3);
var componentCount = Math.Max(1, config.ComponentCount);
for (var index = 0; index < totalFindings; index++)
{
var componentIndex = index % componentCount;
var findingId = $"F-{componentIndex:D5}-{index:D6}";
var severity = SeverityPool[random.Next(SeverityPool.Length)];
var environment = Environments[componentIndex % Environments.Length];
var source = Sources[random.Next(Sources.Length)];
var vendor = Vendors[random.Next(Vendors.Length)];
var license = Licenses[random.Next(Licenses.Length)];
var repository = Repositories[componentIndex % Repositories.Length];
var image = Images[(componentIndex + index) % Images.Length];
var packageName = $"pkg{componentIndex % 1000}";
var purl = $"pkg:generic/{packageName}@{1 + (index % 20)}.{1 + (componentIndex % 10)}.{index % 5}";
var cve = index % 7 == 0 ? $"CVE-2025-{1000 + index % 9000:D4}" : null;
var layerDigest = $"sha256:{Convert.ToHexString(Guid.NewGuid().ToByteArray())[..32].ToLowerInvariant()}";
var tags = TagSets[random.Next(TagSets.Length)];
findings[index] = PolicyFinding.Create(
findingId,
severity,
environment: environment,
source: source,
vendor: vendor,
license: license,
image: image,
repository: repository,
package: packageName,
purl: purl,
cve: cve,
path: $"/app/{packageName}/{index % 50}.so",
layerDigest: layerDigest,
tags: tags);
}
return findings;
}
private static ImmutableArray<ImmutableArray<string>> BuildTagSets()
{
var builder = ImmutableArray.CreateBuilder<ImmutableArray<string>>();
builder.Add(ImmutableArray<string>.Empty);
builder.Add(ImmutableArray.Create("kev"));
builder.Add(ImmutableArray.Create("runtime"));
builder.Add(ImmutableArray.Create("reachable"));
builder.Add(ImmutableArray.Create("third-party"));
builder.Add(ImmutableArray.Create("kev", "runtime"));
builder.Add(ImmutableArray.Create("kev", "third-party"));
builder.Add(ImmutableArray.Create("runtime", "public"));
builder.Add(ImmutableArray.Create("reachable", "critical-path"));
return builder.ToImmutable();
}
}
internal sealed class EvaluationAccumulator
{
private double _scoreAccumulator;
private int _quietCount;
public void Reset()
{
_scoreAccumulator = 0;
_quietCount = 0;
}
public void Add(PolicyVerdict verdict)
{
_scoreAccumulator += verdict.Score;
if (verdict.Quiet)
{
_quietCount++;
}
}
public void AssertConsumed()
{
if (_scoreAccumulator == 0 && _quietCount == 0)
{
throw new InvalidOperationException("Evaluation accumulator detected zero work; dataset may be empty.");
}
}
}

View File

@@ -1,373 +1,373 @@
using System.Globalization;
using System.Linq;
using StellaOps.Bench.PolicyEngine.Baseline;
using StellaOps.Bench.PolicyEngine.Reporting;
namespace StellaOps.Bench.PolicyEngine;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var iterations = options.Iterations ?? config.Iterations ?? 3;
var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath);
var thresholdMs = options.ThresholdMs ?? config.ThresholdMs;
var throughputFloor = options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var allocationLimit = options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var regressionLimit = options.RegressionLimit;
var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime();
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var runner = new PolicyScenarioRunner(scenario, repoRoot);
var execution = runner.Execute(iterations, CancellationToken.None);
var durationStats = DurationStatistics.From(execution.Durations);
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs;
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? throughputFloor;
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? allocationLimit;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.Label ?? scenario.ScenarioId,
iterations,
execution.FindingCount,
durationStats.MeanMs,
durationStats.P95Ms,
durationStats.MaxMs,
throughputStats.MeanPerSecond,
throughputStats.MinPerSecond,
allocationStats.MaxAllocatedMb,
scenarioThreshold,
scenarioThroughputFloor,
scenarioAllocationLimit);
results.Add(result);
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
}
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} findings/s < {floor:N0} findings/s");
}
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "policy-bench/1.0",
CapturedAtUtc: capturedAt,
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(
options.JsonOutPath!,
metadata,
reports,
CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"policy-bench error: {ex.Message}");
return 1;
}
}
private static string ResolveRepoRoot(string? overridePath, string configPath)
{
if (!string.IsNullOrWhiteSpace(overridePath))
{
return Path.GetFullPath(overridePath);
}
var configDirectory = Path.GetDirectoryName(configPath);
if (string.IsNullOrWhiteSpace(configDirectory))
{
return Directory.GetCurrentDirectory();
}
return Path.GetFullPath(Path.Combine(configDirectory, "..", "..", ".."));
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string? RepoRoot,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
string? repoRoot = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--repo-root":
EnsureNext(args, index);
repoRoot = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
repoRoot,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: policy-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (findings/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --repo-root <path> Repository root override.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Findings | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.FindingsColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn,
row.MinThroughputColumn,
row.AllocatedColumn
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.FindingCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}
using System.Globalization;
using System.Linq;
using StellaOps.Bench.PolicyEngine.Baseline;
using StellaOps.Bench.PolicyEngine.Reporting;
namespace StellaOps.Bench.PolicyEngine;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var iterations = options.Iterations ?? config.Iterations ?? 3;
var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath);
var thresholdMs = options.ThresholdMs ?? config.ThresholdMs;
var throughputFloor = options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var allocationLimit = options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var regressionLimit = options.RegressionLimit;
var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime();
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var runner = new PolicyScenarioRunner(scenario, repoRoot);
var execution = runner.Execute(iterations, CancellationToken.None);
var durationStats = DurationStatistics.From(execution.Durations);
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs;
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? throughputFloor;
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? allocationLimit;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.Label ?? scenario.ScenarioId,
iterations,
execution.FindingCount,
durationStats.MeanMs,
durationStats.P95Ms,
durationStats.MaxMs,
throughputStats.MeanPerSecond,
throughputStats.MinPerSecond,
allocationStats.MaxAllocatedMb,
scenarioThreshold,
scenarioThroughputFloor,
scenarioAllocationLimit);
results.Add(result);
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
}
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} findings/s < {floor:N0} findings/s");
}
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "policy-bench/1.0",
CapturedAtUtc: capturedAt,
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(
options.JsonOutPath!,
metadata,
reports,
CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"policy-bench error: {ex.Message}");
return 1;
}
}
private static string ResolveRepoRoot(string? overridePath, string configPath)
{
if (!string.IsNullOrWhiteSpace(overridePath))
{
return Path.GetFullPath(overridePath);
}
var configDirectory = Path.GetDirectoryName(configPath);
if (string.IsNullOrWhiteSpace(configDirectory))
{
return Directory.GetCurrentDirectory();
}
return Path.GetFullPath(Path.Combine(configDirectory, "..", "..", ".."));
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string? RepoRoot,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
string? repoRoot = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--repo-root":
EnsureNext(args, index);
repoRoot = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
repoRoot,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: policy-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (findings/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --repo-root <path> Repository root override.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Findings | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.FindingsColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn,
row.MinThroughputColumn,
row.AllocatedColumn
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.FindingCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -1,125 +1,125 @@
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Bench.PolicyEngine.Baseline;
namespace StellaOps.Bench.PolicyEngine.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.FindingCount,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.MeanThroughputPerSecond,
report.Result.MinThroughputPerSecond,
report.Result.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.FindingCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Bench.PolicyEngine.Baseline;
namespace StellaOps.Bench.PolicyEngine.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.FindingCount,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.MeanThroughputPerSecond,
report.Result.MinThroughputPerSecond,
report.Result.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.FindingCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -1,82 +1,82 @@
using StellaOps.Bench.PolicyEngine.Baseline;
namespace StellaOps.Bench.PolicyEngine.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public bool DurationRegressionBreached =>
DurationRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool ThroughputRegressionBreached =>
ThroughputRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
}
}
private static double? CalculateDurationRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateThroughputRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}
using StellaOps.Bench.PolicyEngine.Baseline;
namespace StellaOps.Bench.PolicyEngine.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public bool DurationRegressionBreached =>
DurationRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool ThroughputRegressionBreached =>
ThroughputRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
}
}
private static double? CalculateDurationRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateThroughputRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -1,83 +1,83 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.PolicyEngine.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds).");
builder.AppendLine("# TYPE policy_engine_bench_duration_ms gauge");
builder.AppendLine("# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second).");
builder.AppendLine("# TYPE policy_engine_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE policy_engine_bench_allocation_mb gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "policy_engine_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "policy_engine_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "policy_engine_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "policy_engine_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
AppendMetric(builder, "policy_engine_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
AppendMetric(builder, "policy_engine_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
AppendMetric(builder, "policy_engine_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "policy_engine_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
AppendMetric(builder, "policy_engine_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "policy_engine_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "policy_engine_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
AppendMetric(builder, "policy_engine_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "policy_engine_bench_duration_regression_ratio", scenarioLabel, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "policy_engine_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
}
AppendMetric(builder, "policy_engine_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "policy_engine_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.PolicyEngine.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds).");
builder.AppendLine("# TYPE policy_engine_bench_duration_ms gauge");
builder.AppendLine("# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second).");
builder.AppendLine("# TYPE policy_engine_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE policy_engine_bench_allocation_mb gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "policy_engine_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "policy_engine_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "policy_engine_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "policy_engine_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
AppendMetric(builder, "policy_engine_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
AppendMetric(builder, "policy_engine_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
AppendMetric(builder, "policy_engine_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "policy_engine_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
AppendMetric(builder, "policy_engine_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "policy_engine_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "policy_engine_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
AppendMetric(builder, "policy_engine_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "policy_engine_bench_duration_regression_ratio", scenarioLabel, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "policy_engine_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
}
AppendMetric(builder, "policy_engine_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "policy_engine_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -1,110 +1,110 @@
using System.Globalization;
namespace StellaOps.Bench.PolicyEngine;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string FindingsColumn => FindingCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}
using System.Globalization;
namespace StellaOps.Bench.PolicyEngine;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int FindingCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string FindingsColumn => FindingCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -1,37 +1,37 @@
using System.Text;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsCsvIntoDictionary()
{
var csv = """
scenario,iterations,sample_count,mean_ms,p95_ms,max_ms
node_monorepo_walk,5,4,9.4303,36.1354,45.0012
python_site_packages_walk,5,10,12.1000,18.2000,26.3000
""";
var path = await WriteTempFileAsync(csv);
var result = await BaselineLoader.LoadAsync(path, CancellationToken.None);
Assert.Equal(2, result.Count);
var entry = Assert.Contains("node_monorepo_walk", result);
Assert.Equal(5, entry.Iterations);
Assert.Equal(4, entry.SampleCount);
Assert.Equal(9.4303, entry.MeanMs, 4);
Assert.Equal(36.1354, entry.P95Ms, 4);
Assert.Equal(45.0012, entry.MaxMs, 4);
}
private static async Task<string> WriteTempFileAsync(string content)
{
var path = Path.Combine(Path.GetTempPath(), $"baseline-{Guid.NewGuid():N}.csv");
await File.WriteAllTextAsync(path, content, Encoding.UTF8);
return path;
}
}
using System.Text;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsCsvIntoDictionary()
{
var csv = """
scenario,iterations,sample_count,mean_ms,p95_ms,max_ms
node_monorepo_walk,5,4,9.4303,36.1354,45.0012
python_site_packages_walk,5,10,12.1000,18.2000,26.3000
""";
var path = await WriteTempFileAsync(csv);
var result = await BaselineLoader.LoadAsync(path, CancellationToken.None);
Assert.Equal(2, result.Count);
var entry = Assert.Contains("node_monorepo_walk", result);
Assert.Equal(5, entry.Iterations);
Assert.Equal(4, entry.SampleCount);
Assert.Equal(9.4303, entry.MeanMs, 4);
Assert.Equal(36.1354, entry.P95Ms, 4);
Assert.Equal(45.0012, entry.MaxMs, 4);
}
private static async Task<string> WriteTempFileAsync(string content)
{
var path = Path.Combine(Path.GetTempPath(), $"baseline-{Guid.NewGuid():N}.csv");
await File.WriteAllTextAsync(path, content, Encoding.UTF8);
return path;
}
}

View File

@@ -1,41 +1,41 @@
using System.Text.Json;
using StellaOps.Bench.ScannerAnalyzers;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class BenchmarkJsonWriterTests
{
[Fact]
public async Task WriteAsync_EmitsMetadataAndScenarioDetails()
{
var metadata = new BenchmarkJsonMetadata("1.0", DateTimeOffset.Parse("2025-10-23T12:00:00Z"), "abc123", "ci");
var result = new ScenarioResult(
"scenario",
"Scenario",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var baseline = new BaselineEntry("scenario", 5, 5, 9, 11, 10);
var report = new BenchmarkScenarioReport(result, baseline, 1.2);
var path = Path.Combine(Path.GetTempPath(), $"bench-{Guid.NewGuid():N}.json");
await BenchmarkJsonWriter.WriteAsync(path, metadata, new[] { report }, CancellationToken.None);
using var document = JsonDocument.Parse(await File.ReadAllTextAsync(path));
var root = document.RootElement;
Assert.Equal("1.0", root.GetProperty("schemaVersion").GetString());
Assert.Equal("abc123", root.GetProperty("commit").GetString());
var scenario = root.GetProperty("scenarios")[0];
Assert.Equal("scenario", scenario.GetProperty("id").GetString());
Assert.Equal(20, scenario.GetProperty("maxMs").GetDouble());
Assert.Equal(10, scenario.GetProperty("baseline").GetProperty("maxMs").GetDouble());
Assert.True(scenario.GetProperty("regression").GetProperty("breached").GetBoolean());
}
}
using System.Text.Json;
using StellaOps.Bench.ScannerAnalyzers;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class BenchmarkJsonWriterTests
{
[Fact]
public async Task WriteAsync_EmitsMetadataAndScenarioDetails()
{
var metadata = new BenchmarkJsonMetadata("1.0", DateTimeOffset.Parse("2025-10-23T12:00:00Z"), "abc123", "ci");
var result = new ScenarioResult(
"scenario",
"Scenario",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var baseline = new BaselineEntry("scenario", 5, 5, 9, 11, 10);
var report = new BenchmarkScenarioReport(result, baseline, 1.2);
var path = Path.Combine(Path.GetTempPath(), $"bench-{Guid.NewGuid():N}.json");
await BenchmarkJsonWriter.WriteAsync(path, metadata, new[] { report }, CancellationToken.None);
using var document = JsonDocument.Parse(await File.ReadAllTextAsync(path));
var root = document.RootElement;
Assert.Equal("1.0", root.GetProperty("schemaVersion").GetString());
Assert.Equal("abc123", root.GetProperty("commit").GetString());
var scenario = root.GetProperty("scenarios")[0];
Assert.Equal("scenario", scenario.GetProperty("id").GetString());
Assert.Equal(20, scenario.GetProperty("maxMs").GetDouble());
Assert.Equal(10, scenario.GetProperty("baseline").GetProperty("maxMs").GetDouble());
Assert.True(scenario.GetProperty("regression").GetProperty("breached").GetBoolean());
}
}

View File

@@ -1,58 +1,58 @@
using StellaOps.Bench.ScannerAnalyzers;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionRatio_ComputedWhenBaselinePresent()
{
var result = new ScenarioResult(
"scenario",
"Scenario",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var baseline = new BaselineEntry(
"scenario",
Iterations: 5,
SampleCount: 5,
MeanMs: 8,
P95Ms: 11,
MaxMs: 15);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.2);
Assert.True(report.MaxRegressionRatio.HasValue);
Assert.Equal(20d / 15d, report.MaxRegressionRatio.Value, 6);
Assert.True(report.RegressionBreached);
Assert.Contains("+33.3%", report.BuildRegressionFailureMessage());
}
[Fact]
public void RegressionRatio_NullWhenBaselineMissing()
{
var result = new ScenarioResult(
"scenario",
"Scenario",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: 1.2);
Assert.Null(report.MaxRegressionRatio);
Assert.False(report.RegressionBreached);
Assert.Null(report.BuildRegressionFailureMessage());
}
}
using StellaOps.Bench.ScannerAnalyzers;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionRatio_ComputedWhenBaselinePresent()
{
var result = new ScenarioResult(
"scenario",
"Scenario",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var baseline = new BaselineEntry(
"scenario",
Iterations: 5,
SampleCount: 5,
MeanMs: 8,
P95Ms: 11,
MaxMs: 15);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.2);
Assert.True(report.MaxRegressionRatio.HasValue);
Assert.Equal(20d / 15d, report.MaxRegressionRatio.Value, 6);
Assert.True(report.RegressionBreached);
Assert.Contains("+33.3%", report.BuildRegressionFailureMessage());
}
[Fact]
public void RegressionRatio_NullWhenBaselineMissing()
{
var result = new ScenarioResult(
"scenario",
"Scenario",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: 1.2);
Assert.Null(report.MaxRegressionRatio);
Assert.False(report.RegressionBreached);
Assert.Null(report.BuildRegressionFailureMessage());
}
}

View File

@@ -1,32 +1,32 @@
using StellaOps.Bench.ScannerAnalyzers;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class PrometheusWriterTests
{
[Fact]
public void Write_EmitsMetricsForScenario()
{
var result = new ScenarioResult(
"scenario_a",
"Scenario A",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var baseline = new BaselineEntry("scenario_a", 5, 5, 9, 11, 18);
var report = new BenchmarkScenarioReport(result, baseline, 1.2);
var path = Path.Combine(Path.GetTempPath(), $"metrics-{Guid.NewGuid():N}.prom");
PrometheusWriter.Write(path, new[] { report });
var contents = File.ReadAllText(path);
Assert.Contains("scanner_analyzer_bench_max_ms{scenario=\"scenario_a\"} 20", contents);
Assert.Contains("scanner_analyzer_bench_regression_ratio{scenario=\"scenario_a\"}", contents);
}
}
using StellaOps.Bench.ScannerAnalyzers;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using Xunit;
namespace StellaOps.Bench.ScannerAnalyzers.Tests;
public sealed class PrometheusWriterTests
{
[Fact]
public void Write_EmitsMetricsForScenario()
{
var result = new ScenarioResult(
"scenario_a",
"Scenario A",
SampleCount: 5,
MeanMs: 10,
P95Ms: 12,
MaxMs: 20,
Iterations: 5,
ThresholdMs: 5000);
var baseline = new BaselineEntry("scenario_a", 5, 5, 9, 11, 18);
var report = new BenchmarkScenarioReport(result, baseline, 1.2);
var path = Path.Combine(Path.GetTempPath(), $"metrics-{Guid.NewGuid():N}.prom");
PrometheusWriter.Write(path, new[] { report });
var contents = File.ReadAllText(path);
Assert.Contains("scanner_analyzer_bench_max_ms{scenario=\"scenario_a\"} 20", contents);
Assert.Contains("scanner_analyzer_bench_regression_ratio{scenario=\"scenario_a\"}", contents);
}
}

View File

@@ -1,9 +1,9 @@
namespace StellaOps.Bench.ScannerAnalyzers.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs);
namespace StellaOps.Bench.ScannerAnalyzers.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs);

View File

@@ -1,88 +1,88 @@
using System.Globalization;
namespace StellaOps.Bench.ScannerAnalyzers.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("Baseline path must be provided.", nameof(path));
}
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Baseline file not found at {resolved}", resolved);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
string? line;
var isFirst = true;
while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
if (isFirst)
{
isFirst = false;
if (line.StartsWith("scenario,", StringComparison.OrdinalIgnoreCase))
{
continue;
}
}
var entry = ParseLine(line);
result[entry.ScenarioId] = entry;
}
return result;
}
private static BaselineEntry ParseLine(string line)
{
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 6)
{
throw new InvalidDataException($"Baseline CSV row malformed: '{line}'");
}
var scenarioId = parts[0];
var iterations = ParseInt(parts[1], nameof(BaselineEntry.Iterations));
var sampleCount = ParseInt(parts[2], nameof(BaselineEntry.SampleCount));
var meanMs = ParseDouble(parts[3], nameof(BaselineEntry.MeanMs));
var p95Ms = ParseDouble(parts[4], nameof(BaselineEntry.P95Ms));
var maxMs = ParseDouble(parts[5], nameof(BaselineEntry.MaxMs));
return new BaselineEntry(scenarioId, iterations, sampleCount, meanMs, p95Ms, maxMs);
}
private static int ParseInt(string value, string field)
{
if (!int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
throw new InvalidDataException($"Failed to parse integer {field} from '{value}'.");
}
return parsed;
}
private static double ParseDouble(string value, string field)
{
if (!double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
throw new InvalidDataException($"Failed to parse double {field} from '{value}'.");
}
return parsed;
}
}
using System.Globalization;
namespace StellaOps.Bench.ScannerAnalyzers.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("Baseline path must be provided.", nameof(path));
}
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Baseline file not found at {resolved}", resolved);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
string? line;
var isFirst = true;
while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null)
{
cancellationToken.ThrowIfCancellationRequested();
if (string.IsNullOrWhiteSpace(line))
{
continue;
}
if (isFirst)
{
isFirst = false;
if (line.StartsWith("scenario,", StringComparison.OrdinalIgnoreCase))
{
continue;
}
}
var entry = ParseLine(line);
result[entry.ScenarioId] = entry;
}
return result;
}
private static BaselineEntry ParseLine(string line)
{
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 6)
{
throw new InvalidDataException($"Baseline CSV row malformed: '{line}'");
}
var scenarioId = parts[0];
var iterations = ParseInt(parts[1], nameof(BaselineEntry.Iterations));
var sampleCount = ParseInt(parts[2], nameof(BaselineEntry.SampleCount));
var meanMs = ParseDouble(parts[3], nameof(BaselineEntry.MeanMs));
var p95Ms = ParseDouble(parts[4], nameof(BaselineEntry.P95Ms));
var maxMs = ParseDouble(parts[5], nameof(BaselineEntry.MaxMs));
return new BaselineEntry(scenarioId, iterations, sampleCount, meanMs, p95Ms, maxMs);
}
private static int ParseInt(string value, string field)
{
if (!int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
throw new InvalidDataException($"Failed to parse integer {field} from '{value}'.");
}
return parsed;
}
private static double ParseDouble(string value, string field)
{
if (!double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
throw new InvalidDataException($"Failed to parse double {field} from '{value}'.");
}
return parsed;
}
}

View File

@@ -1,104 +1,104 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.ScannerAnalyzers;
internal sealed record BenchmarkConfig
{
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("scenarios")]
public List<BenchmarkScenarioConfig> Scenarios { get; init; } = new();
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("Config path is required.", nameof(path));
}
await using var stream = File.OpenRead(path);
var config = await JsonSerializer.DeserializeAsync<BenchmarkConfig>(stream, SerializerOptions).ConfigureAwait(false);
if (config is null)
{
throw new InvalidOperationException($"Failed to parse benchmark config '{path}'.");
}
if (config.Scenarios.Count == 0)
{
throw new InvalidOperationException("config.scenarios must declare at least one scenario.");
}
foreach (var scenario in config.Scenarios)
{
scenario.Validate();
}
return config;
}
private static JsonSerializerOptions SerializerOptions => new()
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
};
}
internal sealed record BenchmarkScenarioConfig
{
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("root")]
public string? Root { get; init; }
[JsonPropertyName("analyzers")]
public List<string>? Analyzers { get; init; }
[JsonPropertyName("matcher")]
public string? Matcher { get; init; }
[JsonPropertyName("parser")]
public string? Parser { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
public bool HasAnalyzers => Analyzers is { Count: > 0 };
public void Validate()
{
if (string.IsNullOrWhiteSpace(Id))
{
throw new InvalidOperationException("scenario.id is required.");
}
if (string.IsNullOrWhiteSpace(Root))
{
throw new InvalidOperationException($"Scenario '{Id}' must specify a root path.");
}
if (HasAnalyzers)
{
return;
}
if (string.IsNullOrWhiteSpace(Parser))
{
throw new InvalidOperationException($"Scenario '{Id}' must specify parser or analyzers.");
}
if (string.IsNullOrWhiteSpace(Matcher))
{
throw new InvalidOperationException($"Scenario '{Id}' must specify matcher when parser is used.");
}
}
}
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.ScannerAnalyzers;
internal sealed record BenchmarkConfig
{
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("scenarios")]
public List<BenchmarkScenarioConfig> Scenarios { get; init; } = new();
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
if (string.IsNullOrWhiteSpace(path))
{
throw new ArgumentException("Config path is required.", nameof(path));
}
await using var stream = File.OpenRead(path);
var config = await JsonSerializer.DeserializeAsync<BenchmarkConfig>(stream, SerializerOptions).ConfigureAwait(false);
if (config is null)
{
throw new InvalidOperationException($"Failed to parse benchmark config '{path}'.");
}
if (config.Scenarios.Count == 0)
{
throw new InvalidOperationException("config.scenarios must declare at least one scenario.");
}
foreach (var scenario in config.Scenarios)
{
scenario.Validate();
}
return config;
}
private static JsonSerializerOptions SerializerOptions => new()
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
};
}
internal sealed record BenchmarkScenarioConfig
{
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("root")]
public string? Root { get; init; }
[JsonPropertyName("analyzers")]
public List<string>? Analyzers { get; init; }
[JsonPropertyName("matcher")]
public string? Matcher { get; init; }
[JsonPropertyName("parser")]
public string? Parser { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
public bool HasAnalyzers => Analyzers is { Count: > 0 };
public void Validate()
{
if (string.IsNullOrWhiteSpace(Id))
{
throw new InvalidOperationException("scenario.id is required.");
}
if (string.IsNullOrWhiteSpace(Root))
{
throw new InvalidOperationException($"Scenario '{Id}' must specify a root path.");
}
if (HasAnalyzers)
{
return;
}
if (string.IsNullOrWhiteSpace(Parser))
{
throw new InvalidOperationException($"Scenario '{Id}' must specify parser or analyzers.");
}
if (string.IsNullOrWhiteSpace(Matcher))
{
throw new InvalidOperationException($"Scenario '{Id}' must specify matcher when parser is used.");
}
}
}

View File

@@ -1,393 +1,393 @@
using System.Globalization;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using StellaOps.Bench.ScannerAnalyzers.Scenarios;
namespace StellaOps.Bench.ScannerAnalyzers;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var iterations = options.Iterations ?? config.Iterations ?? 5;
var thresholdMs = options.ThresholdMs ?? config.ThresholdMs ?? 5000;
var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath);
var regressionLimit = options.RegressionLimit ?? 1.2d;
var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime();
var baseline = await LoadBaselineDictionaryAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var runner = ScenarioRunnerFactory.Create(scenario);
var scenarioRoot = ResolveScenarioRoot(repoRoot, scenario.Root!);
var execution = await runner.ExecuteAsync(scenarioRoot, iterations, CancellationToken.None).ConfigureAwait(false);
var stats = ScenarioStatistics.FromDurations(execution.Durations);
var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs;
var result = new ScenarioResult(
scenario.Id!,
scenario.Label ?? scenario.Id!,
execution.SampleCount,
stats.MeanMs,
stats.P95Ms,
stats.MaxMs,
iterations,
scenarioThreshold);
results.Add(result);
if (stats.MaxMs > scenarioThreshold)
{
failures.Add($"{scenario.Id} exceeded threshold: {stats.MaxMs:F2} ms > {scenarioThreshold:F2} ms");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit);
if (report.BuildRegressionFailureMessage() is { } regressionFailure)
{
failures.Add(regressionFailure);
}
reports.Add(report);
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
"1.0",
capturedAt,
options.Commit,
options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Performance threshold exceeded:");
foreach (var failure in failures)
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine(ex.Message);
return 1;
}
}
private static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadBaselineDictionaryAsync(string? baselinePath, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(baselinePath))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var resolved = Path.GetFullPath(baselinePath);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
return await BaselineLoader.LoadAsync(resolved, cancellationToken).ConfigureAwait(false);
}
private static string ResolveRepoRoot(string? overridePath, string configPath)
{
if (!string.IsNullOrWhiteSpace(overridePath))
{
return Path.GetFullPath(overridePath);
}
var configDirectory = Path.GetDirectoryName(configPath);
if (string.IsNullOrWhiteSpace(configDirectory))
{
return Directory.GetCurrentDirectory();
}
return Path.GetFullPath(Path.Combine(configDirectory, "..", ".."));
}
private static string ResolveScenarioRoot(string repoRoot, string relativeRoot)
{
if (string.IsNullOrWhiteSpace(relativeRoot))
{
throw new InvalidOperationException("Scenario root is required.");
}
var combined = Path.GetFullPath(Path.Combine(repoRoot, relativeRoot));
if (!PathUtilities.IsWithinRoot(repoRoot, combined))
{
throw new InvalidOperationException($"Scenario root '{relativeRoot}' escapes repository root '{repoRoot}'.");
}
if (!Directory.Exists(combined))
{
throw new DirectoryNotFoundException($"Scenario root '{combined}' does not exist.");
}
return combined;
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string? RepoRoot,
string? BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
string? repoRoot = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--out":
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prom":
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = args[++index];
break;
case "--repo-root":
case "--samples":
EnsureNext(args, index);
repoRoot = args[++index];
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
default:
throw new ArgumentException($"Unknown argument: {current}", nameof(args));
}
}
return new ProgramOptions(configPath, iterations, thresholdMs, csvOut, jsonOut, promOut, repoRoot, baselinePath, capturedAt, commit, environment, regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var configDirectory = Path.GetFullPath(Path.Combine(projectRoot, ".."));
return Path.Combine(configDirectory, "config.json");
}
private static string? DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectRoot, ".."));
var baselinePath = Path.Combine(benchRoot, "baseline.csv");
return File.Exists(baselinePath) ? baselinePath : baselinePath;
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.", nameof(args));
}
}
}
private sealed record ScenarioStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static ScenarioStatistics FromDurations(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new ScenarioStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new ScenarioStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
private static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Count | Mean(ms) | P95(ms) | Max(ms)");
Console.WriteLine("---------------------------- | ----- | --------- | --------- | ----------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.SampleCountColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn
}));
}
}
}
private static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,sample_count,mean_ms,p95_ms,max_ms");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.SampleCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}
internal static class PathUtilities
{
public static bool IsWithinRoot(string root, string candidate)
{
var relative = Path.GetRelativePath(root, candidate);
if (string.IsNullOrEmpty(relative) || relative == ".")
{
return true;
}
return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative);
}
}
}
using System.Globalization;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
using StellaOps.Bench.ScannerAnalyzers.Reporting;
using StellaOps.Bench.ScannerAnalyzers.Scenarios;
namespace StellaOps.Bench.ScannerAnalyzers;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var iterations = options.Iterations ?? config.Iterations ?? 5;
var thresholdMs = options.ThresholdMs ?? config.ThresholdMs ?? 5000;
var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath);
var regressionLimit = options.RegressionLimit ?? 1.2d;
var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime();
var baseline = await LoadBaselineDictionaryAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var runner = ScenarioRunnerFactory.Create(scenario);
var scenarioRoot = ResolveScenarioRoot(repoRoot, scenario.Root!);
var execution = await runner.ExecuteAsync(scenarioRoot, iterations, CancellationToken.None).ConfigureAwait(false);
var stats = ScenarioStatistics.FromDurations(execution.Durations);
var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs;
var result = new ScenarioResult(
scenario.Id!,
scenario.Label ?? scenario.Id!,
execution.SampleCount,
stats.MeanMs,
stats.P95Ms,
stats.MaxMs,
iterations,
scenarioThreshold);
results.Add(result);
if (stats.MaxMs > scenarioThreshold)
{
failures.Add($"{scenario.Id} exceeded threshold: {stats.MaxMs:F2} ms > {scenarioThreshold:F2} ms");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit);
if (report.BuildRegressionFailureMessage() is { } regressionFailure)
{
failures.Add(regressionFailure);
}
reports.Add(report);
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
"1.0",
capturedAt,
options.Commit,
options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Performance threshold exceeded:");
foreach (var failure in failures)
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine(ex.Message);
return 1;
}
}
private static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadBaselineDictionaryAsync(string? baselinePath, CancellationToken cancellationToken)
{
if (string.IsNullOrWhiteSpace(baselinePath))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var resolved = Path.GetFullPath(baselinePath);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
return await BaselineLoader.LoadAsync(resolved, cancellationToken).ConfigureAwait(false);
}
private static string ResolveRepoRoot(string? overridePath, string configPath)
{
if (!string.IsNullOrWhiteSpace(overridePath))
{
return Path.GetFullPath(overridePath);
}
var configDirectory = Path.GetDirectoryName(configPath);
if (string.IsNullOrWhiteSpace(configDirectory))
{
return Directory.GetCurrentDirectory();
}
return Path.GetFullPath(Path.Combine(configDirectory, "..", ".."));
}
private static string ResolveScenarioRoot(string repoRoot, string relativeRoot)
{
if (string.IsNullOrWhiteSpace(relativeRoot))
{
throw new InvalidOperationException("Scenario root is required.");
}
var combined = Path.GetFullPath(Path.Combine(repoRoot, relativeRoot));
if (!PathUtilities.IsWithinRoot(repoRoot, combined))
{
throw new InvalidOperationException($"Scenario root '{relativeRoot}' escapes repository root '{repoRoot}'.");
}
if (!Directory.Exists(combined))
{
throw new DirectoryNotFoundException($"Scenario root '{combined}' does not exist.");
}
return combined;
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string? RepoRoot,
string? BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
string? repoRoot = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--out":
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prom":
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = args[++index];
break;
case "--repo-root":
case "--samples":
EnsureNext(args, index);
repoRoot = args[++index];
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
default:
throw new ArgumentException($"Unknown argument: {current}", nameof(args));
}
}
return new ProgramOptions(configPath, iterations, thresholdMs, csvOut, jsonOut, promOut, repoRoot, baselinePath, capturedAt, commit, environment, regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var configDirectory = Path.GetFullPath(Path.Combine(projectRoot, ".."));
return Path.Combine(configDirectory, "config.json");
}
private static string? DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectRoot, ".."));
var baselinePath = Path.Combine(benchRoot, "baseline.csv");
return File.Exists(baselinePath) ? baselinePath : baselinePath;
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.", nameof(args));
}
}
}
private sealed record ScenarioStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static ScenarioStatistics FromDurations(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new ScenarioStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new ScenarioStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
private static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Count | Mean(ms) | P95(ms) | Max(ms)");
Console.WriteLine("---------------------------- | ----- | --------- | --------- | ----------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.SampleCountColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn
}));
}
}
}
private static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,sample_count,mean_ms,p95_ms,max_ms");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.SampleCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}
internal static class PathUtilities
{
public static bool IsWithinRoot(string root, string candidate)
{
var relative = Path.GetRelativePath(root, candidate);
if (string.IsNullOrEmpty(relative) || relative == ".")
{
return true;
}
return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative);
}
}
}

View File

@@ -1,108 +1,108 @@
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.SampleCount,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.ThresholdMs,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.SampleCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs),
new BenchmarkJsonScenarioRegression(
report.MaxRegressionRatio,
report.MeanRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs,
double ThresholdMs,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs);
private sealed record BenchmarkJsonScenarioRegression(
double? MaxRatio,
double? MeanRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);
using System.Text.Json;
using System.Text.Json.Serialization;
using StellaOps.Bench.ScannerAnalyzers.Baseline;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.SampleCount,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.ThresholdMs,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.SampleCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs),
new BenchmarkJsonScenarioRegression(
report.MaxRegressionRatio,
report.MeanRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs,
double ThresholdMs,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs);
private sealed record BenchmarkJsonScenarioRegression(
double? MaxRatio,
double? MeanRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -1,55 +1,55 @@
using StellaOps.Bench.ScannerAnalyzers.Baseline;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double RegressionLimitDefault = 1.2d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : RegressionLimitDefault;
MaxRegressionRatio = CalculateRatio(result.MaxMs, baseline?.MaxMs);
MeanRegressionRatio = CalculateRatio(result.MeanMs, baseline?.MeanMs);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? MaxRegressionRatio { get; }
public double? MeanRegressionRatio { get; }
public bool RegressionBreached => MaxRegressionRatio.HasValue && MaxRegressionRatio.Value >= RegressionLimit;
public string? BuildRegressionFailureMessage()
{
if (!RegressionBreached || MaxRegressionRatio is null)
{
return null;
}
var percentage = (MaxRegressionRatio.Value - 1d) * 100d;
return $"{Result.Id} exceeded regression budget: max {Result.MaxMs:F2} ms vs baseline {Baseline!.MaxMs:F2} ms (+{percentage:F1}%)";
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue)
{
return null;
}
if (baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
}
using StellaOps.Bench.ScannerAnalyzers.Baseline;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double RegressionLimitDefault = 1.2d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : RegressionLimitDefault;
MaxRegressionRatio = CalculateRatio(result.MaxMs, baseline?.MaxMs);
MeanRegressionRatio = CalculateRatio(result.MeanMs, baseline?.MeanMs);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? MaxRegressionRatio { get; }
public double? MeanRegressionRatio { get; }
public bool RegressionBreached => MaxRegressionRatio.HasValue && MaxRegressionRatio.Value >= RegressionLimit;
public string? BuildRegressionFailureMessage()
{
if (!RegressionBreached || MaxRegressionRatio is null)
{
return null;
}
var percentage = (MaxRegressionRatio.Value - 1d) * 100d;
return $"{Result.Id} exceeded regression budget: max {Result.MaxMs:F2} ms vs baseline {Baseline!.MaxMs:F2} ms (+{percentage:F1}%)";
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue)
{
return null;
}
if (baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
}

View File

@@ -1,59 +1,59 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP scanner_analyzer_bench_duration_ms Analyzer benchmark duration metrics in milliseconds.");
builder.AppendLine("# TYPE scanner_analyzer_bench_duration_ms gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "scanner_analyzer_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "scanner_analyzer_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "scanner_analyzer_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "scanner_analyzer_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "scanner_analyzer_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "scanner_analyzer_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
}
if (report.MaxRegressionRatio is { } ratio)
{
AppendMetric(builder, "scanner_analyzer_bench_regression_ratio", scenarioLabel, ratio);
AppendMetric(builder, "scanner_analyzer_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "scanner_analyzer_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenarioLabel, double value)
{
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenarioLabel);
builder.Append("\"} ");
builder.AppendLine(value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) => value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.ScannerAnalyzers.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP scanner_analyzer_bench_duration_ms Analyzer benchmark duration metrics in milliseconds.");
builder.AppendLine("# TYPE scanner_analyzer_bench_duration_ms gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "scanner_analyzer_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "scanner_analyzer_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "scanner_analyzer_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "scanner_analyzer_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "scanner_analyzer_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "scanner_analyzer_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
}
if (report.MaxRegressionRatio is { } ratio)
{
AppendMetric(builder, "scanner_analyzer_bench_regression_ratio", scenarioLabel, ratio);
AppendMetric(builder, "scanner_analyzer_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "scanner_analyzer_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenarioLabel, double value)
{
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenarioLabel);
builder.Append("\"} ");
builder.AppendLine(value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) => value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -1,24 +1,24 @@
using System.Globalization;
namespace StellaOps.Bench.ScannerAnalyzers;
internal sealed record ScenarioResult(
string Id,
string Label,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs,
int Iterations,
double ThresholdMs)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string SampleCountColumn => SampleCount.ToString(CultureInfo.InvariantCulture).PadLeft(5);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
}
using System.Globalization;
namespace StellaOps.Bench.ScannerAnalyzers;
internal sealed record ScenarioResult(
string Id,
string Label,
int SampleCount,
double MeanMs,
double P95Ms,
double MaxMs,
int Iterations,
double ThresholdMs)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string SampleCountColumn => SampleCount.ToString(CultureInfo.InvariantCulture).PadLeft(5);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
}

View File

@@ -1,285 +1,285 @@
using System.Diagnostics;
using System.Text;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Go;
using StellaOps.Scanner.Analyzers.Lang.Java;
using StellaOps.Scanner.Analyzers.Lang.Node;
using StellaOps.Scanner.Analyzers.Lang.DotNet;
using StellaOps.Scanner.Analyzers.Lang.Python;
namespace StellaOps.Bench.ScannerAnalyzers.Scenarios;
internal interface IScenarioRunner
{
Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken);
}
internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount);
internal static class ScenarioRunnerFactory
{
public static IScenarioRunner Create(BenchmarkScenarioConfig scenario)
{
if (scenario.HasAnalyzers)
{
return new LanguageAnalyzerScenarioRunner(scenario.Analyzers!);
}
if (string.IsNullOrWhiteSpace(scenario.Parser) || string.IsNullOrWhiteSpace(scenario.Matcher))
{
throw new InvalidOperationException($"Scenario '{scenario.Id}' missing parser or matcher configuration.");
}
return new MetadataWalkScenarioRunner(scenario.Parser, scenario.Matcher);
}
}
internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
{
private readonly IReadOnlyList<Func<ILanguageAnalyzer>> _analyzerFactories;
public LanguageAnalyzerScenarioRunner(IEnumerable<string> analyzerIds)
{
if (analyzerIds is null)
{
throw new ArgumentNullException(nameof(analyzerIds));
}
_analyzerFactories = analyzerIds
.Where(static id => !string.IsNullOrWhiteSpace(id))
.Select(CreateFactory)
.ToArray();
if (_analyzerFactories.Count == 0)
{
throw new InvalidOperationException("At least one analyzer id must be provided.");
}
}
public async Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var analyzers = _analyzerFactories.Select(factory => factory()).ToArray();
var engine = new LanguageAnalyzerEngine(analyzers);
var durations = new double[iterations];
var componentCount = -1;
for (var i = 0; i < iterations; i++)
{
cancellationToken.ThrowIfCancellationRequested();
var context = new LanguageAnalyzerContext(rootPath, TimeProvider.System);
var stopwatch = Stopwatch.StartNew();
var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
durations[i] = stopwatch.Elapsed.TotalMilliseconds;
var currentCount = result.Components.Count;
if (componentCount < 0)
{
componentCount = currentCount;
}
else if (componentCount != currentCount)
{
throw new InvalidOperationException($"Analyzer output count changed between iterations ({componentCount} vs {currentCount}).");
}
}
if (componentCount < 0)
{
componentCount = 0;
}
return new ScenarioExecutionResult(durations, componentCount);
}
private static Func<ILanguageAnalyzer> CreateFactory(string analyzerId)
{
var id = analyzerId.Trim().ToLowerInvariant();
return id switch
{
"java" => static () => new JavaLanguageAnalyzer(),
"go" => static () => new GoLanguageAnalyzer(),
"node" => static () => new NodeLanguageAnalyzer(),
"dotnet" => static () => new DotNetLanguageAnalyzer(),
"python" => static () => new PythonLanguageAnalyzer(),
_ => throw new InvalidOperationException($"Unsupported analyzer '{analyzerId}'."),
};
}
}
internal sealed class MetadataWalkScenarioRunner : IScenarioRunner
{
private readonly Regex _matcher;
private readonly string _parserKind;
public MetadataWalkScenarioRunner(string parserKind, string globPattern)
{
_parserKind = parserKind?.Trim().ToLowerInvariant() ?? throw new ArgumentNullException(nameof(parserKind));
_matcher = GlobToRegex(globPattern ?? throw new ArgumentNullException(nameof(globPattern)));
}
public async Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var sampleCount = -1;
for (var i = 0; i < iterations; i++)
{
cancellationToken.ThrowIfCancellationRequested();
var stopwatch = Stopwatch.StartNew();
var files = EnumerateMatchingFiles(rootPath);
if (files.Count == 0)
{
throw new InvalidOperationException($"Parser '{_parserKind}' matched zero files under '{rootPath}'.");
}
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
await ParseAsync(file).ConfigureAwait(false);
}
stopwatch.Stop();
durations[i] = stopwatch.Elapsed.TotalMilliseconds;
if (sampleCount < 0)
{
sampleCount = files.Count;
}
else if (sampleCount != files.Count)
{
throw new InvalidOperationException($"File count changed between iterations ({sampleCount} vs {files.Count}).");
}
}
if (sampleCount < 0)
{
sampleCount = 0;
}
return new ScenarioExecutionResult(durations, sampleCount);
}
private async ValueTask ParseAsync(string filePath)
{
switch (_parserKind)
{
case "node":
{
using var stream = File.OpenRead(filePath);
using var document = await JsonDocument.ParseAsync(stream).ConfigureAwait(false);
if (!document.RootElement.TryGetProperty("name", out var name) || name.ValueKind != JsonValueKind.String)
{
throw new InvalidOperationException($"package.json '{filePath}' missing name.");
}
if (!document.RootElement.TryGetProperty("version", out var version) || version.ValueKind != JsonValueKind.String)
{
throw new InvalidOperationException($"package.json '{filePath}' missing version.");
}
}
break;
case "python":
{
var (name, version) = await ParsePythonMetadataAsync(filePath).ConfigureAwait(false);
if (string.IsNullOrEmpty(name) || string.IsNullOrEmpty(version))
{
throw new InvalidOperationException($"METADATA '{filePath}' missing Name/Version.");
}
}
break;
default:
throw new InvalidOperationException($"Unknown parser '{_parserKind}'.");
}
}
private static async Task<(string? Name, string? Version)> ParsePythonMetadataAsync(string filePath)
{
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete);
using var reader = new StreamReader(stream);
string? name = null;
string? version = null;
while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line)
{
if (line.StartsWith("Name:", StringComparison.OrdinalIgnoreCase))
{
name ??= line[5..].Trim();
}
else if (line.StartsWith("Version:", StringComparison.OrdinalIgnoreCase))
{
version ??= line[8..].Trim();
}
if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(version))
{
break;
}
}
return (name, version);
}
private IReadOnlyList<string> EnumerateMatchingFiles(string rootPath)
{
var files = new List<string>();
var stack = new Stack<string>();
stack.Push(rootPath);
while (stack.Count > 0)
{
var current = stack.Pop();
foreach (var directory in Directory.EnumerateDirectories(current))
{
stack.Push(directory);
}
foreach (var file in Directory.EnumerateFiles(current))
{
var relative = Path.GetRelativePath(rootPath, file).Replace('\\', '/');
if (_matcher.IsMatch(relative))
{
files.Add(file);
}
}
}
return files;
}
private static Regex GlobToRegex(string pattern)
{
if (string.IsNullOrWhiteSpace(pattern))
{
throw new ArgumentException("Glob pattern is required.", nameof(pattern));
}
var normalized = pattern.Replace("\\", "/");
normalized = normalized.Replace("**", "\u0001");
normalized = normalized.Replace("*", "\u0002");
var escaped = Regex.Escape(normalized);
escaped = escaped.Replace("\u0001/", "(?:.*/)?", StringComparison.Ordinal);
escaped = escaped.Replace("\u0001", ".*", StringComparison.Ordinal);
escaped = escaped.Replace("\u0002", "[^/]*", StringComparison.Ordinal);
return new Regex("^" + escaped + "$", RegexOptions.Compiled | RegexOptions.CultureInvariant);
}
}
using System.Diagnostics;
using System.Text;
using System.Linq;
using System.Text.Json;
using System.Text.RegularExpressions;
using StellaOps.Scanner.Analyzers.Lang;
using StellaOps.Scanner.Analyzers.Lang.Go;
using StellaOps.Scanner.Analyzers.Lang.Java;
using StellaOps.Scanner.Analyzers.Lang.Node;
using StellaOps.Scanner.Analyzers.Lang.DotNet;
using StellaOps.Scanner.Analyzers.Lang.Python;
namespace StellaOps.Bench.ScannerAnalyzers.Scenarios;
internal interface IScenarioRunner
{
Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken);
}
internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount);
internal static class ScenarioRunnerFactory
{
public static IScenarioRunner Create(BenchmarkScenarioConfig scenario)
{
if (scenario.HasAnalyzers)
{
return new LanguageAnalyzerScenarioRunner(scenario.Analyzers!);
}
if (string.IsNullOrWhiteSpace(scenario.Parser) || string.IsNullOrWhiteSpace(scenario.Matcher))
{
throw new InvalidOperationException($"Scenario '{scenario.Id}' missing parser or matcher configuration.");
}
return new MetadataWalkScenarioRunner(scenario.Parser, scenario.Matcher);
}
}
internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner
{
private readonly IReadOnlyList<Func<ILanguageAnalyzer>> _analyzerFactories;
public LanguageAnalyzerScenarioRunner(IEnumerable<string> analyzerIds)
{
if (analyzerIds is null)
{
throw new ArgumentNullException(nameof(analyzerIds));
}
_analyzerFactories = analyzerIds
.Where(static id => !string.IsNullOrWhiteSpace(id))
.Select(CreateFactory)
.ToArray();
if (_analyzerFactories.Count == 0)
{
throw new InvalidOperationException("At least one analyzer id must be provided.");
}
}
public async Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var analyzers = _analyzerFactories.Select(factory => factory()).ToArray();
var engine = new LanguageAnalyzerEngine(analyzers);
var durations = new double[iterations];
var componentCount = -1;
for (var i = 0; i < iterations; i++)
{
cancellationToken.ThrowIfCancellationRequested();
var context = new LanguageAnalyzerContext(rootPath, TimeProvider.System);
var stopwatch = Stopwatch.StartNew();
var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false);
stopwatch.Stop();
durations[i] = stopwatch.Elapsed.TotalMilliseconds;
var currentCount = result.Components.Count;
if (componentCount < 0)
{
componentCount = currentCount;
}
else if (componentCount != currentCount)
{
throw new InvalidOperationException($"Analyzer output count changed between iterations ({componentCount} vs {currentCount}).");
}
}
if (componentCount < 0)
{
componentCount = 0;
}
return new ScenarioExecutionResult(durations, componentCount);
}
private static Func<ILanguageAnalyzer> CreateFactory(string analyzerId)
{
var id = analyzerId.Trim().ToLowerInvariant();
return id switch
{
"java" => static () => new JavaLanguageAnalyzer(),
"go" => static () => new GoLanguageAnalyzer(),
"node" => static () => new NodeLanguageAnalyzer(),
"dotnet" => static () => new DotNetLanguageAnalyzer(),
"python" => static () => new PythonLanguageAnalyzer(),
_ => throw new InvalidOperationException($"Unsupported analyzer '{analyzerId}'."),
};
}
}
internal sealed class MetadataWalkScenarioRunner : IScenarioRunner
{
private readonly Regex _matcher;
private readonly string _parserKind;
public MetadataWalkScenarioRunner(string parserKind, string globPattern)
{
_parserKind = parserKind?.Trim().ToLowerInvariant() ?? throw new ArgumentNullException(nameof(parserKind));
_matcher = GlobToRegex(globPattern ?? throw new ArgumentNullException(nameof(globPattern)));
}
public async Task<ScenarioExecutionResult> ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var sampleCount = -1;
for (var i = 0; i < iterations; i++)
{
cancellationToken.ThrowIfCancellationRequested();
var stopwatch = Stopwatch.StartNew();
var files = EnumerateMatchingFiles(rootPath);
if (files.Count == 0)
{
throw new InvalidOperationException($"Parser '{_parserKind}' matched zero files under '{rootPath}'.");
}
foreach (var file in files)
{
cancellationToken.ThrowIfCancellationRequested();
await ParseAsync(file).ConfigureAwait(false);
}
stopwatch.Stop();
durations[i] = stopwatch.Elapsed.TotalMilliseconds;
if (sampleCount < 0)
{
sampleCount = files.Count;
}
else if (sampleCount != files.Count)
{
throw new InvalidOperationException($"File count changed between iterations ({sampleCount} vs {files.Count}).");
}
}
if (sampleCount < 0)
{
sampleCount = 0;
}
return new ScenarioExecutionResult(durations, sampleCount);
}
private async ValueTask ParseAsync(string filePath)
{
switch (_parserKind)
{
case "node":
{
using var stream = File.OpenRead(filePath);
using var document = await JsonDocument.ParseAsync(stream).ConfigureAwait(false);
if (!document.RootElement.TryGetProperty("name", out var name) || name.ValueKind != JsonValueKind.String)
{
throw new InvalidOperationException($"package.json '{filePath}' missing name.");
}
if (!document.RootElement.TryGetProperty("version", out var version) || version.ValueKind != JsonValueKind.String)
{
throw new InvalidOperationException($"package.json '{filePath}' missing version.");
}
}
break;
case "python":
{
var (name, version) = await ParsePythonMetadataAsync(filePath).ConfigureAwait(false);
if (string.IsNullOrEmpty(name) || string.IsNullOrEmpty(version))
{
throw new InvalidOperationException($"METADATA '{filePath}' missing Name/Version.");
}
}
break;
default:
throw new InvalidOperationException($"Unknown parser '{_parserKind}'.");
}
}
private static async Task<(string? Name, string? Version)> ParsePythonMetadataAsync(string filePath)
{
using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete);
using var reader = new StreamReader(stream);
string? name = null;
string? version = null;
while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line)
{
if (line.StartsWith("Name:", StringComparison.OrdinalIgnoreCase))
{
name ??= line[5..].Trim();
}
else if (line.StartsWith("Version:", StringComparison.OrdinalIgnoreCase))
{
version ??= line[8..].Trim();
}
if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(version))
{
break;
}
}
return (name, version);
}
private IReadOnlyList<string> EnumerateMatchingFiles(string rootPath)
{
var files = new List<string>();
var stack = new Stack<string>();
stack.Push(rootPath);
while (stack.Count > 0)
{
var current = stack.Pop();
foreach (var directory in Directory.EnumerateDirectories(current))
{
stack.Push(directory);
}
foreach (var file in Directory.EnumerateFiles(current))
{
var relative = Path.GetRelativePath(rootPath, file).Replace('\\', '/');
if (_matcher.IsMatch(relative))
{
files.Add(file);
}
}
}
return files;
}
private static Regex GlobToRegex(string pattern)
{
if (string.IsNullOrWhiteSpace(pattern))
{
throw new ArgumentException("Glob pattern is required.", nameof(pattern));
}
var normalized = pattern.Replace("\\", "/");
normalized = normalized.Replace("**", "\u0001");
normalized = normalized.Replace("*", "\u0002");
var escaped = Regex.Escape(normalized);
escaped = escaped.Replace("\u0001/", "(?:.*/)?", StringComparison.Ordinal);
escaped = escaped.Replace("\u0001", ".*", StringComparison.Ordinal);
escaped = escaped.Replace("\u0002", "[^/]*", StringComparison.Ordinal);
return new Regex("^" + escaped + "$", RegexOptions.Compiled | RegexOptions.CultureInvariant);
}
}