up
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
AOC Guard CI / aoc-guard (push) Has been cancelled
AOC Guard CI / aoc-verify (push) Has been cancelled
Concelier Attestation Tests / attestation-tests (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Notify Smoke Test / Notify Unit Tests (push) Has been cancelled
Notify Smoke Test / Notifier Service Tests (push) Has been cancelled
Notify Smoke Test / Notification Smoke Test (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Scanner Analyzers / Discover Analyzers (push) Has been cancelled
Scanner Analyzers / Build Analyzers (push) Has been cancelled
Scanner Analyzers / Test Language Analyzers (push) Has been cancelled
Scanner Analyzers / Validate Test Fixtures (push) Has been cancelled
Scanner Analyzers / Verify Deterministic Output (push) Has been cancelled
Signals CI & Image / signals-ci (push) Has been cancelled
Signals Reachability Scoring & Events / reachability-smoke (push) Has been cancelled
Signals Reachability Scoring & Events / sign-and-upload (push) Has been cancelled
This commit is contained in:
@@ -1,38 +1,38 @@
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class BaselineLoaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReadsEntries()
|
||||
{
|
||||
var path = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
path,
|
||||
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
|
||||
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
|
||||
var entry = Assert.Single(baseline);
|
||||
|
||||
Assert.Equal("lnm_ingest_baseline", entry.Key);
|
||||
Assert.Equal(5, entry.Value.Iterations);
|
||||
Assert.Equal(5000, entry.Value.Observations);
|
||||
Assert.Equal(500, entry.Value.Aliases);
|
||||
Assert.Equal(360.9, entry.Value.MaxTotalMs);
|
||||
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
|
||||
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.IO;
|
||||
using System.Threading;
|
||||
using System.Threading.Tasks;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class BaselineLoaderTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task LoadAsync_ReadsEntries()
|
||||
{
|
||||
var path = Path.GetTempFileName();
|
||||
try
|
||||
{
|
||||
await File.WriteAllTextAsync(
|
||||
path,
|
||||
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
|
||||
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
|
||||
var entry = Assert.Single(baseline);
|
||||
|
||||
Assert.Equal("lnm_ingest_baseline", entry.Key);
|
||||
Assert.Equal(5, entry.Value.Iterations);
|
||||
Assert.Equal(5000, entry.Value.Observations);
|
||||
Assert.Equal(500, entry.Value.Aliases);
|
||||
Assert.Equal(360.9, entry.Value.MaxTotalMs);
|
||||
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
|
||||
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
|
||||
}
|
||||
finally
|
||||
{
|
||||
File.Delete(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,81 +1,81 @@
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class BenchmarkScenarioReportTests
|
||||
{
|
||||
[Fact]
|
||||
public void RegressionDetection_FlagsBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
LinksetCount: 90,
|
||||
TotalStatistics: new DurationStatistics(200, 240, 260),
|
||||
InsertStatistics: new DurationStatistics(80, 90, 100),
|
||||
CorrelationStatistics: new DurationStatistics(120, 150, 170),
|
||||
TotalThroughputStatistics: new ThroughputStatistics(8000, 7000),
|
||||
InsertThroughputStatistics: new ThroughputStatistics(9000, 8000),
|
||||
AllocationStatistics: new AllocationStatistics(120),
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MinMongoThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var baseline = new BaselineEntry(
|
||||
ScenarioId: "scenario",
|
||||
Iterations: 3,
|
||||
Observations: 1000,
|
||||
Aliases: 100,
|
||||
Linksets: 90,
|
||||
MeanTotalMs: 150,
|
||||
P95TotalMs: 170,
|
||||
MaxTotalMs: 180,
|
||||
MeanInsertMs: 60,
|
||||
MeanCorrelationMs: 90,
|
||||
MeanThroughputPerSecond: 9000,
|
||||
MinThroughputPerSecond: 8500,
|
||||
MeanMongoThroughputPerSecond: 10000,
|
||||
MinMongoThroughputPerSecond: 9500,
|
||||
MaxAllocatedMb: 100);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
|
||||
|
||||
Assert.True(report.DurationRegressionBreached);
|
||||
Assert.True(report.ThroughputRegressionBreached);
|
||||
Assert.True(report.MongoThroughputRegressionBreached);
|
||||
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegressionDetection_NoBaseline_NoBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
LinksetCount: 90,
|
||||
TotalStatistics: new DurationStatistics(200, 220, 230),
|
||||
InsertStatistics: new DurationStatistics(90, 100, 110),
|
||||
CorrelationStatistics: new DurationStatistics(110, 120, 130),
|
||||
TotalThroughputStatistics: new ThroughputStatistics(8000, 7900),
|
||||
InsertThroughputStatistics: new ThroughputStatistics(9000, 8900),
|
||||
AllocationStatistics: new AllocationStatistics(64),
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MinMongoThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
|
||||
|
||||
Assert.False(report.RegressionBreached);
|
||||
Assert.Empty(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
}
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class BenchmarkScenarioReportTests
|
||||
{
|
||||
[Fact]
|
||||
public void RegressionDetection_FlagsBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
LinksetCount: 90,
|
||||
TotalStatistics: new DurationStatistics(200, 240, 260),
|
||||
InsertStatistics: new DurationStatistics(80, 90, 100),
|
||||
CorrelationStatistics: new DurationStatistics(120, 150, 170),
|
||||
TotalThroughputStatistics: new ThroughputStatistics(8000, 7000),
|
||||
InsertThroughputStatistics: new ThroughputStatistics(9000, 8000),
|
||||
AllocationStatistics: new AllocationStatistics(120),
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MinMongoThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var baseline = new BaselineEntry(
|
||||
ScenarioId: "scenario",
|
||||
Iterations: 3,
|
||||
Observations: 1000,
|
||||
Aliases: 100,
|
||||
Linksets: 90,
|
||||
MeanTotalMs: 150,
|
||||
P95TotalMs: 170,
|
||||
MaxTotalMs: 180,
|
||||
MeanInsertMs: 60,
|
||||
MeanCorrelationMs: 90,
|
||||
MeanThroughputPerSecond: 9000,
|
||||
MinThroughputPerSecond: 8500,
|
||||
MeanMongoThroughputPerSecond: 10000,
|
||||
MinMongoThroughputPerSecond: 9500,
|
||||
MaxAllocatedMb: 100);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
|
||||
|
||||
Assert.True(report.DurationRegressionBreached);
|
||||
Assert.True(report.ThroughputRegressionBreached);
|
||||
Assert.True(report.MongoThroughputRegressionBreached);
|
||||
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void RegressionDetection_NoBaseline_NoBreaches()
|
||||
{
|
||||
var result = new ScenarioResult(
|
||||
Id: "scenario",
|
||||
Label: "Scenario",
|
||||
Iterations: 3,
|
||||
ObservationCount: 1000,
|
||||
AliasGroups: 100,
|
||||
LinksetCount: 90,
|
||||
TotalStatistics: new DurationStatistics(200, 220, 230),
|
||||
InsertStatistics: new DurationStatistics(90, 100, 110),
|
||||
CorrelationStatistics: new DurationStatistics(110, 120, 130),
|
||||
TotalThroughputStatistics: new ThroughputStatistics(8000, 7900),
|
||||
InsertThroughputStatistics: new ThroughputStatistics(9000, 8900),
|
||||
AllocationStatistics: new AllocationStatistics(64),
|
||||
ThresholdMs: null,
|
||||
MinThroughputThresholdPerSecond: null,
|
||||
MinMongoThroughputThresholdPerSecond: null,
|
||||
MaxAllocatedThresholdMb: null);
|
||||
|
||||
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
|
||||
|
||||
Assert.False(report.RegressionBreached);
|
||||
Assert.Empty(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,38 @@
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class LinkNotMergeScenarioRunnerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Execute_BuildsDeterministicAggregation()
|
||||
{
|
||||
var config = new LinkNotMergeScenarioConfig
|
||||
{
|
||||
Id = "unit",
|
||||
Observations = 120,
|
||||
AliasGroups = 24,
|
||||
PurlsPerObservation = 3,
|
||||
CpesPerObservation = 2,
|
||||
ReferencesPerObservation = 2,
|
||||
Tenants = 3,
|
||||
BatchSize = 40,
|
||||
Seed = 1337,
|
||||
};
|
||||
|
||||
var runner = new LinkNotMergeScenarioRunner(config);
|
||||
var result = runner.Execute(iterations: 2, CancellationToken.None);
|
||||
|
||||
Assert.Equal(120, result.ObservationCount);
|
||||
Assert.Equal(24, result.AliasGroups);
|
||||
Assert.True(result.TotalDurationsMs.All(value => value > 0));
|
||||
Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0));
|
||||
Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0));
|
||||
Assert.True(result.AllocatedMb.All(value => value >= 0));
|
||||
Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount);
|
||||
Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount);
|
||||
}
|
||||
}
|
||||
using System.Linq;
|
||||
using System.Threading;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Tests;
|
||||
|
||||
public sealed class LinkNotMergeScenarioRunnerTests
|
||||
{
|
||||
[Fact]
|
||||
public void Execute_BuildsDeterministicAggregation()
|
||||
{
|
||||
var config = new LinkNotMergeScenarioConfig
|
||||
{
|
||||
Id = "unit",
|
||||
Observations = 120,
|
||||
AliasGroups = 24,
|
||||
PurlsPerObservation = 3,
|
||||
CpesPerObservation = 2,
|
||||
ReferencesPerObservation = 2,
|
||||
Tenants = 3,
|
||||
BatchSize = 40,
|
||||
Seed = 1337,
|
||||
};
|
||||
|
||||
var runner = new LinkNotMergeScenarioRunner(config);
|
||||
var result = runner.Execute(iterations: 2, CancellationToken.None);
|
||||
|
||||
Assert.Equal(120, result.ObservationCount);
|
||||
Assert.Equal(24, result.AliasGroups);
|
||||
Assert.True(result.TotalDurationsMs.All(value => value > 0));
|
||||
Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0));
|
||||
Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0));
|
||||
Assert.True(result.AllocatedMb.All(value => value >= 0));
|
||||
Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount);
|
||||
Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
namespace StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
@@ -1,87 +1,87 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 15)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
Observations: ParseInt(parts[2], resolved, lineNumber),
|
||||
Aliases: ParseInt(parts[3], resolved, lineNumber),
|
||||
Linksets: ParseInt(parts[4], resolved, lineNumber),
|
||||
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
|
||||
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
|
||||
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
|
||||
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
|
||||
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
|
||||
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
|
||||
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
|
||||
|
||||
result[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 15)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
Observations: ParseInt(parts[2], resolved, lineNumber),
|
||||
Aliases: ParseInt(parts[3], resolved, lineNumber),
|
||||
Linksets: ParseInt(parts[4], resolved, lineNumber),
|
||||
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
|
||||
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
|
||||
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
|
||||
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
|
||||
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
|
||||
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
|
||||
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
|
||||
|
||||
result[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,210 +1,210 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record BenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinMongoThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<BenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new BenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MinMongoThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class BenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minMongoThroughputPerSecond")]
|
||||
public double? MinMongoThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<LinkNotMergeScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class LinkNotMergeScenarioConfig
|
||||
{
|
||||
private const int DefaultObservationCount = 5_000;
|
||||
private const int DefaultAliasGroups = 500;
|
||||
private const int DefaultPurlsPerObservation = 4;
|
||||
private const int DefaultCpesPerObservation = 2;
|
||||
private const int DefaultReferencesPerObservation = 3;
|
||||
private const int DefaultTenants = 4;
|
||||
private const int DefaultBatchSize = 500;
|
||||
private const int DefaultSeed = 42_022;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("observations")]
|
||||
public int? Observations { get; init; }
|
||||
|
||||
[JsonPropertyName("aliasGroups")]
|
||||
public int? AliasGroups { get; init; }
|
||||
|
||||
[JsonPropertyName("purlsPerObservation")]
|
||||
public int? PurlsPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("cpesPerObservation")]
|
||||
public int? CpesPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("referencesPerObservation")]
|
||||
public int? ReferencesPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("tenants")]
|
||||
public int? Tenants { get; init; }
|
||||
|
||||
[JsonPropertyName("batchSize")]
|
||||
public int? BatchSize { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minMongoThroughputPerSecond")]
|
||||
public double? MinMongoThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim();
|
||||
|
||||
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
|
||||
|
||||
public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0
|
||||
? Observations.Value
|
||||
: DefaultObservationCount;
|
||||
|
||||
public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0
|
||||
? AliasGroups.Value
|
||||
: DefaultAliasGroups;
|
||||
|
||||
public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0
|
||||
? PurlsPerObservation.Value
|
||||
: DefaultPurlsPerObservation;
|
||||
|
||||
public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0
|
||||
? CpesPerObservation.Value
|
||||
: DefaultCpesPerObservation;
|
||||
|
||||
public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0
|
||||
? ReferencesPerObservation.Value
|
||||
: DefaultReferencesPerObservation;
|
||||
|
||||
public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0
|
||||
? Tenants.Value
|
||||
: DefaultTenants;
|
||||
|
||||
public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0
|
||||
? BatchSize.Value
|
||||
: DefaultBatchSize;
|
||||
|
||||
public int ResolveSeed() => Seed.HasValue && Seed.Value > 0
|
||||
? Seed.Value
|
||||
: DefaultSeed;
|
||||
|
||||
public int ResolveIterations(int? defaultIterations)
|
||||
{
|
||||
var iterations = Iterations ?? defaultIterations ?? 3;
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
|
||||
}
|
||||
|
||||
return iterations;
|
||||
}
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (ResolveObservationCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
|
||||
}
|
||||
|
||||
if (ResolveAliasGroups() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
|
||||
}
|
||||
|
||||
if (ResolvePurlsPerObservation() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0.");
|
||||
}
|
||||
|
||||
if (ResolveTenantCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0.");
|
||||
}
|
||||
|
||||
if (ResolveBatchSize() > ResolveObservationCount())
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations.");
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record BenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinMongoThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<BenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true,
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new BenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MinMongoThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class BenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minMongoThroughputPerSecond")]
|
||||
public double? MinMongoThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<LinkNotMergeScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class LinkNotMergeScenarioConfig
|
||||
{
|
||||
private const int DefaultObservationCount = 5_000;
|
||||
private const int DefaultAliasGroups = 500;
|
||||
private const int DefaultPurlsPerObservation = 4;
|
||||
private const int DefaultCpesPerObservation = 2;
|
||||
private const int DefaultReferencesPerObservation = 3;
|
||||
private const int DefaultTenants = 4;
|
||||
private const int DefaultBatchSize = 500;
|
||||
private const int DefaultSeed = 42_022;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("observations")]
|
||||
public int? Observations { get; init; }
|
||||
|
||||
[JsonPropertyName("aliasGroups")]
|
||||
public int? AliasGroups { get; init; }
|
||||
|
||||
[JsonPropertyName("purlsPerObservation")]
|
||||
public int? PurlsPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("cpesPerObservation")]
|
||||
public int? CpesPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("referencesPerObservation")]
|
||||
public int? ReferencesPerObservation { get; init; }
|
||||
|
||||
[JsonPropertyName("tenants")]
|
||||
public int? Tenants { get; init; }
|
||||
|
||||
[JsonPropertyName("batchSize")]
|
||||
public int? BatchSize { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("minMongoThroughputPerSecond")]
|
||||
public double? MinMongoThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim();
|
||||
|
||||
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
|
||||
|
||||
public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0
|
||||
? Observations.Value
|
||||
: DefaultObservationCount;
|
||||
|
||||
public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0
|
||||
? AliasGroups.Value
|
||||
: DefaultAliasGroups;
|
||||
|
||||
public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0
|
||||
? PurlsPerObservation.Value
|
||||
: DefaultPurlsPerObservation;
|
||||
|
||||
public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0
|
||||
? CpesPerObservation.Value
|
||||
: DefaultCpesPerObservation;
|
||||
|
||||
public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0
|
||||
? ReferencesPerObservation.Value
|
||||
: DefaultReferencesPerObservation;
|
||||
|
||||
public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0
|
||||
? Tenants.Value
|
||||
: DefaultTenants;
|
||||
|
||||
public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0
|
||||
? BatchSize.Value
|
||||
: DefaultBatchSize;
|
||||
|
||||
public int ResolveSeed() => Seed.HasValue && Seed.Value > 0
|
||||
? Seed.Value
|
||||
: DefaultSeed;
|
||||
|
||||
public int ResolveIterations(int? defaultIterations)
|
||||
{
|
||||
var iterations = Iterations ?? defaultIterations ?? 3;
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
|
||||
}
|
||||
|
||||
return iterations;
|
||||
}
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (ResolveObservationCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
|
||||
}
|
||||
|
||||
if (ResolveAliasGroups() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
|
||||
}
|
||||
|
||||
if (ResolvePurlsPerObservation() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0.");
|
||||
}
|
||||
|
||||
if (ResolveTenantCount() <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0.");
|
||||
}
|
||||
|
||||
if (ResolveBatchSize() > ResolveObservationCount())
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,96 +1,96 @@
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinkNotMergeScenarioRunner
|
||||
{
|
||||
private readonly LinkNotMergeScenarioConfig _config;
|
||||
private readonly IReadOnlyList<ObservationSeed> _seeds;
|
||||
|
||||
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
_seeds = ObservationGenerator.Generate(config);
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var totalDurations = new double[iterations];
|
||||
var insertDurations = new double[iterations];
|
||||
var correlationDurations = new double[iterations];
|
||||
var allocated = new double[iterations];
|
||||
var totalThroughputs = new double[iterations];
|
||||
var insertThroughputs = new double[iterations];
|
||||
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
|
||||
|
||||
for (var iteration = 0; iteration < iterations; iteration++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var insertStopwatch = Stopwatch.StartNew();
|
||||
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
|
||||
insertStopwatch.Stop();
|
||||
|
||||
var correlationStopwatch = Stopwatch.StartNew();
|
||||
var correlator = new LinksetAggregator();
|
||||
lastAggregation = correlator.Correlate(documents);
|
||||
correlationStopwatch.Stop();
|
||||
|
||||
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
|
||||
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
|
||||
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
|
||||
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
|
||||
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
|
||||
|
||||
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
|
||||
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
totalDurations,
|
||||
insertDurations,
|
||||
correlationDurations,
|
||||
allocated,
|
||||
totalThroughputs,
|
||||
insertThroughputs,
|
||||
ObservationCount: _seeds.Count,
|
||||
AliasGroups: _config.ResolveAliasGroups(),
|
||||
LinksetCount: lastAggregation.LinksetCount,
|
||||
TenantCount: _config.ResolveTenantCount(),
|
||||
AggregationResult: lastAggregation);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<ObservationDocument> InsertObservations(
|
||||
IReadOnlyList<ObservationSeed> seeds,
|
||||
int batchSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var documents = new List<ObservationDocument>(seeds.Count);
|
||||
for (var offset = 0; offset < seeds.Count; offset += batchSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remaining = Math.Min(batchSize, seeds.Count - offset);
|
||||
var batch = new List<ObservationDocument>(remaining);
|
||||
for (var index = 0; index < remaining; index++)
|
||||
{
|
||||
batch.Add(seeds[offset + index].ToDocument());
|
||||
}
|
||||
|
||||
documents.AddRange(batch);
|
||||
}
|
||||
|
||||
return documents;
|
||||
}
|
||||
}
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinkNotMergeScenarioRunner
|
||||
{
|
||||
private readonly LinkNotMergeScenarioConfig _config;
|
||||
private readonly IReadOnlyList<ObservationSeed> _seeds;
|
||||
|
||||
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
_seeds = ObservationGenerator.Generate(config);
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var totalDurations = new double[iterations];
|
||||
var insertDurations = new double[iterations];
|
||||
var correlationDurations = new double[iterations];
|
||||
var allocated = new double[iterations];
|
||||
var totalThroughputs = new double[iterations];
|
||||
var insertThroughputs = new double[iterations];
|
||||
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
|
||||
|
||||
for (var iteration = 0; iteration < iterations; iteration++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var insertStopwatch = Stopwatch.StartNew();
|
||||
var documents = InsertObservations(_seeds, _config.ResolveBatchSize(), cancellationToken);
|
||||
insertStopwatch.Stop();
|
||||
|
||||
var correlationStopwatch = Stopwatch.StartNew();
|
||||
var correlator = new LinksetAggregator();
|
||||
lastAggregation = correlator.Correlate(documents);
|
||||
correlationStopwatch.Stop();
|
||||
|
||||
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
|
||||
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
|
||||
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
|
||||
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
|
||||
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
|
||||
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
|
||||
|
||||
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
|
||||
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
totalDurations,
|
||||
insertDurations,
|
||||
correlationDurations,
|
||||
allocated,
|
||||
totalThroughputs,
|
||||
insertThroughputs,
|
||||
ObservationCount: _seeds.Count,
|
||||
AliasGroups: _config.ResolveAliasGroups(),
|
||||
LinksetCount: lastAggregation.LinksetCount,
|
||||
TenantCount: _config.ResolveTenantCount(),
|
||||
AggregationResult: lastAggregation);
|
||||
}
|
||||
|
||||
private static IReadOnlyList<ObservationDocument> InsertObservations(
|
||||
IReadOnlyList<ObservationSeed> seeds,
|
||||
int batchSize,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
var documents = new List<ObservationDocument>(seeds.Count);
|
||||
for (var offset = 0; offset < seeds.Count; offset += batchSize)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var remaining = Math.Min(batchSize, seeds.Count - offset);
|
||||
var batch = new List<ObservationDocument>(remaining);
|
||||
for (var index = 0; index < remaining; index++)
|
||||
{
|
||||
batch.Add(seeds[offset + index].ToDocument());
|
||||
}
|
||||
|
||||
documents.AddRange(batch);
|
||||
}
|
||||
|
||||
return documents;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,121 +1,121 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinksetAggregator
|
||||
{
|
||||
public LinksetAggregationResult Correlate(IEnumerable<ObservationDocument> documents)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(documents);
|
||||
|
||||
var groups = new Dictionary<string, LinksetAccumulator>(StringComparer.Ordinal);
|
||||
var totalObservations = 0;
|
||||
|
||||
foreach (var document in documents)
|
||||
{
|
||||
totalObservations++;
|
||||
|
||||
var tenant = document.Tenant;
|
||||
var linkset = document.Linkset;
|
||||
var aliases = linkset.Aliases;
|
||||
var purls = linkset.Purls;
|
||||
var cpes = linkset.Cpes;
|
||||
var references = linkset.References;
|
||||
|
||||
foreach (var aliasValue in aliases)
|
||||
{
|
||||
var alias = aliasValue;
|
||||
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
|
||||
{
|
||||
var (tenantValue, aliasValue) = data;
|
||||
tenantValue.AsSpan().CopyTo(span);
|
||||
span[tenantValue.Length] = '|';
|
||||
aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]);
|
||||
});
|
||||
|
||||
if (!groups.TryGetValue(key, out var accumulator))
|
||||
{
|
||||
accumulator = new LinksetAccumulator(tenant, alias);
|
||||
groups[key] = accumulator;
|
||||
}
|
||||
|
||||
accumulator.AddPurls(purls);
|
||||
accumulator.AddCpes(cpes);
|
||||
accumulator.AddReferences(references);
|
||||
}
|
||||
}
|
||||
|
||||
var totalReferences = 0;
|
||||
var totalPurls = 0;
|
||||
var totalCpes = 0;
|
||||
|
||||
foreach (var accumulator in groups.Values)
|
||||
{
|
||||
totalReferences += accumulator.ReferenceCount;
|
||||
totalPurls += accumulator.PurlCount;
|
||||
totalCpes += accumulator.CpeCount;
|
||||
}
|
||||
|
||||
return new LinksetAggregationResult(
|
||||
LinksetCount: groups.Count,
|
||||
ObservationCount: totalObservations,
|
||||
TotalPurls: totalPurls,
|
||||
TotalCpes: totalCpes,
|
||||
TotalReferences: totalReferences);
|
||||
}
|
||||
|
||||
private sealed class LinksetAccumulator
|
||||
{
|
||||
private readonly HashSet<string> _purls = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _cpes = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _references = new(StringComparer.Ordinal);
|
||||
|
||||
public LinksetAccumulator(string tenant, string alias)
|
||||
{
|
||||
Tenant = tenant;
|
||||
Alias = alias;
|
||||
}
|
||||
|
||||
public string Tenant { get; }
|
||||
|
||||
public string Alias { get; }
|
||||
|
||||
public int PurlCount => _purls.Count;
|
||||
|
||||
public int CpeCount => _cpes.Count;
|
||||
|
||||
public int ReferenceCount => _references.Count;
|
||||
|
||||
public void AddPurls(IEnumerable<string> array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(item))
|
||||
_purls.Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
public void AddCpes(IEnumerable<string> array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(item))
|
||||
_cpes.Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
public void AddReferences(IEnumerable<ObservationReference> array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(item.Url))
|
||||
_references.Add(item.Url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record LinksetAggregationResult(
|
||||
int LinksetCount,
|
||||
int ObservationCount,
|
||||
int TotalPurls,
|
||||
int TotalCpes,
|
||||
int TotalReferences);
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed class LinksetAggregator
|
||||
{
|
||||
public LinksetAggregationResult Correlate(IEnumerable<ObservationDocument> documents)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(documents);
|
||||
|
||||
var groups = new Dictionary<string, LinksetAccumulator>(StringComparer.Ordinal);
|
||||
var totalObservations = 0;
|
||||
|
||||
foreach (var document in documents)
|
||||
{
|
||||
totalObservations++;
|
||||
|
||||
var tenant = document.Tenant;
|
||||
var linkset = document.Linkset;
|
||||
var aliases = linkset.Aliases;
|
||||
var purls = linkset.Purls;
|
||||
var cpes = linkset.Cpes;
|
||||
var references = linkset.References;
|
||||
|
||||
foreach (var aliasValue in aliases)
|
||||
{
|
||||
var alias = aliasValue;
|
||||
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
|
||||
{
|
||||
var (tenantValue, aliasValue) = data;
|
||||
tenantValue.AsSpan().CopyTo(span);
|
||||
span[tenantValue.Length] = '|';
|
||||
aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]);
|
||||
});
|
||||
|
||||
if (!groups.TryGetValue(key, out var accumulator))
|
||||
{
|
||||
accumulator = new LinksetAccumulator(tenant, alias);
|
||||
groups[key] = accumulator;
|
||||
}
|
||||
|
||||
accumulator.AddPurls(purls);
|
||||
accumulator.AddCpes(cpes);
|
||||
accumulator.AddReferences(references);
|
||||
}
|
||||
}
|
||||
|
||||
var totalReferences = 0;
|
||||
var totalPurls = 0;
|
||||
var totalCpes = 0;
|
||||
|
||||
foreach (var accumulator in groups.Values)
|
||||
{
|
||||
totalReferences += accumulator.ReferenceCount;
|
||||
totalPurls += accumulator.PurlCount;
|
||||
totalCpes += accumulator.CpeCount;
|
||||
}
|
||||
|
||||
return new LinksetAggregationResult(
|
||||
LinksetCount: groups.Count,
|
||||
ObservationCount: totalObservations,
|
||||
TotalPurls: totalPurls,
|
||||
TotalCpes: totalCpes,
|
||||
TotalReferences: totalReferences);
|
||||
}
|
||||
|
||||
private sealed class LinksetAccumulator
|
||||
{
|
||||
private readonly HashSet<string> _purls = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _cpes = new(StringComparer.Ordinal);
|
||||
private readonly HashSet<string> _references = new(StringComparer.Ordinal);
|
||||
|
||||
public LinksetAccumulator(string tenant, string alias)
|
||||
{
|
||||
Tenant = tenant;
|
||||
Alias = alias;
|
||||
}
|
||||
|
||||
public string Tenant { get; }
|
||||
|
||||
public string Alias { get; }
|
||||
|
||||
public int PurlCount => _purls.Count;
|
||||
|
||||
public int CpeCount => _cpes.Count;
|
||||
|
||||
public int ReferenceCount => _references.Count;
|
||||
|
||||
public void AddPurls(IEnumerable<string> array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(item))
|
||||
_purls.Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
public void AddCpes(IEnumerable<string> array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(item))
|
||||
_cpes.Add(item);
|
||||
}
|
||||
}
|
||||
|
||||
public void AddReferences(IEnumerable<ObservationReference> array)
|
||||
{
|
||||
foreach (var item in array)
|
||||
{
|
||||
if (!string.IsNullOrEmpty(item.Url))
|
||||
_references.Add(item.Url);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record LinksetAggregationResult(
|
||||
int LinksetCount,
|
||||
int ObservationCount,
|
||||
int TotalPurls,
|
||||
int TotalCpes,
|
||||
int TotalReferences);
|
||||
|
||||
@@ -1,375 +1,375 @@
|
||||
using System.Globalization;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var iterations = scenario.ResolveIterations(config.Iterations);
|
||||
var runner = new LinkNotMergeScenarioRunner(scenario);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
|
||||
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
|
||||
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
|
||||
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
|
||||
|
||||
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
|
||||
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
|
||||
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
|
||||
var result = new ScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.DisplayLabel,
|
||||
iterations,
|
||||
execution.ObservationCount,
|
||||
execution.AliasGroups,
|
||||
execution.LinksetCount,
|
||||
totalStats,
|
||||
insertStats,
|
||||
correlationStats,
|
||||
throughputStats,
|
||||
mongoThroughputStats,
|
||||
allocationStats,
|
||||
thresholdMs,
|
||||
throughputFloor,
|
||||
mongoThroughputFloor,
|
||||
allocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
|
||||
}
|
||||
|
||||
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
|
||||
}
|
||||
|
||||
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "linknotmerge-bench/1.0",
|
||||
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinMongoThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? minMongoThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-mongo-throughput":
|
||||
EnsureNext(args, index);
|
||||
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
minMongoThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: linknotmerge-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
|
||||
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.ObservationsColumn,
|
||||
row.AliasColumn,
|
||||
row.LinksetColumn,
|
||||
row.TotalMeanColumn,
|
||||
row.CorrelationMeanColumn,
|
||||
row.InsertMeanColumn,
|
||||
row.ThroughputColumn,
|
||||
row.MongoThroughputColumn,
|
||||
row.AllocatedColumn,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(results);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var result in results)
|
||||
{
|
||||
writer.Write(result.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
using System.Globalization;
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
using StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var iterations = scenario.ResolveIterations(config.Iterations);
|
||||
var runner = new LinkNotMergeScenarioRunner(scenario);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
|
||||
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
|
||||
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
|
||||
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
|
||||
|
||||
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
|
||||
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
|
||||
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
|
||||
var result = new ScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.DisplayLabel,
|
||||
iterations,
|
||||
execution.ObservationCount,
|
||||
execution.AliasGroups,
|
||||
execution.LinksetCount,
|
||||
totalStats,
|
||||
insertStats,
|
||||
correlationStats,
|
||||
throughputStats,
|
||||
mongoThroughputStats,
|
||||
allocationStats,
|
||||
thresholdMs,
|
||||
throughputFloor,
|
||||
mongoThroughputFloor,
|
||||
allocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
|
||||
}
|
||||
|
||||
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
|
||||
}
|
||||
|
||||
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "linknotmerge-bench/1.0",
|
||||
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MinMongoThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? minMongoThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-mongo-throughput":
|
||||
EnsureNext(args, index);
|
||||
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
minMongoThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: linknotmerge-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
|
||||
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.ObservationsColumn,
|
||||
row.AliasColumn,
|
||||
row.LinksetColumn,
|
||||
row.TotalMeanColumn,
|
||||
row.CorrelationMeanColumn,
|
||||
row.InsertMeanColumn,
|
||||
row.ThroughputColumn,
|
||||
row.MongoThroughputColumn,
|
||||
row.AllocatedColumn,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(results);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var result in results)
|
||||
{
|
||||
writer.Write(result.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")]
|
||||
using System.Runtime.CompilerServices;
|
||||
|
||||
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")]
|
||||
|
||||
@@ -1,151 +1,151 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.ObservationCount,
|
||||
report.Result.AliasGroups,
|
||||
report.Result.LinksetCount,
|
||||
report.Result.TotalStatistics.MeanMs,
|
||||
report.Result.TotalStatistics.P95Ms,
|
||||
report.Result.TotalStatistics.MaxMs,
|
||||
report.Result.InsertStatistics.MeanMs,
|
||||
report.Result.CorrelationStatistics.MeanMs,
|
||||
report.Result.TotalThroughputStatistics.MeanPerSecond,
|
||||
report.Result.TotalThroughputStatistics.MinPerSecond,
|
||||
report.Result.InsertThroughputStatistics.MeanPerSecond,
|
||||
report.Result.InsertThroughputStatistics.MinPerSecond,
|
||||
report.Result.AllocationStatistics.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinThroughputThresholdPerSecond,
|
||||
report.Result.MinMongoThroughputThresholdPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.Observations,
|
||||
baseline.Aliases,
|
||||
baseline.Linksets,
|
||||
baseline.MeanTotalMs,
|
||||
baseline.P95TotalMs,
|
||||
baseline.MaxTotalMs,
|
||||
baseline.MeanInsertMs,
|
||||
baseline.MeanCorrelationMs,
|
||||
baseline.MeanThroughputPerSecond,
|
||||
baseline.MinThroughputPerSecond,
|
||||
baseline.MeanMongoThroughputPerSecond,
|
||||
baseline.MinMongoThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ThroughputRegressionRatio,
|
||||
report.MongoThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MinMongoThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ThroughputRatio,
|
||||
double? MongoThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.ObservationCount,
|
||||
report.Result.AliasGroups,
|
||||
report.Result.LinksetCount,
|
||||
report.Result.TotalStatistics.MeanMs,
|
||||
report.Result.TotalStatistics.P95Ms,
|
||||
report.Result.TotalStatistics.MaxMs,
|
||||
report.Result.InsertStatistics.MeanMs,
|
||||
report.Result.CorrelationStatistics.MeanMs,
|
||||
report.Result.TotalThroughputStatistics.MeanPerSecond,
|
||||
report.Result.TotalThroughputStatistics.MinPerSecond,
|
||||
report.Result.InsertThroughputStatistics.MeanPerSecond,
|
||||
report.Result.InsertThroughputStatistics.MinPerSecond,
|
||||
report.Result.AllocationStatistics.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinThroughputThresholdPerSecond,
|
||||
report.Result.MinMongoThroughputThresholdPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.Observations,
|
||||
baseline.Aliases,
|
||||
baseline.Linksets,
|
||||
baseline.MeanTotalMs,
|
||||
baseline.P95TotalMs,
|
||||
baseline.MaxTotalMs,
|
||||
baseline.MeanInsertMs,
|
||||
baseline.MeanCorrelationMs,
|
||||
baseline.MeanThroughputPerSecond,
|
||||
baseline.MinThroughputPerSecond,
|
||||
baseline.MeanMongoThroughputPerSecond,
|
||||
baseline.MinMongoThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ThroughputRegressionRatio,
|
||||
report.MongoThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MinMongoThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int Observations,
|
||||
int Aliases,
|
||||
int Linksets,
|
||||
double MeanTotalMs,
|
||||
double P95TotalMs,
|
||||
double MaxTotalMs,
|
||||
double MeanInsertMs,
|
||||
double MeanCorrelationMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MeanMongoThroughputPerSecond,
|
||||
double MinMongoThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ThroughputRatio,
|
||||
double? MongoThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
|
||||
@@ -1,89 +1,89 @@
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
|
||||
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
|
||||
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
|
||||
}
|
||||
|
||||
public ScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ThroughputRegressionRatio { get; }
|
||||
|
||||
public double? MongoThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
var delta = (throughputRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
|
||||
{
|
||||
var delta = (mongoRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateInverseRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
using StellaOps.Bench.LinkNotMerge.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
|
||||
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
|
||||
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
|
||||
}
|
||||
|
||||
public ScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ThroughputRegressionRatio { get; }
|
||||
|
||||
public double? MongoThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
var delta = (throughputRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
|
||||
{
|
||||
var delta = (mongoRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateInverseRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,101 +1,101 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenario = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
|
||||
AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
|
||||
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
|
||||
}
|
||||
|
||||
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);
|
||||
AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
|
||||
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenario = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
|
||||
AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
|
||||
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
|
||||
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
|
||||
}
|
||||
|
||||
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
|
||||
{
|
||||
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);
|
||||
AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record ScenarioExecutionResult(
|
||||
IReadOnlyList<double> TotalDurationsMs,
|
||||
IReadOnlyList<double> InsertDurationsMs,
|
||||
IReadOnlyList<double> CorrelationDurationsMs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
IReadOnlyList<double> TotalThroughputsPerSecond,
|
||||
IReadOnlyList<double> InsertThroughputsPerSecond,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int LinksetCount,
|
||||
int TenantCount,
|
||||
LinksetAggregationResult AggregationResult);
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record ScenarioExecutionResult(
|
||||
IReadOnlyList<double> TotalDurationsMs,
|
||||
IReadOnlyList<double> InsertDurationsMs,
|
||||
IReadOnlyList<double> CorrelationDurationsMs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
IReadOnlyList<double> TotalThroughputsPerSecond,
|
||||
IReadOnlyList<double> InsertThroughputsPerSecond,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int LinksetCount,
|
||||
int TenantCount,
|
||||
LinksetAggregationResult AggregationResult);
|
||||
|
||||
@@ -1,42 +1,42 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record ScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int LinksetCount,
|
||||
DurationStatistics TotalStatistics,
|
||||
DurationStatistics InsertStatistics,
|
||||
DurationStatistics CorrelationStatistics,
|
||||
ThroughputStatistics TotalThroughputStatistics,
|
||||
ThroughputStatistics InsertThroughputStatistics,
|
||||
AllocationStatistics AllocationStatistics,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MinMongoThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
|
||||
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
|
||||
public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
|
||||
|
||||
public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
|
||||
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal sealed record ScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int ObservationCount,
|
||||
int AliasGroups,
|
||||
int LinksetCount,
|
||||
DurationStatistics TotalStatistics,
|
||||
DurationStatistics InsertStatistics,
|
||||
DurationStatistics CorrelationStatistics,
|
||||
ThroughputStatistics TotalThroughputStatistics,
|
||||
ThroughputStatistics InsertThroughputStatistics,
|
||||
AllocationStatistics AllocationStatistics,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MinMongoThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
|
||||
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
|
||||
public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
|
||||
|
||||
public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
|
||||
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
|
||||
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
|
||||
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
|
||||
@@ -1,84 +1,84 @@
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = values.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
namespace StellaOps.Bench.LinkNotMerge;
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = values.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user