Restructure solution layout by module

This commit is contained in:
master
2025-10-28 15:10:40 +02:00
parent 95daa159c4
commit d870da18ce
4103 changed files with 192899 additions and 187024 deletions

View File

@@ -0,0 +1,26 @@
# Link-Not-Merge Bench
Synthetic workload that measures advisory observation ingestion and linkset correlation throughput for the Link-Not-Merge program.
## Scenarios
`config.json` defines three scenarios that vary observation volume, alias density, and correlation fan-out. Each scenario captures:
- Total latency (ingest + correlation) and p95/max percentiles
- Insert latency against an ephemeral MongoDB instance
- Correlator-only latency, tracking fan-out costs
- Observation and Mongo insert throughput (ops/sec)
- Peak managed heap allocations
## Running locally
```bash
dotnet run \
--project src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj \
-- \
--csv out/linknotmerge-bench.csv \
--json out/linknotmerge-bench.json \
--prometheus out/linknotmerge-bench.prom
```
The benchmark exits non-zero if latency exceeds configured thresholds, throughput falls below the floor, Mongo insert throughput regresses, allocations exceed the ceiling, or regression ratios breach the baseline.

View File

@@ -0,0 +1,38 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.LinkNotMerge.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" +
"lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(baseline);
Assert.Equal("lnm_ingest_baseline", entry.Key);
Assert.Equal(5, entry.Value.Iterations);
Assert.Equal(5000, entry.Value.Observations);
Assert.Equal(500, entry.Value.Aliases);
Assert.Equal(360.9, entry.Value.MaxTotalMs);
Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond);
Assert.Equal(96.5, entry.Value.MaxAllocatedMb);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -0,0 +1,81 @@
using StellaOps.Bench.LinkNotMerge.Baseline;
using StellaOps.Bench.LinkNotMerge.Reporting;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
LinksetCount: 90,
TotalStatistics: new DurationStatistics(200, 240, 260),
InsertStatistics: new DurationStatistics(80, 90, 100),
CorrelationStatistics: new DurationStatistics(120, 150, 170),
TotalThroughputStatistics: new ThroughputStatistics(8000, 7000),
InsertThroughputStatistics: new ThroughputStatistics(9000, 8000),
AllocationStatistics: new AllocationStatistics(120),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
Observations: 1000,
Aliases: 100,
Linksets: 90,
MeanTotalMs: 150,
P95TotalMs: 170,
MaxTotalMs: 180,
MeanInsertMs: 60,
MeanCorrelationMs: 90,
MeanThroughputPerSecond: 9000,
MinThroughputPerSecond: 8500,
MeanMongoThroughputPerSecond: 10000,
MinMongoThroughputPerSecond: 9500,
MaxAllocatedMb: 100);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.True(report.MongoThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
LinksetCount: 90,
TotalStatistics: new DurationStatistics(200, 220, 230),
InsertStatistics: new DurationStatistics(90, 100, 110),
CorrelationStatistics: new DurationStatistics(110, 120, 130),
TotalThroughputStatistics: new ThroughputStatistics(8000, 7900),
InsertThroughputStatistics: new ThroughputStatistics(9000, 8900),
AllocationStatistics: new AllocationStatistics(64),
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MinMongoThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.RegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}

View File

@@ -0,0 +1,38 @@
using System.Linq;
using System.Threading;
using StellaOps.Bench.LinkNotMerge.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Tests;
public sealed class LinkNotMergeScenarioRunnerTests
{
[Fact]
public void Execute_BuildsDeterministicAggregation()
{
var config = new LinkNotMergeScenarioConfig
{
Id = "unit",
Observations = 120,
AliasGroups = 24,
PurlsPerObservation = 3,
CpesPerObservation = 2,
ReferencesPerObservation = 2,
Tenants = 3,
BatchSize = 40,
Seed = 1337,
};
var runner = new LinkNotMergeScenarioRunner(config);
var result = runner.Execute(iterations: 2, CancellationToken.None);
Assert.Equal(120, result.ObservationCount);
Assert.Equal(24, result.AliasGroups);
Assert.True(result.TotalDurationsMs.All(value => value > 0));
Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0));
Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0));
Assert.True(result.AllocatedMb.All(value => value >= 0));
Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount);
Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount);
}
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Bench.LinkNotMerge\StellaOps.Bench.LinkNotMerge.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Bench.LinkNotMerge.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -0,0 +1,87 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 15)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
Observations: ParseInt(parts[2], resolved, lineNumber),
Aliases: ParseInt(parts[3], resolved, lineNumber),
Linksets: ParseInt(parts[4], resolved, lineNumber),
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
{
return result;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -0,0 +1,210 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<LinkNotMergeScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinMongoThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<LinkNotMergeScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class LinkNotMergeScenarioConfig
{
private const int DefaultObservationCount = 5_000;
private const int DefaultAliasGroups = 500;
private const int DefaultPurlsPerObservation = 4;
private const int DefaultCpesPerObservation = 2;
private const int DefaultReferencesPerObservation = 3;
private const int DefaultTenants = 4;
private const int DefaultBatchSize = 500;
private const int DefaultSeed = 42_022;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("observations")]
public int? Observations { get; init; }
[JsonPropertyName("aliasGroups")]
public int? AliasGroups { get; init; }
[JsonPropertyName("purlsPerObservation")]
public int? PurlsPerObservation { get; init; }
[JsonPropertyName("cpesPerObservation")]
public int? CpesPerObservation { get; init; }
[JsonPropertyName("referencesPerObservation")]
public int? ReferencesPerObservation { get; init; }
[JsonPropertyName("tenants")]
public int? Tenants { get; init; }
[JsonPropertyName("batchSize")]
public int? BatchSize { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minMongoThroughputPerSecond")]
public double? MinMongoThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0
? Observations.Value
: DefaultObservationCount;
public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0
? AliasGroups.Value
: DefaultAliasGroups;
public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0
? PurlsPerObservation.Value
: DefaultPurlsPerObservation;
public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0
? CpesPerObservation.Value
: DefaultCpesPerObservation;
public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0
? ReferencesPerObservation.Value
: DefaultReferencesPerObservation;
public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0
? Tenants.Value
: DefaultTenants;
public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0
? BatchSize.Value
: DefaultBatchSize;
public int ResolveSeed() => Seed.HasValue && Seed.Value > 0
? Seed.Value
: DefaultSeed;
public int ResolveIterations(int? defaultIterations)
{
var iterations = Iterations ?? defaultIterations ?? 3;
if (iterations <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
}
return iterations;
}
public void Validate()
{
if (ResolveObservationCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
}
if (ResolveAliasGroups() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
}
if (ResolvePurlsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0.");
}
if (ResolveTenantCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0.");
}
if (ResolveBatchSize() > ResolveObservationCount())
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations.");
}
}
}

View File

@@ -0,0 +1,135 @@
using System.Diagnostics;
using EphemeralMongo;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinkNotMergeScenarioRunner
{
private readonly LinkNotMergeScenarioConfig _config;
private readonly IReadOnlyList<ObservationSeed> _seeds;
public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
_seeds = ObservationGenerator.Generate(config);
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var totalDurations = new double[iterations];
var insertDurations = new double[iterations];
var correlationDurations = new double[iterations];
var allocated = new double[iterations];
var totalThroughputs = new double[iterations];
var insertThroughputs = new double[iterations];
LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0);
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
using var runner = MongoRunner.Run(new MongoRunnerOptions
{
UseSingleNodeReplicaSet = false,
});
var client = new MongoClient(runner.ConnectionString);
var database = client.GetDatabase("linknotmerge_bench");
var collection = database.GetCollection<BsonDocument>("advisory_observations");
CreateIndexes(collection, cancellationToken);
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var documents = collection
.Find(FilterDefinition<BsonDocument>.Empty)
.Project(Builders<BsonDocument>.Projection
.Include("tenant")
.Include("linkset"))
.ToList(cancellationToken);
var correlator = new LinksetAggregator();
lastAggregation = correlator.Correlate(documents);
correlationStopwatch.Stop();
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
var afterAllocated = GC.GetTotalAllocatedBytes();
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
totalThroughputs[iteration] = _seeds.Count / totalSeconds;
var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d);
insertThroughputs[iteration] = _seeds.Count / insertSeconds;
}
return new ScenarioExecutionResult(
totalDurations,
insertDurations,
correlationDurations,
allocated,
totalThroughputs,
insertThroughputs,
ObservationCount: _seeds.Count,
AliasGroups: _config.ResolveAliasGroups(),
LinksetCount: lastAggregation.LinksetCount,
TenantCount: _config.ResolveTenantCount(),
AggregationResult: lastAggregation);
}
private static void InsertObservations(
IMongoCollection<BsonDocument> collection,
IReadOnlyList<ObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<BsonDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToBsonDocument());
}
collection.InsertMany(batch, new InsertManyOptions
{
IsOrdered = false,
BypassDocumentValidation = true,
}, cancellationToken);
}
}
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
{
var indexKeys = Builders<BsonDocument>.IndexKeys
.Ascending("tenant")
.Ascending("identifiers.aliases");
try
{
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
}
catch
{
// Index creation failures should not abort the benchmark; they may occur when running multiple iterations concurrently.
}
}
}

View File

@@ -0,0 +1,140 @@
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed class LinksetAggregator
{
public LinksetAggregationResult Correlate(IEnumerable<BsonDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var groups = new Dictionary<string, LinksetAccumulator>(StringComparer.Ordinal);
var totalObservations = 0;
foreach (var document in documents)
{
totalObservations++;
var tenant = document.GetValue("tenant", "unknown").AsString;
var linkset = document.GetValue("linkset", new BsonDocument()).AsBsonDocument;
var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray;
var purls = linkset.GetValue("purls", new BsonArray()).AsBsonArray;
var cpes = linkset.GetValue("cpes", new BsonArray()).AsBsonArray;
var references = linkset.GetValue("references", new BsonArray()).AsBsonArray;
foreach (var aliasValue in aliases)
{
if (!aliasValue.IsString)
{
continue;
}
var alias = aliasValue.AsString;
var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) =>
{
var (tenantValue, aliasValue) = data;
tenantValue.AsSpan().CopyTo(span);
span[tenantValue.Length] = '|';
aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]);
});
if (!groups.TryGetValue(key, out var accumulator))
{
accumulator = new LinksetAccumulator(tenant, alias);
groups[key] = accumulator;
}
accumulator.AddPurls(purls);
accumulator.AddCpes(cpes);
accumulator.AddReferences(references);
}
}
var totalReferences = 0;
var totalPurls = 0;
var totalCpes = 0;
foreach (var accumulator in groups.Values)
{
totalReferences += accumulator.ReferenceCount;
totalPurls += accumulator.PurlCount;
totalCpes += accumulator.CpeCount;
}
return new LinksetAggregationResult(
LinksetCount: groups.Count,
ObservationCount: totalObservations,
TotalPurls: totalPurls,
TotalCpes: totalCpes,
TotalReferences: totalReferences);
}
private sealed class LinksetAccumulator
{
private readonly HashSet<string> _purls = new(StringComparer.Ordinal);
private readonly HashSet<string> _cpes = new(StringComparer.Ordinal);
private readonly HashSet<string> _references = new(StringComparer.Ordinal);
public LinksetAccumulator(string tenant, string alias)
{
Tenant = tenant;
Alias = alias;
}
public string Tenant { get; }
public string Alias { get; }
public int PurlCount => _purls.Count;
public int CpeCount => _cpes.Count;
public int ReferenceCount => _references.Count;
public void AddPurls(BsonArray array)
{
foreach (var item in array)
{
if (item.IsString)
{
_purls.Add(item.AsString);
}
}
}
public void AddCpes(BsonArray array)
{
foreach (var item in array)
{
if (item.IsString)
{
_cpes.Add(item.AsString);
}
}
}
public void AddReferences(BsonArray array)
{
foreach (var item in array)
{
if (!item.IsBsonDocument)
{
continue;
}
var document = item.AsBsonDocument;
if (document.TryGetValue("url", out var urlValue) && urlValue.IsString)
{
_references.Add(urlValue.AsString);
}
}
}
}
}
internal sealed record LinksetAggregationResult(
int LinksetCount,
int ObservationCount,
int TotalPurls,
int TotalCpes,
int TotalReferences);

View File

@@ -0,0 +1,270 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge;
internal static class ObservationGenerator
{
public static IReadOnlyList<ObservationSeed> Generate(LinkNotMergeScenarioConfig config)
{
ArgumentNullException.ThrowIfNull(config);
var observationCount = config.ResolveObservationCount();
var aliasGroups = config.ResolveAliasGroups();
var purlsPerObservation = config.ResolvePurlsPerObservation();
var cpesPerObservation = config.ResolveCpesPerObservation();
var referencesPerObservation = config.ResolveReferencesPerObservation();
var tenantCount = config.ResolveTenantCount();
var seed = config.ResolveSeed();
var seeds = new ObservationSeed[observationCount];
var random = new Random(seed);
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
for (var index = 0; index < observationCount; index++)
{
var tenantIndex = index % tenantCount;
var tenant = $"tenant-{tenantIndex:D2}";
var group = index % aliasGroups;
var revision = index / aliasGroups;
var primaryAlias = $"CVE-2025-{group:D4}";
var vendorAlias = $"VENDOR-{group:D4}";
var thirdAlias = $"GHSA-{group:D4}-{(revision % 26 + 'a')}{(revision % 26 + 'a')}";
var aliases = ImmutableArray.Create(primaryAlias, vendorAlias, thirdAlias);
var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}";
var upstreamId = primaryAlias;
var documentVersion = baseTime.AddMinutes(revision).ToString("O");
var fetchedAt = baseTime.AddSeconds(index % 1_800);
var receivedAt = fetchedAt.AddSeconds(1);
var purls = CreatePurls(group, revision, purlsPerObservation);
var cpes = CreateCpes(group, revision, cpesPerObservation);
var references = CreateReferences(primaryAlias, referencesPerObservation);
var rawPayload = CreateRawPayload(primaryAlias, vendorAlias, purls, cpes, references);
var contentHash = ComputeContentHash(rawPayload, tenant, group, revision);
seeds[index] = new ObservationSeed(
ObservationId: observationId,
Tenant: tenant,
Vendor: "concelier-bench",
Stream: "simulated",
Api: $"https://bench.stella/{group:D4}/{revision:D2}",
CollectorVersion: "1.0.0-bench",
UpstreamId: upstreamId,
DocumentVersion: documentVersion,
FetchedAt: fetchedAt,
ReceivedAt: receivedAt,
ContentHash: contentHash,
Aliases: aliases,
Purls: purls,
Cpes: cpes,
References: references,
ContentFormat: "CSAF",
SpecVersion: "2.0",
RawPayload: rawPayload);
}
return seeds;
}
private static ImmutableArray<string> CreatePurls(int group, int revision, int count)
{
if (count <= 0)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(count);
for (var index = 0; index < count; index++)
{
var version = $"{revision % 9 + 1}.{index + 1}.{group % 10}";
builder.Add($"pkg:generic/stella/sample-{group:D4}-{index}@{version}");
}
return builder.MoveToImmutable();
}
private static ImmutableArray<string> CreateCpes(int group, int revision, int count)
{
if (count <= 0)
{
return ImmutableArray<string>.Empty;
}
var builder = ImmutableArray.CreateBuilder<string>(count);
for (var index = 0; index < count; index++)
{
var component = $"benchtool{group % 50:D2}";
var version = $"{revision % 5}.{index}";
builder.Add($"cpe:2.3:a:stellaops:{component}:{version}:*:*:*:*:*:*:*");
}
return builder.MoveToImmutable();
}
private static ImmutableArray<ObservationReference> CreateReferences(string primaryAlias, int count)
{
if (count <= 0)
{
return ImmutableArray<ObservationReference>.Empty;
}
var builder = ImmutableArray.CreateBuilder<ObservationReference>(count);
for (var index = 0; index < count; index++)
{
builder.Add(new ObservationReference(
Type: index % 2 == 0 ? "advisory" : "patch",
Url: $"https://vendor.example/{primaryAlias.ToLowerInvariant()}/ref/{index:D2}"));
}
return builder.MoveToImmutable();
}
private static BsonDocument CreateRawPayload(
string primaryAlias,
string vendorAlias,
IReadOnlyCollection<string> purls,
IReadOnlyCollection<string> cpes,
IReadOnlyCollection<ObservationReference> references)
{
var document = new BsonDocument
{
["id"] = primaryAlias,
["vendorId"] = vendorAlias,
["title"] = $"Simulated advisory {primaryAlias}",
["summary"] = "Synthetic payload produced by Link-Not-Merge benchmark.",
["metrics"] = new BsonArray
{
new BsonDocument
{
["kind"] = "cvss:v3.1",
["score"] = 7.5,
["vector"] = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N",
},
},
};
if (purls.Count > 0)
{
document["purls"] = new BsonArray(purls);
}
if (cpes.Count > 0)
{
document["cpes"] = new BsonArray(cpes);
}
if (references.Count > 0)
{
document["references"] = new BsonArray(references.Select(reference => new BsonDocument
{
["type"] = reference.Type,
["url"] = reference.Url,
}));
}
return document;
}
private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision)
{
using var sha256 = SHA256.Create();
var seed = $"{tenant}|{group}|{revision}";
var rawBytes = rawPayload.ToBson();
var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed);
var combined = new byte[rawBytes.Length + seedBytes.Length];
Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length);
Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length);
var hash = sha256.ComputeHash(combined);
return $"sha256:{Convert.ToHexString(hash)}";
}
}
internal sealed record ObservationSeed(
string ObservationId,
string Tenant,
string Vendor,
string Stream,
string Api,
string CollectorVersion,
string UpstreamId,
string DocumentVersion,
DateTimeOffset FetchedAt,
DateTimeOffset ReceivedAt,
string ContentHash,
ImmutableArray<string> Aliases,
ImmutableArray<string> Purls,
ImmutableArray<string> Cpes,
ImmutableArray<ObservationReference> References,
string ContentFormat,
string SpecVersion,
BsonDocument RawPayload)
{
public BsonDocument ToBsonDocument()
{
var aliases = new BsonArray(Aliases.Select(alias => alias));
var purls = new BsonArray(Purls.Select(purl => purl));
var cpes = new BsonArray(Cpes.Select(cpe => cpe));
var references = new BsonArray(References.Select(reference => new BsonDocument
{
["type"] = reference.Type,
["url"] = reference.Url,
}));
var document = new BsonDocument
{
["_id"] = ObservationId,
["tenant"] = Tenant,
["source"] = new BsonDocument
{
["vendor"] = Vendor,
["stream"] = Stream,
["api"] = Api,
["collector_version"] = CollectorVersion,
},
["upstream"] = new BsonDocument
{
["upstream_id"] = UpstreamId,
["document_version"] = DocumentVersion,
["fetched_at"] = FetchedAt.UtcDateTime,
["received_at"] = ReceivedAt.UtcDateTime,
["content_hash"] = ContentHash,
["signature"] = new BsonDocument
{
["present"] = false,
["format"] = BsonNull.Value,
["key_id"] = BsonNull.Value,
["signature"] = BsonNull.Value,
},
},
["content"] = new BsonDocument
{
["format"] = ContentFormat,
["spec_version"] = SpecVersion,
["raw"] = RawPayload,
},
["identifiers"] = new BsonDocument
{
["aliases"] = aliases,
["primary"] = UpstreamId,
["cve"] = Aliases.FirstOrDefault(alias => alias.StartsWith("CVE-", StringComparison.Ordinal)) ?? UpstreamId,
},
["linkset"] = new BsonDocument
{
["aliases"] = aliases,
["purls"] = purls,
["cpes"] = cpes,
["references"] = references,
["reconciled_from"] = new BsonArray { "/content/product_tree" },
},
["supersedes"] = BsonNull.Value,
};
return document;
}
}
internal sealed record ObservationReference(string Type, string Url);

View File

@@ -0,0 +1,375 @@
using System.Globalization;
using StellaOps.Bench.LinkNotMerge.Baseline;
using StellaOps.Bench.LinkNotMerge.Reporting;
namespace StellaOps.Bench.LinkNotMerge;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = scenario.ResolveIterations(config.Iterations);
var runner = new LinkNotMergeScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond);
var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.ObservationCount,
execution.AliasGroups,
execution.LinksetCount,
totalStats,
insertStats,
correlationStats,
throughputStats,
mongoThroughputStats,
allocationStats,
thresholdMs,
throughputFloor,
mongoThroughputFloor,
allocationLimit);
results.Add(result);
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
}
if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s");
}
if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor)
{
failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "linknotmerge-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinMongoThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minMongoThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-mongo-throughput":
EnsureNext(args, index);
minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
minMongoThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: linknotmerge-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (observations/second).");
Console.WriteLine(" --min-mongo-throughput <value> Mongo insert throughput floor (ops/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.ObservationsColumn,
row.AliasColumn,
row.LinksetColumn,
row.TotalMeanColumn,
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ThroughputColumn,
row.MongoThroughputColumn,
row.AllocatedColumn,
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(results);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{
writer.Write(result.Id);
writer.Write(',');
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")]

View File

@@ -0,0 +1,151 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.ObservationCount,
report.Result.AliasGroups,
report.Result.LinksetCount,
report.Result.TotalStatistics.MeanMs,
report.Result.TotalStatistics.P95Ms,
report.Result.TotalStatistics.MaxMs,
report.Result.InsertStatistics.MeanMs,
report.Result.CorrelationStatistics.MeanMs,
report.Result.TotalThroughputStatistics.MeanPerSecond,
report.Result.TotalThroughputStatistics.MinPerSecond,
report.Result.InsertThroughputStatistics.MeanPerSecond,
report.Result.InsertThroughputStatistics.MinPerSecond,
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MinMongoThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.Observations,
baseline.Aliases,
baseline.Linksets,
baseline.MeanTotalMs,
baseline.P95TotalMs,
baseline.MaxTotalMs,
baseline.MeanInsertMs,
baseline.MeanCorrelationMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MeanMongoThroughputPerSecond,
baseline.MinMongoThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.MongoThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int Observations,
int Aliases,
int Linksets,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MeanMongoThroughputPerSecond,
double MinMongoThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double? MongoThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -0,0 +1,89 @@
using StellaOps.Bench.LinkNotMerge.Baseline;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond);
MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public double? MongoThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio)
{
var delta = (mongoRatio - 1d) * 100d;
yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%).";
}
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateInverseRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -0,0 +1,101 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge");
builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second).");
builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge");
foreach (var report in reports)
{
var scenario = Escape(report.Result.Id);
AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs);
AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond);
AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond);
AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio);
}
if (report.MongoThroughputRegressionRatio is { } mongoRatio)
{
AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio);
}
AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit);
AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> TotalDurationsMs,
IReadOnlyList<double> InsertDurationsMs,
IReadOnlyList<double> CorrelationDurationsMs,
IReadOnlyList<double> AllocatedMb,
IReadOnlyList<double> TotalThroughputsPerSecond,
IReadOnlyList<double> InsertThroughputsPerSecond,
int ObservationCount,
int AliasGroups,
int LinksetCount,
int TenantCount,
LinksetAggregationResult AggregationResult);

View File

@@ -0,0 +1,42 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int ObservationCount,
int AliasGroups,
int LinksetCount,
DurationStatistics TotalStatistics,
DurationStatistics InsertStatistics,
DurationStatistics CorrelationStatistics,
ThroughputStatistics TotalThroughputStatistics,
ThroughputStatistics InsertThroughputStatistics,
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MinMongoThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -0,0 +1,84 @@
namespace StellaOps.Bench.LinkNotMerge;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = values.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in values)
{
total += value;
}
var mean = total / values.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,4 @@
scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb
lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477
lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328
lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450
1 scenario iterations observations aliases linksets mean_total_ms p95_total_ms max_total_ms mean_insert_ms mean_correlation_ms mean_throughput_per_sec min_throughput_per_sec mean_mongo_throughput_per_sec min_mongo_throughput_per_sec max_allocated_mb
2 lnm_ingest_baseline 5 5000 500 6000 555.1984 823.4957 866.6236 366.2635 188.9349 9877.7916 5769.5175 15338.0851 8405.1257 62.4477
3 lnm_ingest_fanout_medium 5 10000 800 14800 785.8909 841.6247 842.8815 453.5087 332.3822 12794.9550 11864.0639 22086.0320 20891.0579 145.8328
4 lnm_ingest_fanout_high 5 15000 1200 17400 1299.3458 1367.0934 1369.9430 741.6265 557.7193 11571.0991 10949.3607 20232.5180 19781.6762 238.3450

View File

@@ -0,0 +1,57 @@
{
"thresholdMs": 2000,
"minThroughputPerSecond": 7000,
"minMongoThroughputPerSecond": 12000,
"maxAllocatedMb": 600,
"iterations": 5,
"scenarios": [
{
"id": "lnm_ingest_baseline",
"label": "5k observations, 500 aliases",
"observations": 5000,
"aliasGroups": 500,
"purlsPerObservation": 4,
"cpesPerObservation": 2,
"referencesPerObservation": 3,
"tenants": 4,
"batchSize": 250,
"seed": 42022,
"thresholdMs": 900,
"minThroughputPerSecond": 5500,
"minMongoThroughputPerSecond": 8000,
"maxAllocatedMb": 160
},
{
"id": "lnm_ingest_fanout_medium",
"label": "10k observations, 800 aliases",
"observations": 10000,
"aliasGroups": 800,
"purlsPerObservation": 6,
"cpesPerObservation": 3,
"referencesPerObservation": 4,
"tenants": 6,
"batchSize": 400,
"seed": 52022,
"thresholdMs": 1300,
"minThroughputPerSecond": 8000,
"minMongoThroughputPerSecond": 13000,
"maxAllocatedMb": 220
},
{
"id": "lnm_ingest_fanout_high",
"label": "15k observations, 1200 aliases",
"observations": 15000,
"aliasGroups": 1200,
"purlsPerObservation": 8,
"cpesPerObservation": 4,
"referencesPerObservation": 5,
"tenants": 8,
"batchSize": 500,
"seed": 62022,
"thresholdMs": 2200,
"minThroughputPerSecond": 7000,
"minMongoThroughputPerSecond": 13000,
"maxAllocatedMb": 300
}
]
}