Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled

- Implemented PolicyDslValidator with command-line options for strict mode and JSON output.
- Created PolicySchemaExporter to generate JSON schemas for policy-related models.
- Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes.
- Added project files and necessary dependencies for each tool.
- Ensured proper error handling and usage instructions across tools.
This commit is contained in:
2025-10-27 08:00:11 +02:00
parent 651b8e0fa3
commit 96d52884e8
712 changed files with 49449 additions and 6124 deletions

View File

@@ -0,0 +1,26 @@
# Link-Not-Merge VEX Bench
Measures synthetic VEX observation ingest and event emission throughput for the Link-Not-Merge program.
## Scenarios
`config.json` defines workloads with varying statement density and tenant fan-out. Metrics captured per scenario:
- Total latency (ingest + correlation) and p95/max percentiles
- Correlator-only latency and Mongo insert latency
- Observation throughput (observations/sec)
- Event emission throughput (events/sec)
- Peak managed heap allocations
## Running locally
```bash
dotnet run \
--project src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj \
-- \
--csv out/linknotmerge-vex-bench.csv \
--json out/linknotmerge-vex-bench.json \
--prometheus out/linknotmerge-vex-bench.prom
```
The benchmark exits non-zero if latency thresholds are exceeded, observation or event throughput drops below configured floors, allocations exceed the ceiling, or regression ratios breach the baseline.

View File

@@ -0,0 +1,37 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb\n" +
"vex_ingest_baseline,5,4000,24000,12000,620.5,700.1,820.9,320.5,300.0,9800.0,9100.0,4200.0,3900.0,150.0\n");
var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(baseline);
Assert.Equal("vex_ingest_baseline", entry.Key);
Assert.Equal(4000, entry.Value.Observations);
Assert.Equal(24000, entry.Value.Statements);
Assert.Equal(12000, entry.Value.Events);
Assert.Equal(700.1, entry.Value.P95TotalMs);
Assert.Equal(3900.0, entry.Value.MinEventThroughputPerSecond);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -0,0 +1,83 @@
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsBreaches()
{
var result = new VexScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
StatementCount: 6000,
EventCount: 3200,
TotalStatistics: new DurationStatistics(600, 700, 750),
InsertStatistics: new DurationStatistics(320, 360, 380),
CorrelationStatistics: new DurationStatistics(280, 320, 340),
ObservationThroughputStatistics: new ThroughputStatistics(8000, 7000),
EventThroughputStatistics: new ThroughputStatistics(3500, 3200),
AllocationStatistics: new AllocationStatistics(180),
ThresholdMs: null,
MinObservationThroughputPerSecond: null,
MinEventThroughputPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
Observations: 1000,
Statements: 6000,
Events: 3200,
MeanTotalMs: 520,
P95TotalMs: 560,
MaxTotalMs: 580,
MeanInsertMs: 250,
MeanCorrelationMs: 260,
MeanObservationThroughputPerSecond: 9000,
MinObservationThroughputPerSecond: 8500,
MeanEventThroughputPerSecond: 4200,
MinEventThroughputPerSecond: 3800,
MaxAllocatedMb: 140);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ObservationThroughputRegressionBreached);
Assert.True(report.EventThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("event throughput"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new VexScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
ObservationCount: 1000,
AliasGroups: 100,
StatementCount: 6000,
EventCount: 3200,
TotalStatistics: new DurationStatistics(480, 520, 540),
InsertStatistics: new DurationStatistics(260, 280, 300),
CorrelationStatistics: new DurationStatistics(220, 240, 260),
ObservationThroughputStatistics: new ThroughputStatistics(9000, 8800),
EventThroughputStatistics: new ThroughputStatistics(4200, 4100),
AllocationStatistics: new AllocationStatistics(150),
ThresholdMs: null,
MinObservationThroughputPerSecond: null,
MinEventThroughputPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.RegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}

View File

@@ -0,0 +1,28 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Bench.LinkNotMerge.Vex\StellaOps.Bench.LinkNotMerge.Vex.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,34 @@
using System.Linq;
using System.Threading;
using Xunit;
namespace StellaOps.Bench.LinkNotMerge.Vex.Tests;
public sealed class VexScenarioRunnerTests
{
[Fact]
public void Execute_ComputesEvents()
{
var config = new VexScenarioConfig
{
Id = "unit",
Observations = 600,
AliasGroups = 120,
StatementsPerObservation = 5,
ProductsPerObservation = 3,
Tenants = 2,
BatchSize = 120,
Seed = 12345,
};
var runner = new VexScenarioRunner(config);
var result = runner.Execute(2, CancellationToken.None);
Assert.Equal(600, result.ObservationCount);
Assert.True(result.StatementCount > 0);
Assert.True(result.EventCount > 0);
Assert.All(result.TotalDurationsMs, duration => Assert.True(duration > 0));
Assert.All(result.EventThroughputsPerSecond, throughput => Assert.True(throughput > 0));
Assert.Equal(result.AggregationResult.EventCount, result.EventCount);
}
}

View File

@@ -0,0 +1,18 @@
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -0,0 +1,87 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 15)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
Observations: ParseInt(parts[2], resolved, lineNumber),
Statements: ParseInt(parts[3], resolved, lineNumber),
Events: ParseInt(parts[4], resolved, lineNumber),
MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber),
P95TotalMs: ParseDouble(parts[6], resolved, lineNumber),
MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber),
MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber),
MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber),
MeanObservationThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber),
MinObservationThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber),
MeanEventThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber),
MinEventThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber));
result[entry.ScenarioId] = entry;
}
return result;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -0,0 +1,376 @@
using System.Globalization;
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
using StellaOps.Bench.LinkNotMerge.Vex.Reporting;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await VexBenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<VexScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = scenario.ResolveIterations(config.Iterations);
var runner = new VexScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var totalStats = DurationStatistics.From(execution.TotalDurationsMs);
var insertStats = DurationStatistics.From(execution.InsertDurationsMs);
var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var observationThroughputStats = ThroughputStatistics.From(execution.ObservationThroughputsPerSecond);
var eventThroughputStats = ThroughputStatistics.From(execution.EventThroughputsPerSecond);
var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var observationFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var eventFloor = scenario.MinEventThroughputPerSecond ?? options.MinEventThroughputPerSecond ?? config.MinEventThroughputPerSecond;
var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new VexScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.ObservationCount,
execution.AliasGroups,
execution.StatementCount,
execution.EventCount,
totalStats,
insertStats,
correlationStats,
observationThroughputStats,
eventThroughputStats,
allocationStats,
thresholdMs,
observationFloor,
eventFloor,
allocationLimit);
results.Add(result);
if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms");
}
if (observationFloor is { } obsFloor && result.ObservationThroughputStatistics.MinPerSecond < obsFloor)
{
failures.Add($"{result.Id} fell below observation throughput floor: {result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s < {obsFloor:N0} obs/s");
}
if (eventFloor is { } evtFloor && result.EventThroughputStatistics.MinPerSecond < evtFloor)
{
failures.Add($"{result.Id} fell below event throughput floor: {result.EventThroughputStatistics.MinPerSecond:N0} events/s < {evtFloor:N0} events/s");
}
if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "linknotmerge-vex-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"linknotmerge-vex-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? minEventThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-event-throughput":
EnsureNext(args, index);
minEventThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
minEventThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: linknotmerge-vex-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Observation throughput floor (observations/second).");
Console.WriteLine(" --min-event-throughput <value> Event emission throughput floor (events/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<VexScenarioResult> results)
{
Console.WriteLine("Scenario | Observations | Statements | Events | Total(ms) | Correl(ms) | Insert(ms) | Obs k/s | Evnt k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------- | ---------- | ------- | ---------- | ---------- | ----------- | ------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.ObservationsColumn,
row.StatementColumn,
row.EventColumn,
row.TotalMeanColumn,
row.CorrelationMeanColumn,
row.InsertMeanColumn,
row.ObservationThroughputColumn,
row.EventThroughputColumn,
row.AllocatedColumn,
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<VexScenarioResult> results)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(results);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb");
foreach (var result in results)
{
writer.Write(result.Id);
writer.Write(',');
writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.StatementCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventCount.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.ObservationThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.EventThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Vex.Tests")]

View File

@@ -0,0 +1,151 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull,
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.ObservationCount,
report.Result.StatementCount,
report.Result.EventCount,
report.Result.TotalStatistics.MeanMs,
report.Result.TotalStatistics.P95Ms,
report.Result.TotalStatistics.MaxMs,
report.Result.InsertStatistics.MeanMs,
report.Result.CorrelationStatistics.MeanMs,
report.Result.ObservationThroughputStatistics.MeanPerSecond,
report.Result.ObservationThroughputStatistics.MinPerSecond,
report.Result.EventThroughputStatistics.MeanPerSecond,
report.Result.EventThroughputStatistics.MinPerSecond,
report.Result.AllocationStatistics.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinObservationThroughputPerSecond,
report.Result.MinEventThroughputPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.Observations,
baseline.Statements,
baseline.Events,
baseline.MeanTotalMs,
baseline.P95TotalMs,
baseline.MaxTotalMs,
baseline.MeanInsertMs,
baseline.MeanCorrelationMs,
baseline.MeanObservationThroughputPerSecond,
baseline.MinObservationThroughputPerSecond,
baseline.MeanEventThroughputPerSecond,
baseline.MinEventThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ObservationThroughputRegressionRatio,
report.EventThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinObservationThroughputThresholdPerSecond,
double? MinEventThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int Observations,
int Statements,
int Events,
double MeanTotalMs,
double P95TotalMs,
double MaxTotalMs,
double MeanInsertMs,
double MeanCorrelationMs,
double MeanObservationThroughputPerSecond,
double MinObservationThroughputPerSecond,
double MeanEventThroughputPerSecond,
double MinEventThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ObservationThroughputRatio,
double? EventThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -0,0 +1,89 @@
using StellaOps.Bench.LinkNotMerge.Vex.Baseline;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(VexScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs);
ObservationThroughputRegressionRatio = CalculateInverseRatio(result.ObservationThroughputStatistics.MinPerSecond, baseline?.MinObservationThroughputPerSecond);
EventThroughputRegressionRatio = CalculateInverseRatio(result.EventThroughputStatistics.MinPerSecond, baseline?.MinEventThroughputPerSecond);
}
public VexScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ObservationThroughputRegressionRatio { get; }
public double? EventThroughputRegressionRatio { get; }
public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool ObservationThroughputRegressionBreached => ObservationThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool EventThroughputRegressionBreached => EventThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ObservationThroughputRegressionBreached || EventThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%).";
}
if (ObservationThroughputRegressionBreached && ObservationThroughputRegressionRatio is { } obsRatio)
{
var delta = (obsRatio - 1d) * 100d;
yield return $"{Result.Id} observation throughput regressed: min {Result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinObservationThroughputPerSecond:N0} obs/s (-{delta:F1}%).";
}
if (EventThroughputRegressionBreached && EventThroughputRegressionRatio is { } evtRatio)
{
var delta = (evtRatio - 1d) * 100d;
yield return $"{Result.Id} event throughput regressed: min {Result.EventThroughputStatistics.MinPerSecond:N0} events/s vs baseline {Baseline.MinEventThroughputPerSecond:N0} events/s (-{delta:F1}%).";
}
}
private static double? CalculateRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateInverseRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -0,0 +1,94 @@
using System.Globalization;
using System.Text;
namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_total_ms gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge");
builder.AppendLine("# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes).");
builder.AppendLine("# TYPE linknotmerge_vex_bench_allocated_mb gauge");
foreach (var report in reports)
{
var scenario = Escape(report.Result.Id);
AppendMetric(builder, "linknotmerge_vex_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs);
AppendMetric(builder, "linknotmerge_vex_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms);
AppendMetric(builder, "linknotmerge_vex_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs);
AppendMetric(builder, "linknotmerge_vex_bench_threshold_ms", scenario, report.Result.ThresholdMs);
AppendMetric(builder, "linknotmerge_vex_bench_mean_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_min_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_observation_throughput_floor_per_sec", scenario, report.Result.MinObservationThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_mean_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MeanPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_min_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MinPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_event_throughput_floor_per_sec", scenario, report.Result.MinEventThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb);
AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "linknotmerge_vex_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs);
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_observation_throughput_per_sec", scenario, baseline.MinObservationThroughputPerSecond);
AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_event_throughput_per_sec", scenario, baseline.MinEventThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_duration_regression_ratio", scenario, durationRatio);
}
if (report.ObservationThroughputRegressionRatio is { } obsRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_observation_regression_ratio", scenario, obsRatio);
}
if (report.EventThroughputRegressionRatio is { } evtRatio)
{
AppendMetric(builder, "linknotmerge_vex_bench_event_regression_ratio", scenario, evtRatio);
}
AppendMetric(builder, "linknotmerge_vex_bench_regression_limit", scenario, report.RegressionLimit);
AppendMetric(builder, "linknotmerge_vex_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -0,0 +1,84 @@
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = values.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in values)
{
total += value;
}
var mean = total / values.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -0,0 +1,16 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MongoDB.Driver" Version="3.5.0" />
<PackageReference Include="EphemeralMongo" Version="3.0.0" />
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,166 @@
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexLinksetAggregator
{
public VexAggregationResult Correlate(IEnumerable<BsonDocument> documents)
{
ArgumentNullException.ThrowIfNull(documents);
var groups = new Dictionary<string, VexAccumulator>(StringComparer.Ordinal);
var statementsSeen = 0;
foreach (var document in documents)
{
var tenant = document.GetValue("tenant", "unknown").AsString;
var linksetValue = document.GetValue("linkset", new BsonDocument());
var linkset = linksetValue.IsBsonDocument ? linksetValue.AsBsonDocument : new BsonDocument();
var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray;
var statementsValue = document.GetValue("statements", new BsonArray());
var statements = statementsValue.IsBsonArray ? statementsValue.AsBsonArray : new BsonArray();
foreach (var statementValue in statements)
{
if (!statementValue.IsBsonDocument)
{
continue;
}
statementsSeen++;
var statement = statementValue.AsBsonDocument;
var status = statement.GetValue("status", "unknown").AsString;
var justification = statement.GetValue("justification", BsonNull.Value);
var lastUpdated = statement.GetValue("last_updated", BsonNull.Value);
var productValue = statement.GetValue("product", new BsonDocument());
var product = productValue.IsBsonDocument ? productValue.AsBsonDocument : new BsonDocument();
var productKey = product.GetValue("purl", "unknown").AsString;
foreach (var aliasValue in aliases)
{
if (!aliasValue.IsString)
{
continue;
}
var alias = aliasValue.AsString;
var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) =>
{
var (tenantValue, aliasValue, productValue) = data;
var offset = 0;
tenantValue.AsSpan().CopyTo(span);
offset += tenantValue.Length;
span[offset++] = '|';
aliasValue.AsSpan().CopyTo(span[offset..]);
offset += aliasValue.Length;
span[offset++] = '|';
productValue.AsSpan().CopyTo(span[offset..]);
});
if (!groups.TryGetValue(key, out var accumulator))
{
accumulator = new VexAccumulator(tenant, alias, productKey);
groups[key] = accumulator;
}
accumulator.AddStatement(status, justification, lastUpdated);
}
}
}
var eventDocuments = new List<BsonDocument>(groups.Count);
foreach (var accumulator in groups.Values)
{
if (accumulator.ShouldEmitEvent)
{
eventDocuments.Add(accumulator.ToEvent());
}
}
return new VexAggregationResult(
LinksetCount: groups.Count,
StatementCount: statementsSeen,
EventCount: eventDocuments.Count,
EventDocuments: eventDocuments);
}
private sealed class VexAccumulator
{
private readonly Dictionary<string, int> _statusCounts = new(StringComparer.Ordinal);
private readonly HashSet<string> _justifications = new(StringComparer.Ordinal);
private readonly string _tenant;
private readonly string _alias;
private readonly string _product;
private DateTime? _latest;
public VexAccumulator(string tenant, string alias, string product)
{
_tenant = tenant;
_alias = alias;
_product = product;
}
public void AddStatement(string status, BsonValue justification, BsonValue updatedAt)
{
if (!_statusCounts.TryAdd(status, 1))
{
_statusCounts[status]++;
}
if (justification.IsString)
{
_justifications.Add(justification.AsString);
}
if (updatedAt.IsValidDateTime)
{
var value = updatedAt.ToUniversalTime();
if (!_latest.HasValue || value > _latest)
{
_latest = value;
}
}
}
public bool ShouldEmitEvent
{
get
{
if (_statusCounts.TryGetValue("affected", out var affected) && affected > 0)
{
return true;
}
if (_statusCounts.TryGetValue("under_investigation", out var investigating) && investigating > 0)
{
return true;
}
return false;
}
}
public BsonDocument ToEvent()
{
var payload = new BsonDocument
{
["tenant"] = _tenant,
["alias"] = _alias,
["product"] = _product,
["statuses"] = new BsonDocument(_statusCounts.Select(kvp => new BsonElement(kvp.Key, kvp.Value))),
["justifications"] = new BsonArray(_justifications.Select(justification => justification)),
["last_updated"] = _latest.HasValue ? _latest.Value : (BsonValue)BsonNull.Value,
};
return payload;
}
}
}
internal sealed record VexAggregationResult(
int LinksetCount,
int StatementCount,
int EventCount,
IReadOnlyList<BsonDocument> EventDocuments);

View File

@@ -0,0 +1,252 @@
using System.Collections.Immutable;
using System.Security.Cryptography;
using MongoDB.Bson;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal static class VexObservationGenerator
{
private static readonly ImmutableArray<string> StatusPool = ImmutableArray.Create(
"affected",
"not_affected",
"under_investigation");
private static readonly ImmutableArray<string> JustificationPool = ImmutableArray.Create(
"exploitation_mitigated",
"component_not_present",
"vulnerable_code_not_present",
"vulnerable_code_not_in_execute_path");
public static IReadOnlyList<VexObservationSeed> Generate(VexScenarioConfig config)
{
ArgumentNullException.ThrowIfNull(config);
var observationCount = config.ResolveObservationCount();
var aliasGroups = config.ResolveAliasGroups();
var statementsPerObservation = config.ResolveStatementsPerObservation();
var tenantCount = config.ResolveTenantCount();
var productsPerObservation = config.ResolveProductsPerObservation();
var seed = config.ResolveSeed();
var seeds = new VexObservationSeed[observationCount];
var random = new Random(seed);
var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero);
for (var index = 0; index < observationCount; index++)
{
var tenantIndex = index % tenantCount;
var tenant = $"tenant-{tenantIndex:D2}";
var group = index % aliasGroups;
var revision = index / aliasGroups;
var vulnerabilityAlias = $"CVE-2025-{group:D4}";
var upstreamId = $"VEX-{group:D4}-{revision:D3}";
var observationId = $"{tenant}:vex:{group:D5}:{revision:D6}";
var fetchedAt = baseTime.AddMinutes(revision);
var receivedAt = fetchedAt.AddSeconds(2);
var documentVersion = fetchedAt.AddSeconds(15).ToString("O");
var products = CreateProducts(group, revision, productsPerObservation);
var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt);
var rawPayload = CreateRawPayload(upstreamId, vulnerabilityAlias, statements);
var contentHash = ComputeContentHash(rawPayload, tenant, group, revision);
var aliases = ImmutableArray.Create(vulnerabilityAlias, $"GHSA-{group:D4}-{revision % 26 + 'a'}{revision % 26 + 'a'}");
var references = ImmutableArray.Create(
new VexReference("advisory", $"https://vendor.example/advisories/{vulnerabilityAlias.ToLowerInvariant()}"),
new VexReference("fix", $"https://vendor.example/patch/{vulnerabilityAlias.ToLowerInvariant()}"));
seeds[index] = new VexObservationSeed(
ObservationId: observationId,
Tenant: tenant,
Vendor: "excititor-bench",
Stream: "simulated",
Api: $"https://bench.stella/vex/{group:D4}/{revision:D3}",
CollectorVersion: "1.0.0-bench",
UpstreamId: upstreamId,
DocumentVersion: documentVersion,
FetchedAt: fetchedAt,
ReceivedAt: receivedAt,
ContentHash: contentHash,
VulnerabilityAlias: vulnerabilityAlias,
Aliases: aliases,
Products: products,
Statements: statements,
References: references,
ContentFormat: "CycloneDX-VEX",
SpecVersion: "1.4",
RawPayload: rawPayload);
}
return seeds;
}
private static ImmutableArray<VexProduct> CreateProducts(int group, int revision, int count)
{
var builder = ImmutableArray.CreateBuilder<VexProduct>(count);
for (var index = 0; index < count; index++)
{
var purl = $"pkg:generic/stella/product-{group:D4}-{index}@{1 + revision % 5}.{index + 1}.{revision % 9}";
builder.Add(new VexProduct(purl, $"component-{group % 30:D2}", $"namespace-{group % 10:D2}"));
}
return builder.MoveToImmutable();
}
private static ImmutableArray<BsonDocument> CreateStatements(
string vulnerabilityAlias,
ImmutableArray<VexProduct> products,
int statementsPerObservation,
Random random,
DateTimeOffset baseTime)
{
var builder = ImmutableArray.CreateBuilder<BsonDocument>(statementsPerObservation);
for (var index = 0; index < statementsPerObservation; index++)
{
var statusIndex = random.Next(StatusPool.Length);
var status = StatusPool[statusIndex];
var justification = JustificationPool[random.Next(JustificationPool.Length)];
var product = products[index % products.Length];
var statementId = $"stmt-{vulnerabilityAlias}-{index:D2}";
var document = new BsonDocument
{
["statement_id"] = statementId,
["vulnerability_alias"] = vulnerabilityAlias,
["product"] = new BsonDocument
{
["purl"] = product.Purl,
["component"] = product.Component,
["namespace"] = product.Namespace,
},
["status"] = status,
["justification"] = justification,
["impact"] = status == "affected" ? "high" : "none",
["last_updated"] = baseTime.AddMinutes(index).UtcDateTime,
};
builder.Add(document);
}
return builder.MoveToImmutable();
}
private static BsonDocument CreateRawPayload(string upstreamId, string vulnerabilityAlias, ImmutableArray<BsonDocument> statements)
{
var doc = new BsonDocument
{
["documentId"] = upstreamId,
["title"] = $"Simulated VEX report {upstreamId}",
["summary"] = $"Synthetic VEX payload for {vulnerabilityAlias}.",
["statements"] = new BsonArray(statements),
};
return doc;
}
private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision)
{
using var sha256 = SHA256.Create();
var seed = $"{tenant}|{group}|{revision}";
var rawBytes = rawPayload.ToBson();
var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed);
var combined = new byte[rawBytes.Length + seedBytes.Length];
Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length);
Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length);
var hash = sha256.ComputeHash(combined);
return $"sha256:{Convert.ToHexString(hash)}";
}
}
internal sealed record VexObservationSeed(
string ObservationId,
string Tenant,
string Vendor,
string Stream,
string Api,
string CollectorVersion,
string UpstreamId,
string DocumentVersion,
DateTimeOffset FetchedAt,
DateTimeOffset ReceivedAt,
string ContentHash,
string VulnerabilityAlias,
ImmutableArray<string> Aliases,
ImmutableArray<VexProduct> Products,
ImmutableArray<BsonDocument> Statements,
ImmutableArray<VexReference> References,
string ContentFormat,
string SpecVersion,
BsonDocument RawPayload)
{
public BsonDocument ToBsonDocument()
{
var aliases = new BsonArray(Aliases.Select(alias => alias));
var statements = new BsonArray(Statements);
var productsArray = new BsonArray(Products.Select(product => new BsonDocument
{
["purl"] = product.Purl,
["component"] = product.Component,
["namespace"] = product.Namespace,
}));
var references = new BsonArray(References.Select(reference => new BsonDocument
{
["type"] = reference.Type,
["url"] = reference.Url,
}));
var document = new BsonDocument
{
["_id"] = ObservationId,
["tenant"] = Tenant,
["source"] = new BsonDocument
{
["vendor"] = Vendor,
["stream"] = Stream,
["api"] = Api,
["collector_version"] = CollectorVersion,
},
["upstream"] = new BsonDocument
{
["upstream_id"] = UpstreamId,
["document_version"] = DocumentVersion,
["fetched_at"] = FetchedAt.UtcDateTime,
["received_at"] = ReceivedAt.UtcDateTime,
["content_hash"] = ContentHash,
["signature"] = new BsonDocument
{
["present"] = false,
["format"] = BsonNull.Value,
["key_id"] = BsonNull.Value,
["signature"] = BsonNull.Value,
},
},
["content"] = new BsonDocument
{
["format"] = ContentFormat,
["spec_version"] = SpecVersion,
["raw"] = RawPayload,
},
["identifiers"] = new BsonDocument
{
["aliases"] = aliases,
["primary"] = VulnerabilityAlias,
},
["statements"] = statements,
["linkset"] = new BsonDocument
{
["aliases"] = aliases,
["products"] = productsArray,
["references"] = references,
["reconciled_from"] = new BsonArray { "/statements" },
},
["supersedes"] = BsonNull.Value,
};
return document;
}
}
internal sealed record VexProduct(string Purl, string Component, string Namespace);
internal sealed record VexReference(string Type, string Url);

View File

@@ -0,0 +1,183 @@
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexBenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<VexScenarioConfig> Scenarios)
{
public static async Task<VexBenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<VexBenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true,
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new VexBenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MinEventThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class VexBenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minEventThroughputPerSecond")]
public double? MinEventThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<VexScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class VexScenarioConfig
{
private const int DefaultObservationCount = 4_000;
private const int DefaultAliasGroups = 400;
private const int DefaultStatementsPerObservation = 6;
private const int DefaultProductsPerObservation = 3;
private const int DefaultTenants = 3;
private const int DefaultBatchSize = 250;
private const int DefaultSeed = 520_025;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("observations")]
public int? Observations { get; init; }
[JsonPropertyName("aliasGroups")]
public int? AliasGroups { get; init; }
[JsonPropertyName("statementsPerObservation")]
public int? StatementsPerObservation { get; init; }
[JsonPropertyName("productsPerObservation")]
public int? ProductsPerObservation { get; init; }
[JsonPropertyName("tenants")]
public int? Tenants { get; init; }
[JsonPropertyName("batchSize")]
public int? BatchSize { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("minEventThroughputPerSecond")]
public double? MinEventThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "vex" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveObservationCount() => Observations is > 0 ? Observations.Value : DefaultObservationCount;
public int ResolveAliasGroups() => AliasGroups is > 0 ? AliasGroups.Value : DefaultAliasGroups;
public int ResolveStatementsPerObservation() => StatementsPerObservation is > 0 ? StatementsPerObservation.Value : DefaultStatementsPerObservation;
public int ResolveProductsPerObservation() => ProductsPerObservation is > 0 ? ProductsPerObservation.Value : DefaultProductsPerObservation;
public int ResolveTenantCount() => Tenants is > 0 ? Tenants.Value : DefaultTenants;
public int ResolveBatchSize() => BatchSize is > 0 ? BatchSize.Value : DefaultBatchSize;
public int ResolveSeed() => Seed is > 0 ? Seed.Value : DefaultSeed;
public int ResolveIterations(int? defaultIterations)
{
var iterations = Iterations ?? defaultIterations ?? 3;
if (iterations <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0.");
}
return iterations;
}
public void Validate()
{
if (ResolveObservationCount() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0.");
}
if (ResolveAliasGroups() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0.");
}
if (ResolveStatementsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires statementsPerObservation > 0.");
}
if (ResolveProductsPerObservation() <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires productsPerObservation > 0.");
}
}
}

View File

@@ -0,0 +1,14 @@
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexScenarioExecutionResult(
IReadOnlyList<double> TotalDurationsMs,
IReadOnlyList<double> InsertDurationsMs,
IReadOnlyList<double> CorrelationDurationsMs,
IReadOnlyList<double> AllocatedMb,
IReadOnlyList<double> ObservationThroughputsPerSecond,
IReadOnlyList<double> EventThroughputsPerSecond,
int ObservationCount,
int AliasGroups,
int StatementCount,
int EventCount,
VexAggregationResult AggregationResult);

View File

@@ -0,0 +1,43 @@
using System.Globalization;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed record VexScenarioResult(
string Id,
string Label,
int Iterations,
int ObservationCount,
int AliasGroups,
int StatementCount,
int EventCount,
DurationStatistics TotalStatistics,
DurationStatistics InsertStatistics,
DurationStatistics CorrelationStatistics,
ThroughputStatistics ObservationThroughputStatistics,
ThroughputStatistics EventThroughputStatistics,
AllocationStatistics AllocationStatistics,
double? ThresholdMs,
double? MinObservationThroughputPerSecond,
double? MinEventThroughputPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string StatementColumn => StatementCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(10);
public string EventColumn => EventCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8);
public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string ObservationThroughputColumn => (ObservationThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string EventThroughputColumn => (EventThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -0,0 +1,138 @@
using System.Diagnostics;
using EphemeralMongo;
using MongoDB.Bson;
using MongoDB.Driver;
namespace StellaOps.Bench.LinkNotMerge.Vex;
internal sealed class VexScenarioRunner
{
private readonly VexScenarioConfig _config;
private readonly IReadOnlyList<VexObservationSeed> _seeds;
public VexScenarioRunner(VexScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
_seeds = VexObservationGenerator.Generate(config);
}
public VexScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var totalDurations = new double[iterations];
var insertDurations = new double[iterations];
var correlationDurations = new double[iterations];
var allocated = new double[iterations];
var observationThroughputs = new double[iterations];
var eventThroughputs = new double[iterations];
VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty<BsonDocument>());
for (var iteration = 0; iteration < iterations; iteration++)
{
cancellationToken.ThrowIfCancellationRequested();
using var runner = MongoRunner.Run(new MongoRunnerOptions
{
UseSingleNodeReplicaSet = false,
});
var client = new MongoClient(runner.ConnectionString);
var database = client.GetDatabase("linknotmerge_vex_bench");
var collection = database.GetCollection<BsonDocument>("vex_observations");
CreateIndexes(collection, cancellationToken);
var beforeAllocated = GC.GetTotalAllocatedBytes();
var insertStopwatch = Stopwatch.StartNew();
InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken);
insertStopwatch.Stop();
var correlationStopwatch = Stopwatch.StartNew();
var documents = collection
.Find(FilterDefinition<BsonDocument>.Empty)
.Project(Builders<BsonDocument>.Projection
.Include("tenant")
.Include("statements")
.Include("linkset"))
.ToList(cancellationToken);
var aggregator = new VexLinksetAggregator();
lastAggregation = aggregator.Correlate(documents);
correlationStopwatch.Stop();
var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed;
var afterAllocated = GC.GetTotalAllocatedBytes();
totalDurations[iteration] = totalElapsed.TotalMilliseconds;
insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds;
correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds;
allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d);
observationThroughputs[iteration] = _seeds.Count / totalSeconds;
var eventSeconds = Math.Max(correlationStopwatch.Elapsed.TotalSeconds, 0.0001d);
var eventCount = Math.Max(lastAggregation.EventCount, 1);
eventThroughputs[iteration] = eventCount / eventSeconds;
}
return new VexScenarioExecutionResult(
totalDurations,
insertDurations,
correlationDurations,
allocated,
observationThroughputs,
eventThroughputs,
ObservationCount: _seeds.Count,
AliasGroups: _config.ResolveAliasGroups(),
StatementCount: lastAggregation.StatementCount,
EventCount: lastAggregation.EventCount,
AggregationResult: lastAggregation);
}
private static void InsertObservations(
IMongoCollection<BsonDocument> collection,
IReadOnlyList<VexObservationSeed> seeds,
int batchSize,
CancellationToken cancellationToken)
{
for (var offset = 0; offset < seeds.Count; offset += batchSize)
{
cancellationToken.ThrowIfCancellationRequested();
var remaining = Math.Min(batchSize, seeds.Count - offset);
var batch = new List<BsonDocument>(remaining);
for (var index = 0; index < remaining; index++)
{
batch.Add(seeds[offset + index].ToBsonDocument());
}
collection.InsertMany(batch, new InsertManyOptions
{
IsOrdered = false,
BypassDocumentValidation = true,
}, cancellationToken);
}
}
private static void CreateIndexes(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken)
{
var indexKeys = Builders<BsonDocument>.IndexKeys
.Ascending("tenant")
.Ascending("linkset.aliases");
try
{
collection.Indexes.CreateOne(new CreateIndexModel<BsonDocument>(indexKeys), cancellationToken: cancellationToken);
}
catch
{
// non-fatal
}
}
}

View File

@@ -0,0 +1,4 @@
scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb
vex_ingest_baseline,5,4000,24000,21326,842.8191,1319.3038,1432.7675,346.7277,496.0915,5349.8940,2791.7998,48942.4901,24653.0556,138.6365
vex_ingest_medium,5,8000,64000,56720,1525.9929,1706.8900,1748.9056,533.3378,992.6552,5274.5883,4574.2892,57654.9190,48531.7353,326.8638
vex_ingest_high,5,12000,120000,106910,2988.5094,3422.1728,3438.9364,903.3927,2085.1167,4066.2300,3489.4510,52456.9493,42358.0556,583.9903
1 scenario iterations observations statements events mean_total_ms p95_total_ms max_total_ms mean_insert_ms mean_correlation_ms mean_observation_throughput_per_sec min_observation_throughput_per_sec mean_event_throughput_per_sec min_event_throughput_per_sec max_allocated_mb
2 vex_ingest_baseline 5 4000 24000 21326 842.8191 1319.3038 1432.7675 346.7277 496.0915 5349.8940 2791.7998 48942.4901 24653.0556 138.6365
3 vex_ingest_medium 5 8000 64000 56720 1525.9929 1706.8900 1748.9056 533.3378 992.6552 5274.5883 4574.2892 57654.9190 48531.7353 326.8638
4 vex_ingest_high 5 12000 120000 106910 2988.5094 3422.1728 3438.9364 903.3927 2085.1167 4066.2300 3489.4510 52456.9493 42358.0556 583.9903

View File

@@ -0,0 +1,54 @@
{
"thresholdMs": 4200,
"minThroughputPerSecond": 1800,
"minEventThroughputPerSecond": 2000,
"maxAllocatedMb": 800,
"iterations": 5,
"scenarios": [
{
"id": "vex_ingest_baseline",
"label": "4k observations, 400 aliases",
"observations": 4000,
"aliasGroups": 400,
"statementsPerObservation": 6,
"productsPerObservation": 3,
"tenants": 3,
"batchSize": 200,
"seed": 420020,
"thresholdMs": 2300,
"minThroughputPerSecond": 1800,
"minEventThroughputPerSecond": 2000,
"maxAllocatedMb": 220
},
{
"id": "vex_ingest_medium",
"label": "8k observations, 700 aliases",
"observations": 8000,
"aliasGroups": 700,
"statementsPerObservation": 8,
"productsPerObservation": 4,
"tenants": 5,
"batchSize": 300,
"seed": 520020,
"thresholdMs": 3200,
"minThroughputPerSecond": 2200,
"minEventThroughputPerSecond": 2500,
"maxAllocatedMb": 400
},
{
"id": "vex_ingest_high",
"label": "12k observations, 1100 aliases",
"observations": 12000,
"aliasGroups": 1100,
"statementsPerObservation": 10,
"productsPerObservation": 5,
"tenants": 7,
"batchSize": 400,
"seed": 620020,
"thresholdMs": 4200,
"minThroughputPerSecond": 2200,
"minEventThroughputPerSecond": 2500,
"maxAllocatedMb": 700
}
]
}