Restructure solution layout by module

This commit is contained in:
master
2025-10-28 15:10:40 +02:00
parent 95daa159c4
commit d870da18ce
4103 changed files with 192899 additions and 187024 deletions

View File

@@ -0,0 +1,25 @@
# Notify Dispatch Bench
Synthetic workload measuring rule evaluation and channel dispatch fan-out under varying rule densities.
## Scenarios
`config.json` defines three density profiles (5%, 20%, 40%). Each scenario synthesizes deterministic tenants, rules, and delivery actions to measure:
- Latency (mean/p95/max milliseconds)
- Throughput (deliveries per second)
- Managed heap allocations (megabytes)
- Match fan-out statistics (matches and deliveries per event)
## Running locally
```bash
dotnet run \
--project src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj \
-- \
--csv out/notify-bench.csv \
--json out/notify-bench.json \
--prometheus out/notify-bench.prom
```
The benchmark exits non-zero if latency exceeds the configured thresholds, throughput drops below the floor, allocations exceed the ceiling, or regression limits are breached relative to `baseline.csv`.

View File

@@ -0,0 +1,38 @@
using System.IO;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.Notify.Baseline;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class BaselineLoaderTests
{
[Fact]
public async Task LoadAsync_ReadsBaselineEntries()
{
var path = Path.GetTempFileName();
try
{
await File.WriteAllTextAsync(
path,
"scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb\n" +
"notify_dispatch_density_05,5,5000,25000,120.5,150.1,199.9,42000.5,39000.2,85.7\n");
var entries = await BaselineLoader.LoadAsync(path, CancellationToken.None);
var entry = Assert.Single(entries);
Assert.Equal("notify_dispatch_density_05", entry.Key);
Assert.Equal(5, entry.Value.Iterations);
Assert.Equal(5000, entry.Value.EventCount);
Assert.Equal(25000, entry.Value.DeliveryCount);
Assert.Equal(120.5, entry.Value.MeanMs);
Assert.Equal(39000.2, entry.Value.MinThroughputPerSecond);
Assert.Equal(85.7, entry.Value.MaxAllocatedMb);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -0,0 +1,85 @@
using System.Linq;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class BenchmarkScenarioReportTests
{
[Fact]
public void RegressionDetection_FlagsLatencies()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
EventCount: 1000,
DeliveryCount: 20000,
MeanMs: 150,
P95Ms: 180,
MaxMs: 200,
MeanThroughputPerSecond: 60000,
MinThroughputPerSecond: 50000,
MaxAllocatedMb: 90);
var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1);
Assert.True(report.DurationRegressionBreached);
Assert.True(report.ThroughputRegressionBreached);
Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration"));
}
[Fact]
public void RegressionDetection_NoBaseline_NoBreaches()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: null,
MinThroughputThresholdPerSecond: null,
MaxAllocatedThresholdMb: null);
var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null);
Assert.False(report.DurationRegressionBreached);
Assert.False(report.ThroughputRegressionBreached);
Assert.Empty(report.BuildRegressionFailureMessages());
}
}

View File

@@ -0,0 +1,33 @@
using System.Threading;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class NotifyScenarioRunnerTests
{
[Fact]
public void Execute_ComputesDeterministicMetrics()
{
var config = new NotifyScenarioConfig
{
Id = "unit_test",
EventCount = 500,
RuleCount = 40,
ActionsPerRule = 3,
MatchRate = 0.25,
TenantCount = 4,
ChannelCount = 16
};
var runner = new NotifyScenarioRunner(config);
var result = runner.Execute(2, CancellationToken.None);
Assert.Equal(config.ResolveEventCount(), result.TotalEvents);
Assert.Equal(config.ResolveRuleCount(), result.TotalRules);
Assert.Equal(config.ResolveActionsPerRule(), result.ActionsPerRule);
Assert.True(result.TotalMatches > 0);
Assert.Equal(result.TotalMatches * result.ActionsPerRule, result.TotalDeliveries);
Assert.Equal(2, result.Durations.Count);
Assert.All(result.Durations, value => Assert.True(value > 0));
}
}

View File

@@ -0,0 +1,64 @@
using System.IO;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
using Xunit;
namespace StellaOps.Bench.Notify.Tests;
public sealed class PrometheusWriterTests
{
[Fact]
public void Write_EmitsScenarioMetrics()
{
var result = new ScenarioResult(
Id: "scenario",
Label: "Scenario",
Iterations: 3,
TotalEvents: 1000,
TotalRules: 100,
ActionsPerRule: 2,
AverageMatchesPerEvent: 10,
MinMatchesPerEvent: 8,
MaxMatchesPerEvent: 12,
AverageDeliveriesPerEvent: 20,
TotalDeliveries: 20000,
MeanMs: 200,
P95Ms: 250,
MaxMs: 300,
MeanThroughputPerSecond: 50000,
MinThroughputPerSecond: 40000,
MaxAllocatedMb: 100,
ThresholdMs: 900,
MinThroughputThresholdPerSecond: 35000,
MaxAllocatedThresholdMb: 150);
var baseline = new BaselineEntry(
ScenarioId: "scenario",
Iterations: 3,
EventCount: 1000,
DeliveryCount: 20000,
MeanMs: 180,
P95Ms: 210,
MaxMs: 240,
MeanThroughputPerSecond: 52000,
MinThroughputPerSecond: 41000,
MaxAllocatedMb: 95);
var report = new BenchmarkScenarioReport(result, baseline);
var path = Path.GetTempFileName();
try
{
PrometheusWriter.Write(path, new[] { report });
var content = File.ReadAllText(path);
Assert.Contains("notify_dispatch_bench_mean_ms", content);
Assert.Contains("scenario\"} 200", content);
Assert.Contains("notify_dispatch_bench_baseline_mean_ms", content);
}
finally
{
File.Delete(path);
}
}
}

View File

@@ -0,0 +1,27 @@
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
<UseConcelierTestInfra>false</UseConcelierTestInfra>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" />
<PackageReference Include="xunit" Version="2.9.2" />
<PackageReference Include="xunit.runner.visualstudio" Version="2.8.2">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
<PrivateAssets>all</PrivateAssets>
</PackageReference>
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\StellaOps.Bench.Notify\StellaOps.Bench.Notify.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,13 @@
namespace StellaOps.Bench.Notify.Baseline;
internal sealed record BaselineEntry(
string ScenarioId,
int Iterations,
int EventCount,
int DeliveryCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);

View File

@@ -0,0 +1,87 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Threading.Tasks;
namespace StellaOps.Bench.Notify.Baseline;
internal static class BaselineLoader
{
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
}
var results = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
using var reader = new StreamReader(stream);
var lineNumber = 0;
while (true)
{
cancellationToken.ThrowIfCancellationRequested();
var line = await reader.ReadLineAsync().ConfigureAwait(false);
if (line is null)
{
break;
}
lineNumber++;
if (lineNumber == 1 || string.IsNullOrWhiteSpace(line))
{
continue;
}
var parts = line.Split(',', StringSplitOptions.TrimEntries);
if (parts.Length < 10)
{
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 10 columns, found {parts.Length}).");
}
var entry = new BaselineEntry(
ScenarioId: parts[0],
Iterations: ParseInt(parts[1], resolved, lineNumber),
EventCount: ParseInt(parts[2], resolved, lineNumber),
DeliveryCount: ParseInt(parts[3], resolved, lineNumber),
MeanMs: ParseDouble(parts[4], resolved, lineNumber),
P95Ms: ParseDouble(parts[5], resolved, lineNumber),
MaxMs: ParseDouble(parts[6], resolved, lineNumber),
MeanThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
MinThroughputPerSecond: ParseDouble(parts[8], resolved, lineNumber),
MaxAllocatedMb: ParseDouble(parts[9], resolved, lineNumber));
results[entry.ScenarioId] = entry;
}
return results;
}
private static int ParseInt(string value, string file, int line)
{
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
}
private static double ParseDouble(string value, string file, int line)
{
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed))
{
return parsed;
}
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
}
}

View File

@@ -0,0 +1,220 @@
using System.Security.Cryptography;
using System.Text;
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Bench.Notify;
internal sealed record BenchmarkConfig(
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
int? Iterations,
IReadOnlyList<NotifyScenarioConfig> Scenarios)
{
public static async Task<BenchmarkConfig> LoadAsync(string path)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
var resolved = Path.GetFullPath(path);
if (!File.Exists(resolved))
{
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
}
await using var stream = File.OpenRead(resolved);
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
stream,
new JsonSerializerOptions(JsonSerializerDefaults.Web)
{
PropertyNameCaseInsensitive = true,
ReadCommentHandling = JsonCommentHandling.Skip,
AllowTrailingCommas = true
}).ConfigureAwait(false);
if (model is null)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
}
if (model.Scenarios.Count == 0)
{
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
}
foreach (var scenario in model.Scenarios)
{
scenario.Validate();
}
return new BenchmarkConfig(
model.ThresholdMs,
model.MinThroughputPerSecond,
model.MaxAllocatedMb,
model.Iterations,
model.Scenarios);
}
private sealed class BenchmarkConfigModel
{
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
[JsonPropertyName("scenarios")]
public List<NotifyScenarioConfig> Scenarios { get; init; } = new();
}
}
internal sealed class NotifyScenarioConfig
{
private const int DefaultEventCount = 10_000;
private const int DefaultRuleCount = 200;
private const int DefaultActionsPerRule = 3;
private const double DefaultMatchRate = 0.25d;
private const int DefaultTenantCount = 4;
private const int DefaultChannelCount = 8;
private const int BaseSeed = 2025_10_26;
[JsonPropertyName("id")]
public string? Id { get; init; }
[JsonPropertyName("label")]
public string? Label { get; init; }
[JsonPropertyName("eventCount")]
public int EventCount { get; init; } = DefaultEventCount;
[JsonPropertyName("ruleCount")]
public int RuleCount { get; init; } = DefaultRuleCount;
[JsonPropertyName("actionsPerRule")]
public int ActionsPerRule { get; init; } = DefaultActionsPerRule;
[JsonPropertyName("matchRate")]
public double? MatchRate { get; init; }
[JsonPropertyName("tenantCount")]
public int? TenantCount { get; init; }
[JsonPropertyName("channelCount")]
public int? ChannelCount { get; init; }
[JsonPropertyName("seed")]
public int? Seed { get; init; }
[JsonPropertyName("thresholdMs")]
public double? ThresholdMs { get; init; }
[JsonPropertyName("minThroughputPerSecond")]
public double? MinThroughputPerSecond { get; init; }
[JsonPropertyName("maxAllocatedMb")]
public double? MaxAllocatedMb { get; init; }
[JsonPropertyName("iterations")]
public int? Iterations { get; init; }
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "notify_dispatch" : Id!.Trim();
public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim();
public int ResolveEventCount()
{
if (EventCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires eventCount > 0.");
}
return EventCount;
}
public int ResolveRuleCount()
{
if (RuleCount <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires ruleCount > 0.");
}
return RuleCount;
}
public int ResolveActionsPerRule()
{
if (ActionsPerRule <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires actionsPerRule > 0.");
}
return ActionsPerRule;
}
public double ResolveMatchRate()
{
var rate = MatchRate ?? DefaultMatchRate;
if (!double.IsFinite(rate) || rate <= 0d || rate > 1d)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires matchRate within (0, 1].");
}
return rate;
}
public int ResolveTenantCount()
{
var tenants = TenantCount ?? DefaultTenantCount;
if (tenants <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenantCount > 0.");
}
return tenants;
}
public int ResolveChannelCount()
{
var channels = ChannelCount ?? DefaultChannelCount;
if (channels <= 0)
{
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires channelCount > 0.");
}
return channels;
}
public int ResolveSeed()
{
if (Seed is { } explicitSeed && explicitSeed > 0)
{
return explicitSeed;
}
var material = Encoding.UTF8.GetBytes($"stellaops-notify-bench::{ScenarioId}");
var hash = SHA256.HashData(material);
var derived = BitConverter.ToInt32(hash, 0) & int.MaxValue;
if (derived == 0)
{
derived = BaseSeed;
}
return derived;
}
public void Validate()
{
ResolveEventCount();
ResolveRuleCount();
ResolveActionsPerRule();
ResolveMatchRate();
ResolveTenantCount();
ResolveChannelCount();
}
}

View File

@@ -0,0 +1,26 @@
using System;
namespace StellaOps.Bench.Notify;
internal sealed class DispatchAccumulator
{
private long _value = 17;
public void Add(int ruleHash, int actionHash, int eventHash)
{
unchecked
{
_value = (_value * 31) ^ ruleHash;
_value = (_value * 31) ^ actionHash;
_value = (_value * 31) ^ eventHash;
}
}
public void AssertConsumed()
{
if (_value == 17)
{
throw new InvalidOperationException("Dispatch accumulator did not receive any values.");
}
}
}

View File

@@ -0,0 +1,386 @@
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using StellaOps.Notify.Models;
namespace StellaOps.Bench.Notify;
internal sealed class NotifyScenarioRunner
{
private static readonly DateTimeOffset BaseTimestamp = new(2025, 10, 26, 0, 0, 0, TimeSpan.Zero);
private const string EventKind = NotifyEventKinds.ScannerReportReady;
private readonly NotifyScenarioConfig _config;
private readonly EventDescriptor[] _events;
private readonly RuleDescriptor[][] _rulesByTenant;
private readonly int _totalEvents;
private readonly int _ruleCount;
private readonly int _actionsPerRule;
private readonly int _totalMatches;
private readonly int _totalDeliveries;
private readonly double _averageMatchesPerEvent;
private readonly double _averageDeliveriesPerEvent;
private readonly int _minMatchesPerEvent;
private readonly int _maxMatchesPerEvent;
public NotifyScenarioRunner(NotifyScenarioConfig config)
{
_config = config ?? throw new ArgumentNullException(nameof(config));
var eventCount = config.ResolveEventCount();
var ruleCount = config.ResolveRuleCount();
var actionsPerRule = config.ResolveActionsPerRule();
var matchRate = config.ResolveMatchRate();
var tenantCount = config.ResolveTenantCount();
var channelCount = config.ResolveChannelCount();
var seed = config.ResolveSeed();
if (tenantCount > ruleCount)
{
tenantCount = Math.Max(1, ruleCount);
}
_totalEvents = eventCount;
_ruleCount = ruleCount;
_actionsPerRule = actionsPerRule;
var tenants = BuildTenants(tenantCount);
var channels = BuildChannels(channelCount);
var random = new Random(seed);
var targetMatchesPerEvent = Math.Max(1, (int)Math.Round(ruleCount * matchRate));
targetMatchesPerEvent = Math.Min(targetMatchesPerEvent, ruleCount);
var ruleDescriptors = new List<RuleDescriptor>(ruleCount);
var groups = new List<RuleGroup>();
var ruleIndex = 0;
var groupIndex = 0;
var channelCursor = 0;
while (ruleIndex < ruleCount)
{
var groupSize = Math.Min(targetMatchesPerEvent, ruleCount - ruleIndex);
var tenantIndex = groupIndex % tenantCount;
var tenantId = tenants[tenantIndex];
var namespaceValue = $"svc-{tenantIndex:D2}-{groupIndex:D3}";
var repositoryValue = $"registry.local/{tenantId}/service-{groupIndex:D3}";
var digestValue = GenerateDigest(random, groupIndex);
var rules = new RuleDescriptor[groupSize];
for (var local = 0; local < groupSize && ruleIndex < ruleCount; local++, ruleIndex++)
{
var ruleId = $"rule-{groupIndex:D3}-{local:D3}";
var actions = new ActionDescriptor[actionsPerRule];
for (var actionIndex = 0; actionIndex < actionsPerRule; actionIndex++)
{
var channel = channels[channelCursor % channelCount];
channelCursor++;
var actionId = $"{ruleId}-act-{actionIndex:D2}";
actions[actionIndex] = new ActionDescriptor(
actionId,
channel,
StableHash($"{actionId}|{channel}"));
}
rules[local] = new RuleDescriptor(
ruleId,
StableHash(ruleId),
tenantIndex,
namespaceValue,
repositoryValue,
digestValue,
actions);
ruleDescriptors.Add(rules[local]);
}
groups.Add(new RuleGroup(tenantIndex, namespaceValue, repositoryValue, digestValue, rules));
groupIndex++;
}
_rulesByTenant = BuildRulesByTenant(tenantCount, ruleDescriptors);
var events = new EventDescriptor[eventCount];
long totalMatches = 0;
var minMatches = int.MaxValue;
var maxMatches = 0;
for (var eventIndex = 0; eventIndex < eventCount; eventIndex++)
{
var group = groups[eventIndex % groups.Count];
var matchingRules = group.Rules.Length;
totalMatches += matchingRules;
if (matchingRules < minMatches)
{
minMatches = matchingRules;
}
if (matchingRules > maxMatches)
{
maxMatches = matchingRules;
}
var eventId = GenerateEventId(random, group.TenantIndex, eventIndex);
var timestamp = BaseTimestamp.AddMilliseconds(eventIndex * 10d);
// Materialize NotifyEvent to reflect production payload shape.
_ = NotifyEvent.Create(
eventId,
EventKind,
tenants[group.TenantIndex],
timestamp,
payload: null,
scope: NotifyEventScope.Create(
@namespace: group.Namespace,
repo: group.Repository,
digest: group.Digest));
events[eventIndex] = new EventDescriptor(
group.TenantIndex,
EventKind,
group.Namespace,
group.Repository,
group.Digest,
ComputeEventHash(eventId));
}
_events = events;
_totalMatches = checked((int)totalMatches);
_totalDeliveries = checked(_totalMatches * actionsPerRule);
_averageMatchesPerEvent = totalMatches / (double)eventCount;
_averageDeliveriesPerEvent = _averageMatchesPerEvent * actionsPerRule;
_minMatchesPerEvent = minMatches;
_maxMatchesPerEvent = maxMatches;
}
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
{
if (iterations <= 0)
{
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
}
var durations = new double[iterations];
var throughputs = new double[iterations];
var allocations = new double[iterations];
for (var index = 0; index < iterations; index++)
{
cancellationToken.ThrowIfCancellationRequested();
var beforeAllocated = GC.GetTotalAllocatedBytes();
var stopwatch = Stopwatch.StartNew();
var accumulator = new DispatchAccumulator();
var observedMatches = 0;
var observedDeliveries = 0;
foreach (ref readonly var @event in _events.AsSpan())
{
var tenantRules = _rulesByTenant[@event.TenantIndex];
foreach (var rule in tenantRules)
{
if (!Matches(rule, @event))
{
continue;
}
observedMatches++;
var actions = rule.Actions;
for (var actionIndex = 0; actionIndex < actions.Length; actionIndex++)
{
observedDeliveries++;
accumulator.Add(rule.RuleHash, actions[actionIndex].Hash, @event.EventHash);
}
}
}
stopwatch.Stop();
if (observedMatches != _totalMatches)
{
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalMatches} matches but observed {observedMatches}.");
}
if (observedDeliveries != _totalDeliveries)
{
throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalDeliveries} deliveries but observed {observedDeliveries}.");
}
accumulator.AssertConsumed();
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
if (elapsedMs <= 0d)
{
elapsedMs = 0.0001d;
}
var afterAllocated = GC.GetTotalAllocatedBytes();
durations[index] = elapsedMs;
throughputs[index] = observedDeliveries / Math.Max(stopwatch.Elapsed.TotalSeconds, 0.0001d);
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
}
return new ScenarioExecutionResult(
durations,
throughputs,
allocations,
_totalEvents,
_ruleCount,
_actionsPerRule,
_averageMatchesPerEvent,
_minMatchesPerEvent,
_maxMatchesPerEvent,
_averageDeliveriesPerEvent,
_totalMatches,
_totalDeliveries);
}
private static bool Matches(in RuleDescriptor rule, in EventDescriptor @event)
{
if (!string.Equals(@event.Kind, EventKind, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Namespace, @event.Namespace, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Repository, @event.Repository, StringComparison.Ordinal))
{
return false;
}
if (!string.Equals(rule.Digest, @event.Digest, StringComparison.Ordinal))
{
return false;
}
return true;
}
private static int ComputeEventHash(Guid eventId)
{
var bytes = eventId.ToByteArray();
var value = BitConverter.ToInt32(bytes, 0);
return value & int.MaxValue;
}
private static string GenerateDigest(Random random, int groupIndex)
{
var buffer = new byte[16];
random.NextBytes(buffer);
var hex = Convert.ToHexString(buffer).ToLowerInvariant();
return $"sha256:{hex}{groupIndex:D3}";
}
private static Guid GenerateEventId(Random random, int tenantIndex, int eventIndex)
{
Span<byte> buffer = stackalloc byte[16];
random.NextBytes(buffer);
buffer[^1] = (byte)(tenantIndex & 0xFF);
buffer[^2] = (byte)(eventIndex & 0xFF);
return new Guid(buffer);
}
private static RuleDescriptor[][] BuildRulesByTenant(int tenantCount, List<RuleDescriptor> rules)
{
var result = new RuleDescriptor[tenantCount][];
for (var tenantIndex = 0; tenantIndex < tenantCount; tenantIndex++)
{
result[tenantIndex] = rules
.Where(rule => rule.TenantIndex == tenantIndex)
.ToArray();
}
return result;
}
private static string[] BuildTenants(int tenantCount)
{
var tenants = new string[tenantCount];
for (var index = 0; index < tenantCount; index++)
{
tenants[index] = $"tenant-{index:D2}";
}
return tenants;
}
private static string[] BuildChannels(int channelCount)
{
var channels = new string[channelCount];
for (var index = 0; index < channelCount; index++)
{
var kind = (index % 4) switch
{
0 => "slack",
1 => "teams",
2 => "email",
_ => "webhook"
};
channels[index] = $"{kind}:channel-{index:D2}";
}
return channels;
}
private static int StableHash(string value)
{
unchecked
{
const int offset = unchecked((int)2166136261);
const int prime = 16777619;
var hash = offset;
foreach (var ch in value.AsSpan())
{
hash ^= ch;
hash *= prime;
}
return hash & int.MaxValue;
}
}
private readonly record struct RuleDescriptor(
string RuleId,
int RuleHash,
int TenantIndex,
string Namespace,
string Repository,
string Digest,
ActionDescriptor[] Actions);
private readonly record struct ActionDescriptor(
string ActionId,
string Channel,
int Hash);
private readonly record struct RuleGroup(
int TenantIndex,
string Namespace,
string Repository,
string Digest,
RuleDescriptor[] Rules);
private readonly record struct EventDescriptor(
int TenantIndex,
string Kind,
string Namespace,
string Repository,
string Digest,
int EventHash);
}

View File

@@ -0,0 +1,364 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Globalization;
using StellaOps.Bench.Notify.Baseline;
using StellaOps.Bench.Notify.Reporting;
namespace StellaOps.Bench.Notify;
internal static class Program
{
public static async Task<int> Main(string[] args)
{
try
{
var options = ProgramOptions.Parse(args);
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
var results = new List<ScenarioResult>();
var reports = new List<BenchmarkScenarioReport>();
var failures = new List<string>();
foreach (var scenario in config.Scenarios)
{
var iterations = options.Iterations
?? scenario.Iterations
?? config.Iterations
?? 5;
var runner = new NotifyScenarioRunner(scenario);
var execution = runner.Execute(iterations, CancellationToken.None);
var durationStats = DurationStatistics.From(execution.Durations);
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
var scenarioThreshold = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs;
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb;
var result = new ScenarioResult(
scenario.ScenarioId,
scenario.DisplayLabel,
iterations,
execution.TotalEvents,
execution.TotalRules,
execution.ActionsPerRule,
execution.AverageMatchesPerEvent,
execution.MinMatchesPerEvent,
execution.MaxMatchesPerEvent,
execution.AverageDeliveriesPerEvent,
execution.TotalDeliveries,
durationStats.MeanMs,
durationStats.P95Ms,
durationStats.MaxMs,
throughputStats.MeanPerSecond,
throughputStats.MinPerSecond,
allocationStats.MaxAllocatedMb,
scenarioThreshold,
scenarioThroughputFloor,
scenarioAllocationLimit);
results.Add(result);
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
{
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
}
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
{
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} deliveries/s < {floor:N0} deliveries/s");
}
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
{
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
}
baseline.TryGetValue(result.Id, out var baselineEntry);
var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit);
reports.Add(report);
failures.AddRange(report.BuildRegressionFailureMessages());
}
TablePrinter.Print(results);
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
{
CsvWriter.Write(options.CsvOutPath!, results);
}
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
{
var metadata = new BenchmarkJsonMetadata(
SchemaVersion: "notify-dispatch-bench/1.0",
CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(),
Commit: options.Commit,
Environment: options.Environment);
await BenchmarkJsonWriter.WriteAsync(
options.JsonOutPath!,
metadata,
reports,
CancellationToken.None).ConfigureAwait(false);
}
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
{
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
}
if (failures.Count > 0)
{
Console.Error.WriteLine();
Console.Error.WriteLine("Benchmark failures detected:");
foreach (var failure in failures.Distinct())
{
Console.Error.WriteLine($" - {failure}");
}
return 1;
}
return 0;
}
catch (Exception ex)
{
Console.Error.WriteLine($"notify-bench error: {ex.Message}");
return 1;
}
}
private sealed record ProgramOptions(
string ConfigPath,
int? Iterations,
double? ThresholdMs,
double? MinThroughputPerSecond,
double? MaxAllocatedMb,
string? CsvOutPath,
string? JsonOutPath,
string? PrometheusOutPath,
string BaselinePath,
DateTimeOffset? CapturedAtUtc,
string? Commit,
string? Environment,
double? RegressionLimit)
{
public static ProgramOptions Parse(string[] args)
{
var configPath = DefaultConfigPath();
var baselinePath = DefaultBaselinePath();
int? iterations = null;
double? thresholdMs = null;
double? minThroughput = null;
double? maxAllocated = null;
string? csvOut = null;
string? jsonOut = null;
string? promOut = null;
DateTimeOffset? capturedAt = null;
string? commit = null;
string? environment = null;
double? regressionLimit = null;
for (var index = 0; index < args.Length; index++)
{
var current = args[index];
switch (current)
{
case "--config":
EnsureNext(args, index);
configPath = Path.GetFullPath(args[++index]);
break;
case "--iterations":
EnsureNext(args, index);
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--threshold-ms":
EnsureNext(args, index);
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--min-throughput":
EnsureNext(args, index);
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--max-allocated-mb":
EnsureNext(args, index);
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--csv":
EnsureNext(args, index);
csvOut = args[++index];
break;
case "--json":
EnsureNext(args, index);
jsonOut = args[++index];
break;
case "--prometheus":
EnsureNext(args, index);
promOut = args[++index];
break;
case "--baseline":
EnsureNext(args, index);
baselinePath = Path.GetFullPath(args[++index]);
break;
case "--captured-at":
EnsureNext(args, index);
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
break;
case "--commit":
EnsureNext(args, index);
commit = args[++index];
break;
case "--environment":
EnsureNext(args, index);
environment = args[++index];
break;
case "--regression-limit":
EnsureNext(args, index);
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
break;
case "--help":
case "-h":
PrintUsage();
System.Environment.Exit(0);
break;
default:
throw new ArgumentException($"Unknown argument '{current}'.");
}
}
return new ProgramOptions(
configPath,
iterations,
thresholdMs,
minThroughput,
maxAllocated,
csvOut,
jsonOut,
promOut,
baselinePath,
capturedAt,
commit,
environment,
regressionLimit);
}
private static string DefaultConfigPath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "config.json");
}
private static string DefaultBaselinePath()
{
var binaryDir = AppContext.BaseDirectory;
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
return Path.Combine(benchRoot, "baseline.csv");
}
private static void EnsureNext(string[] args, int index)
{
if (index + 1 >= args.Length)
{
throw new ArgumentException("Missing value for argument.");
}
}
private static void PrintUsage()
{
Console.WriteLine("Usage: notify-bench [options]");
Console.WriteLine();
Console.WriteLine("Options:");
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
Console.WriteLine(" --iterations <count> Override iteration count.");
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
Console.WriteLine(" --min-throughput <value> Global throughput floor (deliveries/second).");
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
Console.WriteLine(" --csv <path> Write CSV results to path.");
Console.WriteLine(" --json <path> Write JSON results to path.");
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
Console.WriteLine(" --baseline <path> Baseline CSV path.");
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
Console.WriteLine(" --environment <name> Environment label for metadata.");
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
}
}
}
internal static class TablePrinter
{
public static void Print(IEnumerable<ScenarioResult> results)
{
Console.WriteLine("Scenario | Events | Rules | Match/Evt | Deliver/Evt | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
Console.WriteLine("---------------------------- | ------------| -------- | --------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
foreach (var row in results)
{
Console.WriteLine(string.Join(" | ", new[]
{
row.IdColumn,
row.EventsColumn,
row.RulesColumn,
row.MatchesColumn,
row.DeliveriesColumn,
row.MeanColumn,
row.P95Column,
row.MaxColumn,
row.MinThroughputColumn,
row.AllocatedColumn
}));
}
}
}
internal static class CsvWriter
{
public static void Write(string path, IEnumerable<ScenarioResult> results)
{
var resolvedPath = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolvedPath);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
using var writer = new StreamWriter(stream);
writer.WriteLine("scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
foreach (var row in results)
{
writer.Write(row.Id);
writer.Write(',');
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.TotalEvents.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.TotalDeliveries.ToString(CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
writer.Write(',');
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
writer.WriteLine();
}
}
}

View File

@@ -0,0 +1,3 @@
using System.Runtime.CompilerServices;
[assembly: InternalsVisibleTo("StellaOps.Bench.Notify.Tests")]

View File

@@ -0,0 +1,147 @@
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using System.Threading;
using System.Threading.Tasks;
using StellaOps.Bench.Notify.Baseline;
namespace StellaOps.Bench.Notify.Reporting;
internal static class BenchmarkJsonWriter
{
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
{
WriteIndented = true,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
public static async Task WriteAsync(
string path,
BenchmarkJsonMetadata metadata,
IReadOnlyList<BenchmarkScenarioReport> reports,
CancellationToken cancellationToken)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(metadata);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var document = new BenchmarkJsonDocument(
metadata.SchemaVersion,
metadata.CapturedAtUtc,
metadata.Commit,
metadata.Environment,
reports.Select(CreateScenario).ToArray());
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
}
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
{
var baseline = report.Baseline;
return new BenchmarkJsonScenario(
report.Result.Id,
report.Result.Label,
report.Result.Iterations,
report.Result.TotalEvents,
report.Result.TotalRules,
report.Result.ActionsPerRule,
report.Result.AverageMatchesPerEvent,
report.Result.MinMatchesPerEvent,
report.Result.MaxMatchesPerEvent,
report.Result.AverageDeliveriesPerEvent,
report.Result.TotalDeliveries,
report.Result.MeanMs,
report.Result.P95Ms,
report.Result.MaxMs,
report.Result.MeanThroughputPerSecond,
report.Result.MinThroughputPerSecond,
report.Result.MaxAllocatedMb,
report.Result.ThresholdMs,
report.Result.MinThroughputThresholdPerSecond,
report.Result.MaxAllocatedThresholdMb,
baseline is null
? null
: new BenchmarkJsonScenarioBaseline(
baseline.Iterations,
baseline.EventCount,
baseline.DeliveryCount,
baseline.MeanMs,
baseline.P95Ms,
baseline.MaxMs,
baseline.MeanThroughputPerSecond,
baseline.MinThroughputPerSecond,
baseline.MaxAllocatedMb),
new BenchmarkJsonScenarioRegression(
report.DurationRegressionRatio,
report.ThroughputRegressionRatio,
report.RegressionLimit,
report.RegressionBreached));
}
private sealed record BenchmarkJsonDocument(
string SchemaVersion,
DateTimeOffset CapturedAt,
string? Commit,
string? Environment,
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
private sealed record BenchmarkJsonScenario(
string Id,
string Label,
int Iterations,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalDeliveries,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb,
BenchmarkJsonScenarioBaseline? Baseline,
BenchmarkJsonScenarioRegression Regression);
private sealed record BenchmarkJsonScenarioBaseline(
int Iterations,
int EventCount,
int DeliveryCount,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb);
private sealed record BenchmarkJsonScenarioRegression(
double? DurationRatio,
double? ThroughputRatio,
double Limit,
bool Breached);
}
internal sealed record BenchmarkJsonMetadata(
string SchemaVersion,
DateTimeOffset CapturedAtUtc,
string? Commit,
string? Environment);

View File

@@ -0,0 +1,84 @@
using System;
using System.Collections.Generic;
using StellaOps.Bench.Notify.Baseline;
namespace StellaOps.Bench.Notify.Reporting;
internal sealed class BenchmarkScenarioReport
{
private const double DefaultRegressionLimit = 1.15d;
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
{
Result = result ?? throw new ArgumentNullException(nameof(result));
Baseline = baseline;
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
}
public ScenarioResult Result { get; }
public BaselineEntry? Baseline { get; }
public double RegressionLimit { get; }
public double? DurationRegressionRatio { get; }
public double? ThroughputRegressionRatio { get; }
public bool DurationRegressionBreached =>
DurationRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool ThroughputRegressionBreached =>
ThroughputRegressionRatio is { } ratio &&
ratio >= RegressionLimit;
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
public IEnumerable<string> BuildRegressionFailureMessages()
{
if (Baseline is null)
{
yield break;
}
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
{
var delta = (durationRatio - 1d) * 100d;
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
}
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
{
var delta = (throughputRatio - 1d) * 100d;
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
}
}
private static double? CalculateDurationRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
return current / baseline.Value;
}
private static double? CalculateThroughputRatio(double current, double? baseline)
{
if (!baseline.HasValue || baseline.Value <= 0d)
{
return null;
}
if (current <= 0d)
{
return double.PositiveInfinity;
}
return baseline.Value / current;
}
}

View File

@@ -0,0 +1,86 @@
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Text;
namespace StellaOps.Bench.Notify.Reporting;
internal static class PrometheusWriter
{
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
{
ArgumentException.ThrowIfNullOrWhiteSpace(path);
ArgumentNullException.ThrowIfNull(reports);
var resolved = Path.GetFullPath(path);
var directory = Path.GetDirectoryName(resolved);
if (!string.IsNullOrEmpty(directory))
{
Directory.CreateDirectory(directory);
}
var builder = new StringBuilder();
builder.AppendLine("# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds).");
builder.AppendLine("# TYPE notify_dispatch_bench_duration_ms gauge");
builder.AppendLine("# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second).");
builder.AppendLine("# TYPE notify_dispatch_bench_throughput_per_sec gauge");
builder.AppendLine("# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes).");
builder.AppendLine("# TYPE notify_dispatch_bench_allocation_mb gauge");
foreach (var report in reports)
{
var scenarioLabel = Escape(report.Result.Id);
AppendMetric(builder, "notify_dispatch_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
AppendMetric(builder, "notify_dispatch_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
AppendMetric(builder, "notify_dispatch_bench_max_ms", scenarioLabel, report.Result.MaxMs);
AppendMetric(builder, "notify_dispatch_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
AppendMetric(builder, "notify_dispatch_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
AppendMetric(builder, "notify_dispatch_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
AppendMetric(builder, "notify_dispatch_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
AppendMetric(builder, "notify_dispatch_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
AppendMetric(builder, "notify_dispatch_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
if (report.Baseline is { } baseline)
{
AppendMetric(builder, "notify_dispatch_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
AppendMetric(builder, "notify_dispatch_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
AppendMetric(builder, "notify_dispatch_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
}
if (report.DurationRegressionRatio is { } durationRatio)
{
AppendMetric(builder, "notify_dispatch_bench_duration_regression_ratio", scenarioLabel, durationRatio);
}
if (report.ThroughputRegressionRatio is { } throughputRatio)
{
AppendMetric(builder, "notify_dispatch_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
}
AppendMetric(builder, "notify_dispatch_bench_regression_limit", scenarioLabel, report.RegressionLimit);
AppendMetric(builder, "notify_dispatch_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
}
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
}
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
{
if (!value.HasValue)
{
return;
}
builder.Append(metric);
builder.Append("{scenario=\"");
builder.Append(scenario);
builder.Append("\"} ");
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
}
private static string Escape(string value) =>
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
}

View File

@@ -0,0 +1,17 @@
using System.Collections.Generic;
namespace StellaOps.Bench.Notify;
internal sealed record ScenarioExecutionResult(
IReadOnlyList<double> Durations,
IReadOnlyList<double> Throughputs,
IReadOnlyList<double> AllocatedMb,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalMatches,
int TotalDeliveries);

View File

@@ -0,0 +1,46 @@
using System.Globalization;
namespace StellaOps.Bench.Notify;
internal sealed record ScenarioResult(
string Id,
string Label,
int Iterations,
int TotalEvents,
int TotalRules,
int ActionsPerRule,
double AverageMatchesPerEvent,
int MinMatchesPerEvent,
int MaxMatchesPerEvent,
double AverageDeliveriesPerEvent,
int TotalDeliveries,
double MeanMs,
double P95Ms,
double MaxMs,
double MeanThroughputPerSecond,
double MinThroughputPerSecond,
double MaxAllocatedMb,
double? ThresholdMs,
double? MinThroughputThresholdPerSecond,
double? MaxAllocatedThresholdMb)
{
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
public string EventsColumn => TotalEvents.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
public string RulesColumn => TotalRules.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9);
public string MatchesColumn => AverageMatchesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(8);
public string DeliveriesColumn => AverageDeliveriesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(10);
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
}

View File

@@ -0,0 +1,87 @@
using System;
using System.Collections.Generic;
namespace StellaOps.Bench.Notify;
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
{
public static DurationStatistics From(IReadOnlyList<double> durations)
{
if (durations.Count == 0)
{
return new DurationStatistics(0, 0, 0);
}
var sorted = durations.ToArray();
Array.Sort(sorted);
var total = 0d;
foreach (var value in durations)
{
total += value;
}
var mean = total / durations.Count;
var p95 = Percentile(sorted, 95);
var max = sorted[^1];
return new DurationStatistics(mean, p95, max);
}
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
{
if (sorted.Count == 0)
{
return 0;
}
var rank = (percentile / 100d) * (sorted.Count - 1);
var lower = (int)Math.Floor(rank);
var upper = (int)Math.Ceiling(rank);
var weight = rank - lower;
if (upper >= sorted.Count)
{
return sorted[lower];
}
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
}
}
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
{
public static ThroughputStatistics From(IReadOnlyList<double> values)
{
if (values.Count == 0)
{
return new ThroughputStatistics(0, 0);
}
var total = 0d;
var min = double.MaxValue;
foreach (var value in values)
{
total += value;
min = Math.Min(min, value);
}
var mean = total / values.Count;
return new ThroughputStatistics(mean, min);
}
}
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
{
public static AllocationStatistics From(IReadOnlyList<double> values)
{
var max = 0d;
foreach (var value in values)
{
max = Math.Max(max, value);
}
return new AllocationStatistics(max);
}
}

View File

@@ -0,0 +1,15 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>net10.0</TargetFramework>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<LangVersion>preview</LangVersion>
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="../../../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" />
</ItemGroup>
</Project>

View File

@@ -0,0 +1,4 @@
scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb
notify_dispatch_density_05,5,5000,20000,3.4150,4.1722,4.3039,6053938.5172,4646948.1168,0.0000
notify_dispatch_density_20,5,7500,675000,24.2274,25.8517,26.0526,27923335.5855,25909122.3141,0.0000
notify_dispatch_density_40,5,10000,4000080,138.7387,147.7174,149.1124,28916602.9214,26825938.0172,0.0000
1 scenario iterations events deliveries mean_ms p95_ms max_ms mean_throughput_per_sec min_throughput_per_sec max_allocated_mb
2 notify_dispatch_density_05 5 5000 20000 3.4150 4.1722 4.3039 6053938.5172 4646948.1168 0.0000
3 notify_dispatch_density_20 5 7500 675000 24.2274 25.8517 26.0526 27923335.5855 25909122.3141 0.0000
4 notify_dispatch_density_40 5 10000 4000080 138.7387 147.7174 149.1124 28916602.9214 26825938.0172 0.0000

View File

@@ -0,0 +1,47 @@
{
"thresholdMs": 1200,
"minThroughputPerSecond": 10000,
"maxAllocatedMb": 512,
"iterations": 5,
"scenarios": [
{
"id": "notify_dispatch_density_05",
"label": "50 rules / 5% fanout",
"eventCount": 5000,
"ruleCount": 50,
"actionsPerRule": 2,
"matchRate": 0.05,
"tenantCount": 4,
"channelCount": 12,
"thresholdMs": 400,
"minThroughputPerSecond": 15000,
"maxAllocatedMb": 128
},
{
"id": "notify_dispatch_density_20",
"label": "150 rules / 20% fanout",
"eventCount": 7500,
"ruleCount": 150,
"actionsPerRule": 3,
"matchRate": 0.2,
"tenantCount": 6,
"channelCount": 24,
"thresholdMs": 650,
"minThroughputPerSecond": 30000,
"maxAllocatedMb": 192
},
{
"id": "notify_dispatch_density_40",
"label": "300 rules / 40% fanout",
"eventCount": 10000,
"ruleCount": 300,
"actionsPerRule": 4,
"matchRate": 0.4,
"tenantCount": 8,
"channelCount": 32,
"thresholdMs": 900,
"minThroughputPerSecond": 45000,
"maxAllocatedMb": 256
}
]
}