Add Policy DSL Validator, Schema Exporter, and Simulation Smoke tools
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Implemented PolicyDslValidator with command-line options for strict mode and JSON output. - Created PolicySchemaExporter to generate JSON schemas for policy-related models. - Developed PolicySimulationSmoke tool to validate policy simulations against expected outcomes. - Added project files and necessary dependencies for each tool. - Ensured proper error handling and usage instructions across tools.
This commit is contained in:
@@ -0,0 +1,12 @@
|
||||
namespace StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
internal sealed record BaselineEntry(
|
||||
string ScenarioId,
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
@@ -0,0 +1,86 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
internal static class BaselineLoader
|
||||
{
|
||||
public static async Task<IReadOnlyDictionary<string, BaselineEntry>> LoadAsync(string path, CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
return new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
}
|
||||
|
||||
var result = new Dictionary<string, BaselineEntry>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read);
|
||||
using var reader = new StreamReader(stream);
|
||||
|
||||
var lineNumber = 0;
|
||||
while (true)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var line = await reader.ReadLineAsync().ConfigureAwait(false);
|
||||
if (line is null)
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
lineNumber++;
|
||||
if (lineNumber == 1)
|
||||
{
|
||||
continue; // header
|
||||
}
|
||||
|
||||
if (string.IsNullOrWhiteSpace(line))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var parts = line.Split(',', StringSplitOptions.TrimEntries);
|
||||
if (parts.Length < 9)
|
||||
{
|
||||
throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 9 columns, found {parts.Length}).");
|
||||
}
|
||||
|
||||
var entry = new BaselineEntry(
|
||||
ScenarioId: parts[0],
|
||||
Iterations: ParseInt(parts[1], resolved, lineNumber),
|
||||
FindingCount: ParseInt(parts[2], resolved, lineNumber),
|
||||
MeanMs: ParseDouble(parts[3], resolved, lineNumber),
|
||||
P95Ms: ParseDouble(parts[4], resolved, lineNumber),
|
||||
MaxMs: ParseDouble(parts[5], resolved, lineNumber),
|
||||
MeanThroughputPerSecond: ParseDouble(parts[6], resolved, lineNumber),
|
||||
MinThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber),
|
||||
MaxAllocatedMb: ParseDouble(parts[8], resolved, lineNumber));
|
||||
|
||||
result[entry.ScenarioId] = entry;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private static int ParseInt(string value, string file, int line)
|
||||
{
|
||||
if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'.");
|
||||
}
|
||||
|
||||
private static double ParseDouble(string value, string file, int line)
|
||||
{
|
||||
if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result))
|
||||
{
|
||||
return result;
|
||||
}
|
||||
|
||||
throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'.");
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal sealed record BenchmarkConfig(
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
int? Iterations,
|
||||
IReadOnlyList<PolicyScenarioConfig> Scenarios)
|
||||
{
|
||||
public static async Task<BenchmarkConfig> LoadAsync(string path)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
if (!File.Exists(resolved))
|
||||
{
|
||||
throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved);
|
||||
}
|
||||
|
||||
await using var stream = File.OpenRead(resolved);
|
||||
var model = await JsonSerializer.DeserializeAsync<BenchmarkConfigModel>(
|
||||
stream,
|
||||
new JsonSerializerOptions(JsonSerializerDefaults.Web)
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
ReadCommentHandling = JsonCommentHandling.Skip,
|
||||
AllowTrailingCommas = true
|
||||
}).ConfigureAwait(false);
|
||||
|
||||
if (model is null)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed.");
|
||||
}
|
||||
|
||||
if (model.Scenarios.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios.");
|
||||
}
|
||||
|
||||
foreach (var scenario in model.Scenarios)
|
||||
{
|
||||
scenario.Validate();
|
||||
}
|
||||
|
||||
return new BenchmarkConfig(
|
||||
model.ThresholdMs,
|
||||
model.MinThroughputPerSecond,
|
||||
model.MaxAllocatedMb,
|
||||
model.Iterations,
|
||||
model.Scenarios);
|
||||
}
|
||||
|
||||
private sealed class BenchmarkConfigModel
|
||||
{
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
[JsonPropertyName("iterations")]
|
||||
public int? Iterations { get; init; }
|
||||
|
||||
[JsonPropertyName("scenarios")]
|
||||
public List<PolicyScenarioConfig> Scenarios { get; init; } = new();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class PolicyScenarioConfig
|
||||
{
|
||||
private const int DefaultComponentCount = 100_000;
|
||||
private const int DefaultAdvisoriesPerComponent = 10;
|
||||
|
||||
[JsonPropertyName("id")]
|
||||
public string? Id { get; init; }
|
||||
|
||||
[JsonPropertyName("label")]
|
||||
public string? Label { get; init; }
|
||||
|
||||
[JsonPropertyName("policyPath")]
|
||||
public string PolicyPath { get; init; } = "docs/examples/policies/baseline.yaml";
|
||||
|
||||
[JsonPropertyName("scoringConfig")]
|
||||
public string? ScoringConfigPath { get; init; }
|
||||
|
||||
[JsonPropertyName("componentCount")]
|
||||
public int ComponentCount { get; init; } = DefaultComponentCount;
|
||||
|
||||
[JsonPropertyName("advisoriesPerComponent")]
|
||||
public int AdvisoriesPerComponent { get; init; } = DefaultAdvisoriesPerComponent;
|
||||
|
||||
[JsonPropertyName("totalFindings")]
|
||||
public int? TotalFindings { get; init; }
|
||||
|
||||
[JsonPropertyName("seed")]
|
||||
public int? Seed { get; init; }
|
||||
|
||||
[JsonPropertyName("thresholdMs")]
|
||||
public double? ThresholdMs { get; init; }
|
||||
|
||||
[JsonPropertyName("minThroughputPerSecond")]
|
||||
public double? MinThroughputPerSecond { get; init; }
|
||||
|
||||
[JsonPropertyName("maxAllocatedMb")]
|
||||
public double? MaxAllocatedMb { get; init; }
|
||||
|
||||
public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "policy_eval" : Id.Trim();
|
||||
|
||||
public int ResolveFindingCount()
|
||||
{
|
||||
if (TotalFindings is { } findings)
|
||||
{
|
||||
if (findings <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires totalFindings > 0.");
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
|
||||
if (ComponentCount <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires componentCount > 0.");
|
||||
}
|
||||
|
||||
if (AdvisoriesPerComponent <= 0)
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires advisoriesPerComponent > 0.");
|
||||
}
|
||||
|
||||
checked
|
||||
{
|
||||
var total = ComponentCount * AdvisoriesPerComponent;
|
||||
return total;
|
||||
}
|
||||
}
|
||||
|
||||
public int ResolveSeed() => Seed ?? 2025_10_26;
|
||||
|
||||
public void Validate()
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(PolicyPath))
|
||||
{
|
||||
throw new InvalidOperationException($"Scenario '{ScenarioId}' requires a policyPath.");
|
||||
}
|
||||
|
||||
ResolveFindingCount();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal static class PathUtilities
|
||||
{
|
||||
public static bool IsWithinRoot(string root, string candidate)
|
||||
{
|
||||
var relative = Path.GetRelativePath(root, candidate);
|
||||
if (string.IsNullOrEmpty(relative) || relative == ".")
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,249 @@
|
||||
using System.Collections.Immutable;
|
||||
using System.Diagnostics;
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using StellaOps.Policy;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal sealed class PolicyScenarioRunner
|
||||
{
|
||||
private readonly PolicyScenarioConfig _config;
|
||||
private readonly PolicyDocument _document;
|
||||
private readonly PolicyScoringConfig _scoringConfig;
|
||||
private readonly PolicyFinding[] _findings;
|
||||
|
||||
public PolicyScenarioRunner(PolicyScenarioConfig config, string repoRoot)
|
||||
{
|
||||
_config = config ?? throw new ArgumentNullException(nameof(config));
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
|
||||
|
||||
var policyPath = ResolvePathWithinRoot(repoRoot, config.PolicyPath);
|
||||
var policyContent = File.ReadAllText(policyPath);
|
||||
var policyFormat = PolicySchema.DetectFormat(policyPath);
|
||||
var binding = PolicyBinder.Bind(policyContent, policyFormat);
|
||||
if (!binding.Success)
|
||||
{
|
||||
var issues = string.Join(", ", binding.Issues.Select(issue => issue.Code));
|
||||
throw new InvalidOperationException($"Policy '{config.PolicyPath}' failed validation: {issues}.");
|
||||
}
|
||||
|
||||
_document = binding.Document;
|
||||
|
||||
_scoringConfig = LoadScoringConfig(repoRoot, config.ScoringConfigPath);
|
||||
_findings = SyntheticFindingGenerator.Create(config, repoRoot);
|
||||
}
|
||||
|
||||
public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken)
|
||||
{
|
||||
if (iterations <= 0)
|
||||
{
|
||||
throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive.");
|
||||
}
|
||||
|
||||
var durations = new double[iterations];
|
||||
var throughputs = new double[iterations];
|
||||
var allocations = new double[iterations];
|
||||
var hashingAccumulator = new EvaluationAccumulator();
|
||||
|
||||
for (var index = 0; index < iterations; index++)
|
||||
{
|
||||
cancellationToken.ThrowIfCancellationRequested();
|
||||
|
||||
var beforeAllocated = GC.GetTotalAllocatedBytes();
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
hashingAccumulator.Reset();
|
||||
foreach (var finding in _findings)
|
||||
{
|
||||
var verdict = PolicyEvaluation.EvaluateFinding(_document, _scoringConfig, finding);
|
||||
hashingAccumulator.Add(verdict);
|
||||
}
|
||||
|
||||
stopwatch.Stop();
|
||||
|
||||
var afterAllocated = GC.GetTotalAllocatedBytes();
|
||||
var elapsedMs = stopwatch.Elapsed.TotalMilliseconds;
|
||||
if (elapsedMs <= 0)
|
||||
{
|
||||
elapsedMs = 0.0001;
|
||||
}
|
||||
|
||||
durations[index] = elapsedMs;
|
||||
throughputs[index] = _findings.Length / stopwatch.Elapsed.TotalSeconds;
|
||||
allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d);
|
||||
|
||||
hashingAccumulator.AssertConsumed();
|
||||
}
|
||||
|
||||
return new ScenarioExecutionResult(
|
||||
durations,
|
||||
throughputs,
|
||||
allocations,
|
||||
_findings.Length);
|
||||
}
|
||||
|
||||
private static PolicyScoringConfig LoadScoringConfig(string repoRoot, string? scoringPath)
|
||||
{
|
||||
if (string.IsNullOrWhiteSpace(scoringPath))
|
||||
{
|
||||
return PolicyScoringConfig.Default;
|
||||
}
|
||||
|
||||
var resolved = ResolvePathWithinRoot(repoRoot, scoringPath);
|
||||
var format = PolicySchema.DetectFormat(resolved);
|
||||
var content = File.ReadAllText(resolved);
|
||||
var binding = PolicyScoringConfigBinder.Bind(content, format);
|
||||
if (!binding.Success || binding.Config is null)
|
||||
{
|
||||
var issues = binding.Issues.Length == 0
|
||||
? "unknown"
|
||||
: string.Join(", ", binding.Issues.Select(issue => issue.Code));
|
||||
throw new InvalidOperationException($"Scoring configuration '{scoringPath}' failed validation: {issues}.");
|
||||
}
|
||||
|
||||
return binding.Config;
|
||||
}
|
||||
|
||||
private static string ResolvePathWithinRoot(string repoRoot, string relativePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(relativePath);
|
||||
|
||||
var combined = Path.GetFullPath(Path.Combine(repoRoot, relativePath));
|
||||
if (!PathUtilities.IsWithinRoot(repoRoot, combined))
|
||||
{
|
||||
throw new InvalidOperationException($"Path '{relativePath}' escapes repository root '{repoRoot}'.");
|
||||
}
|
||||
|
||||
if (!File.Exists(combined))
|
||||
{
|
||||
throw new FileNotFoundException($"Path '{relativePath}' resolved to '{combined}' but does not exist.", combined);
|
||||
}
|
||||
|
||||
return combined;
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed record ScenarioExecutionResult(
|
||||
IReadOnlyList<double> Durations,
|
||||
IReadOnlyList<double> Throughputs,
|
||||
IReadOnlyList<double> AllocatedMb,
|
||||
int FindingCount);
|
||||
|
||||
internal static class SyntheticFindingGenerator
|
||||
{
|
||||
private static readonly ImmutableArray<string> Environments = ImmutableArray.Create("prod", "staging", "dev");
|
||||
private static readonly ImmutableArray<string> Sources = ImmutableArray.Create("concelier", "excitor", "sbom");
|
||||
private static readonly ImmutableArray<string> Vendors = ImmutableArray.Create("acme", "contoso", "globex", "initech", "umbrella");
|
||||
private static readonly ImmutableArray<string> Licenses = ImmutableArray.Create("MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "Proprietary");
|
||||
private static readonly ImmutableArray<string> Repositories = ImmutableArray.Create("acme/service-api", "acme/web", "acme/worker", "acme/mobile", "acme/cli");
|
||||
private static readonly ImmutableArray<string> Images = ImmutableArray.Create("registry.local/worker:2025.10", "registry.local/api:2025.10", "registry.local/cli:2025.10");
|
||||
private static readonly ImmutableArray<string> TagPool = ImmutableArray.Create("kev", "runtime", "reachable", "public", "third-party", "critical-path");
|
||||
private static readonly ImmutableArray<ImmutableArray<string>> TagSets = BuildTagSets();
|
||||
private static readonly PolicySeverity[] SeverityPool =
|
||||
{
|
||||
PolicySeverity.Critical,
|
||||
PolicySeverity.High,
|
||||
PolicySeverity.Medium,
|
||||
PolicySeverity.Low,
|
||||
PolicySeverity.Informational
|
||||
};
|
||||
|
||||
public static PolicyFinding[] Create(PolicyScenarioConfig config, string repoRoot)
|
||||
{
|
||||
var totalFindings = config.ResolveFindingCount();
|
||||
if (totalFindings <= 0)
|
||||
{
|
||||
return Array.Empty<PolicyFinding>();
|
||||
}
|
||||
|
||||
var seed = config.ResolveSeed();
|
||||
var random = new Random(seed);
|
||||
var findings = new PolicyFinding[totalFindings];
|
||||
var tagsBuffer = new List<string>(3);
|
||||
|
||||
var componentCount = Math.Max(1, config.ComponentCount);
|
||||
|
||||
for (var index = 0; index < totalFindings; index++)
|
||||
{
|
||||
var componentIndex = index % componentCount;
|
||||
var findingId = $"F-{componentIndex:D5}-{index:D6}";
|
||||
var severity = SeverityPool[random.Next(SeverityPool.Length)];
|
||||
var environment = Environments[componentIndex % Environments.Length];
|
||||
var source = Sources[random.Next(Sources.Length)];
|
||||
var vendor = Vendors[random.Next(Vendors.Length)];
|
||||
var license = Licenses[random.Next(Licenses.Length)];
|
||||
var repository = Repositories[componentIndex % Repositories.Length];
|
||||
var image = Images[(componentIndex + index) % Images.Length];
|
||||
var packageName = $"pkg{componentIndex % 1000}";
|
||||
var purl = $"pkg:generic/{packageName}@{1 + (index % 20)}.{1 + (componentIndex % 10)}.{index % 5}";
|
||||
var cve = index % 7 == 0 ? $"CVE-2025-{1000 + index % 9000:D4}" : null;
|
||||
var layerDigest = $"sha256:{Convert.ToHexString(Guid.NewGuid().ToByteArray())[..32].ToLowerInvariant()}";
|
||||
|
||||
var tags = TagSets[random.Next(TagSets.Length)];
|
||||
|
||||
findings[index] = PolicyFinding.Create(
|
||||
findingId,
|
||||
severity,
|
||||
environment: environment,
|
||||
source: source,
|
||||
vendor: vendor,
|
||||
license: license,
|
||||
image: image,
|
||||
repository: repository,
|
||||
package: packageName,
|
||||
purl: purl,
|
||||
cve: cve,
|
||||
path: $"/app/{packageName}/{index % 50}.so",
|
||||
layerDigest: layerDigest,
|
||||
tags: tags);
|
||||
}
|
||||
|
||||
return findings;
|
||||
}
|
||||
|
||||
private static ImmutableArray<ImmutableArray<string>> BuildTagSets()
|
||||
{
|
||||
var builder = ImmutableArray.CreateBuilder<ImmutableArray<string>>();
|
||||
builder.Add(ImmutableArray<string>.Empty);
|
||||
builder.Add(ImmutableArray.Create("kev"));
|
||||
builder.Add(ImmutableArray.Create("runtime"));
|
||||
builder.Add(ImmutableArray.Create("reachable"));
|
||||
builder.Add(ImmutableArray.Create("third-party"));
|
||||
builder.Add(ImmutableArray.Create("kev", "runtime"));
|
||||
builder.Add(ImmutableArray.Create("kev", "third-party"));
|
||||
builder.Add(ImmutableArray.Create("runtime", "public"));
|
||||
builder.Add(ImmutableArray.Create("reachable", "critical-path"));
|
||||
return builder.ToImmutable();
|
||||
}
|
||||
}
|
||||
|
||||
internal sealed class EvaluationAccumulator
|
||||
{
|
||||
private double _scoreAccumulator;
|
||||
private int _quietCount;
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
_scoreAccumulator = 0;
|
||||
_quietCount = 0;
|
||||
}
|
||||
|
||||
public void Add(PolicyVerdict verdict)
|
||||
{
|
||||
_scoreAccumulator += verdict.Score;
|
||||
if (verdict.Quiet)
|
||||
{
|
||||
_quietCount++;
|
||||
}
|
||||
}
|
||||
|
||||
public void AssertConsumed()
|
||||
{
|
||||
if (_scoreAccumulator == 0 && _quietCount == 0)
|
||||
{
|
||||
throw new InvalidOperationException("Evaluation accumulator detected zero work; dataset may be empty.");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,373 @@
|
||||
using System.Globalization;
|
||||
using System.Linq;
|
||||
using StellaOps.Bench.PolicyEngine.Baseline;
|
||||
using StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal static class Program
|
||||
{
|
||||
public static async Task<int> Main(string[] args)
|
||||
{
|
||||
try
|
||||
{
|
||||
var options = ProgramOptions.Parse(args);
|
||||
var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false);
|
||||
var iterations = options.Iterations ?? config.Iterations ?? 3;
|
||||
var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath);
|
||||
var thresholdMs = options.ThresholdMs ?? config.ThresholdMs;
|
||||
var throughputFloor = options.MinThroughputPerSecond ?? config.MinThroughputPerSecond;
|
||||
var allocationLimit = options.MaxAllocatedMb ?? config.MaxAllocatedMb;
|
||||
var regressionLimit = options.RegressionLimit;
|
||||
var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime();
|
||||
|
||||
var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false);
|
||||
|
||||
var results = new List<ScenarioResult>();
|
||||
var reports = new List<BenchmarkScenarioReport>();
|
||||
var failures = new List<string>();
|
||||
|
||||
foreach (var scenario in config.Scenarios)
|
||||
{
|
||||
var runner = new PolicyScenarioRunner(scenario, repoRoot);
|
||||
var execution = runner.Execute(iterations, CancellationToken.None);
|
||||
|
||||
var durationStats = DurationStatistics.From(execution.Durations);
|
||||
var throughputStats = ThroughputStatistics.From(execution.Throughputs);
|
||||
var allocationStats = AllocationStatistics.From(execution.AllocatedMb);
|
||||
|
||||
var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs;
|
||||
var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? throughputFloor;
|
||||
var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? allocationLimit;
|
||||
|
||||
var result = new ScenarioResult(
|
||||
scenario.ScenarioId,
|
||||
scenario.Label ?? scenario.ScenarioId,
|
||||
iterations,
|
||||
execution.FindingCount,
|
||||
durationStats.MeanMs,
|
||||
durationStats.P95Ms,
|
||||
durationStats.MaxMs,
|
||||
throughputStats.MeanPerSecond,
|
||||
throughputStats.MinPerSecond,
|
||||
allocationStats.MaxAllocatedMb,
|
||||
scenarioThreshold,
|
||||
scenarioThroughputFloor,
|
||||
scenarioAllocationLimit);
|
||||
|
||||
results.Add(result);
|
||||
|
||||
if (scenarioThreshold is { } threshold && result.MaxMs > threshold)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms");
|
||||
}
|
||||
|
||||
if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor)
|
||||
{
|
||||
failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} findings/s < {floor:N0} findings/s");
|
||||
}
|
||||
|
||||
if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit)
|
||||
{
|
||||
failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB");
|
||||
}
|
||||
|
||||
baseline.TryGetValue(result.Id, out var baselineEntry);
|
||||
var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit);
|
||||
reports.Add(report);
|
||||
failures.AddRange(report.BuildRegressionFailureMessages());
|
||||
}
|
||||
|
||||
TablePrinter.Print(results);
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.CsvOutPath))
|
||||
{
|
||||
CsvWriter.Write(options.CsvOutPath!, results);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.JsonOutPath))
|
||||
{
|
||||
var metadata = new BenchmarkJsonMetadata(
|
||||
SchemaVersion: "policy-bench/1.0",
|
||||
CapturedAtUtc: capturedAt,
|
||||
Commit: options.Commit,
|
||||
Environment: options.Environment);
|
||||
|
||||
await BenchmarkJsonWriter.WriteAsync(
|
||||
options.JsonOutPath!,
|
||||
metadata,
|
||||
reports,
|
||||
CancellationToken.None).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath))
|
||||
{
|
||||
PrometheusWriter.Write(options.PrometheusOutPath!, reports);
|
||||
}
|
||||
|
||||
if (failures.Count > 0)
|
||||
{
|
||||
Console.Error.WriteLine();
|
||||
Console.Error.WriteLine("Benchmark failures detected:");
|
||||
foreach (var failure in failures.Distinct())
|
||||
{
|
||||
Console.Error.WriteLine($" - {failure}");
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
Console.Error.WriteLine($"policy-bench error: {ex.Message}");
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
private static string ResolveRepoRoot(string? overridePath, string configPath)
|
||||
{
|
||||
if (!string.IsNullOrWhiteSpace(overridePath))
|
||||
{
|
||||
return Path.GetFullPath(overridePath);
|
||||
}
|
||||
|
||||
var configDirectory = Path.GetDirectoryName(configPath);
|
||||
if (string.IsNullOrWhiteSpace(configDirectory))
|
||||
{
|
||||
return Directory.GetCurrentDirectory();
|
||||
}
|
||||
|
||||
return Path.GetFullPath(Path.Combine(configDirectory, "..", "..", ".."));
|
||||
}
|
||||
|
||||
private sealed record ProgramOptions(
|
||||
string ConfigPath,
|
||||
int? Iterations,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputPerSecond,
|
||||
double? MaxAllocatedMb,
|
||||
string? CsvOutPath,
|
||||
string? JsonOutPath,
|
||||
string? PrometheusOutPath,
|
||||
string? RepoRoot,
|
||||
string BaselinePath,
|
||||
DateTimeOffset? CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
double? RegressionLimit)
|
||||
{
|
||||
public static ProgramOptions Parse(string[] args)
|
||||
{
|
||||
var configPath = DefaultConfigPath();
|
||||
var baselinePath = DefaultBaselinePath();
|
||||
|
||||
int? iterations = null;
|
||||
double? thresholdMs = null;
|
||||
double? minThroughput = null;
|
||||
double? maxAllocated = null;
|
||||
string? csvOut = null;
|
||||
string? jsonOut = null;
|
||||
string? promOut = null;
|
||||
string? repoRoot = null;
|
||||
DateTimeOffset? capturedAt = null;
|
||||
string? commit = null;
|
||||
string? environment = null;
|
||||
double? regressionLimit = null;
|
||||
|
||||
for (var index = 0; index < args.Length; index++)
|
||||
{
|
||||
var current = args[index];
|
||||
switch (current)
|
||||
{
|
||||
case "--config":
|
||||
EnsureNext(args, index);
|
||||
configPath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--iterations":
|
||||
EnsureNext(args, index);
|
||||
iterations = int.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--threshold-ms":
|
||||
EnsureNext(args, index);
|
||||
thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--min-throughput":
|
||||
EnsureNext(args, index);
|
||||
minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--max-allocated-mb":
|
||||
EnsureNext(args, index);
|
||||
maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--csv":
|
||||
EnsureNext(args, index);
|
||||
csvOut = args[++index];
|
||||
break;
|
||||
case "--json":
|
||||
EnsureNext(args, index);
|
||||
jsonOut = args[++index];
|
||||
break;
|
||||
case "--prometheus":
|
||||
EnsureNext(args, index);
|
||||
promOut = args[++index];
|
||||
break;
|
||||
case "--repo-root":
|
||||
EnsureNext(args, index);
|
||||
repoRoot = args[++index];
|
||||
break;
|
||||
case "--baseline":
|
||||
EnsureNext(args, index);
|
||||
baselinePath = Path.GetFullPath(args[++index]);
|
||||
break;
|
||||
case "--captured-at":
|
||||
EnsureNext(args, index);
|
||||
capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal);
|
||||
break;
|
||||
case "--commit":
|
||||
EnsureNext(args, index);
|
||||
commit = args[++index];
|
||||
break;
|
||||
case "--environment":
|
||||
EnsureNext(args, index);
|
||||
environment = args[++index];
|
||||
break;
|
||||
case "--regression-limit":
|
||||
EnsureNext(args, index);
|
||||
regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture);
|
||||
break;
|
||||
case "--help":
|
||||
case "-h":
|
||||
PrintUsage();
|
||||
System.Environment.Exit(0);
|
||||
break;
|
||||
default:
|
||||
throw new ArgumentException($"Unknown argument '{current}'.");
|
||||
}
|
||||
}
|
||||
|
||||
return new ProgramOptions(
|
||||
configPath,
|
||||
iterations,
|
||||
thresholdMs,
|
||||
minThroughput,
|
||||
maxAllocated,
|
||||
csvOut,
|
||||
jsonOut,
|
||||
promOut,
|
||||
repoRoot,
|
||||
baselinePath,
|
||||
capturedAt,
|
||||
commit,
|
||||
environment,
|
||||
regressionLimit);
|
||||
}
|
||||
|
||||
private static string DefaultConfigPath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "config.json");
|
||||
}
|
||||
|
||||
private static string DefaultBaselinePath()
|
||||
{
|
||||
var binaryDir = AppContext.BaseDirectory;
|
||||
var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", ".."));
|
||||
var benchRoot = Path.GetFullPath(Path.Combine(projectDir, ".."));
|
||||
return Path.Combine(benchRoot, "baseline.csv");
|
||||
}
|
||||
|
||||
private static void EnsureNext(string[] args, int index)
|
||||
{
|
||||
if (index + 1 >= args.Length)
|
||||
{
|
||||
throw new ArgumentException("Missing value for argument.");
|
||||
}
|
||||
}
|
||||
|
||||
private static void PrintUsage()
|
||||
{
|
||||
Console.WriteLine("Usage: policy-bench [options]");
|
||||
Console.WriteLine();
|
||||
Console.WriteLine("Options:");
|
||||
Console.WriteLine(" --config <path> Path to benchmark configuration JSON.");
|
||||
Console.WriteLine(" --iterations <count> Override iteration count.");
|
||||
Console.WriteLine(" --threshold-ms <value> Global latency threshold in milliseconds.");
|
||||
Console.WriteLine(" --min-throughput <value> Global throughput floor (findings/second).");
|
||||
Console.WriteLine(" --max-allocated-mb <value> Global allocation ceiling (MB).");
|
||||
Console.WriteLine(" --csv <path> Write CSV results to path.");
|
||||
Console.WriteLine(" --json <path> Write JSON results to path.");
|
||||
Console.WriteLine(" --prometheus <path> Write Prometheus exposition metrics to path.");
|
||||
Console.WriteLine(" --repo-root <path> Repository root override.");
|
||||
Console.WriteLine(" --baseline <path> Baseline CSV path.");
|
||||
Console.WriteLine(" --captured-at <iso8601> Timestamp to embed in JSON metadata.");
|
||||
Console.WriteLine(" --commit <sha> Commit identifier for metadata.");
|
||||
Console.WriteLine(" --environment <name> Environment label for metadata.");
|
||||
Console.WriteLine(" --regression-limit <value> Regression multiplier (default 1.15).");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class TablePrinter
|
||||
{
|
||||
public static void Print(IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
Console.WriteLine("Scenario | Findings | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)");
|
||||
Console.WriteLine("---------------------------- | ----------- | ---------- | ---------- | ---------- | -------- | --------");
|
||||
foreach (var row in results)
|
||||
{
|
||||
Console.WriteLine(string.Join(" | ", new[]
|
||||
{
|
||||
row.IdColumn,
|
||||
row.FindingsColumn,
|
||||
row.MeanColumn,
|
||||
row.P95Column,
|
||||
row.MaxColumn,
|
||||
row.MinThroughputColumn,
|
||||
row.AllocatedColumn
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
internal static class CsvWriter
|
||||
{
|
||||
public static void Write(string path, IEnumerable<ScenarioResult> results)
|
||||
{
|
||||
var resolvedPath = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolvedPath);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
using var writer = new StreamWriter(stream);
|
||||
writer.WriteLine("scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb");
|
||||
|
||||
foreach (var row in results)
|
||||
{
|
||||
writer.Write(row.Id);
|
||||
writer.Write(',');
|
||||
writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.FindingCount.ToString(CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.Write(',');
|
||||
writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture));
|
||||
writer.WriteLine();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
using System.Linq;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
internal static class BenchmarkJsonWriter
|
||||
{
|
||||
private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web)
|
||||
{
|
||||
WriteIndented = true,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
public static async Task WriteAsync(
|
||||
string path,
|
||||
BenchmarkJsonMetadata metadata,
|
||||
IReadOnlyList<BenchmarkScenarioReport> reports,
|
||||
CancellationToken cancellationToken)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(metadata);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var document = new BenchmarkJsonDocument(
|
||||
metadata.SchemaVersion,
|
||||
metadata.CapturedAtUtc,
|
||||
metadata.Commit,
|
||||
metadata.Environment,
|
||||
reports.Select(CreateScenario).ToArray());
|
||||
|
||||
await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None);
|
||||
await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false);
|
||||
await stream.FlushAsync(cancellationToken).ConfigureAwait(false);
|
||||
}
|
||||
|
||||
private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report)
|
||||
{
|
||||
var baseline = report.Baseline;
|
||||
return new BenchmarkJsonScenario(
|
||||
report.Result.Id,
|
||||
report.Result.Label,
|
||||
report.Result.Iterations,
|
||||
report.Result.FindingCount,
|
||||
report.Result.MeanMs,
|
||||
report.Result.P95Ms,
|
||||
report.Result.MaxMs,
|
||||
report.Result.MeanThroughputPerSecond,
|
||||
report.Result.MinThroughputPerSecond,
|
||||
report.Result.MaxAllocatedMb,
|
||||
report.Result.ThresholdMs,
|
||||
report.Result.MinThroughputThresholdPerSecond,
|
||||
report.Result.MaxAllocatedThresholdMb,
|
||||
baseline is null
|
||||
? null
|
||||
: new BenchmarkJsonScenarioBaseline(
|
||||
baseline.Iterations,
|
||||
baseline.FindingCount,
|
||||
baseline.MeanMs,
|
||||
baseline.P95Ms,
|
||||
baseline.MaxMs,
|
||||
baseline.MeanThroughputPerSecond,
|
||||
baseline.MinThroughputPerSecond,
|
||||
baseline.MaxAllocatedMb),
|
||||
new BenchmarkJsonScenarioRegression(
|
||||
report.DurationRegressionRatio,
|
||||
report.ThroughputRegressionRatio,
|
||||
report.RegressionLimit,
|
||||
report.RegressionBreached));
|
||||
}
|
||||
|
||||
private sealed record BenchmarkJsonDocument(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAt,
|
||||
string? Commit,
|
||||
string? Environment,
|
||||
IReadOnlyList<BenchmarkJsonScenario> Scenarios);
|
||||
|
||||
private sealed record BenchmarkJsonScenario(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb,
|
||||
BenchmarkJsonScenarioBaseline? Baseline,
|
||||
BenchmarkJsonScenarioRegression Regression);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioBaseline(
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb);
|
||||
|
||||
private sealed record BenchmarkJsonScenarioRegression(
|
||||
double? DurationRatio,
|
||||
double? ThroughputRatio,
|
||||
double Limit,
|
||||
bool Breached);
|
||||
}
|
||||
|
||||
internal sealed record BenchmarkJsonMetadata(
|
||||
string SchemaVersion,
|
||||
DateTimeOffset CapturedAtUtc,
|
||||
string? Commit,
|
||||
string? Environment);
|
||||
@@ -0,0 +1,82 @@
|
||||
using StellaOps.Bench.PolicyEngine.Baseline;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
internal sealed class BenchmarkScenarioReport
|
||||
{
|
||||
private const double DefaultRegressionLimit = 1.15d;
|
||||
|
||||
public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null)
|
||||
{
|
||||
Result = result ?? throw new ArgumentNullException(nameof(result));
|
||||
Baseline = baseline;
|
||||
RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit;
|
||||
DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs);
|
||||
ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond);
|
||||
}
|
||||
|
||||
public ScenarioResult Result { get; }
|
||||
|
||||
public BaselineEntry? Baseline { get; }
|
||||
|
||||
public double RegressionLimit { get; }
|
||||
|
||||
public double? DurationRegressionRatio { get; }
|
||||
|
||||
public double? ThroughputRegressionRatio { get; }
|
||||
|
||||
public bool DurationRegressionBreached =>
|
||||
DurationRegressionRatio is { } ratio &&
|
||||
ratio >= RegressionLimit;
|
||||
|
||||
public bool ThroughputRegressionBreached =>
|
||||
ThroughputRegressionRatio is { } ratio &&
|
||||
ratio >= RegressionLimit;
|
||||
|
||||
public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached;
|
||||
|
||||
public IEnumerable<string> BuildRegressionFailureMessages()
|
||||
{
|
||||
if (Baseline is null)
|
||||
{
|
||||
yield break;
|
||||
}
|
||||
|
||||
if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
var delta = (durationRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%).";
|
||||
}
|
||||
|
||||
if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
var delta = (throughputRatio - 1d) * 100d;
|
||||
yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%).";
|
||||
}
|
||||
}
|
||||
|
||||
private static double? CalculateDurationRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return current / baseline.Value;
|
||||
}
|
||||
|
||||
private static double? CalculateThroughputRatio(double current, double? baseline)
|
||||
{
|
||||
if (!baseline.HasValue || baseline.Value <= 0d)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (current <= 0d)
|
||||
{
|
||||
return double.PositiveInfinity;
|
||||
}
|
||||
|
||||
return baseline.Value / current;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,83 @@
|
||||
using System.Globalization;
|
||||
using System.Text;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine.Reporting;
|
||||
|
||||
internal static class PrometheusWriter
|
||||
{
|
||||
public static void Write(string path, IReadOnlyList<BenchmarkScenarioReport> reports)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(path);
|
||||
ArgumentNullException.ThrowIfNull(reports);
|
||||
|
||||
var resolved = Path.GetFullPath(path);
|
||||
var directory = Path.GetDirectoryName(resolved);
|
||||
if (!string.IsNullOrEmpty(directory))
|
||||
{
|
||||
Directory.CreateDirectory(directory);
|
||||
}
|
||||
|
||||
var builder = new StringBuilder();
|
||||
builder.AppendLine("# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds).");
|
||||
builder.AppendLine("# TYPE policy_engine_bench_duration_ms gauge");
|
||||
builder.AppendLine("# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second).");
|
||||
builder.AppendLine("# TYPE policy_engine_bench_throughput_per_sec gauge");
|
||||
builder.AppendLine("# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes).");
|
||||
builder.AppendLine("# TYPE policy_engine_bench_allocation_mb gauge");
|
||||
|
||||
foreach (var report in reports)
|
||||
{
|
||||
var scenarioLabel = Escape(report.Result.Id);
|
||||
AppendMetric(builder, "policy_engine_bench_mean_ms", scenarioLabel, report.Result.MeanMs);
|
||||
AppendMetric(builder, "policy_engine_bench_p95_ms", scenarioLabel, report.Result.P95Ms);
|
||||
AppendMetric(builder, "policy_engine_bench_max_ms", scenarioLabel, report.Result.MaxMs);
|
||||
AppendMetric(builder, "policy_engine_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs);
|
||||
|
||||
AppendMetric(builder, "policy_engine_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond);
|
||||
AppendMetric(builder, "policy_engine_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond);
|
||||
AppendMetric(builder, "policy_engine_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond);
|
||||
|
||||
AppendMetric(builder, "policy_engine_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb);
|
||||
AppendMetric(builder, "policy_engine_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb);
|
||||
|
||||
if (report.Baseline is { } baseline)
|
||||
{
|
||||
AppendMetric(builder, "policy_engine_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs);
|
||||
AppendMetric(builder, "policy_engine_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs);
|
||||
AppendMetric(builder, "policy_engine_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond);
|
||||
}
|
||||
|
||||
if (report.DurationRegressionRatio is { } durationRatio)
|
||||
{
|
||||
AppendMetric(builder, "policy_engine_bench_duration_regression_ratio", scenarioLabel, durationRatio);
|
||||
}
|
||||
|
||||
if (report.ThroughputRegressionRatio is { } throughputRatio)
|
||||
{
|
||||
AppendMetric(builder, "policy_engine_bench_throughput_regression_ratio", scenarioLabel, throughputRatio);
|
||||
}
|
||||
|
||||
AppendMetric(builder, "policy_engine_bench_regression_limit", scenarioLabel, report.RegressionLimit);
|
||||
AppendMetric(builder, "policy_engine_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0);
|
||||
}
|
||||
|
||||
File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8);
|
||||
}
|
||||
|
||||
private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value)
|
||||
{
|
||||
if (!value.HasValue)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
builder.Append(metric);
|
||||
builder.Append("{scenario=\"");
|
||||
builder.Append(scenario);
|
||||
builder.Append("\"} ");
|
||||
builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture));
|
||||
}
|
||||
|
||||
private static string Escape(string value) =>
|
||||
value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal);
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
using System.Globalization;
|
||||
|
||||
namespace StellaOps.Bench.PolicyEngine;
|
||||
|
||||
internal sealed record ScenarioResult(
|
||||
string Id,
|
||||
string Label,
|
||||
int Iterations,
|
||||
int FindingCount,
|
||||
double MeanMs,
|
||||
double P95Ms,
|
||||
double MaxMs,
|
||||
double MeanThroughputPerSecond,
|
||||
double MinThroughputPerSecond,
|
||||
double MaxAllocatedMb,
|
||||
double? ThresholdMs,
|
||||
double? MinThroughputThresholdPerSecond,
|
||||
double? MaxAllocatedThresholdMb)
|
||||
{
|
||||
public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28];
|
||||
public string FindingsColumn => FindingCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12);
|
||||
public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10);
|
||||
public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11);
|
||||
public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9);
|
||||
}
|
||||
|
||||
internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs)
|
||||
{
|
||||
public static DurationStatistics From(IReadOnlyList<double> durations)
|
||||
{
|
||||
if (durations.Count == 0)
|
||||
{
|
||||
return new DurationStatistics(0, 0, 0);
|
||||
}
|
||||
|
||||
var sorted = durations.ToArray();
|
||||
Array.Sort(sorted);
|
||||
|
||||
var total = 0d;
|
||||
foreach (var value in durations)
|
||||
{
|
||||
total += value;
|
||||
}
|
||||
|
||||
var mean = total / durations.Count;
|
||||
var p95 = Percentile(sorted, 95);
|
||||
var max = sorted[^1];
|
||||
|
||||
return new DurationStatistics(mean, p95, max);
|
||||
}
|
||||
|
||||
private static double Percentile(IReadOnlyList<double> sorted, double percentile)
|
||||
{
|
||||
if (sorted.Count == 0)
|
||||
{
|
||||
return 0;
|
||||
}
|
||||
|
||||
var rank = (percentile / 100d) * (sorted.Count - 1);
|
||||
var lower = (int)Math.Floor(rank);
|
||||
var upper = (int)Math.Ceiling(rank);
|
||||
var weight = rank - lower;
|
||||
|
||||
if (upper >= sorted.Count)
|
||||
{
|
||||
return sorted[lower];
|
||||
}
|
||||
|
||||
return sorted[lower] + weight * (sorted[upper] - sorted[lower]);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond)
|
||||
{
|
||||
public static ThroughputStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
if (values.Count == 0)
|
||||
{
|
||||
return new ThroughputStatistics(0, 0);
|
||||
}
|
||||
|
||||
var total = 0d;
|
||||
var min = double.MaxValue;
|
||||
|
||||
foreach (var value in values)
|
||||
{
|
||||
total += value;
|
||||
min = Math.Min(min, value);
|
||||
}
|
||||
|
||||
var mean = total / values.Count;
|
||||
return new ThroughputStatistics(mean, min);
|
||||
}
|
||||
}
|
||||
|
||||
internal readonly record struct AllocationStatistics(double MaxAllocatedMb)
|
||||
{
|
||||
public static AllocationStatistics From(IReadOnlyList<double> values)
|
||||
{
|
||||
var max = 0d;
|
||||
foreach (var value in values)
|
||||
{
|
||||
max = Math.Max(max, value);
|
||||
}
|
||||
|
||||
return new AllocationStatistics(max);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<OutputType>Exe</OutputType>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\StellaOps.Policy\StellaOps.Policy.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
Reference in New Issue
Block a user