feat(rate-limiting): Implement core rate limiting functionality with configuration, decision-making, metrics, middleware, and service registration

- Add RateLimitConfig for configuration management with YAML binding support.
- Introduce RateLimitDecision to encapsulate the result of rate limit checks.
- Implement RateLimitMetrics for OpenTelemetry metrics tracking.
- Create RateLimitMiddleware for enforcing rate limits on incoming requests.
- Develop RateLimitService to orchestrate instance and environment rate limit checks.
- Add RateLimitServiceCollectionExtensions for dependency injection registration.
This commit is contained in:
master
2025-12-17 18:02:37 +02:00
parent 394b57f6bf
commit 8bbfe4d2d2
211 changed files with 47179 additions and 1590 deletions

View File

@@ -0,0 +1,222 @@
// -----------------------------------------------------------------------------
// BenchmarkResultWriter.cs
// Sprint: SPRINT_3500_0003_0001_ground_truth_corpus_ci_gates
// Task: CORPUS-006 - Implement BenchmarkResultWriter with metrics calculation
// Description: Writes benchmark results to JSON and computes metrics
// -----------------------------------------------------------------------------
using System.Text.Json;
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Benchmarks;
/// <summary>
/// Writes benchmark results to files and computes metrics.
/// </summary>
public interface IBenchmarkResultWriter
{
/// <summary>
/// Write benchmark result to the results directory.
/// </summary>
Task WriteResultAsync(BenchmarkResult result, string outputPath, CancellationToken cancellationToken = default);
/// <summary>
/// Read the current baseline.
/// </summary>
Task<BenchmarkBaseline?> ReadBaselineAsync(string baselinePath, CancellationToken cancellationToken = default);
/// <summary>
/// Update the baseline from a benchmark result.
/// </summary>
Task UpdateBaselineAsync(BenchmarkResult result, string baselinePath, CancellationToken cancellationToken = default);
/// <summary>
/// Generate a markdown report from benchmark result.
/// </summary>
string GenerateMarkdownReport(BenchmarkResult result, BenchmarkBaseline? baseline = null);
}
/// <summary>
/// Default implementation of IBenchmarkResultWriter.
/// </summary>
public sealed class BenchmarkResultWriter : IBenchmarkResultWriter
{
private static readonly JsonSerializerOptions JsonOptions = new()
{
WriteIndented = true,
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
};
/// <inheritdoc />
public async Task WriteResultAsync(BenchmarkResult result, string outputPath, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
ArgumentException.ThrowIfNullOrEmpty(outputPath);
// Ensure directory exists
var dir = Path.GetDirectoryName(outputPath);
if (!string.IsNullOrEmpty(dir))
Directory.CreateDirectory(dir);
var json = JsonSerializer.Serialize(result, JsonOptions);
await File.WriteAllTextAsync(outputPath, json, cancellationToken);
}
/// <inheritdoc />
public async Task<BenchmarkBaseline?> ReadBaselineAsync(string baselinePath, CancellationToken cancellationToken = default)
{
if (!File.Exists(baselinePath))
return null;
var json = await File.ReadAllTextAsync(baselinePath, cancellationToken);
return JsonSerializer.Deserialize<BenchmarkBaseline>(json, JsonOptions);
}
/// <inheritdoc />
public async Task UpdateBaselineAsync(BenchmarkResult result, string baselinePath, CancellationToken cancellationToken = default)
{
ArgumentNullException.ThrowIfNull(result);
var baseline = new BenchmarkBaseline(
Version: result.CorpusVersion,
Timestamp: result.Timestamp,
Precision: result.Metrics.Precision,
Recall: result.Metrics.Recall,
F1: result.Metrics.F1,
TtfrpP95Ms: result.Metrics.TtfrpP95Ms);
var dir = Path.GetDirectoryName(baselinePath);
if (!string.IsNullOrEmpty(dir))
Directory.CreateDirectory(dir);
var json = JsonSerializer.Serialize(baseline, JsonOptions);
await File.WriteAllTextAsync(baselinePath, json, cancellationToken);
}
/// <inheritdoc />
public string GenerateMarkdownReport(BenchmarkResult result, BenchmarkBaseline? baseline = null)
{
var sb = new System.Text.StringBuilder();
sb.AppendLine("# Reachability Benchmark Report");
sb.AppendLine();
sb.AppendLine($"**Run ID:** `{result.RunId}`");
sb.AppendLine($"**Timestamp:** {result.Timestamp:yyyy-MM-dd HH:mm:ss} UTC");
sb.AppendLine($"**Corpus Version:** {result.CorpusVersion}");
sb.AppendLine($"**Scanner Version:** {result.ScannerVersion}");
sb.AppendLine($"**Duration:** {result.DurationMs}ms");
sb.AppendLine();
sb.AppendLine("## Metrics Summary");
sb.AppendLine();
sb.AppendLine("| Metric | Value | Baseline | Delta |");
sb.AppendLine("|--------|-------|----------|-------|");
var m = result.Metrics;
var b = baseline;
AppendMetricRow(sb, "Precision", m.Precision, b?.Precision);
AppendMetricRow(sb, "Recall", m.Recall, b?.Recall);
AppendMetricRow(sb, "F1 Score", m.F1, b?.F1);
AppendMetricRow(sb, "TTFRP p50 (ms)", m.TtfrpP50Ms, null);
AppendMetricRow(sb, "TTFRP p95 (ms)", m.TtfrpP95Ms, b?.TtfrpP95Ms);
AppendMetricRow(sb, "Determinism", m.DeterministicReplay, null);
sb.AppendLine();
// Regression check
if (baseline != null)
{
var check = result.CheckRegression(baseline);
sb.AppendLine("## Regression Check");
sb.AppendLine();
sb.AppendLine(check.Passed ? "✅ **PASSED**" : "❌ **FAILED**");
sb.AppendLine();
if (check.Issues.Count > 0)
{
sb.AppendLine("### Issues");
sb.AppendLine();
foreach (var issue in check.Issues)
{
var icon = issue.Severity == RegressionSeverity.Error ? "🔴" : "🟡";
sb.AppendLine($"- {icon} **{issue.Metric}**: {issue.Message}");
}
sb.AppendLine();
}
}
// Sample breakdown
sb.AppendLine("## Sample Results");
sb.AppendLine();
sb.AppendLine("| Sample | Category | Sinks | Correct | Latency | Deterministic |");
sb.AppendLine("|--------|----------|-------|---------|---------|---------------|");
foreach (var sample in result.SampleResults)
{
var correct = sample.SinkResults.Count(s => s.Correct);
var total = sample.SinkResults.Count;
var status = correct == total ? "✅" : "❌";
var detIcon = sample.Deterministic ? "✅" : "❌";
sb.AppendLine($"| {sample.SampleId} | {sample.Category} | {correct}/{total} {status} | {sample.LatencyMs}ms | {detIcon} |");
}
// Failed sinks detail
var failedSinks = result.SampleResults
.SelectMany(s => s.SinkResults.Where(sink => !sink.Correct)
.Select(sink => (s.SampleId, sink)))
.ToList();
if (failedSinks.Count > 0)
{
sb.AppendLine();
sb.AppendLine("## Failed Sinks");
sb.AppendLine();
sb.AppendLine("| Sample | Sink | Expected | Actual |");
sb.AppendLine("|--------|------|----------|--------|");
foreach (var (sampleId, sink) in failedSinks)
{
sb.AppendLine($"| {sampleId} | {sink.SinkId} | {sink.Expected} | {sink.Actual} |");
}
}
return sb.ToString();
}
private static void AppendMetricRow(System.Text.StringBuilder sb, string name, double value, double? baseline)
{
var formatted = name.Contains("ms") ? $"{value:N0}" : $"{value:P1}";
var baselineStr = baseline.HasValue
? (name.Contains("ms") ? $"{baseline.Value:N0}" : $"{baseline.Value:P1}")
: "-";
string delta = "-";
if (baseline.HasValue)
{
var diff = value - baseline.Value;
var sign = diff >= 0 ? "+" : "";
delta = name.Contains("ms")
? $"{sign}{diff:N0}"
: $"{sign}{diff:P1}";
}
sb.AppendLine($"| {name} | {formatted} | {baselineStr} | {delta} |");
}
private static void AppendMetricRow(System.Text.StringBuilder sb, string name, int value, int? baseline)
{
var baselineStr = baseline.HasValue ? $"{baseline.Value:N0}" : "-";
string delta = "-";
if (baseline.HasValue)
{
var diff = value - baseline.Value;
var sign = diff >= 0 ? "+" : "";
delta = $"{sign}{diff:N0}";
}
sb.AppendLine($"| {name} | {value:N0} | {baselineStr} | {delta} |");
}
}

View File

@@ -0,0 +1,232 @@
// -----------------------------------------------------------------------------
// ICorpusRunner.cs
// Sprint: SPRINT_3500_0003_0001_ground_truth_corpus_ci_gates
// Task: CORPUS-005 - Implement ICorpusRunner interface for benchmark execution
// Description: Interface and models for running ground-truth corpus benchmarks
// -----------------------------------------------------------------------------
using System.Text.Json.Serialization;
namespace StellaOps.Scanner.Benchmarks;
/// <summary>
/// Interface for running ground-truth corpus benchmarks.
/// </summary>
public interface ICorpusRunner
{
/// <summary>
/// Run the full corpus and compute metrics.
/// </summary>
/// <param name="corpusPath">Path to corpus.json index file.</param>
/// <param name="options">Run options.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Benchmark results with metrics.</returns>
Task<BenchmarkResult> RunAsync(string corpusPath, CorpusRunOptions options, CancellationToken cancellationToken = default);
/// <summary>
/// Run a single sample from the corpus.
/// </summary>
/// <param name="samplePath">Path to sample.manifest.json.</param>
/// <param name="cancellationToken">Cancellation token.</param>
/// <returns>Sample result.</returns>
Task<SampleResult> RunSampleAsync(string samplePath, CancellationToken cancellationToken = default);
}
/// <summary>
/// Options for corpus runs.
/// </summary>
public sealed record CorpusRunOptions
{
/// <summary>Filter to specific categories.</summary>
public string[]? Categories { get; init; }
/// <summary>Filter to specific sample IDs.</summary>
public string[]? SampleIds { get; init; }
/// <summary>Number of parallel workers.</summary>
public int Parallelism { get; init; } = 1;
/// <summary>Timeout per sample in milliseconds.</summary>
public int TimeoutMs { get; init; } = 30000;
/// <summary>Whether to run determinism checks.</summary>
public bool CheckDeterminism { get; init; } = true;
/// <summary>Number of runs for determinism check.</summary>
public int DeterminismRuns { get; init; } = 3;
}
/// <summary>
/// Result of a full benchmark run.
/// </summary>
public sealed record BenchmarkResult(
[property: JsonPropertyName("runId")] string RunId,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("corpusVersion")] string CorpusVersion,
[property: JsonPropertyName("scannerVersion")] string ScannerVersion,
[property: JsonPropertyName("metrics")] BenchmarkMetrics Metrics,
[property: JsonPropertyName("sampleResults")] IReadOnlyList<SampleResult> SampleResults,
[property: JsonPropertyName("durationMs")] long DurationMs)
{
/// <summary>
/// Check if the benchmark result meets the given thresholds.
/// </summary>
public RegressionCheckResult CheckRegression(BenchmarkBaseline baseline)
{
var issues = new List<RegressionIssue>();
// Precision check
var precisionDrop = baseline.Precision - Metrics.Precision;
if (precisionDrop > 0.01) // 1 percentage point
{
issues.Add(new RegressionIssue(
"precision",
$"Precision dropped from {baseline.Precision:P1} to {Metrics.Precision:P1} ({precisionDrop:P1})",
RegressionSeverity.Error));
}
// Recall check
var recallDrop = baseline.Recall - Metrics.Recall;
if (recallDrop > 0.01)
{
issues.Add(new RegressionIssue(
"recall",
$"Recall dropped from {baseline.Recall:P1} to {Metrics.Recall:P1} ({recallDrop:P1})",
RegressionSeverity.Error));
}
// Determinism check
if (Metrics.DeterministicReplay < 1.0)
{
issues.Add(new RegressionIssue(
"determinism",
$"Deterministic replay is {Metrics.DeterministicReplay:P0} (expected 100%)",
RegressionSeverity.Error));
}
// TTFRP p95 check (warning only)
var ttfrpIncrease = (Metrics.TtfrpP95Ms - baseline.TtfrpP95Ms) / (double)baseline.TtfrpP95Ms;
if (ttfrpIncrease > 0.20)
{
issues.Add(new RegressionIssue(
"ttfrp_p95",
$"TTFRP p95 increased from {baseline.TtfrpP95Ms}ms to {Metrics.TtfrpP95Ms}ms ({ttfrpIncrease:P0})",
RegressionSeverity.Warning));
}
return new RegressionCheckResult(
Passed: !issues.Any(i => i.Severity == RegressionSeverity.Error),
Issues: issues);
}
}
/// <summary>
/// Metrics from a benchmark run.
/// </summary>
public sealed record BenchmarkMetrics(
[property: JsonPropertyName("precision")] double Precision,
[property: JsonPropertyName("recall")] double Recall,
[property: JsonPropertyName("f1")] double F1,
[property: JsonPropertyName("ttfrp_p50_ms")] int TtfrpP50Ms,
[property: JsonPropertyName("ttfrp_p95_ms")] int TtfrpP95Ms,
[property: JsonPropertyName("deterministicReplay")] double DeterministicReplay)
{
public static BenchmarkMetrics Compute(IReadOnlyList<SampleResult> results)
{
if (results.Count == 0)
return new(0, 0, 0, 0, 0, 1.0);
int tp = 0, fp = 0, tn = 0, fn = 0;
var latencies = new List<int>();
int deterministicCount = 0;
foreach (var r in results)
{
foreach (var sink in r.SinkResults)
{
if (sink.Expected == "reachable" && sink.Actual == "reachable") tp++;
else if (sink.Expected == "reachable" && sink.Actual == "unreachable") fn++;
else if (sink.Expected == "unreachable" && sink.Actual == "unreachable") tn++;
else if (sink.Expected == "unreachable" && sink.Actual == "reachable") fp++;
}
latencies.Add((int)r.LatencyMs);
if (r.Deterministic) deterministicCount++;
}
var precision = tp + fp > 0 ? (double)tp / (tp + fp) : 1.0;
var recall = tp + fn > 0 ? (double)tp / (tp + fn) : 1.0;
var f1 = precision + recall > 0 ? 2 * precision * recall / (precision + recall) : 0;
latencies.Sort();
var p50 = latencies.Count > 0 ? latencies[latencies.Count / 2] : 0;
var p95 = latencies.Count > 0 ? latencies[(int)(latencies.Count * 0.95)] : 0;
var determinism = results.Count > 0 ? (double)deterministicCount / results.Count : 1.0;
return new(
Math.Round(precision, 4),
Math.Round(recall, 4),
Math.Round(f1, 4),
p50,
p95,
determinism);
}
}
/// <summary>
/// Result of a single sample run.
/// </summary>
public sealed record SampleResult(
[property: JsonPropertyName("sampleId")] string SampleId,
[property: JsonPropertyName("name")] string Name,
[property: JsonPropertyName("category")] string Category,
[property: JsonPropertyName("sinkResults")] IReadOnlyList<SinkResult> SinkResults,
[property: JsonPropertyName("latencyMs")] long LatencyMs,
[property: JsonPropertyName("deterministic")] bool Deterministic,
[property: JsonPropertyName("error")] string? Error = null);
/// <summary>
/// Result for a single sink within a sample.
/// </summary>
public sealed record SinkResult(
[property: JsonPropertyName("sinkId")] string SinkId,
[property: JsonPropertyName("expected")] string Expected,
[property: JsonPropertyName("actual")] string Actual,
[property: JsonPropertyName("correct")] bool Correct,
[property: JsonPropertyName("pathsFound")] IReadOnlyList<string[]>? PathsFound = null);
/// <summary>
/// Baseline for regression checks.
/// </summary>
public sealed record BenchmarkBaseline(
[property: JsonPropertyName("version")] string Version,
[property: JsonPropertyName("timestamp")] DateTimeOffset Timestamp,
[property: JsonPropertyName("precision")] double Precision,
[property: JsonPropertyName("recall")] double Recall,
[property: JsonPropertyName("f1")] double F1,
[property: JsonPropertyName("ttfrp_p95_ms")] int TtfrpP95Ms);
/// <summary>
/// Result of regression check.
/// </summary>
public sealed record RegressionCheckResult(
bool Passed,
IReadOnlyList<RegressionIssue> Issues);
/// <summary>
/// A regression issue found during check.
/// </summary>
public sealed record RegressionIssue(
string Metric,
string Message,
RegressionSeverity Severity);
/// <summary>
/// Severity of a regression issue.
/// </summary>
public enum RegressionSeverity
{
Warning,
Error
}

View File

@@ -0,0 +1,17 @@
<?xml version='1.0' encoding='utf-8'?>
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>net10.0</TargetFramework>
<LangVersion>preview</LangVersion>
<Nullable>enable</Nullable>
<ImplicitUsings>enable</ImplicitUsings>
<TreatWarningsAsErrors>false</TreatWarningsAsErrors>
<Description>Ground-truth corpus benchmarking infrastructure for reachability analysis</Description>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="System.Text.Json" Version="10.0.0-preview.1.25105.2" />
</ItemGroup>
<ItemGroup>
<ProjectReference Include="../StellaOps.Scanner.Reachability/StellaOps.Scanner.Reachability.csproj" />
</ItemGroup>
</Project>