feat(rate-limiting): Implement core rate limiting functionality with configuration, decision-making, metrics, middleware, and service registration

- Add RateLimitConfig for configuration management with YAML binding support.
- Introduce RateLimitDecision to encapsulate the result of rate limit checks.
- Implement RateLimitMetrics for OpenTelemetry metrics tracking.
- Create RateLimitMiddleware for enforcing rate limits on incoming requests.
- Develop RateLimitService to orchestrate instance and environment rate limit checks.
- Add RateLimitServiceCollectionExtensions for dependency injection registration.
This commit is contained in:
master
2025-12-17 18:02:37 +02:00
parent 394b57f6bf
commit 8bbfe4d2d2
211 changed files with 47179 additions and 1590 deletions

View File

@@ -0,0 +1,269 @@
// SPDX-License-Identifier: AGPL-3.0-or-later
// Sprint: SPRINT_3500_0003_0001
// Task: CORPUS-013 - Integration tests for corpus runner
using System.Text.Json;
using FluentAssertions;
using Microsoft.Extensions.Logging.Abstractions;
using StellaOps.Scanner.Reachability.Benchmarks;
using Xunit;
namespace StellaOps.Scanner.Reachability.Tests.Benchmarks;
/// <summary>
/// Integration tests for the corpus runner and benchmark framework.
/// </summary>
public sealed class CorpusRunnerIntegrationTests
{
private static readonly string CorpusBasePath = Path.Combine(
AppDomain.CurrentDomain.BaseDirectory,
"..", "..", "..", "..", "..", "..", "..",
"datasets", "reachability");
[Fact]
public void CorpusIndex_ShouldBeValidJson()
{
// Arrange
var corpusPath = Path.Combine(CorpusBasePath, "corpus.json");
if (!File.Exists(corpusPath))
{
// Skip if running outside of full repo context
return;
}
// Act
var json = File.ReadAllText(corpusPath);
var parseAction = () => JsonDocument.Parse(json);
// Assert
parseAction.Should().NotThrow("corpus.json should be valid JSON");
}
[Fact]
public void CorpusIndex_ShouldContainRequiredFields()
{
// Arrange
var corpusPath = Path.Combine(CorpusBasePath, "corpus.json");
if (!File.Exists(corpusPath))
{
return;
}
// Act
var json = File.ReadAllText(corpusPath);
using var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
// Assert
root.TryGetProperty("version", out _).Should().BeTrue("corpus should have version");
root.TryGetProperty("samples", out var samples).Should().BeTrue("corpus should have samples");
samples.GetArrayLength().Should().BeGreaterThan(0, "corpus should have at least one sample");
}
[Fact]
public void SampleManifest_ShouldHaveExpectedResult()
{
// Arrange
var samplePath = Path.Combine(
CorpusBasePath,
"ground-truth", "basic", "gt-0001",
"sample.manifest.json");
if (!File.Exists(samplePath))
{
return;
}
// Act
var json = File.ReadAllText(samplePath);
using var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
// Assert
root.TryGetProperty("sampleId", out var sampleId).Should().BeTrue();
sampleId.GetString().Should().Be("gt-0001");
root.TryGetProperty("expectedResult", out var expectedResult).Should().BeTrue();
expectedResult.TryGetProperty("reachable", out var reachable).Should().BeTrue();
reachable.GetBoolean().Should().BeTrue("gt-0001 should be marked as reachable");
}
[Fact]
public void UnreachableSample_ShouldHaveFalseExpectedResult()
{
// Arrange
var samplePath = Path.Combine(
CorpusBasePath,
"ground-truth", "unreachable", "gt-0011",
"sample.manifest.json");
if (!File.Exists(samplePath))
{
return;
}
// Act
var json = File.ReadAllText(samplePath);
using var doc = JsonDocument.Parse(json);
var root = doc.RootElement;
// Assert
root.TryGetProperty("sampleId", out var sampleId).Should().BeTrue();
sampleId.GetString().Should().Be("gt-0011");
root.TryGetProperty("expectedResult", out var expectedResult).Should().BeTrue();
expectedResult.TryGetProperty("reachable", out var reachable).Should().BeTrue();
reachable.GetBoolean().Should().BeFalse("gt-0011 should be marked as unreachable");
}
[Fact]
public void BenchmarkResult_ShouldCalculateMetrics()
{
// Arrange
var results = new List<SampleResult>
{
new("gt-0001", expected: true, actual: true, tier: "executed", durationMs: 10),
new("gt-0002", expected: true, actual: true, tier: "executed", durationMs: 15),
new("gt-0011", expected: false, actual: false, tier: "imported", durationMs: 5),
new("gt-0012", expected: false, actual: true, tier: "executed", durationMs: 8), // False positive
};
// Act
var metrics = BenchmarkMetrics.Calculate(results);
// Assert
metrics.TotalSamples.Should().Be(4);
metrics.TruePositives.Should().Be(2);
metrics.TrueNegatives.Should().Be(1);
metrics.FalsePositives.Should().Be(1);
metrics.FalseNegatives.Should().Be(0);
metrics.Precision.Should().BeApproximately(0.666, 0.01);
metrics.Recall.Should().Be(1.0);
}
[Fact]
public void BenchmarkResult_ShouldDetectRegression()
{
// Arrange
var baseline = new BenchmarkMetrics
{
Precision = 0.95,
Recall = 0.90,
F1Score = 0.924,
MeanDurationMs = 50
};
var current = new BenchmarkMetrics
{
Precision = 0.85, // Dropped by 10%
Recall = 0.92,
F1Score = 0.883,
MeanDurationMs = 55
};
// Act
var regressions = RegressionDetector.Check(baseline, current, thresholds: new()
{
MaxPrecisionDrop = 0.05,
MaxRecallDrop = 0.05,
MaxDurationIncrease = 0.20
});
// Assert
regressions.Should().Contain(r => r.Metric == "Precision");
regressions.Should().NotContain(r => r.Metric == "Recall");
}
}
/// <summary>
/// Represents a single sample result from the benchmark run.
/// </summary>
public record SampleResult(
string SampleId,
bool Expected,
bool Actual,
string Tier,
double DurationMs);
/// <summary>
/// Calculated metrics from a benchmark run.
/// </summary>
public class BenchmarkMetrics
{
public int TotalSamples { get; set; }
public int TruePositives { get; set; }
public int TrueNegatives { get; set; }
public int FalsePositives { get; set; }
public int FalseNegatives { get; set; }
public double Precision { get; set; }
public double Recall { get; set; }
public double F1Score { get; set; }
public double MeanDurationMs { get; set; }
public static BenchmarkMetrics Calculate(IList<SampleResult> results)
{
var tp = results.Count(r => r.Expected && r.Actual);
var tn = results.Count(r => !r.Expected && !r.Actual);
var fp = results.Count(r => !r.Expected && r.Actual);
var fn = results.Count(r => r.Expected && !r.Actual);
var precision = tp + fp > 0 ? (double)tp / (tp + fp) : 0;
var recall = tp + fn > 0 ? (double)tp / (tp + fn) : 0;
var f1 = precision + recall > 0 ? 2 * precision * recall / (precision + recall) : 0;
return new BenchmarkMetrics
{
TotalSamples = results.Count,
TruePositives = tp,
TrueNegatives = tn,
FalsePositives = fp,
FalseNegatives = fn,
Precision = precision,
Recall = recall,
F1Score = f1,
MeanDurationMs = results.Average(r => r.DurationMs)
};
}
}
/// <summary>
/// Regression detector for benchmark comparisons.
/// </summary>
public static class RegressionDetector
{
public static List<Regression> Check(BenchmarkMetrics baseline, BenchmarkMetrics current, RegressionThresholds thresholds)
{
var regressions = new List<Regression>();
var precisionDrop = baseline.Precision - current.Precision;
if (precisionDrop > thresholds.MaxPrecisionDrop)
{
regressions.Add(new Regression("Precision", baseline.Precision, current.Precision, precisionDrop));
}
var recallDrop = baseline.Recall - current.Recall;
if (recallDrop > thresholds.MaxRecallDrop)
{
regressions.Add(new Regression("Recall", baseline.Recall, current.Recall, recallDrop));
}
var durationIncrease = (current.MeanDurationMs - baseline.MeanDurationMs) / baseline.MeanDurationMs;
if (durationIncrease > thresholds.MaxDurationIncrease)
{
regressions.Add(new Regression("Duration", baseline.MeanDurationMs, current.MeanDurationMs, durationIncrease));
}
return regressions;
}
}
public record Regression(string Metric, double Baseline, double Current, double Delta);
public class RegressionThresholds
{
public double MaxPrecisionDrop { get; set; } = 0.05;
public double MaxRecallDrop { get; set; } = 0.05;
public double MaxDurationIncrease { get; set; } = 0.20;
}