feat(rate-limiting): Implement core rate limiting functionality with configuration, decision-making, metrics, middleware, and service registration
- Add RateLimitConfig for configuration management with YAML binding support. - Introduce RateLimitDecision to encapsulate the result of rate limit checks. - Implement RateLimitMetrics for OpenTelemetry metrics tracking. - Create RateLimitMiddleware for enforcing rate limits on incoming requests. - Develop RateLimitService to orchestrate instance and environment rate limit checks. - Add RateLimitServiceCollectionExtensions for dependency injection registration.
This commit is contained in:
@@ -0,0 +1,540 @@
|
||||
// =============================================================================
|
||||
// CorpusRunnerIntegrationTests.cs
|
||||
// Sprint: SPRINT_3500_0003_0001_ground_truth_corpus_ci_gates
|
||||
// Task: CORPUS-013 - Integration tests for corpus runner
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Moq;
|
||||
using StellaOps.Scanner.Benchmarks;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Benchmarks.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for the ground-truth corpus runner.
|
||||
/// Per Sprint 3500.0003.0001 - Ground-Truth Corpus & CI Regression Gates.
|
||||
/// </summary>
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "3500.3")]
|
||||
public sealed class CorpusRunnerIntegrationTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true
|
||||
};
|
||||
|
||||
#region Corpus Runner Tests
|
||||
|
||||
[Fact(DisplayName = "RunAsync produces valid benchmark result")]
|
||||
public async Task RunAsync_ProducesValidBenchmarkResult()
|
||||
{
|
||||
// Arrange
|
||||
var runner = new MockCorpusRunner();
|
||||
var corpusPath = "TestData/corpus.json";
|
||||
var options = new CorpusRunOptions();
|
||||
|
||||
// Act
|
||||
var result = await runner.RunAsync(corpusPath, options);
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
result.RunId.Should().NotBeNullOrEmpty();
|
||||
result.Timestamp.Should().BeCloseTo(DateTimeOffset.UtcNow, TimeSpan.FromMinutes(1));
|
||||
result.CorpusVersion.Should().NotBeNullOrEmpty();
|
||||
result.ScannerVersion.Should().NotBeNullOrEmpty();
|
||||
result.Metrics.Should().NotBeNull();
|
||||
result.SampleResults.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "RunAsync computes correct metrics")]
|
||||
public async Task RunAsync_ComputesCorrectMetrics()
|
||||
{
|
||||
// Arrange
|
||||
var runner = new MockCorpusRunner(
|
||||
truePositives: 8,
|
||||
falsePositives: 1,
|
||||
falseNegatives: 1);
|
||||
var options = new CorpusRunOptions();
|
||||
|
||||
// Act
|
||||
var result = await runner.RunAsync("TestData/corpus.json", options);
|
||||
|
||||
// Assert - 8 TP, 1 FP, 1 FN = precision 8/9 = 0.8889, recall 8/9 = 0.8889
|
||||
result.Metrics.Precision.Should().BeApproximately(0.8889, 0.01);
|
||||
result.Metrics.Recall.Should().BeApproximately(0.8889, 0.01);
|
||||
result.Metrics.F1.Should().BeApproximately(0.8889, 0.01);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "RunAsync respects category filter")]
|
||||
public async Task RunAsync_RespectsFilter()
|
||||
{
|
||||
// Arrange
|
||||
var runner = new MockCorpusRunner(sampleCount: 20);
|
||||
var options = new CorpusRunOptions { Categories = ["basic"] };
|
||||
|
||||
// Act
|
||||
var result = await runner.RunAsync("TestData/corpus.json", options);
|
||||
|
||||
// Assert
|
||||
result.SampleResults.Should().OnlyContain(r => r.Category == "basic");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "RunAsync handles timeout correctly")]
|
||||
public async Task RunAsync_HandlesTimeout()
|
||||
{
|
||||
// Arrange
|
||||
var runner = new MockCorpusRunner(sampleLatencyMs: 5000);
|
||||
var options = new CorpusRunOptions { TimeoutMs = 100 };
|
||||
|
||||
// Act
|
||||
var result = await runner.RunAsync("TestData/corpus.json", options);
|
||||
|
||||
// Assert
|
||||
result.SampleResults.Should().OnlyContain(r => r.Error != null);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "RunAsync performs determinism checks")]
|
||||
public async Task RunAsync_PerformsDeterminismChecks()
|
||||
{
|
||||
// Arrange
|
||||
var runner = new MockCorpusRunner(deterministicRate: 1.0);
|
||||
var options = new CorpusRunOptions
|
||||
{
|
||||
CheckDeterminism = true,
|
||||
DeterminismRuns = 3
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = await runner.RunAsync("TestData/corpus.json", options);
|
||||
|
||||
// Assert
|
||||
result.Metrics.DeterministicReplay.Should().Be(1.0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Metrics Computation Tests
|
||||
|
||||
[Fact(DisplayName = "BenchmarkMetrics.Compute calculates precision correctly")]
|
||||
public void BenchmarkMetrics_Compute_CalculatesPrecisionCorrectly()
|
||||
{
|
||||
// Arrange - 7 TP, 3 FP => precision = 7/10 = 0.7
|
||||
var sinkResults = new List<SinkResult>
|
||||
{
|
||||
// True positives
|
||||
new("s1", "reachable", "reachable", true),
|
||||
new("s2", "reachable", "reachable", true),
|
||||
new("s3", "reachable", "reachable", true),
|
||||
new("s4", "reachable", "reachable", true),
|
||||
new("s5", "reachable", "reachable", true),
|
||||
new("s6", "reachable", "reachable", true),
|
||||
new("s7", "reachable", "reachable", true),
|
||||
// False positives
|
||||
new("s8", "unreachable", "reachable", false),
|
||||
new("s9", "unreachable", "reachable", false),
|
||||
new("s10", "unreachable", "reachable", false),
|
||||
};
|
||||
|
||||
var sample = new SampleResult("test-001", "Test", "basic", sinkResults, 100, true);
|
||||
var results = new List<SampleResult> { sample };
|
||||
|
||||
// Act
|
||||
var metrics = BenchmarkMetrics.Compute(results);
|
||||
|
||||
// Assert
|
||||
metrics.Precision.Should().BeApproximately(0.7, 0.01);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "BenchmarkMetrics.Compute calculates recall correctly")]
|
||||
public void BenchmarkMetrics_Compute_CalculatesRecallCorrectly()
|
||||
{
|
||||
// Arrange - 8 TP, 2 FN => recall = 8/10 = 0.8
|
||||
var sinkResults = new List<SinkResult>
|
||||
{
|
||||
// True positives
|
||||
new("s1", "reachable", "reachable", true),
|
||||
new("s2", "reachable", "reachable", true),
|
||||
new("s3", "reachable", "reachable", true),
|
||||
new("s4", "reachable", "reachable", true),
|
||||
new("s5", "reachable", "reachable", true),
|
||||
new("s6", "reachable", "reachable", true),
|
||||
new("s7", "reachable", "reachable", true),
|
||||
new("s8", "reachable", "reachable", true),
|
||||
// False negatives
|
||||
new("s9", "reachable", "unreachable", false),
|
||||
new("s10", "reachable", "unreachable", false),
|
||||
};
|
||||
|
||||
var sample = new SampleResult("test-001", "Test", "basic", sinkResults, 100, true);
|
||||
var results = new List<SampleResult> { sample };
|
||||
|
||||
// Act
|
||||
var metrics = BenchmarkMetrics.Compute(results);
|
||||
|
||||
// Assert
|
||||
metrics.Recall.Should().BeApproximately(0.8, 0.01);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "BenchmarkMetrics.Compute calculates F1 correctly")]
|
||||
public void BenchmarkMetrics_Compute_CalculatesF1Correctly()
|
||||
{
|
||||
// Arrange - precision 0.8, recall 0.6 => F1 = 2*0.8*0.6/(0.8+0.6) ≈ 0.686
|
||||
var sinkResults = new List<SinkResult>
|
||||
{
|
||||
// 8 TP, 2 FP => precision = 0.8
|
||||
// 8 TP, 5.33 FN => recall = 0.6 (adjusting for F1)
|
||||
// Let's use: 6 TP, 4 FN => recall = 0.6; 6 TP, 1.5 FP => precision = 0.8
|
||||
// Actually: 4 TP, 1 FP (precision = 0.8), 4 TP, 2.67 FN (not integer)
|
||||
// Simpler: 8 TP, 2 FP, 2 FN => P=0.8, R=0.8, F1=0.8
|
||||
new("s1", "reachable", "reachable", true),
|
||||
new("s2", "reachable", "reachable", true),
|
||||
new("s3", "reachable", "reachable", true),
|
||||
new("s4", "reachable", "reachable", true),
|
||||
new("s5", "reachable", "reachable", true),
|
||||
new("s6", "reachable", "reachable", true),
|
||||
new("s7", "reachable", "reachable", true),
|
||||
new("s8", "reachable", "reachable", true),
|
||||
new("s9", "unreachable", "reachable", false), // FP
|
||||
new("s10", "unreachable", "reachable", false), // FP
|
||||
new("s11", "reachable", "unreachable", false), // FN
|
||||
new("s12", "reachable", "unreachable", false), // FN
|
||||
};
|
||||
|
||||
var sample = new SampleResult("test-001", "Test", "basic", sinkResults, 100, true);
|
||||
var results = new List<SampleResult> { sample };
|
||||
|
||||
// Act
|
||||
var metrics = BenchmarkMetrics.Compute(results);
|
||||
|
||||
// Assert - P = 8/10 = 0.8, R = 8/10 = 0.8, F1 = 0.8
|
||||
metrics.F1.Should().BeApproximately(0.8, 0.01);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "BenchmarkMetrics.Compute handles empty results")]
|
||||
public void BenchmarkMetrics_Compute_HandlesEmptyResults()
|
||||
{
|
||||
// Arrange
|
||||
var results = new List<SampleResult>();
|
||||
|
||||
// Act
|
||||
var metrics = BenchmarkMetrics.Compute(results);
|
||||
|
||||
// Assert
|
||||
metrics.Precision.Should().Be(0);
|
||||
metrics.Recall.Should().Be(0);
|
||||
metrics.F1.Should().Be(0);
|
||||
metrics.DeterministicReplay.Should().Be(1.0);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Regression Check Tests
|
||||
|
||||
[Fact(DisplayName = "CheckRegression passes when metrics are above baseline")]
|
||||
public void CheckRegression_PassesWhenAboveBaseline()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = new BenchmarkBaseline(
|
||||
Version: "1.0.0",
|
||||
Timestamp: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
Precision: 0.90,
|
||||
Recall: 0.85,
|
||||
F1: 0.875,
|
||||
TtfrpP95Ms: 400);
|
||||
|
||||
var result = CreateBenchmarkResult(
|
||||
precision: 0.92,
|
||||
recall: 0.87,
|
||||
deterministicReplay: 1.0,
|
||||
ttfrpP95Ms: 350);
|
||||
|
||||
// Act
|
||||
var check = result.CheckRegression(baseline);
|
||||
|
||||
// Assert
|
||||
check.Passed.Should().BeTrue();
|
||||
check.Issues.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CheckRegression fails on precision drop > 1pp")]
|
||||
public void CheckRegression_FailsOnPrecisionDrop()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = new BenchmarkBaseline(
|
||||
Version: "1.0.0",
|
||||
Timestamp: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
Precision: 0.95,
|
||||
Recall: 0.90,
|
||||
F1: 0.924,
|
||||
TtfrpP95Ms: 400);
|
||||
|
||||
var result = CreateBenchmarkResult(
|
||||
precision: 0.92, // 3pp drop
|
||||
recall: 0.90,
|
||||
deterministicReplay: 1.0,
|
||||
ttfrpP95Ms: 400);
|
||||
|
||||
// Act
|
||||
var check = result.CheckRegression(baseline);
|
||||
|
||||
// Assert
|
||||
check.Passed.Should().BeFalse();
|
||||
check.Issues.Should().Contain(i => i.Metric == "precision" && i.Severity == RegressionSeverity.Error);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CheckRegression fails on recall drop > 1pp")]
|
||||
public void CheckRegression_FailsOnRecallDrop()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = new BenchmarkBaseline(
|
||||
Version: "1.0.0",
|
||||
Timestamp: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
Precision: 0.90,
|
||||
Recall: 0.95,
|
||||
F1: 0.924,
|
||||
TtfrpP95Ms: 400);
|
||||
|
||||
var result = CreateBenchmarkResult(
|
||||
precision: 0.90,
|
||||
recall: 0.92, // 3pp drop
|
||||
deterministicReplay: 1.0,
|
||||
ttfrpP95Ms: 400);
|
||||
|
||||
// Act
|
||||
var check = result.CheckRegression(baseline);
|
||||
|
||||
// Assert
|
||||
check.Passed.Should().BeFalse();
|
||||
check.Issues.Should().Contain(i => i.Metric == "recall" && i.Severity == RegressionSeverity.Error);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CheckRegression fails on non-deterministic replay")]
|
||||
public void CheckRegression_FailsOnNonDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = new BenchmarkBaseline(
|
||||
Version: "1.0.0",
|
||||
Timestamp: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
Precision: 0.90,
|
||||
Recall: 0.90,
|
||||
F1: 0.90,
|
||||
TtfrpP95Ms: 400);
|
||||
|
||||
var result = CreateBenchmarkResult(
|
||||
precision: 0.90,
|
||||
recall: 0.90,
|
||||
deterministicReplay: 0.95, // Not 100%
|
||||
ttfrpP95Ms: 400);
|
||||
|
||||
// Act
|
||||
var check = result.CheckRegression(baseline);
|
||||
|
||||
// Assert
|
||||
check.Passed.Should().BeFalse();
|
||||
check.Issues.Should().Contain(i => i.Metric == "determinism" && i.Severity == RegressionSeverity.Error);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "CheckRegression warns on TTFRP increase > 20%")]
|
||||
public void CheckRegression_WarnsOnTtfrpIncrease()
|
||||
{
|
||||
// Arrange
|
||||
var baseline = new BenchmarkBaseline(
|
||||
Version: "1.0.0",
|
||||
Timestamp: DateTimeOffset.UtcNow.AddDays(-7),
|
||||
Precision: 0.90,
|
||||
Recall: 0.90,
|
||||
F1: 0.90,
|
||||
TtfrpP95Ms: 400);
|
||||
|
||||
var result = CreateBenchmarkResult(
|
||||
precision: 0.90,
|
||||
recall: 0.90,
|
||||
deterministicReplay: 1.0,
|
||||
ttfrpP95Ms: 520); // 30% increase
|
||||
|
||||
// Act
|
||||
var check = result.CheckRegression(baseline);
|
||||
|
||||
// Assert
|
||||
check.Passed.Should().BeTrue(); // Warning doesn't fail
|
||||
check.Issues.Should().Contain(i => i.Metric == "ttfrp_p95" && i.Severity == RegressionSeverity.Warning);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Serialization Tests
|
||||
|
||||
[Fact(DisplayName = "BenchmarkResult serializes to valid JSON")]
|
||||
public void BenchmarkResult_SerializesToValidJson()
|
||||
{
|
||||
// Arrange
|
||||
var result = CreateBenchmarkResult();
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(result, JsonOptions);
|
||||
var deserialized = JsonSerializer.Deserialize<BenchmarkResult>(json, JsonOptions);
|
||||
|
||||
// Assert
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.RunId.Should().Be(result.RunId);
|
||||
deserialized.Metrics.Precision.Should().Be(result.Metrics.Precision);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "SampleResult serializes with correct property names")]
|
||||
public void SampleResult_SerializesWithCorrectPropertyNames()
|
||||
{
|
||||
// Arrange
|
||||
var sample = new SampleResult(
|
||||
"gt-0001",
|
||||
"test-sample",
|
||||
"basic",
|
||||
new[] { new SinkResult("sink-001", "reachable", "reachable", true) },
|
||||
150,
|
||||
true);
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(sample, JsonOptions);
|
||||
|
||||
// Assert
|
||||
json.Should().Contain("\"sampleId\"");
|
||||
json.Should().Contain("\"latencyMs\"");
|
||||
json.Should().Contain("\"deterministic\"");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static BenchmarkResult CreateBenchmarkResult(
|
||||
double precision = 0.95,
|
||||
double recall = 0.92,
|
||||
double deterministicReplay = 1.0,
|
||||
int ttfrpP95Ms = 380)
|
||||
{
|
||||
var metrics = new BenchmarkMetrics(
|
||||
Precision: precision,
|
||||
Recall: recall,
|
||||
F1: 2 * precision * recall / (precision + recall),
|
||||
TtfrpP50Ms: 120,
|
||||
TtfrpP95Ms: ttfrpP95Ms,
|
||||
DeterministicReplay: deterministicReplay);
|
||||
|
||||
var sampleResults = new List<SampleResult>
|
||||
{
|
||||
new SampleResult("gt-0001", "sample-1", "basic",
|
||||
new[] { new SinkResult("sink-001", "reachable", "reachable", true) },
|
||||
120, true)
|
||||
};
|
||||
|
||||
return new BenchmarkResult(
|
||||
RunId: $"bench-{DateTimeOffset.UtcNow:yyyyMMdd}-001",
|
||||
Timestamp: DateTimeOffset.UtcNow,
|
||||
CorpusVersion: "1.0.0",
|
||||
ScannerVersion: "1.3.0",
|
||||
Metrics: metrics,
|
||||
SampleResults: sampleResults,
|
||||
DurationMs: 5000);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Mock Corpus Runner
|
||||
|
||||
private sealed class MockCorpusRunner : ICorpusRunner
|
||||
{
|
||||
private readonly int _truePositives;
|
||||
private readonly int _falsePositives;
|
||||
private readonly int _falseNegatives;
|
||||
private readonly int _sampleCount;
|
||||
private readonly int _sampleLatencyMs;
|
||||
private readonly double _deterministicRate;
|
||||
|
||||
public MockCorpusRunner(
|
||||
int truePositives = 9,
|
||||
int falsePositives = 0,
|
||||
int falseNegatives = 1,
|
||||
int sampleCount = 10,
|
||||
int sampleLatencyMs = 100,
|
||||
double deterministicRate = 1.0)
|
||||
{
|
||||
_truePositives = truePositives;
|
||||
_falsePositives = falsePositives;
|
||||
_falseNegatives = falseNegatives;
|
||||
_sampleCount = sampleCount;
|
||||
_sampleLatencyMs = sampleLatencyMs;
|
||||
_deterministicRate = deterministicRate;
|
||||
}
|
||||
|
||||
public Task<BenchmarkResult> RunAsync(string corpusPath, CorpusRunOptions options, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var samples = new List<SampleResult>();
|
||||
var random = new Random(42); // Deterministic seed
|
||||
|
||||
for (int i = 0; i < _sampleCount; i++)
|
||||
{
|
||||
var category = options.Categories?.FirstOrDefault() ?? "basic";
|
||||
var sinkResults = new List<SinkResult>();
|
||||
|
||||
if (i < _truePositives)
|
||||
{
|
||||
sinkResults.Add(new SinkResult($"sink-{i}", "reachable", "reachable", true));
|
||||
}
|
||||
else if (i < _truePositives + _falsePositives)
|
||||
{
|
||||
sinkResults.Add(new SinkResult($"sink-{i}", "unreachable", "reachable", false));
|
||||
}
|
||||
else if (i < _truePositives + _falsePositives + _falseNegatives)
|
||||
{
|
||||
sinkResults.Add(new SinkResult($"sink-{i}", "reachable", "unreachable", false));
|
||||
}
|
||||
else
|
||||
{
|
||||
sinkResults.Add(new SinkResult($"sink-{i}", "unreachable", "unreachable", true));
|
||||
}
|
||||
|
||||
var isDeterministic = random.NextDouble() < _deterministicRate;
|
||||
var error = _sampleLatencyMs > options.TimeoutMs ? "Timeout" : null;
|
||||
|
||||
samples.Add(new SampleResult(
|
||||
$"gt-{i:D4}",
|
||||
$"sample-{i}",
|
||||
category,
|
||||
sinkResults,
|
||||
_sampleLatencyMs,
|
||||
isDeterministic,
|
||||
error));
|
||||
}
|
||||
|
||||
var metrics = BenchmarkMetrics.Compute(samples);
|
||||
|
||||
var result = new BenchmarkResult(
|
||||
RunId: $"bench-{DateTimeOffset.UtcNow:yyyyMMddHHmmss}",
|
||||
Timestamp: DateTimeOffset.UtcNow,
|
||||
CorpusVersion: "1.0.0",
|
||||
ScannerVersion: "1.3.0-test",
|
||||
Metrics: metrics,
|
||||
SampleResults: samples,
|
||||
DurationMs: _sampleLatencyMs * samples.Count);
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
|
||||
public Task<SampleResult> RunSampleAsync(string samplePath, CancellationToken cancellationToken = default)
|
||||
{
|
||||
var result = new SampleResult(
|
||||
"gt-0001",
|
||||
"test-sample",
|
||||
"basic",
|
||||
new[] { new SinkResult("sink-001", "reachable", "reachable", true) },
|
||||
_sampleLatencyMs,
|
||||
true);
|
||||
|
||||
return Task.FromResult(result);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<IsPackable>false</IsPackable>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" Version="8.*" />
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.*" />
|
||||
<PackageReference Include="Moq" Version="4.*" />
|
||||
<PackageReference Include="xunit" Version="2.*" />
|
||||
<PackageReference Include="xunit.runner.visualstudio" Version="2.*">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers</IncludeAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\StellaOps.Scanner.Benchmarks\StellaOps.Scanner.Benchmarks.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<Content Include="TestData\**\*" CopyToOutputDirectory="PreserveNewest" />
|
||||
</ItemGroup>
|
||||
|
||||
</Project>
|
||||
Reference in New Issue
Block a user