save development progress
This commit is contained in:
@@ -0,0 +1,336 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-044 - Benchmark: policy evaluation < 50ms per finding
|
||||
|
||||
using System.Diagnostics;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Benchmarks;
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark tests verifying that EWS calculation meets performance requirements.
|
||||
/// Target: policy evaluation < 50ms per finding.
|
||||
/// </summary>
|
||||
[Trait("Category", "Benchmark")]
|
||||
[Trait("Category", "Performance")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-044")]
|
||||
public sealed class EwsCalculationBenchmarkTests
|
||||
{
|
||||
private const int TargetMaxMs = 50;
|
||||
private const int WarmupIterations = 100;
|
||||
private const int BenchmarkIterations = 1000;
|
||||
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region Calculator Performance Tests
|
||||
|
||||
[Fact(DisplayName = "Single EWS calculation completes under 50ms")]
|
||||
public void SingleCalculation_CompletesUnder50ms()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("perf-single");
|
||||
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
sw.ElapsedMilliseconds.Should().BeLessThan(TargetMaxMs,
|
||||
$"single EWS calculation should complete in under {TargetMaxMs}ms (actual: {sw.ElapsedMilliseconds}ms)");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Average calculation time over 1000 iterations is under 1ms")]
|
||||
public void AverageCalculationTime_IsUnder1ms()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("perf-avg");
|
||||
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (var i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgMs = (double)sw.ElapsedMilliseconds / BenchmarkIterations;
|
||||
|
||||
// Assert - average should be well under 1ms
|
||||
avgMs.Should().BeLessThan(1.0,
|
||||
$"average EWS calculation should be under 1ms (actual: {avgMs:F3}ms per calculation)");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "P99 calculation time is under 10ms")]
|
||||
public void P99CalculationTime_IsUnder10ms()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("perf-p99");
|
||||
var timings = new long[BenchmarkIterations];
|
||||
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Act - Collect timing for each iteration
|
||||
var sw = new Stopwatch();
|
||||
for (var i = 0; i < BenchmarkIterations; i++)
|
||||
{
|
||||
sw.Restart();
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
sw.Stop();
|
||||
timings[i] = sw.ElapsedTicks;
|
||||
}
|
||||
|
||||
// Calculate P99
|
||||
Array.Sort(timings);
|
||||
var p99Index = (int)(BenchmarkIterations * 0.99);
|
||||
var p99Ticks = timings[p99Index];
|
||||
var p99Ms = (double)p99Ticks / Stopwatch.Frequency * 1000;
|
||||
|
||||
// Assert
|
||||
p99Ms.Should().BeLessThan(10.0,
|
||||
$"P99 EWS calculation time should be under 10ms (actual: {p99Ms:F3}ms)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Enricher Pipeline Performance Tests
|
||||
|
||||
[Fact(DisplayName = "Single enrichment completes under 50ms")]
|
||||
public void SingleEnrichment_CompletesUnder50ms()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false; // Measure actual calculation
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("perf-enricher");
|
||||
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var result = enricher.Enrich(evidence);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
sw.ElapsedMilliseconds.Should().BeLessThan(TargetMaxMs,
|
||||
$"single enrichment should complete in under {TargetMaxMs}ms (actual: {sw.ElapsedMilliseconds}ms)");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Enricher with caching improves performance")]
|
||||
public void EnricherWithCaching_ImprovesPerformance()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("perf-cache");
|
||||
|
||||
// Warmup and populate cache
|
||||
enricher.Enrich(evidence);
|
||||
|
||||
// Act - Measure cached access
|
||||
var sw = Stopwatch.StartNew();
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var avgCachedMs = (double)sw.ElapsedMilliseconds / 100;
|
||||
|
||||
// Assert - cached access should be very fast
|
||||
avgCachedMs.Should().BeLessThan(0.5,
|
||||
$"cached enrichment should be under 0.5ms (actual: {avgCachedMs:F3}ms)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Batch Performance Tests
|
||||
|
||||
[Fact(DisplayName = "Batch of 100 findings processes under 500ms")]
|
||||
public void BatchOf100Findings_ProcessesUnder500ms()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var inputs = Enumerable.Range(0, 100)
|
||||
.Select(i => CreateTestInput($"batch-{i}"))
|
||||
.ToList();
|
||||
|
||||
// Warmup
|
||||
foreach (var input in inputs.Take(10))
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
foreach (var input in inputs)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
// Assert - 100 findings * 50ms max = 5000ms, but should be much faster
|
||||
sw.ElapsedMilliseconds.Should().BeLessThan(500,
|
||||
$"batch of 100 findings should process in under 500ms (actual: {sw.ElapsedMilliseconds}ms)");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Throughput exceeds 1000 evaluations per second")]
|
||||
public void Throughput_Exceeds1000PerSecond()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("throughput-test");
|
||||
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Act - Run for 1 second and count operations
|
||||
var count = 0;
|
||||
var sw = Stopwatch.StartNew();
|
||||
while (sw.ElapsedMilliseconds < 1000)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
count++;
|
||||
}
|
||||
sw.Stop();
|
||||
|
||||
var opsPerSecond = count * 1000.0 / sw.ElapsedMilliseconds;
|
||||
|
||||
// Assert - should exceed 1000 ops/sec (actual should be 10000+)
|
||||
opsPerSecond.Should().BeGreaterThan(1000,
|
||||
$"throughput should exceed 1000 evaluations/sec (actual: {opsPerSecond:F0} ops/sec)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Normalizer Pipeline Performance Tests
|
||||
|
||||
[Fact(DisplayName = "Normalizer aggregation completes under 5ms")]
|
||||
public void NormalizerAggregation_CompletesUnder5ms()
|
||||
{
|
||||
// Arrange
|
||||
var aggregator = new NormalizerAggregator();
|
||||
var evidence = CreateTestEvidence("norm-perf");
|
||||
|
||||
// Warmup
|
||||
for (var i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
aggregator.Aggregate(evidence);
|
||||
}
|
||||
|
||||
// Act
|
||||
var sw = Stopwatch.StartNew();
|
||||
var result = aggregator.Aggregate(evidence);
|
||||
sw.Stop();
|
||||
|
||||
// Assert
|
||||
result.Should().NotBeNull();
|
||||
sw.ElapsedMilliseconds.Should().BeLessThan(5,
|
||||
$"normalizer aggregation should complete in under 5ms (actual: {sw.ElapsedMilliseconds}ms)");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static EvidenceWeightedScoreInput CreateTestInput(string findingId)
|
||||
{
|
||||
return new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = findingId,
|
||||
Rch = 0.75,
|
||||
Rts = 0.60,
|
||||
Bkp = 0.40,
|
||||
Xpl = 0.55,
|
||||
Src = 0.65,
|
||||
Mit = 0.20
|
||||
};
|
||||
}
|
||||
|
||||
private static FindingEvidence CreateTestEvidence(string findingId)
|
||||
{
|
||||
return new FindingEvidence
|
||||
{
|
||||
FindingId = findingId,
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = global::StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.85
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = global::StellaOps.Signals.EvidenceWeightedScore.RuntimePosture.ActiveTracing,
|
||||
ObservationCount = 3,
|
||||
RecencyFactor = 0.75
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = 0.45,
|
||||
EpssPercentile = 75,
|
||||
KevStatus = KevStatus.NotInKev,
|
||||
PublicExploitAvailable = false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,472 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-043 - Attestation reproducibility test: verify EWS proofs validate
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Attestation reproducibility tests verifying that EWS proofs validate correctly.
|
||||
/// Tests that scoring decisions can be reproduced and verified for audit purposes.
|
||||
/// </summary>
|
||||
[Trait("Category", "Attestation")]
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-043")]
|
||||
public sealed class EwsAttestationReproducibilityTests
|
||||
{
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region Policy Digest Reproducibility Tests
|
||||
|
||||
[Fact(DisplayName = "Policy digest is reproducible for same policy")]
|
||||
public void PolicyDigest_IsReproducible_ForSamePolicy()
|
||||
{
|
||||
// Arrange
|
||||
var policy1 = EvidenceWeightPolicy.DefaultProduction;
|
||||
var policy2 = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act
|
||||
var digest1 = policy1.ComputeDigest();
|
||||
var digest2 = policy2.ComputeDigest();
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2, "same policy should produce same digest");
|
||||
digest1.Should().HaveLength(64, "SHA256 hex digest should be 64 chars");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy digest changes when weights change")]
|
||||
public void PolicyDigest_Changes_WhenWeightsChange()
|
||||
{
|
||||
// Arrange
|
||||
var policy1 = EvidenceWeightPolicy.DefaultProduction;
|
||||
var policy2 = new EvidenceWeightPolicy
|
||||
{
|
||||
Version = "ews.v1",
|
||||
Profile = "modified",
|
||||
Weights = new EvidenceWeights
|
||||
{
|
||||
Rch = 0.35, // Changed
|
||||
Rts = 0.25,
|
||||
Bkp = 0.15,
|
||||
Xpl = 0.15,
|
||||
Src = 0.10,
|
||||
Mit = 0.10
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
var digest1 = policy1.ComputeDigest();
|
||||
var digest2 = policy2.ComputeDigest();
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2, "different policies should produce different digests");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy canonical JSON is deterministic")]
|
||||
public void PolicyCanonicalJson_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act - Get canonical JSON multiple times
|
||||
var json1 = policy.GetCanonicalJson();
|
||||
var json2 = policy.GetCanonicalJson();
|
||||
var json3 = policy.GetCanonicalJson();
|
||||
|
||||
// Assert
|
||||
json1.Should().Be(json2, "canonical JSON should be deterministic");
|
||||
json2.Should().Be(json3, "canonical JSON should be deterministic");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Score Calculation Reproducibility Tests
|
||||
|
||||
[Fact(DisplayName = "Score calculation is reproducible with same inputs and policy")]
|
||||
public void ScoreCalculation_IsReproducible_WithSameInputsAndPolicy()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("reproducible-score-test");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act
|
||||
var result1 = calculator.Calculate(input, policy);
|
||||
var result2 = calculator.Calculate(input, policy);
|
||||
|
||||
// Assert - Everything should match exactly
|
||||
result1.Score.Should().Be(result2.Score);
|
||||
result1.Bucket.Should().Be(result2.Bucket);
|
||||
result1.PolicyDigest.Should().Be(result2.PolicyDigest);
|
||||
result1.Flags.Should().BeEquivalentTo(result2.Flags);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score result contains valid policy digest")]
|
||||
public void ScoreResult_ContainsValidPolicyDigest()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("policy-digest-in-result");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act
|
||||
var result = calculator.Calculate(input, policy);
|
||||
|
||||
// Assert
|
||||
result.PolicyDigest.Should().NotBeNullOrEmpty("result should contain policy digest");
|
||||
result.PolicyDigest.Should().Be(policy.ComputeDigest(),
|
||||
"result's policy digest should match the policy used");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score can be verified by recalculating with same inputs")]
|
||||
public void Score_CanBeVerified_ByRecalculating()
|
||||
{
|
||||
// Arrange - Original calculation
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("verification-test");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
var original = calculator.Calculate(input, policy);
|
||||
|
||||
// Create a "proof" structure that could be stored/transmitted
|
||||
var proof = new
|
||||
{
|
||||
FindingId = original.FindingId,
|
||||
Score = original.Score,
|
||||
Bucket = original.Bucket,
|
||||
PolicyDigest = original.PolicyDigest,
|
||||
Inputs = original.Inputs
|
||||
};
|
||||
|
||||
// Act - Verification: recalculate with same inputs and verify
|
||||
var recreatedInput = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = proof.FindingId,
|
||||
Rch = proof.Inputs.Rch,
|
||||
Rts = proof.Inputs.Rts,
|
||||
Bkp = proof.Inputs.Bkp,
|
||||
Xpl = proof.Inputs.Xpl,
|
||||
Src = proof.Inputs.Src,
|
||||
Mit = proof.Inputs.Mit
|
||||
};
|
||||
|
||||
var verification = calculator.Calculate(recreatedInput, policy);
|
||||
|
||||
// Assert - Verification should produce identical results
|
||||
verification.Score.Should().Be(proof.Score, "verified score should match original");
|
||||
verification.Bucket.Should().Be(proof.Bucket, "verified bucket should match original");
|
||||
verification.PolicyDigest.Should().Be(proof.PolicyDigest, "policy digest should match");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Enrichment Chain Reproducibility Tests
|
||||
|
||||
[Fact(DisplayName = "Enrichment result contains reproducibility information")]
|
||||
public void EnrichmentResult_ContainsReproducibilityInfo()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts => opts.Enabled = true);
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("reproducibility-info-test");
|
||||
|
||||
// Act
|
||||
var result = enricher.Enrich(evidence);
|
||||
|
||||
// Assert
|
||||
result.IsSuccess.Should().BeTrue();
|
||||
result.Score.Should().NotBeNull();
|
||||
result.Score!.PolicyDigest.Should().NotBeNullOrEmpty("score should include policy digest for verification");
|
||||
result.Score!.Inputs.Should().NotBeNull("score should include inputs for reproducibility");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Enrichment is reproducible for same evidence")]
|
||||
public void Enrichment_IsReproducible_ForSameEvidence()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false; // Disable caching to test actual reproducibility
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("enrichment-reproducibility");
|
||||
|
||||
// Act - Multiple enrichments
|
||||
var results = Enumerable.Range(0, 10)
|
||||
.Select(_ => enricher.Enrich(evidence))
|
||||
.ToList();
|
||||
|
||||
// Assert - All should be identical
|
||||
var first = results[0];
|
||||
results.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score!.Score.Should().Be(first.Score!.Score);
|
||||
r.Score!.PolicyDigest.Should().Be(first.Score!.PolicyDigest);
|
||||
r.Score!.Bucket.Should().Be(first.Score!.Bucket);
|
||||
});
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Attestation Proof Structure Tests
|
||||
|
||||
[Fact(DisplayName = "Score proof contains all required verification fields")]
|
||||
public void ScoreProof_ContainsAllRequiredFields()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("proof-fields-test");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act
|
||||
var result = calculator.Calculate(input, policy);
|
||||
|
||||
// Assert - All fields needed for verification are present
|
||||
result.FindingId.Should().NotBeNullOrEmpty("finding ID required for correlation");
|
||||
result.Score.Should().BeInRange(0, 100, "score required for verdict");
|
||||
result.Bucket.Should().BeDefined("bucket required for triage");
|
||||
result.PolicyDigest.Should().NotBeNullOrEmpty("policy digest required for version tracking");
|
||||
result.Inputs.Should().NotBeNull("inputs required for reproducibility");
|
||||
result.Weights.Should().NotBeNull("weights required for audit");
|
||||
result.CalculatedAt.Should().NotBe(default, "timestamp required for audit trail");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score proof is JSON serializable for attestation")]
|
||||
public void ScoreProof_IsJsonSerializable()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("json-serialization-test");
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Act
|
||||
var json = JsonSerializer.Serialize(result);
|
||||
var deserialized = JsonSerializer.Deserialize<EvidenceWeightedScoreResult>(json);
|
||||
|
||||
// Assert
|
||||
json.Should().NotBeNullOrEmpty();
|
||||
deserialized.Should().NotBeNull();
|
||||
deserialized!.Score.Should().Be(result.Score);
|
||||
deserialized.PolicyDigest.Should().Be(result.PolicyDigest);
|
||||
deserialized.FindingId.Should().Be(result.FindingId);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Score proof hash is reproducible")]
|
||||
public void ScoreProofHash_IsReproducible()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("proof-hash-test");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act - Calculate twice and compute hash of each
|
||||
var result1 = calculator.Calculate(input, policy);
|
||||
var result2 = calculator.Calculate(input, policy);
|
||||
|
||||
var hash1 = ComputeProofHash(result1);
|
||||
var hash2 = ComputeProofHash(result2);
|
||||
|
||||
// Assert
|
||||
hash1.Should().Be(hash2, "proof hash should be reproducible");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cross-Instance Reproducibility Tests
|
||||
|
||||
[Fact(DisplayName = "Different calculator instances produce same results")]
|
||||
public void DifferentCalculatorInstances_ProduceSameResults()
|
||||
{
|
||||
// Arrange
|
||||
var calculator1 = new EvidenceWeightedScoreCalculator();
|
||||
var calculator2 = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("cross-instance-test");
|
||||
var policy = EvidenceWeightPolicy.DefaultProduction;
|
||||
|
||||
// Act
|
||||
var result1 = calculator1.Calculate(input, policy);
|
||||
var result2 = calculator2.Calculate(input, policy);
|
||||
|
||||
// Assert
|
||||
result1.Score.Should().Be(result2.Score);
|
||||
result1.Bucket.Should().Be(result2.Bucket);
|
||||
result1.PolicyDigest.Should().Be(result2.PolicyDigest);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Different service provider instances produce same results")]
|
||||
public void DifferentServiceProviderInstances_ProduceSameResults()
|
||||
{
|
||||
// Arrange - Two independent service providers
|
||||
var services1 = CreateServicesWithConfiguration();
|
||||
services1.AddEvidenceWeightedScoring();
|
||||
services1.AddEvidenceNormalizers();
|
||||
services1.AddEvidenceWeightedScore(opts => opts.Enabled = true);
|
||||
var provider1 = services1.BuildServiceProvider();
|
||||
|
||||
var services2 = CreateServicesWithConfiguration();
|
||||
services2.AddEvidenceWeightedScoring();
|
||||
services2.AddEvidenceNormalizers();
|
||||
services2.AddEvidenceWeightedScore(opts => opts.Enabled = true);
|
||||
var provider2 = services2.BuildServiceProvider();
|
||||
|
||||
var enricher1 = provider1.GetRequiredService<IFindingScoreEnricher>();
|
||||
var enricher2 = provider2.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
var evidence = CreateTestEvidence("cross-provider-test");
|
||||
|
||||
// Act
|
||||
var result1 = enricher1.Enrich(evidence);
|
||||
var result2 = enricher2.Enrich(evidence);
|
||||
|
||||
// Assert
|
||||
result1.Score!.Score.Should().Be(result2.Score!.Score);
|
||||
result1.Score!.PolicyDigest.Should().Be(result2.Score!.PolicyDigest);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Timestamp and Audit Trail Tests
|
||||
|
||||
[Fact(DisplayName = "Calculation timestamp is captured")]
|
||||
public void CalculationTimestamp_IsCaptured()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("timestamp-test");
|
||||
var before = DateTimeOffset.UtcNow;
|
||||
|
||||
// Act
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
var after = DateTimeOffset.UtcNow;
|
||||
|
||||
// Assert
|
||||
result.CalculatedAt.Should().BeOnOrAfter(before);
|
||||
result.CalculatedAt.Should().BeOnOrBefore(after);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Breakdown provides audit trail for score components")]
|
||||
public void Breakdown_ProvidesAuditTrail()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("breakdown-audit");
|
||||
|
||||
// Act
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Assert
|
||||
result.Breakdown.Should().NotBeEmpty("breakdown should explain score composition");
|
||||
// Each dimension should be accounted for
|
||||
result.Breakdown.Should().HaveCountGreaterOrEqualTo(1);
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Explanations provide human-readable audit information")]
|
||||
public void Explanations_ProvideHumanReadableAudit()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = "explanation-test",
|
||||
Rch = 0.9, // High reachability
|
||||
Rts = 0.8, // High runtime
|
||||
Bkp = 0.2, // Low backport
|
||||
Xpl = 0.7, // High exploit
|
||||
Src = 0.6,
|
||||
Mit = 0.1 // Low mitigation
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
|
||||
// Assert - Should have explanations for high-risk findings
|
||||
result.Explanations.Should().NotBeEmpty("high-risk input should generate explanations");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static EvidenceWeightedScoreInput CreateTestInput(string findingId)
|
||||
{
|
||||
return new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = findingId,
|
||||
Rch = 0.70,
|
||||
Rts = 0.55,
|
||||
Bkp = 0.35,
|
||||
Xpl = 0.50,
|
||||
Src = 0.60,
|
||||
Mit = 0.20
|
||||
};
|
||||
}
|
||||
|
||||
private static FindingEvidence CreateTestEvidence(string findingId)
|
||||
{
|
||||
return new FindingEvidence
|
||||
{
|
||||
FindingId = findingId,
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.80
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = StellaOps.Signals.EvidenceWeightedScore.RuntimePosture.ActiveTracing,
|
||||
ObservationCount = 3,
|
||||
RecencyFactor = 0.70
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = 0.40,
|
||||
EpssPercentile = 70,
|
||||
KevStatus = KevStatus.NotInKev,
|
||||
PublicExploitAvailable = false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeProofHash(EvidenceWeightedScoreResult result)
|
||||
{
|
||||
// Hash the critical reproducibility fields
|
||||
var proofData = $"{result.FindingId}:{result.Score}:{result.Bucket}:{result.PolicyDigest}:" +
|
||||
$"{result.Inputs.Rch}:{result.Inputs.Rts}:{result.Inputs.Bkp}:" +
|
||||
$"{result.Inputs.Xpl}:{result.Inputs.Src}:{result.Inputs.Mit}";
|
||||
|
||||
var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(proofData));
|
||||
return Convert.ToHexStringLower(bytes);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,489 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-042 - Concurrent evaluation test: thread-safe EWS in policy pipeline
|
||||
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
|
||||
using System.Collections.Concurrent;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Concurrent evaluation tests verifying that EWS calculation is thread-safe
|
||||
/// in the policy pipeline. These tests stress-test the system under concurrent load.
|
||||
/// </summary>
|
||||
[Trait("Category", "Concurrency")]
|
||||
[Trait("Category", "Integration")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-042")]
|
||||
public sealed class EwsConcurrentEvaluationTests
|
||||
{
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region Calculator Thread Safety Tests
|
||||
|
||||
[Fact(DisplayName = "Calculator is thread-safe for concurrent same-input calculations")]
|
||||
public async Task Calculator_IsThreadSafe_ForSameInputCalculations()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("concurrent-same-input");
|
||||
var results = new ConcurrentBag<EvidenceWeightedScoreResult>();
|
||||
|
||||
// Act - Concurrent calculations with same input
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
results.Add(result);
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All should produce identical results
|
||||
var resultList = results.ToList();
|
||||
var first = resultList[0];
|
||||
resultList.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score.Should().Be(first.Score, "concurrent calculations must produce same score");
|
||||
r.Bucket.Should().Be(first.Bucket, "concurrent calculations must produce same bucket");
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Calculator is thread-safe for concurrent different-input calculations")]
|
||||
public async Task Calculator_IsThreadSafe_ForDifferentInputCalculations()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var results = new ConcurrentDictionary<string, EvidenceWeightedScoreResult>();
|
||||
|
||||
var inputs = Enumerable.Range(0, 50)
|
||||
.Select(i => CreateTestInput($"concurrent-different-{i}", i / 50.0))
|
||||
.ToList();
|
||||
|
||||
// Act - Concurrent calculations with different inputs
|
||||
var tasks = inputs.Select(input => Task.Run(() =>
|
||||
{
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
results[input.FindingId] = result;
|
||||
})).ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - Each should produce valid result
|
||||
results.Should().HaveCount(50);
|
||||
foreach (var kvp in results)
|
||||
{
|
||||
kvp.Value.FindingId.Should().Be(kvp.Key);
|
||||
kvp.Value.Score.Should().BeInRange(0, 100);
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Calculator handles high concurrency without contention issues")]
|
||||
public async Task Calculator_HandlesHighConcurrency_WithoutContention()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var errors = new ConcurrentBag<Exception>();
|
||||
var results = new ConcurrentBag<EvidenceWeightedScoreResult>();
|
||||
|
||||
// Act - Very high concurrency (500 parallel tasks)
|
||||
var tasks = Enumerable.Range(0, 500)
|
||||
.Select(i => Task.Run(() =>
|
||||
{
|
||||
try
|
||||
{
|
||||
var input = CreateTestInput($"stress-test-{i}", (i % 100) / 100.0);
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
results.Add(result);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
errors.Add(ex);
|
||||
}
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - No errors, all results valid
|
||||
errors.Should().BeEmpty("no exceptions should occur under high concurrency");
|
||||
results.Should().HaveCount(500);
|
||||
results.Should().AllSatisfy(r => r.Score.Should().BeInRange(0, 100));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Enricher Thread Safety Tests
|
||||
|
||||
[Fact(DisplayName = "Enricher is thread-safe for concurrent enrichments")]
|
||||
public async Task Enricher_IsThreadSafe_ForConcurrentEnrichments()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false; // Test without caching
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
var evidence = CreateTestEvidence("concurrent-enricher-test");
|
||||
var results = new ConcurrentBag<ScoreEnrichmentResult>();
|
||||
|
||||
// Act
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = enricher.Enrich(evidence);
|
||||
results.Add(result);
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
var resultList = results.ToList();
|
||||
resultList.Should().HaveCount(100);
|
||||
var first = resultList[0];
|
||||
resultList.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score!.Score.Should().Be(first.Score!.Score);
|
||||
r.Score!.Bucket.Should().Be(first.Score!.Bucket);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Enricher with caching handles concurrent requests correctly")]
|
||||
public async Task Enricher_WithCaching_HandlesConcurrentRequests()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true; // Enable caching
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
var evidence = CreateTestEvidence("cached-concurrent-test");
|
||||
var results = new ConcurrentBag<ScoreEnrichmentResult>();
|
||||
|
||||
// Act - First warm up the cache
|
||||
enricher.Enrich(evidence);
|
||||
|
||||
// Then concurrent reads
|
||||
var tasks = Enumerable.Range(0, 100)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = enricher.Enrich(evidence);
|
||||
results.Add(result);
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All should be from cache and identical
|
||||
var resultList = results.ToList();
|
||||
resultList.Should().HaveCount(100);
|
||||
resultList.Should().AllSatisfy(r => r.FromCache.Should().BeTrue("all should hit cache"));
|
||||
|
||||
var first = resultList[0];
|
||||
resultList.Should().AllSatisfy(r =>
|
||||
{
|
||||
r.Score!.Score.Should().Be(first.Score!.Score);
|
||||
});
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Multiple findings enriched concurrently produce correct results")]
|
||||
public async Task MultipleFindingsEnrichedConcurrently_ProduceCorrectResults()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
var evidences = Enumerable.Range(0, 20)
|
||||
.Select(i => CreateTestEvidence($"multi-finding-{i}", i / 20.0))
|
||||
.ToList();
|
||||
|
||||
var results = new ConcurrentDictionary<string, ScoreEnrichmentResult>();
|
||||
|
||||
// Act - Enrich multiple different findings concurrently
|
||||
var tasks = evidences.Select(evidence => Task.Run(() =>
|
||||
{
|
||||
var result = enricher.Enrich(evidence);
|
||||
results[evidence.FindingId] = result;
|
||||
})).ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert
|
||||
results.Should().HaveCount(20);
|
||||
foreach (var kvp in results)
|
||||
{
|
||||
kvp.Value.FindingId.Should().Be(kvp.Key);
|
||||
kvp.Value.Score.Should().NotBeNull();
|
||||
kvp.Value.Score!.Score.Should().BeInRange(0, 100);
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cache Thread Safety Tests
|
||||
|
||||
[Fact(DisplayName = "Cache handles concurrent reads and writes safely")]
|
||||
public async Task Cache_HandlesConcurrentReadsAndWrites()
|
||||
{
|
||||
// Arrange
|
||||
var cache = new InMemoryScoreEnrichmentCache();
|
||||
var readSuccesses = new ConcurrentBag<bool>();
|
||||
var writes = new ConcurrentBag<bool>();
|
||||
|
||||
var testResult = new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = "cache-test",
|
||||
Score = 75,
|
||||
Bucket = ScoreBucket.ScheduleNext,
|
||||
Inputs = new EvidenceInputValues(Rch: 0.8, Rts: 0.7, Bkp: 0.3, Xpl: 0.6, Src: 0.5, Mit: 0.2),
|
||||
Weights = EvidenceWeights.Default,
|
||||
Breakdown = [],
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "test-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act - Mixed concurrent reads and writes
|
||||
var tasks = Enumerable.Range(0, 200)
|
||||
.Select(i => Task.Run(() =>
|
||||
{
|
||||
if (i % 3 == 0)
|
||||
{
|
||||
// Write
|
||||
cache.Set($"finding-{i % 10}", testResult);
|
||||
writes.Add(true);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Read
|
||||
var found = cache.TryGet($"finding-{i % 10}", out _);
|
||||
readSuccesses.Add(found);
|
||||
}
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - No exceptions means thread-safe
|
||||
writes.Should().NotBeEmpty();
|
||||
readSuccesses.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Cache maintains consistency under concurrent access")]
|
||||
public async Task Cache_MaintainsConsistency_UnderConcurrentAccess()
|
||||
{
|
||||
// Arrange
|
||||
var cache = new InMemoryScoreEnrichmentCache();
|
||||
var findingId = "consistency-test";
|
||||
|
||||
var testResult = new EvidenceWeightedScoreResult
|
||||
{
|
||||
FindingId = findingId,
|
||||
Score = 80,
|
||||
Bucket = ScoreBucket.ScheduleNext,
|
||||
Inputs = new EvidenceInputValues(Rch: 0.8, Rts: 0.7, Bkp: 0.3, Xpl: 0.6, Src: 0.5, Mit: 0.2),
|
||||
Weights = EvidenceWeights.Default,
|
||||
Breakdown = [],
|
||||
Flags = [],
|
||||
Explanations = [],
|
||||
Caps = new AppliedGuardrails(),
|
||||
PolicyDigest = "test-digest",
|
||||
CalculatedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Set initial value
|
||||
cache.Set(findingId, testResult);
|
||||
|
||||
var readResults = new ConcurrentBag<int>();
|
||||
|
||||
// Act - Many concurrent reads
|
||||
var tasks = Enumerable.Range(0, 500)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
if (cache.TryGet(findingId, out var result) && result is not null)
|
||||
readResults.Add(result.Score);
|
||||
}))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All reads should get the same value
|
||||
readResults.Should().OnlyContain(score => score == 80, "all reads should return consistent value");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Race Condition Tests
|
||||
|
||||
[Fact(DisplayName = "No race conditions in calculator under parallel execution")]
|
||||
public async Task NoRaceConditions_InCalculator_UnderParallelExecution()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var inputs = Enumerable.Range(0, 100)
|
||||
.Select(i => CreateTestInput($"race-test-{i}", (i % 10) / 10.0))
|
||||
.ToList();
|
||||
|
||||
var results = new ConcurrentDictionary<string, List<int>>();
|
||||
|
||||
// Initialize result lists
|
||||
foreach (var input in inputs)
|
||||
results[input.FindingId] = new List<int>();
|
||||
|
||||
// Act - Each input calculated multiple times concurrently
|
||||
var tasks = inputs.SelectMany(input =>
|
||||
Enumerable.Range(0, 5).Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
lock (results[input.FindingId])
|
||||
{
|
||||
results[input.FindingId].Add(result.Score);
|
||||
}
|
||||
})))
|
||||
.ToArray();
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - For each input, all calculations should produce same score
|
||||
foreach (var kvp in results)
|
||||
{
|
||||
var scores = kvp.Value;
|
||||
scores.Should().HaveCount(5);
|
||||
scores.Distinct().Should().HaveCount(1,
|
||||
$"all calculations for {kvp.Key} should produce same score, but got: {string.Join(", ", scores)}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Policy changes between calculations don't cause race conditions")]
|
||||
public async Task PolicyChanges_DontCauseRaceConditions()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("policy-race-test");
|
||||
|
||||
var policy1 = EvidenceWeightPolicy.DefaultProduction;
|
||||
var policy2 = new EvidenceWeightPolicy
|
||||
{
|
||||
Version = "ews.v1",
|
||||
Profile = "alternate",
|
||||
Weights = new EvidenceWeights
|
||||
{
|
||||
Rch = 0.40,
|
||||
Rts = 0.20,
|
||||
Bkp = 0.10,
|
||||
Xpl = 0.15,
|
||||
Src = 0.10,
|
||||
Mit = 0.05
|
||||
}
|
||||
};
|
||||
|
||||
var results1 = new ConcurrentBag<int>();
|
||||
var results2 = new ConcurrentBag<int>();
|
||||
|
||||
// Act - Concurrent calculations with different policies
|
||||
var tasks1 = Enumerable.Range(0, 50)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = calculator.Calculate(input, policy1);
|
||||
results1.Add(result.Score);
|
||||
}));
|
||||
|
||||
var tasks2 = Enumerable.Range(0, 50)
|
||||
.Select(_ => Task.Run(() =>
|
||||
{
|
||||
var result = calculator.Calculate(input, policy2);
|
||||
results2.Add(result.Score);
|
||||
}));
|
||||
|
||||
await Task.WhenAll(tasks1.Concat(tasks2));
|
||||
|
||||
// Assert - Each policy should produce consistent results
|
||||
results1.Distinct().Should().HaveCount(1, "all policy1 calculations should produce same score");
|
||||
results2.Distinct().Should().HaveCount(1, "all policy2 calculations should produce same score");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Test Helpers
|
||||
|
||||
private static EvidenceWeightedScoreInput CreateTestInput(string findingId, double factor = 0.5)
|
||||
{
|
||||
return new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = findingId,
|
||||
Rch = 0.50 + factor * 0.3,
|
||||
Rts = 0.40 + factor * 0.3,
|
||||
Bkp = 0.30 + factor * 0.2,
|
||||
Xpl = 0.45 + factor * 0.3,
|
||||
Src = 0.55 + factor * 0.2,
|
||||
Mit = 0.15 + factor * 0.1
|
||||
};
|
||||
}
|
||||
|
||||
private static FindingEvidence CreateTestEvidence(string findingId, double factor = 0.5)
|
||||
{
|
||||
return new FindingEvidence
|
||||
{
|
||||
FindingId = findingId,
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = StellaOps.Signals.EvidenceWeightedScore.ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.70 + factor * 0.2
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = StellaOps.Signals.EvidenceWeightedScore.RuntimePosture.ActiveTracing,
|
||||
ObservationCount = (int)(3 + factor * 5),
|
||||
RecencyFactor = 0.65 + factor * 0.2
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = 0.35 + factor * 0.3,
|
||||
EpssPercentile = (int)(60 + factor * 30),
|
||||
KevStatus = factor > 0.5 ? KevStatus.InKev : KevStatus.NotInKev,
|
||||
PublicExploitAvailable = factor > 0.7
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,442 @@
|
||||
// SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
// Copyright © 2025 StellaOps
|
||||
// Sprint: SPRINT_8200_0012_0003_policy_engine_integration
|
||||
// Task: PINT-8200-044 - Add benchmark: policy evaluation with EWS < 50ms per finding
|
||||
|
||||
using System.Diagnostics;
|
||||
using FluentAssertions;
|
||||
using Microsoft.Extensions.Configuration;
|
||||
using Microsoft.Extensions.DependencyInjection;
|
||||
using StellaOps.Policy.Engine.Scoring.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore;
|
||||
using StellaOps.Signals.EvidenceWeightedScore.Normalizers;
|
||||
using Xunit;
|
||||
using Xunit.Abstractions;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests.Integration;
|
||||
|
||||
/// <summary>
|
||||
/// Performance tests for EWS calculation in the policy evaluation pipeline.
|
||||
/// Ensures that EWS calculation completes within acceptable time limits.
|
||||
/// </summary>
|
||||
[Trait("Category", "Performance")]
|
||||
[Trait("Sprint", "8200.0012.0003")]
|
||||
[Trait("Task", "PINT-8200-044")]
|
||||
public sealed class EwsPipelinePerformanceTests
|
||||
{
|
||||
private const int MaxAllowedMsPerFinding = 50;
|
||||
private const int WarmupIterations = 10;
|
||||
private const int MeasurementIterations = 100;
|
||||
|
||||
private readonly ITestOutputHelper _output;
|
||||
|
||||
public EwsPipelinePerformanceTests(ITestOutputHelper output)
|
||||
{
|
||||
_output = output;
|
||||
}
|
||||
|
||||
private static ServiceCollection CreateServicesWithConfiguration()
|
||||
{
|
||||
var services = new ServiceCollection();
|
||||
var configuration = new ConfigurationBuilder()
|
||||
.AddInMemoryCollection()
|
||||
.Build();
|
||||
services.AddSingleton<IConfiguration>(configuration);
|
||||
return services;
|
||||
}
|
||||
|
||||
#region Calculator Performance Tests
|
||||
|
||||
[Fact(DisplayName = "EWS calculator runs under 50ms per calculation")]
|
||||
public void EwsCalculator_CompletesUnder50ms()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var input = CreateTestInput("perf-test-001");
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Measure
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
for (int i = 0; i < MeasurementIterations; i++)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
stopwatch.Stop();
|
||||
|
||||
var averageMs = stopwatch.Elapsed.TotalMilliseconds / MeasurementIterations;
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"EWS Calculator Performance:");
|
||||
_output.WriteLine($" Total iterations: {MeasurementIterations}");
|
||||
_output.WriteLine($" Total time: {stopwatch.Elapsed.TotalMilliseconds:F2}ms");
|
||||
_output.WriteLine($" Average per calculation: {averageMs:F4}ms");
|
||||
_output.WriteLine($" Throughput: {MeasurementIterations / stopwatch.Elapsed.TotalSeconds:F0} calculations/sec");
|
||||
|
||||
// Assert
|
||||
averageMs.Should().BeLessThan(MaxAllowedMsPerFinding,
|
||||
$"average EWS calculation time ({averageMs:F2}ms) should be under {MaxAllowedMsPerFinding}ms");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "EWS calculator handles 1000 findings in under 5 seconds")]
|
||||
public void EwsCalculator_Handles1000Findings_UnderFiveSeconds()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var inputs = Enumerable.Range(0, 1000)
|
||||
.Select(i => CreateTestInput($"batch-test-{i:D4}"))
|
||||
.ToList();
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
calculator.Calculate(inputs[i % inputs.Count], EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Measure
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
foreach (var input in inputs)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
stopwatch.Stop();
|
||||
|
||||
var averageMs = stopwatch.Elapsed.TotalMilliseconds / inputs.Count;
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"EWS Batch Performance (1000 findings):");
|
||||
_output.WriteLine($" Total time: {stopwatch.Elapsed.TotalMilliseconds:F2}ms");
|
||||
_output.WriteLine($" Average per finding: {averageMs:F4}ms");
|
||||
_output.WriteLine($" Throughput: {inputs.Count / stopwatch.Elapsed.TotalSeconds:F0} findings/sec");
|
||||
|
||||
// Assert - 1000 findings in under 5 seconds (5ms average)
|
||||
stopwatch.Elapsed.TotalSeconds.Should().BeLessThan(5,
|
||||
"1000 EWS calculations should complete in under 5 seconds");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Enricher Performance Tests
|
||||
|
||||
[Fact(DisplayName = "Enricher pipeline runs under 50ms per finding")]
|
||||
public void EnricherPipeline_CompletesUnder50ms()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false; // Test actual calculation, not cache
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("enricher-perf-test");
|
||||
|
||||
// Warmup
|
||||
for (int i = 0; i < WarmupIterations; i++)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
|
||||
// Measure
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
for (int i = 0; i < MeasurementIterations; i++)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
stopwatch.Stop();
|
||||
|
||||
var averageMs = stopwatch.Elapsed.TotalMilliseconds / MeasurementIterations;
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"Enricher Pipeline Performance:");
|
||||
_output.WriteLine($" Total iterations: {MeasurementIterations}");
|
||||
_output.WriteLine($" Total time: {stopwatch.Elapsed.TotalMilliseconds:F2}ms");
|
||||
_output.WriteLine($" Average per finding: {averageMs:F4}ms");
|
||||
_output.WriteLine($" Throughput: {MeasurementIterations / stopwatch.Elapsed.TotalSeconds:F0} findings/sec");
|
||||
|
||||
// Assert
|
||||
averageMs.Should().BeLessThan(MaxAllowedMsPerFinding,
|
||||
$"average enricher time ({averageMs:F2}ms) should be under {MaxAllowedMsPerFinding}ms");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Cached enricher is faster than uncached")]
|
||||
public void CachedEnricher_IsFasterThanUncached()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = true;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidence = CreateTestEvidence("cache-perf-test");
|
||||
|
||||
// First call (cache miss)
|
||||
var coldWatch = Stopwatch.StartNew();
|
||||
enricher.Enrich(evidence);
|
||||
coldWatch.Stop();
|
||||
|
||||
// Warmup cache
|
||||
enricher.Enrich(evidence);
|
||||
|
||||
// Measure cached calls
|
||||
var hotWatch = Stopwatch.StartNew();
|
||||
for (int i = 0; i < MeasurementIterations; i++)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
hotWatch.Stop();
|
||||
|
||||
var coldMs = coldWatch.Elapsed.TotalMilliseconds;
|
||||
var hotAverageMs = hotWatch.Elapsed.TotalMilliseconds / MeasurementIterations;
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"Cache Performance Comparison:");
|
||||
_output.WriteLine($" Cold call (cache miss): {coldMs:F4}ms");
|
||||
_output.WriteLine($" Hot average (cache hit): {hotAverageMs:F4}ms");
|
||||
_output.WriteLine($" Speedup factor: {coldMs / hotAverageMs:F1}x");
|
||||
|
||||
// Assert - Cached should be faster (or at least not slower)
|
||||
hotAverageMs.Should().BeLessThanOrEqualTo(coldMs,
|
||||
"cached enricher calls should be faster than or equal to uncached");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Batch Processing Performance Tests
|
||||
|
||||
[Fact(DisplayName = "Enricher handles diverse evidence types efficiently")]
|
||||
public void Enricher_HandlesDiverseEvidence_Efficiently()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
|
||||
// Create diverse evidence samples
|
||||
var evidenceSet = CreateDiverseEvidenceSet(100);
|
||||
|
||||
// Warmup
|
||||
foreach (var evidence in evidenceSet.Take(10))
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
|
||||
// Measure
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
foreach (var evidence in evidenceSet)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
stopwatch.Stop();
|
||||
|
||||
var averageMs = stopwatch.Elapsed.TotalMilliseconds / evidenceSet.Count;
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"Diverse Evidence Performance:");
|
||||
_output.WriteLine($" Evidence samples: {evidenceSet.Count}");
|
||||
_output.WriteLine($" Total time: {stopwatch.Elapsed.TotalMilliseconds:F2}ms");
|
||||
_output.WriteLine($" Average per finding: {averageMs:F4}ms");
|
||||
_output.WriteLine($" Throughput: {evidenceSet.Count / stopwatch.Elapsed.TotalSeconds:F0} findings/sec");
|
||||
|
||||
// Assert
|
||||
averageMs.Should().BeLessThan(MaxAllowedMsPerFinding,
|
||||
$"average time for diverse evidence ({averageMs:F2}ms) should be under {MaxAllowedMsPerFinding}ms");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Concurrent enrichment scales efficiently")]
|
||||
public async Task ConcurrentEnrichment_ScalesEfficiently()
|
||||
{
|
||||
// Arrange
|
||||
var services = CreateServicesWithConfiguration();
|
||||
services.AddEvidenceWeightedScoring();
|
||||
services.AddEvidenceNormalizers();
|
||||
services.AddEvidenceWeightedScore(opts =>
|
||||
{
|
||||
opts.Enabled = true;
|
||||
opts.EnableCaching = false;
|
||||
});
|
||||
var provider = services.BuildServiceProvider();
|
||||
|
||||
var enricher = provider.GetRequiredService<IFindingScoreEnricher>();
|
||||
var evidenceSet = CreateDiverseEvidenceSet(100);
|
||||
|
||||
// Measure sequential
|
||||
var seqWatch = Stopwatch.StartNew();
|
||||
foreach (var evidence in evidenceSet)
|
||||
{
|
||||
enricher.Enrich(evidence);
|
||||
}
|
||||
seqWatch.Stop();
|
||||
|
||||
// Measure parallel
|
||||
var parWatch = Stopwatch.StartNew();
|
||||
var tasks = evidenceSet.Select(e => Task.Run(() => enricher.Enrich(e))).ToArray();
|
||||
await Task.WhenAll(tasks);
|
||||
parWatch.Stop();
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"Concurrency Scaling:");
|
||||
_output.WriteLine($" Sequential time: {seqWatch.Elapsed.TotalMilliseconds:F2}ms");
|
||||
_output.WriteLine($" Parallel time: {parWatch.Elapsed.TotalMilliseconds:F2}ms");
|
||||
_output.WriteLine($" Speedup factor: {seqWatch.Elapsed.TotalMilliseconds / parWatch.Elapsed.TotalMilliseconds:F2}x");
|
||||
_output.WriteLine($" Parallel efficiency: {(seqWatch.Elapsed.TotalMilliseconds / parWatch.Elapsed.TotalMilliseconds / Environment.ProcessorCount * 100):F1}%");
|
||||
|
||||
// Assert - Parallel should complete in reasonable time
|
||||
var parallelAverage = parWatch.Elapsed.TotalMilliseconds / evidenceSet.Count;
|
||||
parallelAverage.Should().BeLessThan(MaxAllowedMsPerFinding,
|
||||
$"parallel average ({parallelAverage:F2}ms) should be under {MaxAllowedMsPerFinding}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Memory Performance Tests
|
||||
|
||||
[Fact(DisplayName = "EWS calculation has stable memory usage")]
|
||||
public void EwsCalculation_HasStableMemoryUsage()
|
||||
{
|
||||
// Arrange
|
||||
var calculator = new EvidenceWeightedScoreCalculator();
|
||||
var inputs = Enumerable.Range(0, 1000)
|
||||
.Select(i => CreateTestInput($"memory-test-{i:D4}"))
|
||||
.ToList();
|
||||
|
||||
// Force GC before measurement
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
GC.Collect();
|
||||
|
||||
var memoryBefore = GC.GetTotalMemory(true);
|
||||
|
||||
// Perform calculations
|
||||
foreach (var input in inputs)
|
||||
{
|
||||
calculator.Calculate(input, EvidenceWeightPolicy.DefaultProduction);
|
||||
}
|
||||
|
||||
// Force GC after measurement
|
||||
GC.Collect();
|
||||
GC.WaitForPendingFinalizers();
|
||||
GC.Collect();
|
||||
|
||||
var memoryAfter = GC.GetTotalMemory(true);
|
||||
var memoryDeltaKb = (memoryAfter - memoryBefore) / 1024.0;
|
||||
var memoryPerFindingBytes = (memoryAfter - memoryBefore) / (double)inputs.Count;
|
||||
|
||||
// Report
|
||||
_output.WriteLine($"Memory Usage:");
|
||||
_output.WriteLine($" Before: {memoryBefore / 1024:N0} KB");
|
||||
_output.WriteLine($" After: {memoryAfter / 1024:N0} KB");
|
||||
_output.WriteLine($" Delta: {memoryDeltaKb:N0} KB");
|
||||
_output.WriteLine($" Per finding: {memoryPerFindingBytes:N0} bytes");
|
||||
|
||||
// Assert - Memory growth should be reasonable (less than 1KB per finding on average)
|
||||
memoryPerFindingBytes.Should().BeLessThan(1024,
|
||||
"memory growth per finding should be under 1KB");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static EvidenceWeightedScoreInput CreateTestInput(string findingId)
|
||||
{
|
||||
return new EvidenceWeightedScoreInput
|
||||
{
|
||||
FindingId = findingId,
|
||||
Rch = 0.75,
|
||||
Rts = 0.60,
|
||||
Bkp = 0.40,
|
||||
Xpl = 0.55,
|
||||
Src = 0.65,
|
||||
Mit = 0.20
|
||||
};
|
||||
}
|
||||
|
||||
private static FindingEvidence CreateTestEvidence(string findingId)
|
||||
{
|
||||
return new FindingEvidence
|
||||
{
|
||||
FindingId = findingId,
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = ReachabilityState.DynamicReachable,
|
||||
Confidence = 0.85
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = RuntimePosture.ActiveTracing,
|
||||
ObservationCount = 3,
|
||||
RecencyFactor = 0.75
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = 0.45,
|
||||
EpssPercentile = 75,
|
||||
KevStatus = KevStatus.NotInKev,
|
||||
PublicExploitAvailable = false
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static List<FindingEvidence> CreateDiverseEvidenceSet(int count)
|
||||
{
|
||||
var random = new Random(42); // Fixed seed for reproducibility
|
||||
var result = new List<FindingEvidence>(count);
|
||||
|
||||
var reachabilityStates = Enum.GetValues<ReachabilityState>();
|
||||
var runtimePostures = Enum.GetValues<RuntimePosture>();
|
||||
var kevStatuses = Enum.GetValues<KevStatus>();
|
||||
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
result.Add(new FindingEvidence
|
||||
{
|
||||
FindingId = $"diverse-{i:D4}",
|
||||
Reachability = new ReachabilityInput
|
||||
{
|
||||
State = reachabilityStates[random.Next(reachabilityStates.Length)],
|
||||
Confidence = random.NextDouble()
|
||||
},
|
||||
Runtime = new RuntimeInput
|
||||
{
|
||||
Posture = runtimePostures[random.Next(runtimePostures.Length)],
|
||||
ObservationCount = random.Next(0, 20),
|
||||
RecencyFactor = random.NextDouble()
|
||||
},
|
||||
Exploit = new ExploitInput
|
||||
{
|
||||
EpssScore = random.NextDouble(),
|
||||
EpssPercentile = random.Next(0, 100),
|
||||
KevStatus = kevStatuses[random.Next(kevStatuses.Length)],
|
||||
PublicExploitAvailable = random.Next(2) == 1
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,433 @@
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using Microsoft.AspNetCore.Http;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Microsoft.Extensions.Options;
|
||||
using Microsoft.Extensions.Primitives;
|
||||
using Microsoft.Extensions.Time.Testing;
|
||||
using Moq;
|
||||
using StellaOps.Policy.Confidence.Models;
|
||||
using StellaOps.Policy.Engine.Caching;
|
||||
using StellaOps.Policy.Engine.Options;
|
||||
using StellaOps.Provcache;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Policy.Engine.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for ProvcachePolicyEvaluationCache.
|
||||
/// </summary>
|
||||
public sealed class ProvcachePolicyEvaluationCacheTests
|
||||
{
|
||||
private readonly Mock<IProvcacheService> _mockProvcacheService;
|
||||
private readonly Mock<ICacheBypassAccessor> _mockBypassAccessor;
|
||||
private readonly FakeTimeProvider _timeProvider;
|
||||
private readonly ProvcachePolicyEvaluationCache _cache;
|
||||
|
||||
public ProvcachePolicyEvaluationCacheTests()
|
||||
{
|
||||
_mockProvcacheService = new Mock<IProvcacheService>();
|
||||
_mockBypassAccessor = new Mock<ICacheBypassAccessor>();
|
||||
_mockBypassAccessor.Setup(x => x.ShouldBypassCache).Returns(false);
|
||||
_timeProvider = new FakeTimeProvider(DateTimeOffset.UtcNow);
|
||||
|
||||
var provcacheOptions = Microsoft.Extensions.Options.Options.Create(new ProvcacheOptions
|
||||
{
|
||||
DefaultTtl = TimeSpan.FromHours(24),
|
||||
AllowCacheBypass = true
|
||||
});
|
||||
|
||||
// PolicyEngineOptions has read-only properties, use default instance
|
||||
var policyOptions = Microsoft.Extensions.Options.Options.Create(new PolicyEngineOptions());
|
||||
|
||||
_cache = new ProvcachePolicyEvaluationCache(
|
||||
_mockProvcacheService.Object,
|
||||
_mockBypassAccessor.Object,
|
||||
_timeProvider,
|
||||
provcacheOptions,
|
||||
policyOptions,
|
||||
NullLogger<ProvcachePolicyEvaluationCache>.Instance);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_CacheHit_ReturnsEntry()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-1", "subject-1", "context-1");
|
||||
var provcacheEntry = CreateProvcacheEntry("sha256:verikey1");
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.GetAsync(It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Hit(provcacheEntry, "valkey", 1.0));
|
||||
|
||||
// Act
|
||||
var result = await _cache.GetAsync(key);
|
||||
|
||||
// Assert
|
||||
result.CacheHit.Should().BeTrue();
|
||||
result.Entry.Should().NotBeNull();
|
||||
result.Source.Should().Be(CacheSource.Redis);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_CacheMiss_ReturnsNull()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-2", "subject-2", "context-2");
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.GetAsync(It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0.5));
|
||||
|
||||
// Act
|
||||
var result = await _cache.GetAsync(key);
|
||||
|
||||
// Assert
|
||||
result.CacheHit.Should().BeFalse();
|
||||
result.Entry.Should().BeNull();
|
||||
result.Source.Should().Be(CacheSource.None);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_BypassHeader_SkipsCache()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-3", "subject-3", "context-3");
|
||||
_mockBypassAccessor.Setup(x => x.ShouldBypassCache).Returns(true);
|
||||
|
||||
// Act
|
||||
var result = await _cache.GetAsync(key);
|
||||
|
||||
// Assert
|
||||
result.CacheHit.Should().BeFalse();
|
||||
result.Entry.Should().BeNull();
|
||||
_mockProvcacheService.Verify(
|
||||
x => x.GetAsync(It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()),
|
||||
Times.Never);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_StoresEntryInProvcache()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-4", "subject-4", "context-4");
|
||||
var entry = CreateCacheEntry();
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.SetAsync(It.IsAny<ProvcacheEntry>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
|
||||
// Act
|
||||
await _cache.SetAsync(key, entry);
|
||||
|
||||
// Assert
|
||||
_mockProvcacheService.Verify(
|
||||
x => x.SetAsync(It.Is<ProvcacheEntry>(e =>
|
||||
e.PolicyHash == key.PolicyDigest), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SetAsync_FailureDoesNotThrow()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-5", "subject-5", "context-5");
|
||||
var entry = CreateCacheEntry();
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.SetAsync(It.IsAny<ProvcacheEntry>(), It.IsAny<CancellationToken>()))
|
||||
.ThrowsAsync(new InvalidOperationException("Test exception"));
|
||||
|
||||
// Act & Assert - should not throw
|
||||
await _cache.SetAsync(key, entry);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InvalidateAsync_CallsProvcache()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-6", "subject-6", "context-6");
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.InvalidateAsync(It.IsAny<string>(), It.IsAny<string?>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(true);
|
||||
|
||||
// Act
|
||||
await _cache.InvalidateAsync(key);
|
||||
|
||||
// Assert
|
||||
_mockProvcacheService.Verify(
|
||||
x => x.InvalidateAsync(It.IsAny<string>(), It.IsAny<string?>(), It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task InvalidateByPolicyDigestAsync_InvalidatesAllMatchingEntries()
|
||||
{
|
||||
// Arrange
|
||||
var policyDigest = "sha256:policy123";
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.InvalidateByAsync(It.IsAny<InvalidationRequest>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(new InvalidationResult
|
||||
{
|
||||
EntriesAffected = 5,
|
||||
Request = new InvalidationRequest
|
||||
{
|
||||
Type = InvalidationType.PolicyHash,
|
||||
Value = policyDigest,
|
||||
Reason = "policy-update"
|
||||
},
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
});
|
||||
|
||||
// Act
|
||||
await _cache.InvalidateByPolicyDigestAsync(policyDigest);
|
||||
|
||||
// Assert
|
||||
_mockProvcacheService.Verify(
|
||||
x => x.InvalidateByAsync(
|
||||
It.Is<InvalidationRequest>(r =>
|
||||
r.Type == InvalidationType.PolicyHash &&
|
||||
r.Value == policyDigest),
|
||||
It.IsAny<CancellationToken>()),
|
||||
Times.Once);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetBatchAsync_ProcessesAllKeys()
|
||||
{
|
||||
// Arrange
|
||||
var keys = new[]
|
||||
{
|
||||
CreateCacheKey("policy-a", "subject-a", "context-a"),
|
||||
CreateCacheKey("policy-b", "subject-b", "context-b"),
|
||||
CreateCacheKey("policy-c", "subject-c", "context-c")
|
||||
};
|
||||
|
||||
var provcacheEntry = CreateProvcacheEntry("sha256:batch");
|
||||
|
||||
// First key is a hit, others are misses
|
||||
_mockProvcacheService
|
||||
.SetupSequence(x => x.GetAsync(It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Hit(provcacheEntry, "valkey", 1.0))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0.5))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0.5));
|
||||
|
||||
// Act
|
||||
var result = await _cache.GetBatchAsync(keys);
|
||||
|
||||
// Assert
|
||||
result.CacheHits.Should().Be(1);
|
||||
result.CacheMisses.Should().Be(2);
|
||||
result.Found.Should().HaveCount(1);
|
||||
result.NotFound.Should().HaveCount(2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void GetStats_ReturnsAccumulatedStatistics()
|
||||
{
|
||||
// Act
|
||||
var stats = _cache.GetStats();
|
||||
|
||||
// Assert
|
||||
stats.Should().NotBeNull();
|
||||
stats.TotalRequests.Should().BeGreaterThanOrEqualTo(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetAsync_WhenProvcacheThrows_TreatsAsMiss()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("policy-err", "subject-err", "context-err");
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.GetAsync(It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.ThrowsAsync(new Exception("Connection failed"));
|
||||
|
||||
// Act
|
||||
var result = await _cache.GetAsync(key);
|
||||
|
||||
// Assert
|
||||
result.CacheHit.Should().BeFalse();
|
||||
result.Entry.Should().BeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VeriKey_Construction_IsDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var key = CreateCacheKey("sha256:policy", "sha256:subject", "sha256:context");
|
||||
var capturedVeriKeys = new List<string>();
|
||||
|
||||
_mockProvcacheService
|
||||
.Setup(x => x.GetAsync(It.IsAny<string>(), It.IsAny<bool>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<string, bool, CancellationToken>((vk, _, _) => capturedVeriKeys.Add(vk))
|
||||
.ReturnsAsync(ProvcacheServiceResult.Miss(0.5));
|
||||
|
||||
// Act
|
||||
await _cache.GetAsync(key);
|
||||
await _cache.GetAsync(key);
|
||||
|
||||
// Assert
|
||||
capturedVeriKeys.Should().HaveCount(2);
|
||||
capturedVeriKeys[0].Should().Be(capturedVeriKeys[1]);
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static PolicyEvaluationCacheKey CreateCacheKey(string policy, string subject, string context)
|
||||
{
|
||||
return PolicyEvaluationCacheKey.Create(policy, subject, context);
|
||||
}
|
||||
|
||||
private PolicyEvaluationCacheEntry CreateCacheEntry()
|
||||
{
|
||||
return new PolicyEvaluationCacheEntry(
|
||||
Status: "evaluated",
|
||||
Severity: "low",
|
||||
RuleName: "test-rule",
|
||||
Priority: 1,
|
||||
Annotations: ImmutableDictionary<string, string>.Empty,
|
||||
Warnings: [],
|
||||
ExceptionId: null,
|
||||
CorrelationId: "corr-123",
|
||||
EvaluatedAt: _timeProvider.GetUtcNow(),
|
||||
ExpiresAt: _timeProvider.GetUtcNow().AddHours(1),
|
||||
Confidence: new ConfidenceScore
|
||||
{
|
||||
Value = 0.85m,
|
||||
Factors = [],
|
||||
Explanation = "High confidence"
|
||||
});
|
||||
}
|
||||
|
||||
private ProvcacheEntry CreateProvcacheEntry(string veriKey)
|
||||
{
|
||||
return new ProvcacheEntry
|
||||
{
|
||||
VeriKey = veriKey,
|
||||
Decision = new DecisionDigest
|
||||
{
|
||||
DigestVersion = "v1",
|
||||
VeriKey = veriKey,
|
||||
VerdictHash = "sha256:verdict123",
|
||||
ProofRoot = "sha256:proof123",
|
||||
ReplaySeed = new ReplaySeed { FeedIds = [], RuleIds = [] },
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ExpiresAt = _timeProvider.GetUtcNow().AddHours(1),
|
||||
TrustScore = 85
|
||||
},
|
||||
PolicyHash = "sha256:policy",
|
||||
SignerSetHash = "sha256:signer",
|
||||
FeedEpoch = "2025-W01",
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
ExpiresAt = _timeProvider.GetUtcNow().AddHours(1)
|
||||
};
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Tests for CacheBypassAccessor.
|
||||
/// </summary>
|
||||
public sealed class CacheBypassAccessorTests
|
||||
{
|
||||
[Fact]
|
||||
public void HttpCacheBypassAccessor_NoHeader_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var httpContext = new DefaultHttpContext();
|
||||
var accessor = CreateAccessor(httpContext, allowBypass: true);
|
||||
|
||||
// Act
|
||||
var result = accessor.ShouldBypassCache;
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpCacheBypassAccessor_BypassHeaderTrue_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var httpContext = new DefaultHttpContext();
|
||||
httpContext.Request.Headers[CacheBypassHeaders.CacheBypass] = "true";
|
||||
var accessor = CreateAccessor(httpContext, allowBypass: true);
|
||||
|
||||
// Act
|
||||
var result = accessor.ShouldBypassCache;
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpCacheBypassAccessor_BypassHeaderFalse_ReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var httpContext = new DefaultHttpContext();
|
||||
httpContext.Request.Headers[CacheBypassHeaders.CacheBypass] = "false";
|
||||
var accessor = CreateAccessor(httpContext, allowBypass: true);
|
||||
|
||||
// Act
|
||||
var result = accessor.ShouldBypassCache;
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpCacheBypassAccessor_RefreshHeaderTrue_ReturnsTrue()
|
||||
{
|
||||
// Arrange
|
||||
var httpContext = new DefaultHttpContext();
|
||||
httpContext.Request.Headers[CacheBypassHeaders.CacheRefresh] = "true";
|
||||
var accessor = CreateAccessor(httpContext, allowBypass: true);
|
||||
|
||||
// Act
|
||||
var result = accessor.ShouldBypassCache;
|
||||
|
||||
// Assert
|
||||
result.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void HttpCacheBypassAccessor_BypassDisabledInOptions_AlwaysReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var httpContext = new DefaultHttpContext();
|
||||
httpContext.Request.Headers[CacheBypassHeaders.CacheBypass] = "true";
|
||||
var accessor = CreateAccessor(httpContext, allowBypass: false);
|
||||
|
||||
// Act
|
||||
var result = accessor.ShouldBypassCache;
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void NullCacheBypassAccessor_AlwaysReturnsFalse()
|
||||
{
|
||||
// Arrange
|
||||
var accessor = NullCacheBypassAccessor.Instance;
|
||||
|
||||
// Act
|
||||
var result = accessor.ShouldBypassCache;
|
||||
|
||||
// Assert
|
||||
result.Should().BeFalse();
|
||||
}
|
||||
|
||||
private static HttpCacheBypassAccessor CreateAccessor(HttpContext httpContext, bool allowBypass)
|
||||
{
|
||||
var httpContextAccessor = new Mock<IHttpContextAccessor>();
|
||||
httpContextAccessor.Setup(x => x.HttpContext).Returns(httpContext);
|
||||
|
||||
var options = Microsoft.Extensions.Options.Options.Create(new ProvcacheOptions { AllowCacheBypass = allowBypass });
|
||||
|
||||
return new HttpCacheBypassAccessor(httpContextAccessor.Object, options);
|
||||
}
|
||||
}
|
||||
@@ -32,6 +32,7 @@
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="../../StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.DeltaVerdict/StellaOps.DeltaVerdict.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.Provcache/StellaOps.Provcache.csproj" />
|
||||
<ProjectReference Include="../../../Excititor/__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" />
|
||||
<ProjectReference Include="../../__Libraries/StellaOps.Policy.Unknowns/StellaOps.Policy.Unknowns.csproj" />
|
||||
<ProjectReference Include="../../../__Libraries/StellaOps.TestKit/StellaOps.TestKit.csproj" />
|
||||
|
||||
Reference in New Issue
Block a user