feat(metrics): Implement scan metrics repository and PostgreSQL integration
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
- Added IScanMetricsRepository interface for scan metrics persistence and retrieval. - Implemented PostgresScanMetricsRepository for PostgreSQL database interactions, including methods for saving and retrieving scan metrics and execution phases. - Introduced methods for obtaining TTE statistics and recent scans for tenants. - Implemented deletion of old metrics for retention purposes. test(tests): Add SCA Failure Catalogue tests for FC6-FC10 - Created ScaCatalogueDeterminismTests to validate determinism properties of SCA Failure Catalogue fixtures. - Developed ScaFailureCatalogueTests to ensure correct handling of specific failure modes in the scanner. - Included tests for manifest validation, file existence, and expected findings across multiple failure cases. feat(telemetry): Integrate scan completion metrics into the pipeline - Introduced IScanCompletionMetricsIntegration interface and ScanCompletionMetricsIntegration class to record metrics upon scan completion. - Implemented proof coverage and TTE metrics recording with logging for scan completion summaries.
This commit is contained in:
@@ -0,0 +1,172 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DeterminismScoringIntegrationTests.cs
|
||||
// Sprint: SPRINT_3401_0001_0001_determinism_scoring_foundations
|
||||
// Task: DET-3401-013
|
||||
// Description: Integration tests for freshness + proof coverage + explain in full scan
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Policy.Scoring;
|
||||
|
||||
namespace StellaOps.Policy.Scoring.Tests;
|
||||
|
||||
public class DeterminismScoringIntegrationTests
|
||||
{
|
||||
private readonly IFreshnessAwareScoringService _freshnessService;
|
||||
|
||||
public DeterminismScoringIntegrationTests()
|
||||
{
|
||||
_freshnessService = new FreshnessAwareScoringService();
|
||||
}
|
||||
|
||||
#region Freshness Integration Tests
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_WithExplanation_ProducesConsistentResults()
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-15); // 15 days old = recent_30d bucket
|
||||
var baseScore = 100;
|
||||
|
||||
// Act
|
||||
var result1 = _freshnessService.AdjustForFreshness(baseScore, evidenceTime, evaluationTime);
|
||||
var result2 = _freshnessService.AdjustForFreshness(baseScore, evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(result1.AdjustedScore, result2.AdjustedScore);
|
||||
Assert.Equal(result1.MultiplierBps, result2.MultiplierBps);
|
||||
Assert.Equal("recent_30d", result1.BucketName);
|
||||
Assert.Equal(9000, result1.MultiplierBps); // 30d bucket = 9000bps
|
||||
Assert.Equal(90, result1.AdjustedScore); // 100 * 9000 / 10000 = 90
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[InlineData(5, "fresh_7d", 10000, 100)] // 5 days old
|
||||
[InlineData(15, "recent_30d", 9000, 90)] // 15 days old
|
||||
[InlineData(60, "moderate_90d", 7500, 75)] // 60 days old
|
||||
[InlineData(120, "aging_180d", 6000, 60)] // 120 days old
|
||||
[InlineData(300, "stale_365d", 4000, 40)] // 300 days old
|
||||
[InlineData(500, "ancient", 2000, 20)] // 500 days old
|
||||
public void FreshnessAdjustment_AllBuckets_ApplyCorrectMultiplier(
|
||||
int ageDays,
|
||||
string expectedBucket,
|
||||
int expectedMultiplierBps,
|
||||
int expectedScore)
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-ageDays);
|
||||
var baseScore = 100;
|
||||
|
||||
// Act
|
||||
var result = _freshnessService.AdjustForFreshness(baseScore, evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(expectedBucket, result.BucketName);
|
||||
Assert.Equal(expectedMultiplierBps, result.MultiplierBps);
|
||||
Assert.Equal(expectedScore, result.AdjustedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_FutureEvidence_GetsFreshBucket()
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(1); // Future evidence
|
||||
|
||||
// Act
|
||||
var result = _freshnessService.AdjustForFreshness(100, evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal("fresh_7d", result.BucketName);
|
||||
Assert.Equal(10000, result.MultiplierBps);
|
||||
Assert.Equal(0, result.EvidenceAgeDays);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Bucket Lookup Tests
|
||||
|
||||
[Fact]
|
||||
public void GetFreshnessBucket_ReturnsCorrectPercentage()
|
||||
{
|
||||
// Arrange
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-60); // 60 days old
|
||||
|
||||
// Act
|
||||
var result = _freshnessService.GetFreshnessBucket(evidenceTime, evaluationTime);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(60, result.AgeDays);
|
||||
Assert.Equal("moderate_90d", result.BucketName);
|
||||
Assert.Equal(7500, result.MultiplierBps);
|
||||
Assert.Equal(75m, result.MultiplierPercent);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Determinism Tests
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_SameInputs_AlwaysProducesSameOutput()
|
||||
{
|
||||
// Test determinism across multiple invocations
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-45);
|
||||
|
||||
var results = new List<FreshnessAdjustedScore>();
|
||||
for (int i = 0; i < 100; i++)
|
||||
{
|
||||
results.Add(_freshnessService.AdjustForFreshness(85, evidenceTime, evaluationTime));
|
||||
}
|
||||
|
||||
Assert.True(results.All(r => r.AdjustedScore == results[0].AdjustedScore));
|
||||
Assert.True(results.All(r => r.MultiplierBps == results[0].MultiplierBps));
|
||||
Assert.True(results.All(r => r.BucketName == results[0].BucketName));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_BasisPointMath_AvoidFloatingPointErrors()
|
||||
{
|
||||
// Verify integer math produces predictable results
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-45);
|
||||
|
||||
// Score that could produce floating point issues if using decimals
|
||||
var result = _freshnessService.AdjustForFreshness(33, evidenceTime, evaluationTime);
|
||||
|
||||
// 33 * 7500 / 10000 = 24.75 -> rounds to 24 with integer division
|
||||
Assert.Equal(24, result.AdjustedScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_ZeroScore_ReturnsZero()
|
||||
{
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-30);
|
||||
|
||||
var result = _freshnessService.AdjustForFreshness(0, evidenceTime, evaluationTime);
|
||||
|
||||
Assert.Equal(0, result.AdjustedScore);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FreshnessAdjustment_VeryOldEvidence_StillGetsMinMultiplier()
|
||||
{
|
||||
var evaluationTime = new DateTimeOffset(2025, 12, 16, 12, 0, 0, TimeSpan.Zero);
|
||||
var evidenceTime = evaluationTime.AddDays(-3650); // 10 years old
|
||||
|
||||
var result = _freshnessService.AdjustForFreshness(100, evidenceTime, evaluationTime);
|
||||
|
||||
Assert.Equal("ancient", result.BucketName);
|
||||
Assert.Equal(2000, result.MultiplierBps); // Minimum multiplier
|
||||
Assert.Equal(20, result.AdjustedScore);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user