Add comprehensive security tests for OWASP A02, A05, A07, and A08 categories
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
Some checks failed
Docs CI / lint-and-preview (push) Has been cancelled
Export Center CI / export-ci (push) Has been cancelled
Findings Ledger CI / build-test (push) Has been cancelled
Findings Ledger CI / migration-validation (push) Has been cancelled
Findings Ledger CI / generate-manifest (push) Has been cancelled
Manifest Integrity / Validate Schema Integrity (push) Has been cancelled
Lighthouse CI / Lighthouse Audit (push) Has been cancelled
Lighthouse CI / Axe Accessibility Audit (push) Has been cancelled
Manifest Integrity / Validate Contract Documents (push) Has been cancelled
Manifest Integrity / Validate Pack Fixtures (push) Has been cancelled
Manifest Integrity / Audit SHA256SUMS Files (push) Has been cancelled
Manifest Integrity / Verify Merkle Roots (push) Has been cancelled
Policy Lint & Smoke / policy-lint (push) Has been cancelled
Policy Simulation / policy-simulate (push) Has been cancelled
- Implemented tests for Cryptographic Failures (A02) to ensure proper handling of sensitive data, secure algorithms, and key management. - Added tests for Security Misconfiguration (A05) to validate production configurations, security headers, CORS settings, and feature management. - Developed tests for Authentication Failures (A07) to enforce strong password policies, rate limiting, session management, and MFA support. - Created tests for Software and Data Integrity Failures (A08) to verify artifact signatures, SBOM integrity, attestation chains, and feed updates.
This commit is contained in:
@@ -0,0 +1,312 @@
|
||||
// =============================================================================
|
||||
// SmartDiffSchemaValidationTests.cs
|
||||
// Sprint: SPRINT_3500_0002_0001
|
||||
// Task: SDIFF-FND-016 - JSON Schema validation tests
|
||||
// =============================================================================
|
||||
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using Json.Schema;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.SmartDiff.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests to validate Smart-Diff predicates against JSON Schema.
|
||||
/// </summary>
|
||||
[Trait("Category", "Schema")]
|
||||
[Trait("Sprint", "3500")]
|
||||
public sealed class SmartDiffSchemaValidationTests
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
};
|
||||
|
||||
[Fact(DisplayName = "Valid SmartDiffPredicate passes schema validation")]
|
||||
public void ValidPredicate_PassesValidation()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var predicate = CreateValidPredicate();
|
||||
var json = JsonSerializer.Serialize(predicate, JsonOptions);
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue("Valid predicate should pass schema validation");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Predicate missing required field fails validation")]
|
||||
public void MissingRequiredField_FailsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"baseImage": { "digest": "sha256:abc123" }
|
||||
}
|
||||
""";
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse("Missing required fields should fail validation");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Predicate with invalid schema version fails validation")]
|
||||
public void InvalidSchemaVersion_FailsValidation()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = """
|
||||
{
|
||||
"schemaVersion": "invalid",
|
||||
"baseImage": { "digest": "sha256:abc123" },
|
||||
"targetImage": { "digest": "sha256:def456" },
|
||||
"diff": { "added": [], "removed": [], "modified": [] },
|
||||
"reachabilityGate": { "class": 0, "isSinkReachable": false, "isEntryReachable": false },
|
||||
"scanner": { "name": "test", "version": "1.0.0" }
|
||||
}
|
||||
""";
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
// Schema version must match semver pattern
|
||||
result.IsValid.Should().BeFalse("Invalid schema version should fail validation");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "ReachabilityGate class must be 0-7")]
|
||||
public void ReachabilityGateClass_MustBe0To7()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"baseImage": { "digest": "sha256:abc123" },
|
||||
"targetImage": { "digest": "sha256:def456" },
|
||||
"diff": { "added": [], "removed": [], "modified": [] },
|
||||
"reachabilityGate": { "class": 10, "isSinkReachable": false, "isEntryReachable": false },
|
||||
"scanner": { "name": "test", "version": "1.0.0" }
|
||||
}
|
||||
""";
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse("Reachability class > 7 should fail validation");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Valid reachability gate class 0 passes")]
|
||||
public void ReachabilityGateClass0_Passes()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = CreatePredicateJson(gateClass: 0);
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Valid reachability gate class 7 passes")]
|
||||
public void ReachabilityGateClass7_Passes()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = CreatePredicateJson(gateClass: 7);
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Suppressed count must be non-negative")]
|
||||
public void SuppressedCount_MustBeNonNegative()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"baseImage": { "digest": "sha256:abc123" },
|
||||
"targetImage": { "digest": "sha256:def456" },
|
||||
"diff": { "added": [], "removed": [], "modified": [] },
|
||||
"reachabilityGate": { "class": 0, "isSinkReachable": false, "isEntryReachable": false },
|
||||
"scanner": { "name": "test", "version": "1.0.0" },
|
||||
"suppressedCount": -1
|
||||
}
|
||||
""";
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeFalse("Negative suppressed count should fail");
|
||||
}
|
||||
|
||||
[Fact(DisplayName = "Optional context field is valid when present")]
|
||||
public void OptionalContext_ValidWhenPresent()
|
||||
{
|
||||
// Arrange
|
||||
var schema = GetSmartDiffSchema();
|
||||
var json = """
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"baseImage": { "digest": "sha256:abc123" },
|
||||
"targetImage": { "digest": "sha256:def456" },
|
||||
"diff": { "added": [], "removed": [], "modified": [] },
|
||||
"reachabilityGate": { "class": 0, "isSinkReachable": false, "isEntryReachable": false },
|
||||
"scanner": { "name": "test", "version": "1.0.0" },
|
||||
"context": { "env": "production", "namespace": "default" }
|
||||
}
|
||||
""";
|
||||
var jsonNode = JsonDocument.Parse(json).RootElement;
|
||||
|
||||
// Act
|
||||
var result = schema.Evaluate(jsonNode);
|
||||
|
||||
// Assert
|
||||
result.IsValid.Should().BeTrue();
|
||||
}
|
||||
|
||||
private static JsonSchema GetSmartDiffSchema()
|
||||
{
|
||||
// Define schema inline for testing
|
||||
var schemaJson = """
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"$id": "https://stellaops.dev/schemas/smart-diff.v1.json",
|
||||
"type": "object",
|
||||
"required": ["schemaVersion", "baseImage", "targetImage", "diff", "reachabilityGate", "scanner"],
|
||||
"properties": {
|
||||
"schemaVersion": {
|
||||
"type": "string",
|
||||
"pattern": "^[0-9]+\\.[0-9]+\\.[0-9]+$"
|
||||
},
|
||||
"baseImage": {
|
||||
"type": "object",
|
||||
"required": ["digest"],
|
||||
"properties": {
|
||||
"digest": { "type": "string" },
|
||||
"repository": { "type": "string" },
|
||||
"tag": { "type": "string" }
|
||||
}
|
||||
},
|
||||
"targetImage": {
|
||||
"type": "object",
|
||||
"required": ["digest"],
|
||||
"properties": {
|
||||
"digest": { "type": "string" },
|
||||
"repository": { "type": "string" },
|
||||
"tag": { "type": "string" }
|
||||
}
|
||||
},
|
||||
"diff": {
|
||||
"type": "object",
|
||||
"required": ["added", "removed", "modified"],
|
||||
"properties": {
|
||||
"added": { "type": "array" },
|
||||
"removed": { "type": "array" },
|
||||
"modified": { "type": "array" }
|
||||
}
|
||||
},
|
||||
"reachabilityGate": {
|
||||
"type": "object",
|
||||
"required": ["class", "isSinkReachable", "isEntryReachable"],
|
||||
"properties": {
|
||||
"class": { "type": "integer", "minimum": 0, "maximum": 7 },
|
||||
"isSinkReachable": { "type": "boolean" },
|
||||
"isEntryReachable": { "type": "boolean" },
|
||||
"sinkCategory": { "type": "string" }
|
||||
}
|
||||
},
|
||||
"scanner": {
|
||||
"type": "object",
|
||||
"required": ["name", "version"],
|
||||
"properties": {
|
||||
"name": { "type": "string" },
|
||||
"version": { "type": "string" }
|
||||
}
|
||||
},
|
||||
"context": {
|
||||
"type": "object",
|
||||
"additionalProperties": true
|
||||
},
|
||||
"suppressedCount": {
|
||||
"type": "integer",
|
||||
"minimum": 0
|
||||
},
|
||||
"materialChanges": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
""";
|
||||
|
||||
return JsonSchema.FromText(schemaJson);
|
||||
}
|
||||
|
||||
private static object CreateValidPredicate()
|
||||
{
|
||||
return new
|
||||
{
|
||||
schemaVersion = "1.0.0",
|
||||
baseImage = new { digest = "sha256:abc123" },
|
||||
targetImage = new { digest = "sha256:def456" },
|
||||
diff = new
|
||||
{
|
||||
added = Array.Empty<object>(),
|
||||
removed = Array.Empty<object>(),
|
||||
modified = Array.Empty<object>()
|
||||
},
|
||||
reachabilityGate = new
|
||||
{
|
||||
@class = 0,
|
||||
isSinkReachable = false,
|
||||
isEntryReachable = false
|
||||
},
|
||||
scanner = new
|
||||
{
|
||||
name = "stellaops-scanner",
|
||||
version = "1.5.0"
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private static string CreatePredicateJson(int gateClass)
|
||||
{
|
||||
return $$"""
|
||||
{
|
||||
"schemaVersion": "1.0.0",
|
||||
"baseImage": { "digest": "sha256:abc123" },
|
||||
"targetImage": { "digest": "sha256:def456" },
|
||||
"diff": { "added": [], "removed": [], "modified": [] },
|
||||
"reachabilityGate": { "class": {{gateClass}}, "isSinkReachable": false, "isEntryReachable": false },
|
||||
"scanner": { "name": "test", "version": "1.0.0" }
|
||||
}
|
||||
""";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,238 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScanMetricsRepositoryTests.cs
|
||||
// Sprint: SPRINT_3406_0001_0001_metrics_tables
|
||||
// Task: METRICS-3406-011
|
||||
// Description: Unit tests for scan metrics repository operations
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.Storage.Models;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Storage.Tests;
|
||||
|
||||
[Collection("scanner-postgres")]
|
||||
public sealed class ScanMetricsRepositoryTests : IAsyncLifetime
|
||||
{
|
||||
private readonly ScannerPostgresFixture _fixture;
|
||||
private IScanMetricsRepository _repository = null!;
|
||||
|
||||
public ScanMetricsRepositoryTests(ScannerPostgresFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
public async Task InitializeAsync()
|
||||
{
|
||||
await _fixture.ResetAsync();
|
||||
_repository = new PostgresScanMetricsRepository(_fixture.CreateConnection);
|
||||
}
|
||||
|
||||
public Task DisposeAsync() => Task.CompletedTask;
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_InsertsNewMetrics()
|
||||
{
|
||||
// Arrange
|
||||
var metrics = CreateTestMetrics();
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(metrics, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
var retrieved = await _repository.GetByScanIdAsync(metrics.ScanId, CancellationToken.None);
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(metrics.ScanId, retrieved.ScanId);
|
||||
Assert.Equal(metrics.TenantId, retrieved.TenantId);
|
||||
Assert.Equal(metrics.ArtifactDigest, retrieved.ArtifactDigest);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SavePhasesAsync_InsertsPhasesLinkedToMetrics()
|
||||
{
|
||||
// Arrange
|
||||
var metrics = CreateTestMetrics();
|
||||
await _repository.SaveAsync(metrics, CancellationToken.None);
|
||||
|
||||
var phases = new[]
|
||||
{
|
||||
new ExecutionPhase
|
||||
{
|
||||
MetricsId = metrics.MetricsId,
|
||||
PhaseName = "pull",
|
||||
PhaseOrder = 1,
|
||||
StartedAt = DateTimeOffset.UtcNow.AddSeconds(-10),
|
||||
FinishedAt = DateTimeOffset.UtcNow.AddSeconds(-5),
|
||||
Success = true
|
||||
},
|
||||
new ExecutionPhase
|
||||
{
|
||||
MetricsId = metrics.MetricsId,
|
||||
PhaseName = "analyze",
|
||||
PhaseOrder = 2,
|
||||
StartedAt = DateTimeOffset.UtcNow.AddSeconds(-5),
|
||||
FinishedAt = DateTimeOffset.UtcNow,
|
||||
Success = true
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.SavePhasesAsync(phases, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
var retrieved = await _repository.GetPhasesByMetricsIdAsync(metrics.MetricsId, CancellationToken.None);
|
||||
Assert.Equal(2, retrieved.Count);
|
||||
Assert.Contains(retrieved, p => p.PhaseName == "pull");
|
||||
Assert.Contains(retrieved, p => p.PhaseName == "analyze");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetByScanIdAsync_ReturnsNullForNonexistent()
|
||||
{
|
||||
// Act
|
||||
var result = await _repository.GetByScanIdAsync(Guid.NewGuid(), CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Null(result);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetTteByTenantAsync_ReturnsMetricsForTenant()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = Guid.NewGuid();
|
||||
var metrics1 = CreateTestMetrics(tenantId: tenantId);
|
||||
var metrics2 = CreateTestMetrics(tenantId: tenantId);
|
||||
var metricsOther = CreateTestMetrics(tenantId: Guid.NewGuid());
|
||||
|
||||
await _repository.SaveAsync(metrics1, CancellationToken.None);
|
||||
await _repository.SaveAsync(metrics2, CancellationToken.None);
|
||||
await _repository.SaveAsync(metricsOther, CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetTteByTenantAsync(tenantId, limit: 10, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, result.Count);
|
||||
Assert.All(result, m => Assert.Equal(tenantId, m.TenantId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetTteBySurfaceAsync_ReturnsMetricsForSurface()
|
||||
{
|
||||
// Arrange
|
||||
var surfaceId = Guid.NewGuid();
|
||||
var metrics1 = CreateTestMetrics(surfaceId: surfaceId);
|
||||
var metrics2 = CreateTestMetrics(surfaceId: surfaceId);
|
||||
|
||||
await _repository.SaveAsync(metrics1, CancellationToken.None);
|
||||
await _repository.SaveAsync(metrics2, CancellationToken.None);
|
||||
|
||||
// Act
|
||||
var result = await _repository.GetTteBySurfaceAsync(surfaceId, limit: 10, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(2, result.Count);
|
||||
Assert.All(result, m => Assert.Equal(surfaceId, m.SurfaceId));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GetP50TteAsync_CalculatesMedianCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var tenantId = Guid.NewGuid();
|
||||
var baseTime = DateTimeOffset.UtcNow;
|
||||
|
||||
// Create metrics with different durations: 100ms, 200ms, 300ms, 400ms, 500ms
|
||||
for (int i = 1; i <= 5; i++)
|
||||
{
|
||||
var metrics = new ScanMetrics
|
||||
{
|
||||
MetricsId = Guid.NewGuid(),
|
||||
ScanId = Guid.NewGuid(),
|
||||
TenantId = tenantId,
|
||||
ArtifactDigest = $"sha256:{Guid.NewGuid():N}",
|
||||
ArtifactType = "oci_image",
|
||||
FindingsSha256 = $"sha256:{Guid.NewGuid():N}",
|
||||
StartedAt = baseTime.AddMilliseconds(-(i * 100)),
|
||||
FinishedAt = baseTime,
|
||||
Phases = new ScanPhaseTimings
|
||||
{
|
||||
PullMs = i * 20,
|
||||
AnalyzeMs = i * 30,
|
||||
DecideMs = i * 50
|
||||
}
|
||||
};
|
||||
await _repository.SaveAsync(metrics, CancellationToken.None);
|
||||
}
|
||||
|
||||
// Act
|
||||
var p50 = await _repository.GetP50TteAsync(tenantId, since: baseTime.AddHours(-1), CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(p50);
|
||||
Assert.True(p50 > 0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_PreservesPhaseTimings()
|
||||
{
|
||||
// Arrange
|
||||
var metrics = CreateTestMetrics();
|
||||
metrics.Phases = new ScanPhaseTimings
|
||||
{
|
||||
PullMs = 100,
|
||||
AnalyzeMs = 200,
|
||||
DecideMs = 150,
|
||||
AttestMs = 50,
|
||||
ReachabilityMs = 300
|
||||
};
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(metrics, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
var retrieved = await _repository.GetByScanIdAsync(metrics.ScanId, CancellationToken.None);
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.Equal(100, retrieved.Phases.PullMs);
|
||||
Assert.Equal(200, retrieved.Phases.AnalyzeMs);
|
||||
Assert.Equal(150, retrieved.Phases.DecideMs);
|
||||
Assert.Equal(50, retrieved.Phases.AttestMs);
|
||||
Assert.Equal(300, retrieved.Phases.ReachabilityMs);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SaveAsync_HandlesReplayScans()
|
||||
{
|
||||
// Arrange
|
||||
var metrics = CreateTestMetrics();
|
||||
metrics.IsReplay = true;
|
||||
metrics.ReplayManifestHash = "sha256:replay123";
|
||||
|
||||
// Act
|
||||
await _repository.SaveAsync(metrics, CancellationToken.None);
|
||||
|
||||
// Assert
|
||||
var retrieved = await _repository.GetByScanIdAsync(metrics.ScanId, CancellationToken.None);
|
||||
Assert.NotNull(retrieved);
|
||||
Assert.True(retrieved.IsReplay);
|
||||
Assert.Equal("sha256:replay123", retrieved.ReplayManifestHash);
|
||||
}
|
||||
|
||||
private static ScanMetrics CreateTestMetrics(Guid? tenantId = null, Guid? surfaceId = null)
|
||||
{
|
||||
return new ScanMetrics
|
||||
{
|
||||
MetricsId = Guid.NewGuid(),
|
||||
ScanId = Guid.NewGuid(),
|
||||
TenantId = tenantId ?? Guid.NewGuid(),
|
||||
SurfaceId = surfaceId,
|
||||
ArtifactDigest = $"sha256:{Guid.NewGuid():N}",
|
||||
ArtifactType = "oci_image",
|
||||
FindingsSha256 = $"sha256:{Guid.NewGuid():N}",
|
||||
StartedAt = DateTimeOffset.UtcNow.AddMinutes(-1),
|
||||
FinishedAt = DateTimeOffset.UtcNow,
|
||||
Phases = new ScanPhaseTimings()
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,232 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// FidelityMetricsIntegrationTests.cs
|
||||
// Sprint: SPRINT_3403_0001_0001_fidelity_metrics
|
||||
// Task: FID-3403-013
|
||||
// Description: Integration tests for fidelity metrics in determinism harness
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using StellaOps.Scanner.Worker.Determinism;
|
||||
using StellaOps.Scanner.Worker.Determinism.Calculators;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests.Determinism;
|
||||
|
||||
public sealed class FidelityMetricsIntegrationTests
|
||||
{
|
||||
[Fact]
|
||||
public void DeterminismReport_WithFidelityMetrics_IncludesAllThreeTiers()
|
||||
{
|
||||
// Arrange & Act
|
||||
var fidelity = CreateTestFidelityMetrics(
|
||||
bitwiseFidelity: 0.98,
|
||||
semanticFidelity: 0.99,
|
||||
policyFidelity: 1.0);
|
||||
|
||||
var report = new DeterminismReport(
|
||||
Version: "1.0.0",
|
||||
Release: "test-release",
|
||||
Platform: "linux-amd64",
|
||||
PolicySha: "sha256:policy123",
|
||||
FeedsSha: "sha256:feeds456",
|
||||
ScannerSha: "sha256:scanner789",
|
||||
OverallScore: 0.98,
|
||||
ThresholdOverall: 0.95,
|
||||
ThresholdImage: 0.90,
|
||||
Images: [],
|
||||
Fidelity: fidelity);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(report.Fidelity);
|
||||
Assert.Equal(0.98, report.Fidelity.BitwiseFidelity);
|
||||
Assert.Equal(0.99, report.Fidelity.SemanticFidelity);
|
||||
Assert.Equal(1.0, report.Fidelity.PolicyFidelity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void DeterminismImageReport_WithFidelityMetrics_TracksPerImage()
|
||||
{
|
||||
// Arrange
|
||||
var imageFidelity = CreateTestFidelityMetrics(
|
||||
bitwiseFidelity: 0.95,
|
||||
semanticFidelity: 0.98,
|
||||
policyFidelity: 1.0);
|
||||
|
||||
var imageReport = new DeterminismImageReport(
|
||||
Image: "sha256:image123",
|
||||
Runs: 5,
|
||||
Identical: 4,
|
||||
Score: 0.80,
|
||||
ArtifactHashes: new Dictionary<string, string>(),
|
||||
RunsDetail: [],
|
||||
Fidelity: imageFidelity);
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(imageReport.Fidelity);
|
||||
Assert.Equal(0.95, imageReport.Fidelity.BitwiseFidelity);
|
||||
Assert.Equal(5, imageReport.Fidelity.TotalReplays);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FidelityMetricsService_ComputesAllThreeTiers()
|
||||
{
|
||||
// Arrange
|
||||
var service = new FidelityMetricsService(
|
||||
new BitwiseFidelityCalculator(),
|
||||
new SemanticFidelityCalculator(),
|
||||
new PolicyFidelityCalculator());
|
||||
|
||||
var baseline = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass");
|
||||
var replay = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass");
|
||||
|
||||
// Act
|
||||
var metrics = service.Compute(baseline, new[] { replay });
|
||||
|
||||
// Assert
|
||||
Assert.Equal(1, metrics.TotalReplays);
|
||||
Assert.True(metrics.BitwiseFidelity >= 0.0 && metrics.BitwiseFidelity <= 1.0);
|
||||
Assert.True(metrics.SemanticFidelity >= 0.0 && metrics.SemanticFidelity <= 1.0);
|
||||
Assert.True(metrics.PolicyFidelity >= 0.0 && metrics.PolicyFidelity <= 1.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FidelityMetrics_SemanticEquivalent_ButBitwiseDifferent()
|
||||
{
|
||||
// Arrange - same semantic content, different formatting/ordering
|
||||
var service = new FidelityMetricsService(
|
||||
new BitwiseFidelityCalculator(),
|
||||
new SemanticFidelityCalculator(),
|
||||
new PolicyFidelityCalculator());
|
||||
|
||||
var baseline = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "HIGH", "pass");
|
||||
var replay = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass"); // case difference
|
||||
|
||||
// Act
|
||||
var metrics = service.Compute(baseline, new[] { replay });
|
||||
|
||||
// Assert
|
||||
// Bitwise should be < 1.0 (different bytes)
|
||||
// Semantic should be 1.0 (same meaning)
|
||||
// Policy should be 1.0 (same decision)
|
||||
Assert.True(metrics.SemanticFidelity >= metrics.BitwiseFidelity);
|
||||
Assert.Equal(1.0, metrics.PolicyFidelity);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FidelityMetrics_PolicyDifference_ReflectedInPF()
|
||||
{
|
||||
// Arrange
|
||||
var service = new FidelityMetricsService(
|
||||
new BitwiseFidelityCalculator(),
|
||||
new SemanticFidelityCalculator(),
|
||||
new PolicyFidelityCalculator());
|
||||
|
||||
var baseline = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass");
|
||||
var replay = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "fail"); // policy differs
|
||||
|
||||
// Act
|
||||
var metrics = service.Compute(baseline, new[] { replay });
|
||||
|
||||
// Assert
|
||||
Assert.True(metrics.PolicyFidelity < 1.0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FidelityMetrics_MultipleReplays_AveragesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var service = new FidelityMetricsService(
|
||||
new BitwiseFidelityCalculator(),
|
||||
new SemanticFidelityCalculator(),
|
||||
new PolicyFidelityCalculator());
|
||||
|
||||
var baseline = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass");
|
||||
var replays = new[]
|
||||
{
|
||||
CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass"), // identical
|
||||
CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass"), // identical
|
||||
CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "fail"), // policy diff
|
||||
};
|
||||
|
||||
// Act
|
||||
var metrics = service.Compute(baseline, replays);
|
||||
|
||||
// Assert
|
||||
Assert.Equal(3, metrics.TotalReplays);
|
||||
// 2 out of 3 have matching policy
|
||||
Assert.True(metrics.PolicyFidelity >= 0.6 && metrics.PolicyFidelity <= 0.7);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void FidelityMetrics_IncludesMismatchDiagnostics()
|
||||
{
|
||||
// Arrange
|
||||
var service = new FidelityMetricsService(
|
||||
new BitwiseFidelityCalculator(),
|
||||
new SemanticFidelityCalculator(),
|
||||
new PolicyFidelityCalculator());
|
||||
|
||||
var baseline = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "high", "pass");
|
||||
var replay = CreateTestScanResult("pkg:npm/lodash@4.17.21", "CVE-2021-23337", "critical", "fail"); // semantic + policy diff
|
||||
|
||||
// Act
|
||||
var metrics = service.Compute(baseline, new[] { replay });
|
||||
|
||||
// Assert
|
||||
Assert.NotNull(metrics.Mismatches);
|
||||
Assert.NotEmpty(metrics.Mismatches);
|
||||
}
|
||||
|
||||
private static FidelityMetrics CreateTestFidelityMetrics(
|
||||
double bitwiseFidelity,
|
||||
double semanticFidelity,
|
||||
double policyFidelity,
|
||||
int totalReplays = 5)
|
||||
{
|
||||
return new FidelityMetrics
|
||||
{
|
||||
BitwiseFidelity = bitwiseFidelity,
|
||||
SemanticFidelity = semanticFidelity,
|
||||
PolicyFidelity = policyFidelity,
|
||||
TotalReplays = totalReplays,
|
||||
IdenticalOutputs = (int)(totalReplays * bitwiseFidelity),
|
||||
SemanticMatches = (int)(totalReplays * semanticFidelity),
|
||||
PolicyMatches = (int)(totalReplays * policyFidelity),
|
||||
ComputedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
private static TestScanResult CreateTestScanResult(
|
||||
string purl,
|
||||
string cve,
|
||||
string severity,
|
||||
string policyDecision)
|
||||
{
|
||||
return new TestScanResult
|
||||
{
|
||||
Packages = new[] { new TestPackage { Purl = purl } },
|
||||
Findings = new[] { new TestFinding { Cve = cve, Severity = severity } },
|
||||
PolicyDecision = policyDecision,
|
||||
PolicyReasonCodes = policyDecision == "pass" ? Array.Empty<string>() : new[] { "severity_exceeded" }
|
||||
};
|
||||
}
|
||||
|
||||
// Test support types
|
||||
private sealed record TestScanResult
|
||||
{
|
||||
public required IReadOnlyList<TestPackage> Packages { get; init; }
|
||||
public required IReadOnlyList<TestFinding> Findings { get; init; }
|
||||
public required string PolicyDecision { get; init; }
|
||||
public required IReadOnlyList<string> PolicyReasonCodes { get; init; }
|
||||
}
|
||||
|
||||
private sealed record TestPackage
|
||||
{
|
||||
public required string Purl { get; init; }
|
||||
}
|
||||
|
||||
private sealed record TestFinding
|
||||
{
|
||||
public required string Cve { get; init; }
|
||||
public required string Severity { get; init; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,217 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ScanCompletionMetricsIntegrationTests.cs
|
||||
// Sprint: SPRINT_3406_0001_0001_metrics_tables
|
||||
// Task: METRICS-3406-012
|
||||
// Description: Integration test verifying metrics captured on scan completion
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using Moq;
|
||||
using StellaOps.Scanner.Storage.Models;
|
||||
using StellaOps.Scanner.Storage.Repositories;
|
||||
using StellaOps.Scanner.Worker.Metrics;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Scanner.Worker.Tests.Metrics;
|
||||
|
||||
public sealed class ScanCompletionMetricsIntegrationTests
|
||||
{
|
||||
[Fact]
|
||||
public async Task CaptureAsync_PersistsMetricsOnScanCompletion()
|
||||
{
|
||||
// Arrange
|
||||
var savedMetrics = new List<ScanMetrics>();
|
||||
var savedPhases = new List<ExecutionPhase>();
|
||||
|
||||
var mockRepository = new Mock<IScanMetricsRepository>();
|
||||
mockRepository
|
||||
.Setup(r => r.SaveAsync(It.IsAny<ScanMetrics>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<ScanMetrics, CancellationToken>((m, _) => savedMetrics.Add(m))
|
||||
.Returns(Task.CompletedTask);
|
||||
mockRepository
|
||||
.Setup(r => r.SavePhasesAsync(It.IsAny<IEnumerable<ExecutionPhase>>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<IEnumerable<ExecutionPhase>, CancellationToken>((p, _) => savedPhases.AddRange(p))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
var factory = new TestScanMetricsCollectorFactory(mockRepository.Object);
|
||||
var integration = new ScanCompletionMetricsIntegration(
|
||||
factory,
|
||||
NullLogger<ScanCompletionMetricsIntegration>.Instance);
|
||||
|
||||
var context = new ScanCompletionContext
|
||||
{
|
||||
ScanId = Guid.NewGuid(),
|
||||
TenantId = Guid.NewGuid(),
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
ArtifactType = "oci_image",
|
||||
FindingsSha256 = "sha256:def456",
|
||||
PackageCount = 150,
|
||||
FindingCount = 25,
|
||||
VexDecisionCount = 10,
|
||||
Phases = new[]
|
||||
{
|
||||
new PhaseCompletionInfo
|
||||
{
|
||||
PhaseName = "pull",
|
||||
StartedAt = DateTimeOffset.UtcNow.AddSeconds(-10),
|
||||
FinishedAt = DateTimeOffset.UtcNow.AddSeconds(-5),
|
||||
Success = true
|
||||
},
|
||||
new PhaseCompletionInfo
|
||||
{
|
||||
PhaseName = "analyze",
|
||||
StartedAt = DateTimeOffset.UtcNow.AddSeconds(-5),
|
||||
FinishedAt = DateTimeOffset.UtcNow,
|
||||
Success = true
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
await integration.CaptureAsync(context);
|
||||
|
||||
// Assert
|
||||
Assert.Single(savedMetrics);
|
||||
var metrics = savedMetrics[0];
|
||||
Assert.Equal(context.ScanId, metrics.ScanId);
|
||||
Assert.Equal(context.TenantId, metrics.TenantId);
|
||||
Assert.Equal(context.ArtifactDigest, metrics.ArtifactDigest);
|
||||
Assert.Equal(context.FindingsSha256, metrics.FindingsSha256);
|
||||
Assert.Equal(150, metrics.PackageCount);
|
||||
Assert.Equal(25, metrics.FindingCount);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CaptureAsync_DoesNotFailScanOnMetricsError()
|
||||
{
|
||||
// Arrange
|
||||
var mockRepository = new Mock<IScanMetricsRepository>();
|
||||
mockRepository
|
||||
.Setup(r => r.SaveAsync(It.IsAny<ScanMetrics>(), It.IsAny<CancellationToken>()))
|
||||
.ThrowsAsync(new InvalidOperationException("Database error"));
|
||||
|
||||
var factory = new TestScanMetricsCollectorFactory(mockRepository.Object);
|
||||
var integration = new ScanCompletionMetricsIntegration(
|
||||
factory,
|
||||
NullLogger<ScanCompletionMetricsIntegration>.Instance);
|
||||
|
||||
var context = new ScanCompletionContext
|
||||
{
|
||||
ScanId = Guid.NewGuid(),
|
||||
TenantId = Guid.NewGuid(),
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
ArtifactType = "oci_image",
|
||||
FindingsSha256 = "sha256:def456"
|
||||
};
|
||||
|
||||
// Act & Assert - should not throw
|
||||
await integration.CaptureAsync(context);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CaptureAsync_IncludesVexAndProofDigests()
|
||||
{
|
||||
// Arrange
|
||||
var savedMetrics = new List<ScanMetrics>();
|
||||
|
||||
var mockRepository = new Mock<IScanMetricsRepository>();
|
||||
mockRepository
|
||||
.Setup(r => r.SaveAsync(It.IsAny<ScanMetrics>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<ScanMetrics, CancellationToken>((m, _) => savedMetrics.Add(m))
|
||||
.Returns(Task.CompletedTask);
|
||||
mockRepository
|
||||
.Setup(r => r.SavePhasesAsync(It.IsAny<IEnumerable<ExecutionPhase>>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
var factory = new TestScanMetricsCollectorFactory(mockRepository.Object);
|
||||
var integration = new ScanCompletionMetricsIntegration(
|
||||
factory,
|
||||
NullLogger<ScanCompletionMetricsIntegration>.Instance);
|
||||
|
||||
var context = new ScanCompletionContext
|
||||
{
|
||||
ScanId = Guid.NewGuid(),
|
||||
TenantId = Guid.NewGuid(),
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
ArtifactType = "oci_image",
|
||||
FindingsSha256 = "sha256:findings",
|
||||
VexBundleSha256 = "sha256:vex",
|
||||
ProofBundleSha256 = "sha256:proof",
|
||||
SbomSha256 = "sha256:sbom"
|
||||
};
|
||||
|
||||
// Act
|
||||
await integration.CaptureAsync(context);
|
||||
|
||||
// Assert
|
||||
var metrics = savedMetrics[0];
|
||||
Assert.Equal("sha256:vex", metrics.VexBundleSha256);
|
||||
Assert.Equal("sha256:proof", metrics.ProofBundleSha256);
|
||||
Assert.Equal("sha256:sbom", metrics.SbomSha256);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CaptureAsync_IncludesReplayMetadata()
|
||||
{
|
||||
// Arrange
|
||||
var savedMetrics = new List<ScanMetrics>();
|
||||
|
||||
var mockRepository = new Mock<IScanMetricsRepository>();
|
||||
mockRepository
|
||||
.Setup(r => r.SaveAsync(It.IsAny<ScanMetrics>(), It.IsAny<CancellationToken>()))
|
||||
.Callback<ScanMetrics, CancellationToken>((m, _) => savedMetrics.Add(m))
|
||||
.Returns(Task.CompletedTask);
|
||||
mockRepository
|
||||
.Setup(r => r.SavePhasesAsync(It.IsAny<IEnumerable<ExecutionPhase>>(), It.IsAny<CancellationToken>()))
|
||||
.Returns(Task.CompletedTask);
|
||||
|
||||
var factory = new TestScanMetricsCollectorFactory(mockRepository.Object);
|
||||
var integration = new ScanCompletionMetricsIntegration(
|
||||
factory,
|
||||
NullLogger<ScanCompletionMetricsIntegration>.Instance);
|
||||
|
||||
var context = new ScanCompletionContext
|
||||
{
|
||||
ScanId = Guid.NewGuid(),
|
||||
TenantId = Guid.NewGuid(),
|
||||
ArtifactDigest = "sha256:abc123",
|
||||
ArtifactType = "oci_image",
|
||||
FindingsSha256 = "sha256:findings",
|
||||
IsReplay = true,
|
||||
ReplayManifestHash = "sha256:replay123"
|
||||
};
|
||||
|
||||
// Act
|
||||
await integration.CaptureAsync(context);
|
||||
|
||||
// Assert
|
||||
var metrics = savedMetrics[0];
|
||||
Assert.True(metrics.IsReplay);
|
||||
Assert.Equal("sha256:replay123", metrics.ReplayManifestHash);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test factory that uses a mock repository.
|
||||
/// </summary>
|
||||
private sealed class TestScanMetricsCollectorFactory : IScanMetricsCollectorFactory
|
||||
{
|
||||
private readonly IScanMetricsRepository _repository;
|
||||
|
||||
public TestScanMetricsCollectorFactory(IScanMetricsRepository repository)
|
||||
{
|
||||
_repository = repository;
|
||||
}
|
||||
|
||||
public ScanMetricsCollector Create(Guid scanId, Guid tenantId, string artifactDigest, string artifactType)
|
||||
{
|
||||
return new ScanMetricsCollector(
|
||||
_repository,
|
||||
NullLogger<ScanMetricsCollector>.Instance,
|
||||
scanId,
|
||||
tenantId,
|
||||
artifactDigest,
|
||||
artifactType,
|
||||
"test-1.0.0");
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user