sln build fix (again), tests fixes, audit work and doctors work
This commit is contained in:
@@ -15,7 +15,7 @@ Validate CGS determinism and cross-platform hash stability.
|
||||
## Required Reading
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
- `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
|
||||
## Working Agreement
|
||||
- 1. Use fixed time and IDs; avoid Guid.NewGuid or DateTimeOffset.UtcNow in fixtures.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Determinism Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Graph Indexer Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.AirGap Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,354 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CrossServiceClockSkewTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.2
|
||||
// Description: Tests for cross-service behavior under clock skew conditions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.ClockSkew.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.ClockSkew;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for cross-service interactions under various clock skew scenarios.
|
||||
/// Validates that HLC maintains correct event ordering despite wall clock differences.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.HLC)]
|
||||
[Trait("Category", "ClockSkew")]
|
||||
public class CrossServiceClockSkewTests : IClassFixture<ClockSkewServiceFixture>
|
||||
{
|
||||
private readonly ClockSkewServiceFixture _fixture;
|
||||
|
||||
public CrossServiceClockSkewTests(ClockSkewServiceFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_fixture.ResetAllClocks();
|
||||
_fixture.ClearEventLog();
|
||||
}
|
||||
|
||||
#region Scanner-Concelier Skew Tests
|
||||
|
||||
[Fact]
|
||||
public void Scanner_Concelier_5SecondSkew_EventOrderingMaintained()
|
||||
{
|
||||
// Arrange - Scanner is 5 seconds ahead of Concelier
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(5));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.Zero);
|
||||
|
||||
// Act - Scanner sends scan result to Concelier
|
||||
var scanComplete = _fixture.SendEvent("scanner", "concelier", "ScanComplete", "scan-123");
|
||||
var advisoryQuery = _fixture.SendEvent("concelier", "scanner", "AdvisoryQuery", "query-456");
|
||||
|
||||
// Assert - HLC ordering maintained despite wall clock skew
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Scanner's HLC should be ahead but causality preserved
|
||||
scanComplete.SourceHlcTimestamp.Should().BeLessThan(scanComplete.TargetHlcTimestamp);
|
||||
advisoryQuery.SourceHlcTimestamp.Should().BeLessThan(advisoryQuery.TargetHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Scanner_Concelier_BackwardSkew_StillOrders()
|
||||
{
|
||||
// Arrange - Scanner is 5 seconds BEHIND Concelier
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(-5));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.Zero);
|
||||
|
||||
// Act - Messages flow both directions
|
||||
var evt1 = _fixture.SendEvent("scanner", "concelier", "ScanRequest");
|
||||
var evt2 = _fixture.SendEvent("concelier", "scanner", "AdvisoryData");
|
||||
var evt3 = _fixture.SendEvent("scanner", "concelier", "ScanComplete");
|
||||
|
||||
// Assert
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Each response should have higher HLC than request
|
||||
evt1.TargetHlcTimestamp.Should().BeGreaterThan(evt1.SourceHlcTimestamp);
|
||||
evt2.TargetHlcTimestamp.Should().BeGreaterThan(evt2.SourceHlcTimestamp);
|
||||
evt3.TargetHlcTimestamp.Should().BeGreaterThan(evt3.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Gateway-Backend Skew Tests
|
||||
|
||||
[Fact]
|
||||
public void Gateway_Backend_ClockDrift_NoTimestampConflicts()
|
||||
{
|
||||
// Arrange - Gateway ahead, Backend behind
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromSeconds(3));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-2));
|
||||
|
||||
// Act - Simulate request/response flow
|
||||
var request = _fixture.SendEvent("gateway", "backend", "HttpRequest", "/api/scan");
|
||||
var processing = _fixture.GenerateLocalEvent("backend", "ProcessingStarted");
|
||||
_fixture.AdvanceServiceTime("backend", TimeSpan.FromMilliseconds(50));
|
||||
var response = _fixture.SendEvent("backend", "gateway", "HttpResponse", "200 OK");
|
||||
|
||||
// Assert - Ordering should be: request < processing < response
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
request.TargetHlcTimestamp.Should().BeLessThan(response.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Gateway_Backend_RapidRequests_UniqueTimestamps()
|
||||
{
|
||||
// Arrange - Different skews
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromMilliseconds(500));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromMilliseconds(-500));
|
||||
|
||||
// Act - Send 100 rapid requests
|
||||
var events = new List<CrossServiceEvent>();
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
events.Add(_fixture.SendEvent("gateway", "backend", "Request", $"req-{i}"));
|
||||
}
|
||||
|
||||
// Assert - All HLC timestamps should be unique
|
||||
var allTimestamps = events
|
||||
.SelectMany(e => new[] { e.SourceHlcTimestamp, e.TargetHlcTimestamp })
|
||||
.ToList();
|
||||
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region All Services Random Skew Tests
|
||||
|
||||
[Fact]
|
||||
public void AllServices_RandomSkew_UpTo30Seconds_SystemFunctions()
|
||||
{
|
||||
// Arrange - Apply random skew up to 30 seconds to all services
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(15));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.FromSeconds(-10));
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromSeconds(25));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-30));
|
||||
|
||||
// Act - Simulate multi-service workflow
|
||||
// Gateway receives request
|
||||
var gatewayReceive = _fixture.GenerateLocalEvent("gateway", "RequestReceived");
|
||||
|
||||
// Gateway calls backend
|
||||
var toBackend = _fixture.SendEvent("gateway", "backend", "BackendCall");
|
||||
|
||||
// Backend calls scanner
|
||||
var toScanner = _fixture.SendEvent("backend", "scanner", "ScanRequest");
|
||||
|
||||
// Scanner calls concelier
|
||||
var toConcelier = _fixture.SendEvent("scanner", "concelier", "AdvisoryLookup");
|
||||
|
||||
// Response chain
|
||||
var fromConcelier = _fixture.SendEvent("concelier", "scanner", "AdvisoryResponse");
|
||||
var fromScanner = _fixture.SendEvent("scanner", "backend", "ScanResponse");
|
||||
var fromBackend = _fixture.SendEvent("backend", "gateway", "BackendResponse");
|
||||
|
||||
// Assert - HLC maintains ordering despite extreme clock skew
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Response should always be after request in HLC terms
|
||||
fromConcelier.SourceHlcTimestamp.Should().BeGreaterThan(toConcelier.TargetHlcTimestamp);
|
||||
fromScanner.SourceHlcTimestamp.Should().BeGreaterThan(toScanner.TargetHlcTimestamp);
|
||||
fromBackend.SourceHlcTimestamp.Should().BeGreaterThan(toBackend.TargetHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllServices_DriftingClocks_MaintainsCausality()
|
||||
{
|
||||
// Arrange - Start synchronized
|
||||
_fixture.ResetAllClocks();
|
||||
|
||||
var events = new List<CrossServiceEvent>();
|
||||
|
||||
// Act - Simulate clock drift over time
|
||||
for (var round = 0; round < 10; round++)
|
||||
{
|
||||
// Apply random drift
|
||||
_fixture.ApplyRandomDrift(TimeSpan.FromSeconds(2));
|
||||
|
||||
// Generate cross-service events
|
||||
events.Add(_fixture.SendEvent("gateway", "backend", $"Round{round}-1"));
|
||||
events.Add(_fixture.SendEvent("backend", "scanner", $"Round{round}-2"));
|
||||
events.Add(_fixture.SendEvent("scanner", "concelier", $"Round{round}-3"));
|
||||
events.Add(_fixture.SendEvent("concelier", "gateway", $"Round{round}-4"));
|
||||
|
||||
// Advance base time
|
||||
_fixture.AdvanceAllTime(TimeSpan.FromMilliseconds(100));
|
||||
}
|
||||
|
||||
// Assert - Despite drift, HLC ordering maintained
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void Services_IdenticalTimestamps_StillUnique()
|
||||
{
|
||||
// Arrange - All services at exactly the same time
|
||||
_fixture.ResetAllClocks();
|
||||
|
||||
// Act - All services generate events "simultaneously"
|
||||
var events = new List<CrossServiceEvent>
|
||||
{
|
||||
_fixture.SendEvent("scanner", "concelier", "Msg1"),
|
||||
_fixture.SendEvent("concelier", "gateway", "Msg2"),
|
||||
_fixture.SendEvent("gateway", "backend", "Msg3"),
|
||||
_fixture.SendEvent("backend", "scanner", "Msg4")
|
||||
};
|
||||
|
||||
// Assert - All timestamps unique
|
||||
var allTimestamps = events
|
||||
.SelectMany(e => new[] { e.SourceHlcTimestamp, e.TargetHlcTimestamp })
|
||||
.ToList();
|
||||
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Services_ExtremeSkew_60Seconds_HandledCorrectly()
|
||||
{
|
||||
// Arrange - Extreme 60 second skew
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(60));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-60));
|
||||
|
||||
// Act
|
||||
var evt1 = _fixture.SendEvent("scanner", "backend", "ExtremeSkew1");
|
||||
var evt2 = _fixture.SendEvent("backend", "scanner", "ExtremeSkew2");
|
||||
|
||||
// Assert - HLC still maintains ordering
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Maximum observed skew should reflect the clock difference
|
||||
var maxSkew = _fixture.GetMaxObservedSkew();
|
||||
maxSkew.Should().BeGreaterThan(TimeSpan.FromSeconds(100)); // 60 + 60 = 120 sec difference
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Services_ClockJump_Forward_Handled()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.ResetAllClocks();
|
||||
|
||||
var evt1 = _fixture.SendEvent("scanner", "concelier", "BeforeJump");
|
||||
var ts1 = evt1.TargetHlcTimestamp;
|
||||
|
||||
// Clock jumps forward 10 seconds on scanner
|
||||
_fixture.AdvanceServiceTime("scanner", TimeSpan.FromSeconds(10));
|
||||
|
||||
// Act
|
||||
var evt2 = _fixture.SendEvent("scanner", "concelier", "AfterJump");
|
||||
var ts2 = evt2.SourceHlcTimestamp;
|
||||
|
||||
// Assert - New timestamp should be ahead
|
||||
ts2.Should().BeGreaterThan(ts1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Services_ClockJump_Backward_HandledByHLC()
|
||||
{
|
||||
// Arrange - Scanner at +10 seconds
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(10));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.Zero);
|
||||
|
||||
var evt1 = _fixture.SendEvent("scanner", "concelier", "HighTime");
|
||||
|
||||
// "Fix" scanner's clock by moving it back (simulating NTP correction)
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.Zero);
|
||||
|
||||
// Act - Scanner continues operating
|
||||
var evt2 = _fixture.SendEvent("scanner", "concelier", "NormalTime");
|
||||
|
||||
// Assert - HLC ensures monotonicity despite wall clock going backwards
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
evt2.SourceHlcTimestamp.Should().BeGreaterThan(evt1.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Workflow Simulation Tests
|
||||
|
||||
[Fact]
|
||||
public void FullScanWorkflow_WithSkew_MaintainsOrdering()
|
||||
{
|
||||
// Arrange - Realistic skew scenario
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromMilliseconds(100));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromMilliseconds(-50));
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromMilliseconds(200));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.FromMilliseconds(-100));
|
||||
|
||||
// Act - Simulate full scan workflow
|
||||
// 1. Gateway receives scan request
|
||||
var step1 = _fixture.SendEvent("gateway", "backend", "ScanRequest", "image:tag");
|
||||
|
||||
// 2. Backend dispatches to scanner
|
||||
_fixture.AdvanceServiceTime("backend", TimeSpan.FromMilliseconds(10));
|
||||
var step2 = _fixture.SendEvent("backend", "scanner", "DispatchScan");
|
||||
|
||||
// 3. Scanner queries advisories
|
||||
_fixture.AdvanceServiceTime("scanner", TimeSpan.FromMilliseconds(50));
|
||||
var step3 = _fixture.SendEvent("scanner", "concelier", "AdvisoryQuery");
|
||||
|
||||
// 4. Concelier responds
|
||||
_fixture.AdvanceServiceTime("concelier", TimeSpan.FromMilliseconds(20));
|
||||
var step4 = _fixture.SendEvent("concelier", "scanner", "AdvisoryResponse");
|
||||
|
||||
// 5. Scanner completes
|
||||
_fixture.AdvanceServiceTime("scanner", TimeSpan.FromMilliseconds(30));
|
||||
var step5 = _fixture.SendEvent("scanner", "backend", "ScanComplete");
|
||||
|
||||
// 6. Backend responds to gateway
|
||||
_fixture.AdvanceServiceTime("backend", TimeSpan.FromMilliseconds(10));
|
||||
var step6 = _fixture.SendEvent("backend", "gateway", "ScanResult");
|
||||
|
||||
// Assert - Full causal chain maintained
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Verify causal chain
|
||||
step1.TargetHlcTimestamp.Should().BeLessThan(step2.SourceHlcTimestamp);
|
||||
step2.TargetHlcTimestamp.Should().BeLessThan(step3.SourceHlcTimestamp);
|
||||
step3.TargetHlcTimestamp.Should().BeLessThan(step4.SourceHlcTimestamp);
|
||||
step4.TargetHlcTimestamp.Should().BeLessThan(step5.SourceHlcTimestamp);
|
||||
step5.TargetHlcTimestamp.Should().BeLessThan(step6.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ConcurrentWorkflows_WithSkew_AllMaintainOrdering()
|
||||
{
|
||||
// Arrange - Set up skew
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromSeconds(1));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-1));
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(2));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.FromSeconds(-2));
|
||||
|
||||
var allEvents = new List<CrossServiceEvent>();
|
||||
|
||||
// Act - Run 5 concurrent workflows
|
||||
for (var workflow = 0; workflow < 5; workflow++)
|
||||
{
|
||||
allEvents.Add(_fixture.SendEvent("gateway", "backend", $"Workflow{workflow}-Start"));
|
||||
allEvents.Add(_fixture.SendEvent("backend", "scanner", $"Workflow{workflow}-Scan"));
|
||||
allEvents.Add(_fixture.SendEvent("scanner", "concelier", $"Workflow{workflow}-Lookup"));
|
||||
allEvents.Add(_fixture.SendEvent("concelier", "scanner", $"Workflow{workflow}-Data"));
|
||||
allEvents.Add(_fixture.SendEvent("scanner", "backend", $"Workflow{workflow}-Done"));
|
||||
allEvents.Add(_fixture.SendEvent("backend", "gateway", $"Workflow{workflow}-Complete"));
|
||||
}
|
||||
|
||||
// Assert - All events maintain HLC ordering
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// All timestamps should be unique
|
||||
var allTimestamps = allEvents
|
||||
.SelectMany(e => new[] { e.SourceHlcTimestamp, e.TargetHlcTimestamp })
|
||||
.ToList();
|
||||
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,365 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ClockSkewServiceFixture.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.2
|
||||
// Description: Test fixture for simulating clock skew across multiple services
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.ClockSkew.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture that simulates multiple services with independent clocks.
|
||||
/// Allows testing cross-service interactions under various clock skew conditions.
|
||||
/// </summary>
|
||||
public sealed class ClockSkewServiceFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, ServiceClock> _services = new();
|
||||
private readonly ConcurrentBag<CrossServiceEvent> _eventLog = [];
|
||||
private readonly Random _random = new(42); // Deterministic seed
|
||||
private long _globalEventSequence;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the event log containing all cross-service events.
|
||||
/// </summary>
|
||||
public IReadOnlyCollection<CrossServiceEvent> EventLog => _eventLog.ToImmutableArray();
|
||||
|
||||
/// <summary>
|
||||
/// Gets all registered services.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, ServiceClock> Services => _services.ToImmutableDictionary();
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
// Create default services
|
||||
CreateService("scanner", TimeSpan.Zero);
|
||||
CreateService("concelier", TimeSpan.Zero);
|
||||
CreateService("gateway", TimeSpan.Zero);
|
||||
CreateService("backend", TimeSpan.Zero);
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
service.Dispose();
|
||||
}
|
||||
_services.Clear();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new service with the specified clock offset.
|
||||
/// </summary>
|
||||
/// <param name="serviceId">Service identifier.</param>
|
||||
/// <param name="clockOffset">Offset from base time (positive = ahead, negative = behind).</param>
|
||||
/// <returns>The created service clock.</returns>
|
||||
public ServiceClock CreateService(string serviceId, TimeSpan clockOffset)
|
||||
{
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
var serviceTime = baseTime + clockOffset;
|
||||
|
||||
var timeProvider = new FakeTimeProvider(serviceTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var hlc = new HybridLogicalClock.HybridLogicalClock(
|
||||
timeProvider,
|
||||
serviceId,
|
||||
stateStore,
|
||||
NullLogger<HybridLogicalClock.HybridLogicalClock>.Instance,
|
||||
TimeSpan.FromMinutes(1));
|
||||
|
||||
var service = new ServiceClock(serviceId, timeProvider, hlc, clockOffset);
|
||||
_services[serviceId] = service;
|
||||
return service;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets clock skew for a service.
|
||||
/// </summary>
|
||||
public void SetServiceClockSkew(string serviceId, TimeSpan skew)
|
||||
{
|
||||
var service = GetService(serviceId);
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
service.TimeProvider.SetUtcNow(baseTime + skew);
|
||||
service.ClockOffset = skew;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Applies random clock drift to all services within the specified bounds.
|
||||
/// </summary>
|
||||
public void ApplyRandomDrift(TimeSpan maxDrift)
|
||||
{
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
var driftMs = (_random.NextDouble() * 2 - 1) * maxDrift.TotalMilliseconds;
|
||||
var drift = TimeSpan.FromMilliseconds(driftMs);
|
||||
service.TimeProvider.Advance(drift);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a cross-service call from source to target.
|
||||
/// </summary>
|
||||
public CrossServiceEvent SendEvent(
|
||||
string sourceService,
|
||||
string targetService,
|
||||
string eventType,
|
||||
string? payload = null)
|
||||
{
|
||||
var source = GetService(sourceService);
|
||||
var target = GetService(targetService);
|
||||
|
||||
// Source generates timestamp
|
||||
var sourceTimestamp = source.HlcService.Tick();
|
||||
var sourceWallTime = source.TimeProvider.GetUtcNow();
|
||||
|
||||
// Target receives and generates its timestamp
|
||||
var targetTimestamp = target.HlcService.Receive(sourceTimestamp);
|
||||
var targetWallTime = target.TimeProvider.GetUtcNow();
|
||||
|
||||
var eventSeq = Interlocked.Increment(ref _globalEventSequence);
|
||||
|
||||
var evt = new CrossServiceEvent
|
||||
{
|
||||
Sequence = eventSeq,
|
||||
SourceService = sourceService,
|
||||
TargetService = targetService,
|
||||
EventType = eventType,
|
||||
Payload = payload,
|
||||
SourceHlcTimestamp = sourceTimestamp,
|
||||
TargetHlcTimestamp = targetTimestamp,
|
||||
SourceWallTime = sourceWallTime,
|
||||
TargetWallTime = targetWallTime,
|
||||
SourceClockOffset = source.ClockOffset,
|
||||
TargetClockOffset = target.ClockOffset
|
||||
};
|
||||
|
||||
_eventLog.Add(evt);
|
||||
return evt;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates service generating a local event.
|
||||
/// </summary>
|
||||
public LocalServiceEvent GenerateLocalEvent(string serviceId, string eventType, string? payload = null)
|
||||
{
|
||||
var service = GetService(serviceId);
|
||||
var hlcTimestamp = service.HlcService.Tick();
|
||||
var wallTime = service.TimeProvider.GetUtcNow();
|
||||
|
||||
return new LocalServiceEvent
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
EventType = eventType,
|
||||
Payload = payload,
|
||||
HlcTimestamp = hlcTimestamp,
|
||||
WallTime = wallTime,
|
||||
ClockOffset = service.ClockOffset
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for all services by the specified duration.
|
||||
/// </summary>
|
||||
public void AdvanceAllTime(TimeSpan duration)
|
||||
{
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
service.TimeProvider.Advance(duration);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for a specific service.
|
||||
/// </summary>
|
||||
public void AdvanceServiceTime(string serviceId, TimeSpan duration)
|
||||
{
|
||||
var service = GetService(serviceId);
|
||||
service.TimeProvider.Advance(duration);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that all events in the log maintain causal ordering based on HLC timestamps.
|
||||
/// </summary>
|
||||
public bool VerifyHlcOrdering()
|
||||
{
|
||||
var events = _eventLog.ToList();
|
||||
|
||||
foreach (var evt in events)
|
||||
{
|
||||
// For cross-service events, target HLC should be > source HLC
|
||||
if (evt.TargetHlcTimestamp <= evt.SourceHlcTimestamp)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the maximum wall clock difference observed across any event pair.
|
||||
/// </summary>
|
||||
public TimeSpan GetMaxObservedSkew()
|
||||
{
|
||||
var events = _eventLog.ToList();
|
||||
if (events.Count == 0) return TimeSpan.Zero;
|
||||
|
||||
var maxSkew = TimeSpan.Zero;
|
||||
foreach (var evt in events)
|
||||
{
|
||||
var skew = (evt.TargetWallTime - evt.SourceWallTime).Duration();
|
||||
if (skew > maxSkew) maxSkew = skew;
|
||||
}
|
||||
|
||||
return maxSkew;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears the event log.
|
||||
/// </summary>
|
||||
public void ClearEventLog()
|
||||
{
|
||||
_eventLog.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets all service clocks to base time with no offset.
|
||||
/// </summary>
|
||||
public void ResetAllClocks()
|
||||
{
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
service.TimeProvider.SetUtcNow(baseTime);
|
||||
service.ClockOffset = TimeSpan.Zero;
|
||||
}
|
||||
}
|
||||
|
||||
private ServiceClock GetService(string serviceId)
|
||||
{
|
||||
return _services.TryGetValue(serviceId, out var service)
|
||||
? service
|
||||
: throw new ArgumentException($"Service '{serviceId}' not found", nameof(serviceId));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a service with its own clock for testing.
|
||||
/// </summary>
|
||||
public sealed class ServiceClock : IDisposable
|
||||
{
|
||||
public ServiceClock(
|
||||
string serviceId,
|
||||
FakeTimeProvider timeProvider,
|
||||
IHybridLogicalClock hlcService,
|
||||
TimeSpan clockOffset)
|
||||
{
|
||||
ServiceId = serviceId;
|
||||
TimeProvider = timeProvider;
|
||||
HlcService = hlcService;
|
||||
ClockOffset = clockOffset;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the service identifier.
|
||||
/// </summary>
|
||||
public string ServiceId { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the fake time provider for this service.
|
||||
/// </summary>
|
||||
public FakeTimeProvider TimeProvider { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the HLC service for this service.
|
||||
/// </summary>
|
||||
public IHybridLogicalClock HlcService { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the clock offset from base time.
|
||||
/// </summary>
|
||||
public TimeSpan ClockOffset { get; set; }
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
// Cleanup if needed
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for deterministic testing.
|
||||
/// </summary>
|
||||
public sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _utcNow;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset? initialTime = null)
|
||||
{
|
||||
_utcNow = initialTime ?? DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _utcNow;
|
||||
}
|
||||
}
|
||||
|
||||
public void Advance(TimeSpan duration)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = _utcNow.Add(duration);
|
||||
}
|
||||
}
|
||||
|
||||
public void SetUtcNow(DateTimeOffset time)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = time;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a cross-service event for testing.
|
||||
/// </summary>
|
||||
public sealed record CrossServiceEvent
|
||||
{
|
||||
public required long Sequence { get; init; }
|
||||
public required string SourceService { get; init; }
|
||||
public required string TargetService { get; init; }
|
||||
public required string EventType { get; init; }
|
||||
public string? Payload { get; init; }
|
||||
public required HlcTimestamp SourceHlcTimestamp { get; init; }
|
||||
public required HlcTimestamp TargetHlcTimestamp { get; init; }
|
||||
public required DateTimeOffset SourceWallTime { get; init; }
|
||||
public required DateTimeOffset TargetWallTime { get; init; }
|
||||
public required TimeSpan SourceClockOffset { get; init; }
|
||||
public required TimeSpan TargetClockOffset { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a local service event for testing.
|
||||
/// </summary>
|
||||
public sealed record LocalServiceEvent
|
||||
{
|
||||
public required string ServiceId { get; init; }
|
||||
public required string EventType { get; init; }
|
||||
public string? Payload { get; init; }
|
||||
public required HlcTimestamp HlcTimestamp { get; init; }
|
||||
public required DateTimeOffset WallTime { get; init; }
|
||||
public required TimeSpan ClockOffset { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<!-- Enable xUnit v3 (project doesn't end with .Tests so needs explicit opt-in) -->
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Sprint: Testing Enhancement Advisory - Phase 3.2 -->
|
||||
<!-- Description: Cross-service clock skew integration tests -->
|
||||
|
||||
<ItemGroup>
|
||||
<!-- xUnit packages (project doesn't end with .Tests so must be explicit) -->
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Determinism Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Net.Http.Json;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using StellaOps.ReachGraph.Schema;
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
using StellaOps.Scanner.Contracts;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<!-- Suppress xUnit1051: E2E integration tests don't need responsive cancellation -->
|
||||
<NoWarn>$(NoWarn);xUnit1051</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup> <PackageReference Include="xunit.v3" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.E2E Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,264 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DistributedHlcTests.cs
|
||||
// Integration tests for multi-node HLC scenarios
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.HLC.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.HLC;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for distributed HLC scenarios with multiple nodes.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.HLC)]
|
||||
public class DistributedHlcTests : IClassFixture<MultiNodeHlcFixture>
|
||||
{
|
||||
private readonly MultiNodeHlcFixture _fixture;
|
||||
|
||||
public DistributedHlcTests(MultiNodeHlcFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Multi-Node Causal Ordering Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ThreeNode_ConcurrentTicks_MaintainCausalOrder()
|
||||
{
|
||||
// Arrange - Create 3 nodes with synchronized time
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("node-a", baseTime);
|
||||
_fixture.CreateNode("node-b", baseTime);
|
||||
_fixture.CreateNode("node-c", baseTime);
|
||||
|
||||
// Act - Each node generates ticks
|
||||
var timestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
|
||||
// Concurrent ticks at same physical time
|
||||
timestamps.Add(_fixture.Tick("node-a"));
|
||||
timestamps.Add(_fixture.Tick("node-b"));
|
||||
timestamps.Add(_fixture.Tick("node-c"));
|
||||
|
||||
// More ticks
|
||||
_fixture.AdvanceAllTime(TimeSpan.FromMilliseconds(1));
|
||||
timestamps.Add(_fixture.Tick("node-a"));
|
||||
timestamps.Add(_fixture.Tick("node-b"));
|
||||
timestamps.Add(_fixture.Tick("node-c"));
|
||||
|
||||
// Assert - All timestamps should be unique
|
||||
timestamps.Should().OnlyHaveUniqueItems();
|
||||
|
||||
// Timestamps from same node should be monotonically increasing
|
||||
var nodeATimestamps = timestamps.Where(t => t.NodeId == "node-a").ToList();
|
||||
nodeATimestamps.Should().BeInAscendingOrder();
|
||||
|
||||
var nodeBTimestamps = timestamps.Where(t => t.NodeId == "node-b").ToList();
|
||||
nodeBTimestamps.Should().BeInAscendingOrder();
|
||||
|
||||
var nodeCTimestamps = timestamps.Where(t => t.NodeId == "node-c").ToList();
|
||||
nodeCTimestamps.Should().BeInAscendingOrder();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TwoNode_MessageExchange_PreservesCausality()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("sender", baseTime);
|
||||
_fixture.CreateNode("receiver", baseTime);
|
||||
|
||||
// Act - Sender creates event
|
||||
var senderTs1 = _fixture.Tick("sender");
|
||||
|
||||
// Receiver gets the message
|
||||
var receiverTs1 = await _fixture.SendMessageAsync("sender", "receiver", senderTs1);
|
||||
|
||||
// Receiver generates new event
|
||||
var receiverTs2 = _fixture.Tick("receiver");
|
||||
|
||||
// Assert - Causal ordering preserved
|
||||
receiverTs1.Should().BeGreaterThan(senderTs1);
|
||||
receiverTs2.Should().BeGreaterThan(receiverTs1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FiveNode_Broadcast_AllNodesAdvance()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
_fixture.CreateNode($"node-{i}", baseTime);
|
||||
}
|
||||
|
||||
// Act - Node 0 broadcasts
|
||||
var originTs = _fixture.Tick("node-0");
|
||||
var results = await _fixture.BroadcastAsync("node-0", originTs);
|
||||
|
||||
// Assert - All 4 other nodes received and advanced their clocks
|
||||
results.Should().HaveCount(4);
|
||||
|
||||
foreach (var (nodeId, receivedTs) in results)
|
||||
{
|
||||
receivedTs.Should().BeGreaterThan(originTs,
|
||||
$"Node {nodeId} should have advanced past origin timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ThreeNode_ChainedMessages_MaintainTransitiveCausality()
|
||||
{
|
||||
// Arrange - A -> B -> C chain
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("chain-a", baseTime);
|
||||
_fixture.CreateNode("chain-b", baseTime);
|
||||
_fixture.CreateNode("chain-c", baseTime);
|
||||
|
||||
// Act - Chain of messages
|
||||
var tsA = _fixture.Tick("chain-a");
|
||||
var tsB = await _fixture.SendMessageAsync("chain-a", "chain-b", tsA);
|
||||
var tsC = await _fixture.SendMessageAsync("chain-b", "chain-c", tsB);
|
||||
|
||||
// Assert - Transitive causality: A < B < C
|
||||
tsA.Should().BeLessThan(tsB);
|
||||
tsB.Should().BeLessThan(tsC);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Clock Skew Tests
|
||||
|
||||
[Fact]
|
||||
public async Task TwoNode_ClockSkew_StillMaintainsOrdering()
|
||||
{
|
||||
// Arrange - Node B is 5 seconds ahead
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("slow-node", baseTime);
|
||||
_fixture.CreateNode("fast-node", baseTime.AddSeconds(5)); // 5 seconds ahead
|
||||
|
||||
// Act - Fast node sends to slow node
|
||||
var fastTs = _fixture.Tick("fast-node");
|
||||
var slowReceived = await _fixture.SendMessageAsync("fast-node", "slow-node", fastTs);
|
||||
|
||||
// Slow node generates new event
|
||||
var slowTs = _fixture.Tick("slow-node");
|
||||
|
||||
// Assert - Despite clock skew, ordering is maintained
|
||||
slowReceived.Should().BeGreaterThan(fastTs);
|
||||
slowTs.Should().BeGreaterThan(slowReceived);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ThreeNode_VariableClockSkew_EventualConsistency()
|
||||
{
|
||||
// Arrange - Nodes with different clock skews
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("sync-a", baseTime);
|
||||
_fixture.CreateNode("sync-b", baseTime.AddSeconds(2)); // 2 sec ahead
|
||||
_fixture.CreateNode("sync-c", baseTime.AddSeconds(-3)); // 3 sec behind
|
||||
|
||||
// Act - Exchange messages
|
||||
var tsA = _fixture.Tick("sync-a");
|
||||
await _fixture.SendMessageAsync("sync-a", "sync-b", tsA);
|
||||
await _fixture.SendMessageAsync("sync-a", "sync-c", tsA);
|
||||
|
||||
// All nodes now generate events
|
||||
var tsA2 = _fixture.Tick("sync-a");
|
||||
var tsB2 = _fixture.Tick("sync-b");
|
||||
var tsC2 = _fixture.Tick("sync-c");
|
||||
|
||||
// Assert - All new events should be after original
|
||||
tsA2.Should().BeGreaterThan(tsA);
|
||||
tsB2.Should().BeGreaterThan(tsA);
|
||||
tsC2.Should().BeGreaterThan(tsA);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region High Frequency Tests
|
||||
|
||||
[Fact]
|
||||
public void HighFrequency_RapidTicks_AllUnique()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("rapid-node", baseTime);
|
||||
|
||||
// Act - Generate 1000 ticks rapidly
|
||||
var timestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
for (var i = 0; i < 1000; i++)
|
||||
{
|
||||
timestamps.Add(_fixture.Tick("rapid-node"));
|
||||
}
|
||||
|
||||
// Assert - All unique and monotonically increasing
|
||||
timestamps.Should().OnlyHaveUniqueItems();
|
||||
timestamps.Should().BeInAscendingOrder();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HighFrequency_ConcurrentNodes_NoConflicts()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
_fixture.CreateNode($"concurrent-{i}", baseTime);
|
||||
}
|
||||
|
||||
// Act - All nodes tick 100 times each
|
||||
var allTimestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
for (var tick = 0; tick < 100; tick++)
|
||||
{
|
||||
for (var node = 0; node < 10; node++)
|
||||
{
|
||||
allTimestamps.Add(_fixture.Tick($"concurrent-{node}"));
|
||||
}
|
||||
_fixture.AdvanceAllTime(TimeSpan.FromMilliseconds(1));
|
||||
}
|
||||
|
||||
// Assert - All 1000 timestamps should be unique
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public async Task LargeCluster_TenNodes_ScalesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
_fixture.CreateNode($"cluster-{i:D2}", baseTime);
|
||||
}
|
||||
|
||||
// Act - Simulate gossip-style message propagation
|
||||
var initialTs = _fixture.Tick("cluster-00");
|
||||
|
||||
// Fan-out from node 0 to all others
|
||||
var firstWave = await _fixture.BroadcastAsync("cluster-00", initialTs);
|
||||
|
||||
// Each node in first wave broadcasts to others
|
||||
foreach (var (nodeId, receivedTs) in firstWave)
|
||||
{
|
||||
await _fixture.BroadcastAsync(nodeId, receivedTs);
|
||||
}
|
||||
|
||||
// Assert - Causal ordering maintained across all events
|
||||
_fixture.VerifyCausalOrdering().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,300 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MultiNodeHlcFixture.cs
|
||||
// Test fixture for multi-node HLC testing scenarios
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.HLC.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture that manages multiple HLC nodes with controllable time and network.
|
||||
/// </summary>
|
||||
public sealed class MultiNodeHlcFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly Dictionary<string, NodeContext> _nodes = [];
|
||||
private readonly NetworkPartitionSimulator _partitionSimulator = new();
|
||||
private readonly List<HlcTimestamp> _eventLog = [];
|
||||
private readonly object _eventLogLock = new();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the network partition simulator for controlling connectivity.
|
||||
/// </summary>
|
||||
public NetworkPartitionSimulator PartitionSimulator => _partitionSimulator;
|
||||
|
||||
/// <summary>
|
||||
/// Gets all logged events in order of occurrence.
|
||||
/// </summary>
|
||||
public IReadOnlyList<HlcTimestamp> EventLog
|
||||
{
|
||||
get
|
||||
{
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
return _eventLog.ToList().AsReadOnly();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask InitializeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_nodes.Clear();
|
||||
_partitionSimulator.HealAll();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new node with its own HLC instance.
|
||||
/// </summary>
|
||||
public IHybridLogicalClock CreateNode(string nodeId, DateTimeOffset? initialTime = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
if (_nodes.ContainsKey(nodeId))
|
||||
{
|
||||
throw new ArgumentException($"Node {nodeId} already exists", nameof(nodeId));
|
||||
}
|
||||
|
||||
var timeProvider = new FakeTimeProvider(initialTime ?? DateTimeOffset.UtcNow);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock.HybridLogicalClock(
|
||||
timeProvider,
|
||||
nodeId,
|
||||
stateStore,
|
||||
NullLogger<HybridLogicalClock.HybridLogicalClock>.Instance,
|
||||
TimeSpan.FromMinutes(1));
|
||||
|
||||
var context = new NodeContext(clock, timeProvider, stateStore, nodeId);
|
||||
_nodes[nodeId] = context;
|
||||
|
||||
return clock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the HLC instance for a node.
|
||||
/// </summary>
|
||||
public IHybridLogicalClock GetNode(string nodeId)
|
||||
{
|
||||
if (!_nodes.TryGetValue(nodeId, out var context))
|
||||
{
|
||||
throw new ArgumentException($"Node {nodeId} does not exist", nameof(nodeId));
|
||||
}
|
||||
return context.Clock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the time provider for a node (for advancing time).
|
||||
/// </summary>
|
||||
public FakeTimeProvider GetTimeProvider(string nodeId)
|
||||
{
|
||||
if (!_nodes.TryGetValue(nodeId, out var context))
|
||||
{
|
||||
throw new ArgumentException($"Node {nodeId} does not exist", nameof(nodeId));
|
||||
}
|
||||
return context.TimeProvider;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for a specific node.
|
||||
/// </summary>
|
||||
public void AdvanceTime(string nodeId, TimeSpan duration)
|
||||
{
|
||||
GetTimeProvider(nodeId).Advance(duration);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for all nodes uniformly.
|
||||
/// </summary>
|
||||
public void AdvanceAllTime(TimeSpan duration)
|
||||
{
|
||||
foreach (var context in _nodes.Values)
|
||||
{
|
||||
context.TimeProvider.Advance(duration);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets absolute time for a node (for creating clock skew).
|
||||
/// </summary>
|
||||
public void SetTime(string nodeId, DateTimeOffset time)
|
||||
{
|
||||
GetTimeProvider(nodeId).SetUtcNow(time);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a tick on a node and logs the event.
|
||||
/// </summary>
|
||||
public HlcTimestamp Tick(string nodeId)
|
||||
{
|
||||
var clock = GetNode(nodeId);
|
||||
var timestamp = clock.Tick();
|
||||
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
_eventLog.Add(timestamp);
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sends a message from one node to another (simulating distributed communication).
|
||||
/// Respects network partitions and latency.
|
||||
/// </summary>
|
||||
public async Task<HlcTimestamp> SendMessageAsync(
|
||||
string fromNode,
|
||||
string toNode,
|
||||
HlcTimestamp messageTimestamp,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Check partition
|
||||
if (_partitionSimulator.IsPartitioned(fromNode, toNode))
|
||||
{
|
||||
throw new NetworkPartitionException(fromNode, toNode);
|
||||
}
|
||||
|
||||
// Apply latency
|
||||
var latency = _partitionSimulator.GetLatency(fromNode, toNode);
|
||||
var delay = latency.ComputeDelay();
|
||||
if (delay > TimeSpan.Zero)
|
||||
{
|
||||
// For testing, we advance the receiver's time instead of waiting
|
||||
AdvanceTime(toNode, delay);
|
||||
}
|
||||
|
||||
// Receiver processes the message
|
||||
var receiverClock = GetNode(toNode);
|
||||
var newTimestamp = receiverClock.Receive(messageTimestamp);
|
||||
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
_eventLog.Add(newTimestamp);
|
||||
}
|
||||
|
||||
return await Task.FromResult(newTimestamp);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Broadcasts a message from one node to all others.
|
||||
/// Returns timestamps from nodes that received the message.
|
||||
/// </summary>
|
||||
public async Task<Dictionary<string, HlcTimestamp>> BroadcastAsync(
|
||||
string fromNode,
|
||||
HlcTimestamp messageTimestamp,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = new Dictionary<string, HlcTimestamp>();
|
||||
|
||||
foreach (var nodeId in _nodes.Keys)
|
||||
{
|
||||
if (nodeId == fromNode)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var received = await SendMessageAsync(fromNode, nodeId, messageTimestamp, ct);
|
||||
results[nodeId] = received;
|
||||
}
|
||||
catch (NetworkPartitionException)
|
||||
{
|
||||
// Node is partitioned, skip
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all node IDs in the cluster.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> GetNodeIds() => _nodes.Keys.ToList().AsReadOnly();
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that all timestamps in the event log maintain causal ordering.
|
||||
/// </summary>
|
||||
public bool VerifyCausalOrdering()
|
||||
{
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
for (var i = 1; i < _eventLog.Count; i++)
|
||||
{
|
||||
// Each event should be >= the previous event from the same node
|
||||
var current = _eventLog[i];
|
||||
var previous = _eventLog
|
||||
.Take(i)
|
||||
.Where(e => e.NodeId == current.NodeId)
|
||||
.LastOrDefault();
|
||||
|
||||
if (previous != default && current <= previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears the event log.
|
||||
/// </summary>
|
||||
public void ClearEventLog()
|
||||
{
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
_eventLog.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record NodeContext(
|
||||
IHybridLogicalClock Clock,
|
||||
FakeTimeProvider TimeProvider,
|
||||
InMemoryHlcStateStore StateStore,
|
||||
string NodeId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for deterministic testing.
|
||||
/// </summary>
|
||||
public sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _utcNow;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset? initialTime = null)
|
||||
{
|
||||
_utcNow = initialTime ?? DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _utcNow;
|
||||
}
|
||||
}
|
||||
|
||||
public void Advance(TimeSpan duration)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = _utcNow.Add(duration);
|
||||
}
|
||||
}
|
||||
|
||||
public void SetUtcNow(DateTimeOffset time)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = time;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// NetworkPartitionSimulator.cs
|
||||
// Simulates network partitions between distributed nodes for testing
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Integration.HLC.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Simulates network partitions between nodes for distributed testing scenarios.
|
||||
/// </summary>
|
||||
public sealed class NetworkPartitionSimulator
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, HashSet<string>> _partitions = new();
|
||||
private readonly ConcurrentDictionary<(string From, string To), LatencyConfig> _latencies = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
/// <summary>
|
||||
/// Isolates a node from all other nodes (full partition).
|
||||
/// </summary>
|
||||
public void IsolateNode(string nodeId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_partitions[nodeId] = ["*"]; // Special marker for full isolation
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Partitions communication between two specific nodes.
|
||||
/// </summary>
|
||||
public void PartitionNodes(string nodeA, string nodeB)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeA);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeB);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (!_partitions.TryGetValue(nodeA, out var aPartitions))
|
||||
{
|
||||
aPartitions = [];
|
||||
_partitions[nodeA] = aPartitions;
|
||||
}
|
||||
aPartitions.Add(nodeB);
|
||||
|
||||
if (!_partitions.TryGetValue(nodeB, out var bPartitions))
|
||||
{
|
||||
bPartitions = [];
|
||||
_partitions[nodeB] = bPartitions;
|
||||
}
|
||||
bPartitions.Add(nodeA);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heals the partition for a specific node (restores connectivity).
|
||||
/// </summary>
|
||||
public void HealNode(string nodeId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_partitions.TryRemove(nodeId, out _);
|
||||
|
||||
// Also remove this node from other nodes' partition lists
|
||||
foreach (var kvp in _partitions)
|
||||
{
|
||||
kvp.Value.Remove(nodeId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heals partition between two specific nodes.
|
||||
/// </summary>
|
||||
public void HealPartition(string nodeA, string nodeB)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (_partitions.TryGetValue(nodeA, out var aPartitions))
|
||||
{
|
||||
aPartitions.Remove(nodeB);
|
||||
}
|
||||
if (_partitions.TryGetValue(nodeB, out var bPartitions))
|
||||
{
|
||||
bPartitions.Remove(nodeA);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heals all partitions (restores full connectivity).
|
||||
/// </summary>
|
||||
public void HealAll()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_partitions.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if communication between two nodes is blocked.
|
||||
/// </summary>
|
||||
public bool IsPartitioned(string fromNode, string toNode)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
// Check if fromNode is fully isolated
|
||||
if (_partitions.TryGetValue(fromNode, out var fromPartitions))
|
||||
{
|
||||
if (fromPartitions.Contains("*") || fromPartitions.Contains(toNode))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if toNode is fully isolated
|
||||
if (_partitions.TryGetValue(toNode, out var toPartitions))
|
||||
{
|
||||
if (toPartitions.Contains("*") || toPartitions.Contains(fromNode))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets simulated latency between two nodes.
|
||||
/// </summary>
|
||||
public void SetLatency(string fromNode, string toNode, TimeSpan baseLatency, double jitterPercent = 0)
|
||||
{
|
||||
_latencies[(fromNode, toNode)] = new LatencyConfig(baseLatency, jitterPercent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the configured latency between two nodes (or default if not set).
|
||||
/// </summary>
|
||||
public LatencyConfig GetLatency(string fromNode, string toNode)
|
||||
{
|
||||
if (_latencies.TryGetValue((fromNode, toNode), out var config))
|
||||
{
|
||||
return config;
|
||||
}
|
||||
return LatencyConfig.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all latency configurations.
|
||||
/// </summary>
|
||||
public void ClearLatencies()
|
||||
{
|
||||
_latencies.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current partition state for diagnostic purposes.
|
||||
/// </summary>
|
||||
public ImmutableDictionary<string, ImmutableHashSet<string>> GetPartitionState()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _partitions.ToImmutableDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => kvp.Value.ToImmutableHashSet());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for simulated network latency.
|
||||
/// </summary>
|
||||
public sealed record LatencyConfig
|
||||
{
|
||||
public static readonly LatencyConfig Default = new(TimeSpan.Zero, 0);
|
||||
|
||||
public TimeSpan BaseLatency { get; }
|
||||
public double JitterPercent { get; }
|
||||
|
||||
public LatencyConfig(TimeSpan baseLatency, double jitterPercent)
|
||||
{
|
||||
BaseLatency = baseLatency;
|
||||
JitterPercent = Math.Clamp(jitterPercent, 0, 100);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the actual delay including jitter.
|
||||
/// </summary>
|
||||
public TimeSpan ComputeDelay(Random? random = null)
|
||||
{
|
||||
if (BaseLatency <= TimeSpan.Zero)
|
||||
{
|
||||
return TimeSpan.Zero;
|
||||
}
|
||||
|
||||
if (JitterPercent <= 0)
|
||||
{
|
||||
return BaseLatency;
|
||||
}
|
||||
|
||||
random ??= Random.Shared;
|
||||
var jitterFactor = 1.0 + ((random.NextDouble() * 2 - 1) * JitterPercent / 100);
|
||||
return TimeSpan.FromTicks((long)(BaseLatency.Ticks * jitterFactor));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when a network partition prevents communication.
|
||||
/// </summary>
|
||||
public sealed class NetworkPartitionException : Exception
|
||||
{
|
||||
public string FromNode { get; }
|
||||
public string ToNode { get; }
|
||||
|
||||
public NetworkPartitionException(string fromNode, string toNode)
|
||||
: base($"Network partition: {fromNode} cannot communicate with {toNode}")
|
||||
{
|
||||
FromNode = fromNode;
|
||||
ToNode = toNode;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,318 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// HlcNetworkPartitionTests.cs
|
||||
// Integration tests for HLC behavior during network partitions
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.HLC.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.HLC;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for HLC behavior during network partitions.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.HLC)]
|
||||
[Trait("Category", TestCategories.Chaos)]
|
||||
public class HlcNetworkPartitionTests : IClassFixture<MultiNodeHlcFixture>
|
||||
{
|
||||
private readonly MultiNodeHlcFixture _fixture;
|
||||
|
||||
public HlcNetworkPartitionTests(MultiNodeHlcFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Basic Partition Tests
|
||||
|
||||
[Fact]
|
||||
public async Task NetworkPartition_SplitBrain_NoDataLoss()
|
||||
{
|
||||
// Arrange - Create 3 nodes
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("part-a", baseTime);
|
||||
_fixture.CreateNode("part-b", baseTime);
|
||||
_fixture.CreateNode("part-c", baseTime);
|
||||
|
||||
// Act - Partition node-c from A and B
|
||||
_fixture.PartitionSimulator.IsolateNode("part-c");
|
||||
|
||||
// A and B can still communicate
|
||||
var tsA = _fixture.Tick("part-a");
|
||||
var receivedB = await _fixture.SendMessageAsync("part-a", "part-b", tsA);
|
||||
|
||||
// C cannot receive messages
|
||||
var sendToC = async () => await _fixture.SendMessageAsync("part-a", "part-c", tsA);
|
||||
await sendToC.Should().ThrowAsync<NetworkPartitionException>();
|
||||
|
||||
// C continues to operate independently
|
||||
var tsC = _fixture.Tick("part-c");
|
||||
|
||||
// Assert - All nodes have generated valid timestamps
|
||||
tsA.Should().NotBeNull();
|
||||
receivedB.Should().BeGreaterThan(tsA);
|
||||
tsC.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NetworkPartition_HealedPartition_CorrectReconciliation()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("heal-a", baseTime);
|
||||
_fixture.CreateNode("heal-b", baseTime);
|
||||
|
||||
// Generate initial events
|
||||
var tsA1 = _fixture.Tick("heal-a");
|
||||
await _fixture.SendMessageAsync("heal-a", "heal-b", tsA1);
|
||||
|
||||
// Create partition
|
||||
_fixture.PartitionSimulator.PartitionNodes("heal-a", "heal-b");
|
||||
|
||||
// Both nodes operate independently during partition
|
||||
_fixture.AdvanceTime("heal-a", TimeSpan.FromSeconds(10));
|
||||
_fixture.AdvanceTime("heal-b", TimeSpan.FromSeconds(5));
|
||||
|
||||
var tsA_during = _fixture.Tick("heal-a");
|
||||
var tsB_during = _fixture.Tick("heal-b");
|
||||
|
||||
// Heal partition
|
||||
_fixture.PartitionSimulator.HealPartition("heal-a", "heal-b");
|
||||
|
||||
// A sends its state to B
|
||||
var tsB_after = await _fixture.SendMessageAsync("heal-a", "heal-b", tsA_during);
|
||||
|
||||
// Assert - B has reconciled and its new timestamp is greater
|
||||
tsB_after.Should().BeGreaterThan(tsA_during);
|
||||
tsB_after.Should().BeGreaterThan(tsB_during);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NetworkPartition_AsymmetricPartition_HandledGracefully()
|
||||
{
|
||||
// Arrange - A can send to B, but B cannot send to A
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("asym-a", baseTime);
|
||||
_fixture.CreateNode("asym-b", baseTime);
|
||||
|
||||
// Only partition B -> A direction
|
||||
_fixture.PartitionSimulator.PartitionNodes("asym-b", "asym-a");
|
||||
_fixture.PartitionSimulator.HealPartition("asym-a", "asym-b"); // Allow A -> B
|
||||
|
||||
// Act - A can send to B
|
||||
var tsA = _fixture.Tick("asym-a");
|
||||
var receivedB = await _fixture.SendMessageAsync("asym-a", "asym-b", tsA);
|
||||
|
||||
// B cannot send to A
|
||||
var tsB = _fixture.Tick("asym-b");
|
||||
var sendToA = async () => await _fixture.SendMessageAsync("asym-b", "asym-a", tsB);
|
||||
|
||||
// Assert
|
||||
receivedB.Should().BeGreaterThan(tsA);
|
||||
await sendToA.Should().ThrowAsync<NetworkPartitionException>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Split Brain Scenarios
|
||||
|
||||
[Fact]
|
||||
public async Task SplitBrain_TwoPartitions_IndependentProgress()
|
||||
{
|
||||
// Arrange - 4 nodes split into two groups: {A,B} and {C,D}
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("split-a", baseTime);
|
||||
_fixture.CreateNode("split-b", baseTime);
|
||||
_fixture.CreateNode("split-c", baseTime);
|
||||
_fixture.CreateNode("split-d", baseTime);
|
||||
|
||||
// Create split brain: A-B can communicate, C-D can communicate, no cross-group
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-a", "split-c");
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-a", "split-d");
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-b", "split-c");
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-b", "split-d");
|
||||
|
||||
// Act - Both groups operate independently
|
||||
var tsA = _fixture.Tick("split-a");
|
||||
var receivedB = await _fixture.SendMessageAsync("split-a", "split-b", tsA);
|
||||
|
||||
var tsC = _fixture.Tick("split-c");
|
||||
var receivedD = await _fixture.SendMessageAsync("split-c", "split-d", tsC);
|
||||
|
||||
// Verify cross-group communication fails
|
||||
var crossGroup = async () => await _fixture.SendMessageAsync("split-a", "split-c", tsA);
|
||||
await crossGroup.Should().ThrowAsync<NetworkPartitionException>();
|
||||
|
||||
// Assert - Both groups made progress
|
||||
receivedB.Should().BeGreaterThan(tsA);
|
||||
receivedD.Should().BeGreaterThan(tsC);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SplitBrain_Merge_ConvergesToConsistentState()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("merge-a", baseTime);
|
||||
_fixture.CreateNode("merge-b", baseTime);
|
||||
|
||||
// Both nodes work independently
|
||||
_fixture.PartitionSimulator.IsolateNode("merge-a");
|
||||
_fixture.PartitionSimulator.IsolateNode("merge-b");
|
||||
|
||||
// Advance time differently
|
||||
_fixture.AdvanceTime("merge-a", TimeSpan.FromSeconds(100));
|
||||
_fixture.AdvanceTime("merge-b", TimeSpan.FromSeconds(50));
|
||||
|
||||
// Generate many events on each
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
_fixture.Tick("merge-a");
|
||||
_fixture.Tick("merge-b");
|
||||
}
|
||||
|
||||
var tsA_before = _fixture.GetNode("merge-a").Current;
|
||||
var tsB_before = _fixture.GetNode("merge-b").Current;
|
||||
|
||||
// Heal partition
|
||||
_fixture.PartitionSimulator.HealAll();
|
||||
|
||||
// Exchange messages to synchronize
|
||||
var tsA_sent = _fixture.Tick("merge-a");
|
||||
var tsB_received = await _fixture.SendMessageAsync("merge-a", "merge-b", tsA_sent);
|
||||
|
||||
var tsB_sent = _fixture.Tick("merge-b");
|
||||
var tsA_received = await _fixture.SendMessageAsync("merge-b", "merge-a", tsB_sent);
|
||||
|
||||
// Assert - Both nodes are now synchronized (new events are greater than pre-merge)
|
||||
tsA_received.Should().BeGreaterThan(tsA_before);
|
||||
tsB_received.Should().BeGreaterThan(tsB_before);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Recovery Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PartitionRecovery_LongPartition_NoClockDrift()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("long-a", baseTime);
|
||||
_fixture.CreateNode("long-b", baseTime);
|
||||
|
||||
// Initial sync
|
||||
var initial = _fixture.Tick("long-a");
|
||||
await _fixture.SendMessageAsync("long-a", "long-b", initial);
|
||||
|
||||
// Long partition (simulated by large time advance)
|
||||
_fixture.PartitionSimulator.PartitionNodes("long-a", "long-b");
|
||||
|
||||
// A advances by 1 hour
|
||||
_fixture.AdvanceTime("long-a", TimeSpan.FromHours(1));
|
||||
// B advances by only 30 minutes (slower clock)
|
||||
_fixture.AdvanceTime("long-b", TimeSpan.FromMinutes(30));
|
||||
|
||||
// Generate events during partition
|
||||
var tsA_partition = _fixture.Tick("long-a");
|
||||
var tsB_partition = _fixture.Tick("long-b");
|
||||
|
||||
// Heal and sync
|
||||
_fixture.PartitionSimulator.HealAll();
|
||||
var tsB_synced = await _fixture.SendMessageAsync("long-a", "long-b", tsA_partition);
|
||||
|
||||
// Assert - B's clock has caught up to A's time
|
||||
tsB_synced.PhysicalTime.Should().BeGreaterThanOrEqualTo(tsA_partition.PhysicalTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MultiplePartitionCycles_Stable()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("cycle-a", baseTime);
|
||||
_fixture.CreateNode("cycle-b", baseTime);
|
||||
|
||||
// Multiple partition/heal cycles
|
||||
for (var cycle = 0; cycle < 5; cycle++)
|
||||
{
|
||||
// Sync phase
|
||||
var ts = _fixture.Tick("cycle-a");
|
||||
await _fixture.SendMessageAsync("cycle-a", "cycle-b", ts);
|
||||
|
||||
// Partition phase
|
||||
_fixture.PartitionSimulator.PartitionNodes("cycle-a", "cycle-b");
|
||||
_fixture.AdvanceTime("cycle-a", TimeSpan.FromSeconds(10));
|
||||
_fixture.AdvanceTime("cycle-b", TimeSpan.FromSeconds(8));
|
||||
_fixture.Tick("cycle-a");
|
||||
_fixture.Tick("cycle-b");
|
||||
|
||||
// Heal
|
||||
_fixture.PartitionSimulator.HealAll();
|
||||
}
|
||||
|
||||
// Final sync
|
||||
var finalA = _fixture.Tick("cycle-a");
|
||||
var finalB = await _fixture.SendMessageAsync("cycle-a", "cycle-b", finalA);
|
||||
|
||||
// Assert - System is stable after multiple cycles
|
||||
finalB.Should().BeGreaterThan(finalA);
|
||||
_fixture.VerifyCausalOrdering().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Latency Tests
|
||||
|
||||
[Fact]
|
||||
public async Task HighLatency_MessageDelivery_MaintainsOrdering()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("latency-a", baseTime);
|
||||
_fixture.CreateNode("latency-b", baseTime);
|
||||
|
||||
// Set high latency (500ms with 20% jitter)
|
||||
_fixture.PartitionSimulator.SetLatency("latency-a", "latency-b",
|
||||
TimeSpan.FromMilliseconds(500), 20);
|
||||
|
||||
// Act - Send message with latency
|
||||
var tsA = _fixture.Tick("latency-a");
|
||||
var tsB = await _fixture.SendMessageAsync("latency-a", "latency-b", tsA);
|
||||
|
||||
// Assert - B's timestamp accounts for latency
|
||||
tsB.Should().BeGreaterThan(tsA);
|
||||
// Physical time should have advanced by at least the latency
|
||||
(tsB.PhysicalTime - tsA.PhysicalTime).Should().BeGreaterThanOrEqualTo(400); // ~500ms - jitter
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VariableLatency_MultipleMessages_OrderPreserved()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("var-a", baseTime);
|
||||
_fixture.CreateNode("var-b", baseTime);
|
||||
|
||||
// Variable latency
|
||||
_fixture.PartitionSimulator.SetLatency("var-a", "var-b",
|
||||
TimeSpan.FromMilliseconds(100), 50); // High jitter
|
||||
|
||||
// Act - Send multiple messages
|
||||
var timestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var ts = _fixture.Tick("var-a");
|
||||
var received = await _fixture.SendMessageAsync("var-a", "var-b", ts);
|
||||
timestamps.Add(received);
|
||||
}
|
||||
|
||||
// Assert - All received timestamps are monotonically increasing
|
||||
timestamps.Should().BeInAscendingOrder();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<!-- Enable xUnit v3 (project doesn't end with .Tests so needs explicit opt-in) -->
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- xUnit packages (project doesn't end with .Tests so must be explicit) -->
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,319 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ArtifactImmutabilityTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 2.1
|
||||
// Description: Tests for artifact immutability verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.Immutability.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Immutability;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for verifying artifact immutability across builds.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.Immutability)]
|
||||
public class ArtifactImmutabilityTests : IClassFixture<ArtifactVerificationFixture>
|
||||
{
|
||||
private readonly ArtifactVerificationFixture _fixture;
|
||||
|
||||
public ArtifactImmutabilityTests(ArtifactVerificationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Byte-Identical Build Tests
|
||||
|
||||
[Fact]
|
||||
public void BuildArtifacts_SameContent_ProduceIdenticalDigests()
|
||||
{
|
||||
// Arrange - Create two "builds" with identical content
|
||||
var content = "deterministic content for testing"u8.ToArray();
|
||||
|
||||
var artifact1Path = _fixture.CreateTestArtifact("build1/output.dll", content);
|
||||
var artifact2Path = _fixture.CreateTestArtifact("build2/output.dll", content);
|
||||
|
||||
// Act
|
||||
var digest1 = ArtifactVerificationFixture.ComputeFileDigest(artifact1Path);
|
||||
var digest2 = ArtifactVerificationFixture.ComputeFileDigest(artifact2Path);
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2);
|
||||
digest1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildArtifacts_DifferentContent_ProduceDifferentDigests()
|
||||
{
|
||||
// Arrange
|
||||
var content1 = "content version 1"u8.ToArray();
|
||||
var content2 = "content version 2"u8.ToArray();
|
||||
|
||||
var artifact1Path = _fixture.CreateTestArtifact("diff1/output.dll", content1);
|
||||
var artifact2Path = _fixture.CreateTestArtifact("diff2/output.dll", content2);
|
||||
|
||||
// Act
|
||||
var digest1 = ArtifactVerificationFixture.ComputeFileDigest(artifact1Path);
|
||||
var digest2 = ArtifactVerificationFixture.ComputeFileDigest(artifact2Path);
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildManifests_ByteIdentical_WhenAllArtifactsMatch()
|
||||
{
|
||||
// Arrange
|
||||
var sharedDigest = "sha256:abc123def456";
|
||||
var buildA = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-a",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", sharedDigest, 1024, "application/octet-stream"),
|
||||
new ArtifactEntry("app.pdb", "sha256:pdb123", 2048, "application/octet-stream")
|
||||
]
|
||||
};
|
||||
|
||||
var buildB = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-b",
|
||||
BuildTime = DateTimeOffset.UtcNow.AddMinutes(5),
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", sharedDigest, 1024, "application/octet-stream"),
|
||||
new ArtifactEntry("app.pdb", "sha256:pdb123", 2048, "application/octet-stream")
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.CompareBuilds(buildA, buildB);
|
||||
|
||||
// Assert
|
||||
result.ByteIdentical.Should().BeTrue();
|
||||
result.Mismatches.Should().BeEmpty();
|
||||
result.OnlyInBuildA.Should().BeEmpty();
|
||||
result.OnlyInBuildB.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildManifests_NotIdentical_WhenDigestsDiffer()
|
||||
{
|
||||
// Arrange
|
||||
var buildA = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-a",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:version1", 1024, null)
|
||||
]
|
||||
};
|
||||
|
||||
var buildB = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-b",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:version2", 1025, null)
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.CompareBuilds(buildA, buildB);
|
||||
|
||||
// Assert
|
||||
result.ByteIdentical.Should().BeFalse();
|
||||
result.Mismatches.Should().HaveCount(1);
|
||||
result.Mismatches[0].Name.Should().Be("app.dll");
|
||||
result.Mismatches[0].DigestA.Should().Be("sha256:version1");
|
||||
result.Mismatches[0].DigestB.Should().Be("sha256:version2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildManifests_DetectsMissingArtifacts()
|
||||
{
|
||||
// Arrange
|
||||
var buildA = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-a",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:abc", 1024, null),
|
||||
new ArtifactEntry("extra.dll", "sha256:extra", 512, null)
|
||||
]
|
||||
};
|
||||
|
||||
var buildB = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-b",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:abc", 1024, null),
|
||||
new ArtifactEntry("new.dll", "sha256:new", 256, null)
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.CompareBuilds(buildA, buildB);
|
||||
|
||||
// Assert
|
||||
result.ByteIdentical.Should().BeFalse();
|
||||
result.OnlyInBuildA.Should().Contain("extra.dll");
|
||||
result.OnlyInBuildB.Should().Contain("new.dll");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SBOM Linkage Tests
|
||||
|
||||
[Fact]
|
||||
public void SbomLinkage_AllArtifactsLinked_WhenDigestsMatch()
|
||||
{
|
||||
// Arrange
|
||||
var artifactManifest = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-1",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:app123", 1024, null),
|
||||
new ArtifactEntry("lib.dll", "sha256:lib456", 2048, null)
|
||||
]
|
||||
};
|
||||
|
||||
var sbomManifest = new SbomManifest
|
||||
{
|
||||
Digest = "sha256:sbom789",
|
||||
Format = "spdx-2.3",
|
||||
ReferencedDigests = ["sha256:app123", "sha256:lib456", "sha256:other"]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.VerifySbomLinkage(artifactManifest, sbomManifest);
|
||||
|
||||
// Assert
|
||||
result.AllLinked.Should().BeTrue();
|
||||
result.LinkedArtifacts.Should().HaveCount(2);
|
||||
result.UnlinkedArtifacts.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomLinkage_DetectsUnlinkedArtifacts()
|
||||
{
|
||||
// Arrange
|
||||
var artifactManifest = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-1",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:app123", 1024, null),
|
||||
new ArtifactEntry("lib.dll", "sha256:lib456", 2048, null),
|
||||
new ArtifactEntry("untracked.dll", "sha256:untracked", 512, null)
|
||||
]
|
||||
};
|
||||
|
||||
var sbomManifest = new SbomManifest
|
||||
{
|
||||
Digest = "sha256:sbom789",
|
||||
Format = "cyclonedx-1.5",
|
||||
ReferencedDigests = ["sha256:app123", "sha256:lib456"]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.VerifySbomLinkage(artifactManifest, sbomManifest);
|
||||
|
||||
// Assert
|
||||
result.AllLinked.Should().BeFalse();
|
||||
result.UnlinkedArtifacts.Should().Contain("untracked.dll");
|
||||
result.LinkedArtifacts.Should().Contain("app.dll");
|
||||
result.LinkedArtifacts.Should().Contain("lib.dll");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Content Addressability Tests
|
||||
|
||||
[Fact]
|
||||
public void ContentAddressability_DigestDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var content = System.Text.Encoding.UTF8.GetBytes(
|
||||
"{\"name\":\"test\",\"version\":\"1.0.0\"}");
|
||||
|
||||
// Act - compute multiple times
|
||||
var digest1 = ArtifactVerificationFixture.ComputeDigest(content);
|
||||
var digest2 = ArtifactVerificationFixture.ComputeDigest(content);
|
||||
var digest3 = ArtifactVerificationFixture.ComputeDigest(content);
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2);
|
||||
digest2.Should().Be(digest3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ContentAddressability_EmptyContent_HasValidDigest()
|
||||
{
|
||||
// Arrange
|
||||
var emptyContent = Array.Empty<byte>();
|
||||
|
||||
// Act
|
||||
var digest = ArtifactVerificationFixture.ComputeDigest(emptyContent);
|
||||
|
||||
// Assert - SHA-256 of empty input is well-known
|
||||
digest.Should().Be("sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Multi-Artifact Build Tests
|
||||
|
||||
[Fact]
|
||||
public void MultiArtifactBuild_AllArtifactsTracked()
|
||||
{
|
||||
// Arrange - Simulate a build with multiple output types
|
||||
var artifacts = new List<(string Name, byte[] Content)>
|
||||
{
|
||||
("bin/app.dll", "app binary content"u8.ToArray()),
|
||||
("bin/app.pdb", "debug symbols"u8.ToArray()),
|
||||
("bin/app.deps.json", "{\"dependencies\":{}}"u8.ToArray()),
|
||||
("bin/app.runtimeconfig.json", "{\"runtimeOptions\":{}}"u8.ToArray())
|
||||
};
|
||||
|
||||
var entries = ImmutableArray.CreateBuilder<ArtifactEntry>();
|
||||
foreach (var (name, content) in artifacts)
|
||||
{
|
||||
var path = _fixture.CreateTestArtifact($"multi/{name}", content);
|
||||
var digest = ArtifactVerificationFixture.ComputeFileDigest(path);
|
||||
entries.Add(new ArtifactEntry(name, digest, content.Length, null));
|
||||
}
|
||||
|
||||
var manifest = new ArtifactManifest
|
||||
{
|
||||
BuildId = "multi-build-1",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts = entries.ToImmutable()
|
||||
};
|
||||
|
||||
// Act
|
||||
_fixture.RegisterManifest("multi-build", manifest);
|
||||
var retrieved = _fixture.GetManifest("multi-build");
|
||||
|
||||
// Assert
|
||||
retrieved.Should().NotBeNull();
|
||||
retrieved!.Artifacts.Should().HaveCount(4);
|
||||
retrieved.Artifacts.Select(a => a.Name).Should().Contain("bin/app.dll");
|
||||
retrieved.Artifacts.Select(a => a.Name).Should().Contain("bin/app.pdb");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,351 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ContainerDigestVerificationTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 2.1
|
||||
// Description: Tests for container image digest verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.Immutability.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Immutability;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for container image digest verification.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.Immutability)]
|
||||
public class ContainerDigestVerificationTests : IClassFixture<ArtifactVerificationFixture>
|
||||
{
|
||||
private readonly ArtifactVerificationFixture _fixture;
|
||||
|
||||
public ContainerDigestVerificationTests(ArtifactVerificationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region OCI Manifest Digest Tests
|
||||
|
||||
[Fact]
|
||||
public void OciManifest_DigestMatchesContent()
|
||||
{
|
||||
// Arrange - Create a simulated OCI manifest
|
||||
var manifest = new OciManifest
|
||||
{
|
||||
SchemaVersion = 2,
|
||||
MediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
Config = new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.config.v1+json",
|
||||
Size = 1234,
|
||||
Digest = "sha256:config123"
|
||||
},
|
||||
Layers =
|
||||
[
|
||||
new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
Size = 50000,
|
||||
Digest = "sha256:layer1abc"
|
||||
},
|
||||
new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
Size = 30000,
|
||||
Digest = "sha256:layer2def"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act - Serialize and compute digest
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(manifest, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
var digest = ArtifactVerificationFixture.ComputeDigest(json);
|
||||
|
||||
// Compute again to verify determinism
|
||||
var json2 = JsonSerializer.SerializeToUtf8Bytes(manifest, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
var digest2 = ArtifactVerificationFixture.ComputeDigest(json2);
|
||||
|
||||
// Assert
|
||||
digest.Should().Be(digest2);
|
||||
digest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OciManifest_DifferentLayers_ProduceDifferentDigest()
|
||||
{
|
||||
// Arrange
|
||||
var manifest1 = CreateTestOciManifest("sha256:layer1");
|
||||
var manifest2 = CreateTestOciManifest("sha256:layer2");
|
||||
|
||||
// Act
|
||||
var digest1 = ComputeManifestDigest(manifest1);
|
||||
var digest2 = ComputeManifestDigest(manifest2);
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OciManifest_SameLayers_ProduceSameDigest()
|
||||
{
|
||||
// Arrange - Same content but created at different times
|
||||
var manifest1 = CreateTestOciManifest("sha256:sharedlayer");
|
||||
var manifest2 = CreateTestOciManifest("sha256:sharedlayer");
|
||||
|
||||
// Act
|
||||
var digest1 = ComputeManifestDigest(manifest1);
|
||||
var digest2 = ComputeManifestDigest(manifest2);
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Image Reference Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void ImageReference_TagAndDigest_BothResolvable()
|
||||
{
|
||||
// Arrange
|
||||
var imageRef = new ImageReference
|
||||
{
|
||||
Registry = "registry.example.com",
|
||||
Repository = "myapp",
|
||||
Tag = "v1.0.0",
|
||||
Digest = "sha256:abc123def456"
|
||||
};
|
||||
|
||||
// Act
|
||||
var tagRef = imageRef.ToTagReference();
|
||||
var digestRef = imageRef.ToDigestReference();
|
||||
|
||||
// Assert
|
||||
tagRef.Should().Be("registry.example.com/myapp:v1.0.0");
|
||||
digestRef.Should().Be("registry.example.com/myapp@sha256:abc123def456");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ImageReference_DigestOnly_IsImmutable()
|
||||
{
|
||||
// Arrange
|
||||
var imageRef = new ImageReference
|
||||
{
|
||||
Registry = "ghcr.io",
|
||||
Repository = "org/image",
|
||||
Digest = "sha256:immutabledigest"
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
imageRef.IsImmutable.Should().BeTrue();
|
||||
imageRef.ToDigestReference().Should().Be("ghcr.io/org/image@sha256:immutabledigest");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ImageReference_TagOnly_IsNotImmutable()
|
||||
{
|
||||
// Arrange
|
||||
var imageRef = new ImageReference
|
||||
{
|
||||
Registry = "docker.io",
|
||||
Repository = "library/nginx",
|
||||
Tag = "latest"
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
imageRef.IsImmutable.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Layer Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void LayerChain_DigestsFormMerkleTree()
|
||||
{
|
||||
// Arrange - Simulate layer chain
|
||||
var layers = new[]
|
||||
{
|
||||
"sha256:base",
|
||||
"sha256:deps",
|
||||
"sha256:app"
|
||||
};
|
||||
|
||||
// Act - Compute chain digest (simplified Merkle)
|
||||
var chainDigest = ComputeLayerChainDigest(layers);
|
||||
|
||||
// Assert
|
||||
chainDigest.Should().StartWith("sha256:");
|
||||
|
||||
// Verify determinism
|
||||
var chainDigest2 = ComputeLayerChainDigest(layers);
|
||||
chainDigest.Should().Be(chainDigest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LayerChain_OrderMatters()
|
||||
{
|
||||
// Arrange
|
||||
var layers1 = new[] { "sha256:a", "sha256:b", "sha256:c" };
|
||||
var layers2 = new[] { "sha256:c", "sha256:b", "sha256:a" };
|
||||
|
||||
// Act
|
||||
var digest1 = ComputeLayerChainDigest(layers1);
|
||||
var digest2 = ComputeLayerChainDigest(layers2);
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SBOM-to-Image Linkage Tests
|
||||
|
||||
[Fact]
|
||||
public void SbomImageLinkage_VerifiesDigestChain()
|
||||
{
|
||||
// Arrange
|
||||
var imageDigest = "sha256:imageabc123";
|
||||
var sbomDigest = "sha256:sbomabc123";
|
||||
|
||||
var linkage = new SbomImageLinkage
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
SbomDigest = sbomDigest,
|
||||
SbomFormat = "spdx-2.3",
|
||||
LinkedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
var linkageDigest = ComputeLinkageDigest(linkage);
|
||||
|
||||
// Assert
|
||||
linkageDigest.Should().StartWith("sha256:");
|
||||
|
||||
// Verify different image produces different linkage
|
||||
var linkage2 = linkage with { ImageDigest = "sha256:differentimage" };
|
||||
var linkageDigest2 = ComputeLinkageDigest(linkage2);
|
||||
linkageDigest.Should().NotBe(linkageDigest2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static OciManifest CreateTestOciManifest(string layerDigest)
|
||||
{
|
||||
return new OciManifest
|
||||
{
|
||||
SchemaVersion = 2,
|
||||
MediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
Config = new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.config.v1+json",
|
||||
Size = 1000,
|
||||
Digest = "sha256:config"
|
||||
},
|
||||
Layers =
|
||||
[
|
||||
new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
Size = 10000,
|
||||
Digest = layerDigest
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeManifestDigest(OciManifest manifest)
|
||||
{
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(manifest, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
return ArtifactVerificationFixture.ComputeDigest(json);
|
||||
}
|
||||
|
||||
private static string ComputeLayerChainDigest(string[] layerDigests)
|
||||
{
|
||||
var combined = string.Join("|", layerDigests);
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(combined);
|
||||
return ArtifactVerificationFixture.ComputeDigest(bytes);
|
||||
}
|
||||
|
||||
private static string ComputeLinkageDigest(SbomImageLinkage linkage)
|
||||
{
|
||||
var combined = $"{linkage.ImageDigest}|{linkage.SbomDigest}|{linkage.SbomFormat}";
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(combined);
|
||||
return ArtifactVerificationFixture.ComputeDigest(bytes);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Test Models
|
||||
|
||||
/// <summary>
|
||||
/// Simplified OCI manifest for testing.
|
||||
/// </summary>
|
||||
public sealed record OciManifest
|
||||
{
|
||||
public int SchemaVersion { get; init; }
|
||||
public required string MediaType { get; init; }
|
||||
public required OciDescriptor Config { get; init; }
|
||||
public required ImmutableArray<OciDescriptor> Layers { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// OCI descriptor for config or layer.
|
||||
/// </summary>
|
||||
public sealed record OciDescriptor
|
||||
{
|
||||
public required string MediaType { get; init; }
|
||||
public required long Size { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Container image reference.
|
||||
/// </summary>
|
||||
public sealed record ImageReference
|
||||
{
|
||||
public required string Registry { get; init; }
|
||||
public required string Repository { get; init; }
|
||||
public string? Tag { get; init; }
|
||||
public string? Digest { get; init; }
|
||||
|
||||
public bool IsImmutable => !string.IsNullOrEmpty(Digest);
|
||||
|
||||
public string ToTagReference() =>
|
||||
$"{Registry}/{Repository}:{Tag ?? "latest"}";
|
||||
|
||||
public string ToDigestReference() =>
|
||||
$"{Registry}/{Repository}@{Digest}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Linkage between SBOM and container image.
|
||||
/// </summary>
|
||||
public sealed record SbomImageLinkage
|
||||
{
|
||||
public required string ImageDigest { get; init; }
|
||||
public required string SbomDigest { get; init; }
|
||||
public required string SbomFormat { get; init; }
|
||||
public required DateTimeOffset LinkedAt { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,240 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ArtifactVerificationFixture.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 2.1
|
||||
// Description: Test fixture for artifact immutability verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Immutability.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for verifying artifact immutability across builds.
|
||||
/// </summary>
|
||||
public sealed class ArtifactVerificationFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly Dictionary<string, ArtifactManifest> _manifests = [];
|
||||
private readonly string _workDir;
|
||||
|
||||
public ArtifactVerificationFixture()
|
||||
{
|
||||
_workDir = Path.Combine(Path.GetTempPath(), $"artifact-verify-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_workDir);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask InitializeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_workDir))
|
||||
{
|
||||
Directory.Delete(_workDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the working directory for artifact storage.
|
||||
/// </summary>
|
||||
public string WorkDirectory => _workDir;
|
||||
|
||||
/// <summary>
|
||||
/// Registers an artifact manifest for verification.
|
||||
/// </summary>
|
||||
public void RegisterManifest(string name, ArtifactManifest manifest)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(name);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
_manifests[name] = manifest;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a registered manifest.
|
||||
/// </summary>
|
||||
public ArtifactManifest? GetManifest(string name)
|
||||
{
|
||||
return _manifests.TryGetValue(name, out var manifest) ? manifest : null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 digest of a file.
|
||||
/// </summary>
|
||||
public static string ComputeFileDigest(string filePath)
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
var hash = SHA256.HashData(stream);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 digest of byte content.
|
||||
/// </summary>
|
||||
public static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a test artifact file with deterministic content.
|
||||
/// </summary>
|
||||
public string CreateTestArtifact(string name, byte[] content)
|
||||
{
|
||||
var path = Path.Combine(_workDir, name);
|
||||
var dir = Path.GetDirectoryName(path);
|
||||
if (dir is not null && !Directory.Exists(dir))
|
||||
{
|
||||
Directory.CreateDirectory(dir);
|
||||
}
|
||||
File.WriteAllBytes(path, content);
|
||||
return path;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that two builds produced identical artifacts.
|
||||
/// </summary>
|
||||
public ArtifactComparisonResult CompareBuilds(
|
||||
ArtifactManifest buildA,
|
||||
ArtifactManifest buildB)
|
||||
{
|
||||
var mismatches = new List<ArtifactMismatch>();
|
||||
var onlyInA = new List<string>();
|
||||
var onlyInB = new List<string>();
|
||||
|
||||
var artifactsA = buildA.Artifacts.ToDictionary(a => a.Name);
|
||||
var artifactsB = buildB.Artifacts.ToDictionary(a => a.Name);
|
||||
|
||||
foreach (var (name, artifactA) in artifactsA)
|
||||
{
|
||||
if (artifactsB.TryGetValue(name, out var artifactB))
|
||||
{
|
||||
if (artifactA.Digest != artifactB.Digest)
|
||||
{
|
||||
mismatches.Add(new ArtifactMismatch(
|
||||
name,
|
||||
artifactA.Digest,
|
||||
artifactB.Digest,
|
||||
artifactA.Size,
|
||||
artifactB.Size));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
onlyInA.Add(name);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var name in artifactsB.Keys)
|
||||
{
|
||||
if (!artifactsA.ContainsKey(name))
|
||||
{
|
||||
onlyInB.Add(name);
|
||||
}
|
||||
}
|
||||
|
||||
return new ArtifactComparisonResult(
|
||||
ByteIdentical: mismatches.Count == 0 && onlyInA.Count == 0 && onlyInB.Count == 0,
|
||||
Mismatches: [.. mismatches],
|
||||
OnlyInBuildA: [.. onlyInA],
|
||||
OnlyInBuildB: [.. onlyInB]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies SBOM content-addressability with artifact.
|
||||
/// </summary>
|
||||
public SbomLinkageResult VerifySbomLinkage(
|
||||
ArtifactManifest artifactManifest,
|
||||
SbomManifest sbomManifest)
|
||||
{
|
||||
var linkedArtifacts = new List<string>();
|
||||
var unlinkedArtifacts = new List<string>();
|
||||
|
||||
foreach (var artifact in artifactManifest.Artifacts)
|
||||
{
|
||||
var isLinked = sbomManifest.ReferencedDigests.Contains(artifact.Digest);
|
||||
if (isLinked)
|
||||
{
|
||||
linkedArtifacts.Add(artifact.Name);
|
||||
}
|
||||
else
|
||||
{
|
||||
unlinkedArtifacts.Add(artifact.Name);
|
||||
}
|
||||
}
|
||||
|
||||
return new SbomLinkageResult(
|
||||
AllLinked: unlinkedArtifacts.Count == 0,
|
||||
LinkedArtifacts: [.. linkedArtifacts],
|
||||
UnlinkedArtifacts: [.. unlinkedArtifacts],
|
||||
SbomDigest: sbomManifest.Digest);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Manifest describing build artifacts.
|
||||
/// </summary>
|
||||
public sealed record ArtifactManifest
|
||||
{
|
||||
public required string BuildId { get; init; }
|
||||
public required DateTimeOffset BuildTime { get; init; }
|
||||
public required ImmutableArray<ArtifactEntry> Artifacts { get; init; }
|
||||
public ImmutableDictionary<string, string>? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a single artifact in a manifest.
|
||||
/// </summary>
|
||||
public sealed record ArtifactEntry(
|
||||
string Name,
|
||||
string Digest,
|
||||
long Size,
|
||||
string? MediaType);
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two builds.
|
||||
/// </summary>
|
||||
public sealed record ArtifactComparisonResult(
|
||||
bool ByteIdentical,
|
||||
ImmutableArray<ArtifactMismatch> Mismatches,
|
||||
ImmutableArray<string> OnlyInBuildA,
|
||||
ImmutableArray<string> OnlyInBuildB);
|
||||
|
||||
/// <summary>
|
||||
/// Details of a mismatched artifact between builds.
|
||||
/// </summary>
|
||||
public sealed record ArtifactMismatch(
|
||||
string Name,
|
||||
string DigestA,
|
||||
string DigestB,
|
||||
long SizeA,
|
||||
long SizeB);
|
||||
|
||||
/// <summary>
|
||||
/// SBOM manifest for linkage verification.
|
||||
/// </summary>
|
||||
public sealed record SbomManifest
|
||||
{
|
||||
public required string Digest { get; init; }
|
||||
public required string Format { get; init; }
|
||||
public required ImmutableHashSet<string> ReferencedDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of SBOM linkage verification.
|
||||
/// </summary>
|
||||
public sealed record SbomLinkageResult(
|
||||
bool AllLinked,
|
||||
ImmutableArray<string> LinkedArtifacts,
|
||||
ImmutableArray<string> UnlinkedArtifacts,
|
||||
string SbomDigest);
|
||||
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<!-- Enable xUnit v3 (project doesn't end with .Tests so needs explicit opt-in) -->
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- xUnit packages (project doesn't end with .Tests so must be explicit) -->
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,451 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ColdPathLatencyTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
// Description: Tests for cold-start latency budgets (first request after service start)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for cold-start latency (first request after service initialization).
|
||||
/// Validates that services can respond within budget even on first request.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Performance)]
|
||||
[Trait("Category", "Latency")]
|
||||
[Trait("Category", "ColdPath")]
|
||||
public class ColdPathLatencyTests : IClassFixture<PerformanceTestFixture>
|
||||
{
|
||||
private readonly PerformanceTestFixture _fixture;
|
||||
private readonly LatencyBudgetEnforcer _enforcer;
|
||||
|
||||
public ColdPathLatencyTests(PerformanceTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_enforcer = new LatencyBudgetEnforcer();
|
||||
_enforcer.RegisterDefaultBudgets();
|
||||
}
|
||||
|
||||
#region Scanner Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task FirstRequest_ColdStart_CompletesWithin5Seconds()
|
||||
{
|
||||
// Arrange - Simulate cold start by creating new service instance
|
||||
var coldStartSimulator = new ColdStartSimulator("scanner");
|
||||
await coldStartSimulator.ResetStateAsync();
|
||||
|
||||
// Act - First request (cold)
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => coldStartSimulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
measurement.IsColdStart.Should().BeTrue();
|
||||
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"Cold start took {measurement.Duration.TotalMilliseconds:F0}ms, " +
|
||||
$"budget is {result.ExpectedBudget.TotalMilliseconds:F0}ms");
|
||||
|
||||
_fixture.RecordMeasurement("scanner_cold_start_ms", measurement.Duration.TotalMilliseconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_ColdStart_CompletesWithin3Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("sbom-generator");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.sbom",
|
||||
() => simulator.SimulateSbomGenerationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"SBOM generation cold start: {measurement.Duration.TotalMilliseconds:F0}ms, " +
|
||||
$"budget: {result.ExpectedBudget.TotalMilliseconds:F0}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Concelier Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierLookup_ColdStart_CompletesWithin2Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => simulator.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierMerge_ColdStart_CompletesWithin4Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.merge",
|
||||
() => simulator.SimulateAdvisoryMergeAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyEvaluate_ColdStart_CompletesWithin2Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.evaluate",
|
||||
() => simulator.SimulatePolicyEvaluationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyScore_ColdStart_CompletesWithin1Second()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.score",
|
||||
() => simulator.SimulateRiskScoringAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Authority Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityToken_ColdStart_CompletesWithin1Second()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.token",
|
||||
() => simulator.SimulateTokenIssuanceAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityValidate_ColdStart_CompletesWithin500ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.validate",
|
||||
() => simulator.SimulateTokenValidationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Attestor Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorSign_ColdStart_CompletesWithin2Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.sign",
|
||||
() => simulator.SimulateSigningAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorVerify_ColdStart_CompletesWithin1Second()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.verify",
|
||||
() => simulator.SimulateVerificationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold Start with Payload Size Variations
|
||||
|
||||
[Theory]
|
||||
[InlineData("small", 1)] // 1 KB payload
|
||||
[InlineData("medium", 100)] // 100 KB payload
|
||||
[InlineData("large", 1000)] // 1 MB payload
|
||||
public async Task ColdStart_WithVaryingPayloadSizes_StaysWithinBudget(string size, int sizeKb)
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.ResetStateAsync();
|
||||
simulator.ConfigurePayloadSize(sizeKb * 1024);
|
||||
|
||||
// Register a larger budget for larger payloads
|
||||
var budget = TimeSpan.FromSeconds(5 + (sizeKb / 500.0));
|
||||
_enforcer.RegisterBudget($"scanner.scan.{size}", budget, TimeSpan.FromMilliseconds(500));
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
$"scanner.scan.{size}",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"Cold start with {size} payload: {measurement.Duration.TotalMilliseconds:F0}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold Start After Extended Idle
|
||||
|
||||
[Fact]
|
||||
public async Task ColdStart_AfterExtendedIdle_StillMeetsBudget()
|
||||
{
|
||||
// Arrange - Simulate service that has been idle (potential resource cleanup)
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.ResetStateAsync();
|
||||
simulator.SimulateExtendedIdle(TimeSpan.FromMinutes(30));
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold Start Statistics
|
||||
|
||||
[Fact]
|
||||
public async Task ColdStart_MultipleSamples_GeneratesStatistics()
|
||||
{
|
||||
// Arrange
|
||||
const int sampleCount = 5;
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
|
||||
// Act - Collect multiple cold start samples
|
||||
for (var i = 0; i < sampleCount; i++)
|
||||
{
|
||||
await simulator.ResetStateAsync();
|
||||
await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
}
|
||||
|
||||
// Assert - Generate statistics
|
||||
var stats = _enforcer.ComputeStatistics("scanner.scan");
|
||||
stats.SampleCount.Should().Be(sampleCount);
|
||||
stats.P95.Should().BeLessThan(TimeSpan.FromSeconds(5));
|
||||
stats.Max.Should().BeGreaterThan(TimeSpan.Zero);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates cold-start scenarios for services.
|
||||
/// </summary>
|
||||
public sealed class ColdStartSimulator
|
||||
{
|
||||
private readonly string _serviceName;
|
||||
private readonly Random _random = new(42);
|
||||
private int _payloadSize = 1024;
|
||||
private TimeSpan _idleTime = TimeSpan.Zero;
|
||||
private bool _isWarm;
|
||||
|
||||
public ColdStartSimulator(string serviceName)
|
||||
{
|
||||
_serviceName = serviceName;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets the simulator to cold state.
|
||||
/// </summary>
|
||||
public Task ResetStateAsync()
|
||||
{
|
||||
_isWarm = false;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configures payload size for the simulated request.
|
||||
/// </summary>
|
||||
public void ConfigurePayloadSize(int bytes)
|
||||
{
|
||||
_payloadSize = bytes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates extended idle period.
|
||||
/// </summary>
|
||||
public void SimulateExtendedIdle(TimeSpan duration)
|
||||
{
|
||||
_idleTime = duration;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates the first request (cold start).
|
||||
/// </summary>
|
||||
public async Task SimulateFirstRequestAsync()
|
||||
{
|
||||
// Simulate initialization overhead
|
||||
var initDelay = GetInitializationDelay();
|
||||
await Task.Delay(initDelay);
|
||||
|
||||
// Simulate actual work
|
||||
var workDelay = GetWorkDelay();
|
||||
await Task.Delay(workDelay);
|
||||
|
||||
_isWarm = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a subsequent request (warm path).
|
||||
/// </summary>
|
||||
public async Task SimulateSubsequentRequestAsync()
|
||||
{
|
||||
// No initialization overhead for warm requests
|
||||
var workDelay = GetWorkDelay();
|
||||
await Task.Delay(workDelay);
|
||||
}
|
||||
|
||||
public Task SimulateSbomGenerationAsync() => SimulateOperationAsync(50, 150);
|
||||
public Task SimulateAdvisoryLookupAsync() => SimulateOperationAsync(30, 100);
|
||||
public Task SimulateAdvisoryMergeAsync() => SimulateOperationAsync(100, 300);
|
||||
public Task SimulatePolicyEvaluationAsync() => SimulateOperationAsync(40, 120);
|
||||
public Task SimulateRiskScoringAsync() => SimulateOperationAsync(20, 60);
|
||||
public Task SimulateTokenIssuanceAsync() => SimulateOperationAsync(10, 50);
|
||||
public Task SimulateTokenValidationAsync() => SimulateOperationAsync(5, 20);
|
||||
public Task SimulateSigningAsync() => SimulateOperationAsync(50, 150);
|
||||
public Task SimulateVerificationAsync() => SimulateOperationAsync(30, 100);
|
||||
|
||||
private async Task SimulateOperationAsync(int minMs, int maxMs)
|
||||
{
|
||||
var delay = _isWarm
|
||||
? _random.Next(minMs, maxMs)
|
||||
: _random.Next(minMs * 2, maxMs * 3); // Cold start is slower
|
||||
|
||||
await Task.Delay(delay);
|
||||
_isWarm = true;
|
||||
}
|
||||
|
||||
private int GetInitializationDelay()
|
||||
{
|
||||
// Base initialization cost
|
||||
var baseDelay = _serviceName switch
|
||||
{
|
||||
"scanner" => 200,
|
||||
"concelier" => 100,
|
||||
"policy" => 80,
|
||||
"authority" => 50,
|
||||
"attestor" => 100,
|
||||
"sbom-generator" => 120,
|
||||
_ => 100
|
||||
};
|
||||
|
||||
// Add time for idle recovery if applicable
|
||||
if (_idleTime > TimeSpan.FromMinutes(5))
|
||||
{
|
||||
baseDelay += 50;
|
||||
}
|
||||
|
||||
return baseDelay + _random.Next(0, 50);
|
||||
}
|
||||
|
||||
private int GetWorkDelay()
|
||||
{
|
||||
// Work time proportional to payload
|
||||
var baseWork = Math.Max(10, _payloadSize / 10000);
|
||||
return baseWork + _random.Next(0, 20);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,364 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// LatencyBudgetEnforcer.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
// Description: Latency budget enforcement for cold/warm path testing
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Enforces latency budgets for service operations.
|
||||
/// Distinguishes between cold-start and warm-path latency expectations.
|
||||
/// </summary>
|
||||
public sealed class LatencyBudgetEnforcer
|
||||
{
|
||||
private readonly Dictionary<string, LatencyBudget> _budgets = new();
|
||||
private readonly List<LatencyMeasurement> _measurements = [];
|
||||
|
||||
/// <summary>
|
||||
/// Default cold-start budget (first request after service start).
|
||||
/// </summary>
|
||||
public static readonly TimeSpan DefaultColdStartBudget = TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <summary>
|
||||
/// Default warm-path budget (subsequent requests).
|
||||
/// </summary>
|
||||
public static readonly TimeSpan DefaultWarmPathBudget = TimeSpan.FromMilliseconds(500);
|
||||
|
||||
/// <summary>
|
||||
/// Registers a latency budget for an operation.
|
||||
/// </summary>
|
||||
public void RegisterBudget(
|
||||
string operationName,
|
||||
TimeSpan coldStartBudget,
|
||||
TimeSpan warmPathBudget)
|
||||
{
|
||||
_budgets[operationName] = new LatencyBudget
|
||||
{
|
||||
OperationName = operationName,
|
||||
ColdStartBudget = coldStartBudget,
|
||||
WarmPathBudget = warmPathBudget
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers default budgets for common operations.
|
||||
/// </summary>
|
||||
public void RegisterDefaultBudgets()
|
||||
{
|
||||
// Scanner operations
|
||||
RegisterBudget("scanner.scan", TimeSpan.FromSeconds(5), TimeSpan.FromMilliseconds(500));
|
||||
RegisterBudget("scanner.sbom", TimeSpan.FromSeconds(3), TimeSpan.FromMilliseconds(300));
|
||||
|
||||
// Concelier operations
|
||||
RegisterBudget("concelier.lookup", TimeSpan.FromSeconds(2), TimeSpan.FromMilliseconds(100));
|
||||
RegisterBudget("concelier.merge", TimeSpan.FromSeconds(4), TimeSpan.FromMilliseconds(400));
|
||||
|
||||
// Policy operations
|
||||
RegisterBudget("policy.evaluate", TimeSpan.FromSeconds(2), TimeSpan.FromMilliseconds(200));
|
||||
RegisterBudget("policy.score", TimeSpan.FromSeconds(1), TimeSpan.FromMilliseconds(100));
|
||||
|
||||
// Authority operations
|
||||
RegisterBudget("authority.token", TimeSpan.FromSeconds(1), TimeSpan.FromMilliseconds(50));
|
||||
RegisterBudget("authority.validate", TimeSpan.FromMilliseconds(500), TimeSpan.FromMilliseconds(20));
|
||||
|
||||
// Attestor operations
|
||||
RegisterBudget("attestor.sign", TimeSpan.FromSeconds(2), TimeSpan.FromMilliseconds(200));
|
||||
RegisterBudget("attestor.verify", TimeSpan.FromSeconds(1), TimeSpan.FromMilliseconds(100));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the budget for an operation.
|
||||
/// </summary>
|
||||
public LatencyBudget GetBudget(string operationName)
|
||||
{
|
||||
if (_budgets.TryGetValue(operationName, out var budget))
|
||||
{
|
||||
return budget;
|
||||
}
|
||||
|
||||
// Return default budget
|
||||
return new LatencyBudget
|
||||
{
|
||||
OperationName = operationName,
|
||||
ColdStartBudget = DefaultColdStartBudget,
|
||||
WarmPathBudget = DefaultWarmPathBudget
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Measures the latency of an operation.
|
||||
/// </summary>
|
||||
public async Task<LatencyMeasurement> MeasureAsync(
|
||||
string operationName,
|
||||
Func<Task> operation,
|
||||
bool isColdStart = false)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
await operation();
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = true,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return measurement;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = false,
|
||||
Error = ex.Message,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return measurement;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Measures the latency of an operation with a result.
|
||||
/// </summary>
|
||||
public async Task<(LatencyMeasurement Measurement, T? Result)> MeasureAsync<T>(
|
||||
string operationName,
|
||||
Func<Task<T>> operation,
|
||||
bool isColdStart = false)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
var result = await operation();
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = true,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return (measurement, result);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = false,
|
||||
Error = ex.Message,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return (measurement, default);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a measurement is within budget.
|
||||
/// </summary>
|
||||
public BudgetVerificationResult VerifyWithinBudget(LatencyMeasurement measurement)
|
||||
{
|
||||
var budget = GetBudget(measurement.OperationName);
|
||||
var expectedBudget = measurement.IsColdStart
|
||||
? budget.ColdStartBudget
|
||||
: budget.WarmPathBudget;
|
||||
|
||||
var isWithinBudget = measurement.Duration <= expectedBudget;
|
||||
var percentageOfBudget = measurement.Duration.TotalMilliseconds / expectedBudget.TotalMilliseconds * 100;
|
||||
|
||||
return new BudgetVerificationResult
|
||||
{
|
||||
Measurement = measurement,
|
||||
Budget = budget,
|
||||
ExpectedBudget = expectedBudget,
|
||||
IsWithinBudget = isWithinBudget,
|
||||
PercentageOfBudget = percentageOfBudget,
|
||||
Overage = isWithinBudget ? TimeSpan.Zero : measurement.Duration - expectedBudget
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all measurements for an operation.
|
||||
/// </summary>
|
||||
public IReadOnlyList<LatencyMeasurement> GetMeasurements(string operationName)
|
||||
{
|
||||
return _measurements
|
||||
.Where(m => m.OperationName == operationName)
|
||||
.ToList()
|
||||
.AsReadOnly();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all measurements.
|
||||
/// </summary>
|
||||
public IReadOnlyList<LatencyMeasurement> GetAllMeasurements() => _measurements.AsReadOnly();
|
||||
|
||||
/// <summary>
|
||||
/// Computes statistics for an operation.
|
||||
/// </summary>
|
||||
public LatencyStatistics ComputeStatistics(string operationName)
|
||||
{
|
||||
var measurements = _measurements
|
||||
.Where(m => m.OperationName == operationName && m.Success)
|
||||
.ToList();
|
||||
|
||||
if (measurements.Count == 0)
|
||||
{
|
||||
return new LatencyStatistics
|
||||
{
|
||||
OperationName = operationName,
|
||||
SampleCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
var durations = measurements.Select(m => m.Duration.TotalMilliseconds).OrderBy(d => d).ToList();
|
||||
|
||||
return new LatencyStatistics
|
||||
{
|
||||
OperationName = operationName,
|
||||
SampleCount = durations.Count,
|
||||
Min = TimeSpan.FromMilliseconds(durations.Min()),
|
||||
Max = TimeSpan.FromMilliseconds(durations.Max()),
|
||||
Mean = TimeSpan.FromMilliseconds(durations.Average()),
|
||||
Median = TimeSpan.FromMilliseconds(Percentile(durations, 50)),
|
||||
P95 = TimeSpan.FromMilliseconds(Percentile(durations, 95)),
|
||||
P99 = TimeSpan.FromMilliseconds(Percentile(durations, 99))
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a latency report.
|
||||
/// </summary>
|
||||
public LatencyReport GenerateReport()
|
||||
{
|
||||
var operationNames = _measurements.Select(m => m.OperationName).Distinct();
|
||||
var statistics = operationNames.Select(ComputeStatistics).ToList();
|
||||
var budgetResults = _measurements.Select(VerifyWithinBudget).ToList();
|
||||
|
||||
return new LatencyReport
|
||||
{
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
TotalMeasurements = _measurements.Count,
|
||||
SuccessfulMeasurements = _measurements.Count(m => m.Success),
|
||||
Statistics = statistics.AsReadOnly(),
|
||||
BudgetViolations = budgetResults.Where(r => !r.IsWithinBudget).ToList().AsReadOnly(),
|
||||
OverallComplianceRate = budgetResults.Count > 0
|
||||
? (double)budgetResults.Count(r => r.IsWithinBudget) / budgetResults.Count * 100
|
||||
: 100
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all measurements.
|
||||
/// </summary>
|
||||
public void ClearMeasurements()
|
||||
{
|
||||
_measurements.Clear();
|
||||
}
|
||||
|
||||
private static double Percentile(List<double> sortedData, double percentile)
|
||||
{
|
||||
if (sortedData.Count == 0) return 0;
|
||||
if (sortedData.Count == 1) return sortedData[0];
|
||||
|
||||
var index = (percentile / 100.0) * (sortedData.Count - 1);
|
||||
var lower = (int)Math.Floor(index);
|
||||
var upper = (int)Math.Ceiling(index);
|
||||
|
||||
if (lower == upper) return sortedData[lower];
|
||||
|
||||
var weight = index - lower;
|
||||
return sortedData[lower] * (1 - weight) + sortedData[upper] * weight;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a latency budget for an operation.
|
||||
/// </summary>
|
||||
public sealed record LatencyBudget
|
||||
{
|
||||
public required string OperationName { get; init; }
|
||||
public required TimeSpan ColdStartBudget { get; init; }
|
||||
public required TimeSpan WarmPathBudget { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a latency measurement.
|
||||
/// </summary>
|
||||
public sealed record LatencyMeasurement
|
||||
{
|
||||
public required string OperationName { get; init; }
|
||||
public required TimeSpan Duration { get; init; }
|
||||
public required bool IsColdStart { get; init; }
|
||||
public required bool Success { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of budget verification.
|
||||
/// </summary>
|
||||
public sealed record BudgetVerificationResult
|
||||
{
|
||||
public required LatencyMeasurement Measurement { get; init; }
|
||||
public required LatencyBudget Budget { get; init; }
|
||||
public required TimeSpan ExpectedBudget { get; init; }
|
||||
public required bool IsWithinBudget { get; init; }
|
||||
public required double PercentageOfBudget { get; init; }
|
||||
public required TimeSpan Overage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Latency statistics for an operation.
|
||||
/// </summary>
|
||||
public sealed record LatencyStatistics
|
||||
{
|
||||
public required string OperationName { get; init; }
|
||||
public required int SampleCount { get; init; }
|
||||
public TimeSpan Min { get; init; }
|
||||
public TimeSpan Max { get; init; }
|
||||
public TimeSpan Mean { get; init; }
|
||||
public TimeSpan Median { get; init; }
|
||||
public TimeSpan P95 { get; init; }
|
||||
public TimeSpan P99 { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Comprehensive latency report.
|
||||
/// </summary>
|
||||
public sealed record LatencyReport
|
||||
{
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
public required int TotalMeasurements { get; init; }
|
||||
public required int SuccessfulMeasurements { get; init; }
|
||||
public required IReadOnlyList<LatencyStatistics> Statistics { get; init; }
|
||||
public required IReadOnlyList<BudgetVerificationResult> BudgetViolations { get; init; }
|
||||
public required double OverallComplianceRate { get; init; }
|
||||
}
|
||||
@@ -8,6 +8,9 @@
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -23,6 +26,7 @@
|
||||
<ProjectReference Include="..\..\..\Scanner\StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj" />
|
||||
<ProjectReference Include="..\..\..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="..\..\..\Policy\StellaOps.Policy.Scoring\StellaOps.Policy.Scoring.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Performance Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,460 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WarmPathLatencyTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
// Description: Tests for warm-path latency budgets (subsequent requests)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for warm-path latency (subsequent requests after initialization).
|
||||
/// Validates that services maintain low latency for steady-state operation.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Performance)]
|
||||
[Trait("Category", "Latency")]
|
||||
[Trait("Category", "WarmPath")]
|
||||
public class WarmPathLatencyTests : IClassFixture<PerformanceTestFixture>
|
||||
{
|
||||
private readonly PerformanceTestFixture _fixture;
|
||||
private readonly LatencyBudgetEnforcer _enforcer;
|
||||
|
||||
public WarmPathLatencyTests(PerformanceTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_enforcer = new LatencyBudgetEnforcer();
|
||||
_enforcer.RegisterDefaultBudgets();
|
||||
}
|
||||
|
||||
#region Scanner Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SubsequentRequests_WarmPath_CompletesWithin500ms()
|
||||
{
|
||||
// Arrange - Warm up the service
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.SimulateFirstRequestAsync();
|
||||
|
||||
// Act - Subsequent request (warm)
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
measurement.IsColdStart.Should().BeFalse();
|
||||
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"Warm path took {measurement.Duration.TotalMilliseconds:F0}ms, " +
|
||||
$"budget is {result.ExpectedBudget.TotalMilliseconds:F0}ms");
|
||||
|
||||
_fixture.RecordMeasurement("scanner_warm_path_ms", measurement.Duration.TotalMilliseconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_WarmPath_CompletesWithin300ms()
|
||||
{
|
||||
// Arrange - Warm up
|
||||
var simulator = new ColdStartSimulator("sbom-generator");
|
||||
await simulator.SimulateSbomGenerationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.sbom",
|
||||
() => simulator.SimulateSbomGenerationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Concelier Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierLookup_WarmPath_CompletesWithin100ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.SimulateAdvisoryLookupAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => simulator.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierMerge_WarmPath_CompletesWithin400ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.SimulateAdvisoryMergeAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.merge",
|
||||
() => simulator.SimulateAdvisoryMergeAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyEvaluate_WarmPath_CompletesWithin200ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.SimulatePolicyEvaluationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.evaluate",
|
||||
() => simulator.SimulatePolicyEvaluationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyScore_WarmPath_CompletesWithin100ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.SimulateRiskScoringAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.score",
|
||||
() => simulator.SimulateRiskScoringAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Authority Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityToken_WarmPath_CompletesWithin50ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.SimulateTokenIssuanceAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.token",
|
||||
() => simulator.SimulateTokenIssuanceAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityValidate_WarmPath_CompletesWithin20ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.SimulateTokenValidationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.validate",
|
||||
() => simulator.SimulateTokenValidationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Attestor Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorSign_WarmPath_CompletesWithin200ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.SimulateSigningAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.sign",
|
||||
() => simulator.SimulateSigningAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorVerify_WarmPath_CompletesWithin100ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.SimulateVerificationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.verify",
|
||||
() => simulator.SimulateVerificationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sustained Load Tests
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_SustainedLoad_MaintainsLowLatency()
|
||||
{
|
||||
// Arrange - Warm up
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.SimulateFirstRequestAsync();
|
||||
|
||||
// Act - 100 consecutive requests
|
||||
const int requestCount = 100;
|
||||
for (var i = 0; i < requestCount; i++)
|
||||
{
|
||||
await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
}
|
||||
|
||||
// Assert - P95 should be within budget
|
||||
var stats = _enforcer.ComputeStatistics("scanner.scan");
|
||||
stats.SampleCount.Should().Be(requestCount);
|
||||
stats.P95.Should().BeLessThanOrEqualTo(TimeSpan.FromMilliseconds(500));
|
||||
|
||||
_fixture.RecordMeasurement("scanner_warm_p95_ms", stats.P95.TotalMilliseconds);
|
||||
_fixture.RecordMeasurement("scanner_warm_mean_ms", stats.Mean.TotalMilliseconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_BurstLoad_StaysWithinBudget()
|
||||
{
|
||||
// Arrange - Warm up each service
|
||||
var scannerSim = new ColdStartSimulator("scanner");
|
||||
var concelierSim = new ColdStartSimulator("concelier");
|
||||
var policySim = new ColdStartSimulator("policy");
|
||||
|
||||
await scannerSim.SimulateFirstRequestAsync();
|
||||
await concelierSim.SimulateAdvisoryLookupAsync();
|
||||
await policySim.SimulatePolicyEvaluationAsync();
|
||||
|
||||
// Act - Burst of requests across services
|
||||
var tasks = new List<Task<LatencyMeasurement>>();
|
||||
for (var i = 0; i < 30; i++)
|
||||
{
|
||||
tasks.Add(_enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => scannerSim.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false));
|
||||
tasks.Add(_enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => concelierSim.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: false));
|
||||
tasks.Add(_enforcer.MeasureAsync(
|
||||
"policy.evaluate",
|
||||
() => policySim.SimulatePolicyEvaluationAsync(),
|
||||
isColdStart: false));
|
||||
}
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All measurements within budget
|
||||
var report = _enforcer.GenerateReport();
|
||||
report.BudgetViolations.Should().BeEmpty(
|
||||
"All warm path requests should complete within budget");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Latency Consistency Tests
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_LatencyVariance_StaysAcceptable()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.SimulateFirstRequestAsync();
|
||||
|
||||
// Act - Collect samples
|
||||
const int sampleCount = 50;
|
||||
for (var i = 0; i < sampleCount; i++)
|
||||
{
|
||||
await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
}
|
||||
|
||||
// Assert - P99/Median ratio should be reasonable (< 3x)
|
||||
var stats = _enforcer.ComputeStatistics("scanner.scan");
|
||||
var p99ToMedianRatio = stats.P99.TotalMilliseconds / stats.Median.TotalMilliseconds;
|
||||
p99ToMedianRatio.Should().BeLessThan(3.0,
|
||||
"P99 latency should not exceed 3x median latency");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_NoLatencySpikes_OverTime()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.SimulateAdvisoryLookupAsync();
|
||||
|
||||
// Act - Extended test with pauses
|
||||
var maxLatency = TimeSpan.Zero;
|
||||
var budget = _enforcer.GetBudget("concelier.lookup").WarmPathBudget;
|
||||
|
||||
for (var batch = 0; batch < 5; batch++)
|
||||
{
|
||||
// Process batch
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => simulator.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
if (measurement.Duration > maxLatency)
|
||||
{
|
||||
maxLatency = measurement.Duration;
|
||||
}
|
||||
}
|
||||
|
||||
// Brief pause between batches
|
||||
await Task.Delay(10);
|
||||
}
|
||||
|
||||
// Assert - Max latency should not spike above budget
|
||||
maxLatency.Should().BeLessThanOrEqualTo(budget,
|
||||
$"Max latency was {maxLatency.TotalMilliseconds:F0}ms, budget is {budget.TotalMilliseconds:F0}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold to Warm Transition Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ColdToWarm_TransitionIsSmooth()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act - First request (cold)
|
||||
var coldMeasurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Subsequent requests (warm)
|
||||
var warmMeasurements = new List<LatencyMeasurement>();
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
warmMeasurements.Add(measurement);
|
||||
}
|
||||
|
||||
// Assert
|
||||
coldMeasurement.Success.Should().BeTrue();
|
||||
warmMeasurements.Should().AllSatisfy(m => m.Success.Should().BeTrue());
|
||||
|
||||
// Warm requests should be significantly faster than cold
|
||||
var avgWarmLatency = TimeSpan.FromMilliseconds(
|
||||
warmMeasurements.Average(m => m.Duration.TotalMilliseconds));
|
||||
avgWarmLatency.Should().BeLessThan(coldMeasurement.Duration,
|
||||
"Warm path should be faster than cold start");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Report Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateLatencyReport_AfterMultipleOperations()
|
||||
{
|
||||
// Arrange & Act - Run various operations
|
||||
var scannerSim = new ColdStartSimulator("scanner");
|
||||
var concelierSim = new ColdStartSimulator("concelier");
|
||||
|
||||
// Cold starts
|
||||
await _enforcer.MeasureAsync("scanner.scan",
|
||||
() => scannerSim.SimulateFirstRequestAsync(), isColdStart: true);
|
||||
await _enforcer.MeasureAsync("concelier.lookup",
|
||||
() => concelierSim.SimulateAdvisoryLookupAsync(), isColdStart: true);
|
||||
|
||||
// Warm paths
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
await _enforcer.MeasureAsync("scanner.scan",
|
||||
() => scannerSim.SimulateSubsequentRequestAsync(), isColdStart: false);
|
||||
await _enforcer.MeasureAsync("concelier.lookup",
|
||||
() => concelierSim.SimulateAdvisoryLookupAsync(), isColdStart: false);
|
||||
}
|
||||
|
||||
// Generate report
|
||||
var report = _enforcer.GenerateReport();
|
||||
|
||||
// Assert
|
||||
report.TotalMeasurements.Should().Be(22);
|
||||
report.SuccessfulMeasurements.Should().Be(22);
|
||||
report.Statistics.Should().HaveCount(2);
|
||||
report.OverallComplianceRate.Should().BeGreaterThanOrEqualTo(90);
|
||||
|
||||
// Save report for verification
|
||||
_fixture.SaveReport("latency-report.txt",
|
||||
$"Generated: {report.GeneratedAt}\n" +
|
||||
$"Compliance: {report.OverallComplianceRate:F1}%\n" +
|
||||
$"Violations: {report.BudgetViolations.Count}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Platform Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<!-- Suppress xUnit1051: Integration tests don't need responsive cancellation -->
|
||||
<NoWarn>$(NoWarn);xUnit1051</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup> <PackageReference Include="xunit.v3" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.ProofChain Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Reachability Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Unknowns Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Audit ReplayToken Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Evidence Bundle Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Microservice.Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -18,7 +18,7 @@ Maintain the fixture harvester CLI and tests for deterministic fixture capture.
|
||||
## Required Reading
|
||||
- `docs/07_HIGH_LEVEL_ARCHITECTURE.md`
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
- `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
|
||||
## Working Agreement
|
||||
- 1. Use fixed time/ID sources for fixture metadata and sample content.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Fixture Harvester Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Binary Lookup Benchmark Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# ProofChain Benchmark Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Concelier Testing Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Infrastructure.Postgres.Testing Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,345 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TestRunAttestationGenerator.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.3
|
||||
// Description: Generates DSSE-signed attestations linking test outputs to inputs
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
using StellaOps.Testing.Manifests.Models;
|
||||
|
||||
namespace StellaOps.Testing.Manifests.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Service for generating in-toto attestations for test runs.
|
||||
/// Links test outputs to their inputs (SBOMs, VEX documents, feeds).
|
||||
/// </summary>
|
||||
public sealed class TestRunAttestationGenerator : ITestRunAttestationGenerator
|
||||
{
|
||||
private const string DssePayloadType = "application/vnd.in-toto+json";
|
||||
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false,
|
||||
DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull
|
||||
};
|
||||
|
||||
private readonly ITestRunAttestationSigner? _signer;
|
||||
private readonly TimeProvider _timeProvider;
|
||||
private readonly IAttestationIdGenerator _idGenerator;
|
||||
|
||||
public TestRunAttestationGenerator(
|
||||
ITestRunAttestationSigner? signer = null,
|
||||
TimeProvider? timeProvider = null,
|
||||
IAttestationIdGenerator? idGenerator = null)
|
||||
{
|
||||
_signer = signer;
|
||||
_timeProvider = timeProvider ?? TimeProvider.System;
|
||||
_idGenerator = idGenerator ?? new GuidAttestationIdGenerator();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a test run attestation linking outputs to inputs.
|
||||
/// </summary>
|
||||
public async Task<TestRunAttestation> GenerateAsync(
|
||||
RunManifest manifest,
|
||||
ImmutableArray<TestRunOutput> outputs,
|
||||
TestRunEvidence evidence,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
ArgumentNullException.ThrowIfNull(evidence);
|
||||
|
||||
var statement = CreateInTotoStatement(manifest, outputs, evidence);
|
||||
var statementBytes = SerializeToCanonicalJson(statement);
|
||||
var statementDigest = ComputeSha256Digest(statementBytes);
|
||||
|
||||
var envelope = await CreateDsseEnvelopeAsync(statementBytes, ct);
|
||||
|
||||
return new TestRunAttestation
|
||||
{
|
||||
AttestationId = _idGenerator.NewId(),
|
||||
RunId = manifest.RunId,
|
||||
CreatedAt = _timeProvider.GetUtcNow(),
|
||||
Statement = statement,
|
||||
StatementDigest = statementDigest,
|
||||
Envelope = envelope,
|
||||
Success = evidence.Success,
|
||||
OutputCount = outputs.Length
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates attestations for multiple test runs.
|
||||
/// </summary>
|
||||
public async Task<ImmutableArray<TestRunAttestation>> GenerateBatchAsync(
|
||||
IEnumerable<(RunManifest Manifest, ImmutableArray<TestRunOutput> Outputs, TestRunEvidence Evidence)> runs,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var attestations = ImmutableArray.CreateBuilder<TestRunAttestation>();
|
||||
|
||||
foreach (var (manifest, outputs, evidence) in runs)
|
||||
{
|
||||
ct.ThrowIfCancellationRequested();
|
||||
var attestation = await GenerateAsync(manifest, outputs, evidence, ct);
|
||||
attestations.Add(attestation);
|
||||
}
|
||||
|
||||
return attestations.ToImmutable();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a test run attestation's integrity.
|
||||
/// </summary>
|
||||
public Task<TestRunAttestationVerificationResult> VerifyAsync(
|
||||
TestRunAttestation attestation,
|
||||
ITestRunAttestationVerifier? verifier = null,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(attestation);
|
||||
|
||||
var errors = ImmutableArray.CreateBuilder<string>();
|
||||
var signatureVerified = false;
|
||||
string? verifiedKeyId = null;
|
||||
|
||||
// Verify statement digest
|
||||
if (attestation.Envelope is not null)
|
||||
{
|
||||
try
|
||||
{
|
||||
var payloadBytes = Convert.FromBase64String(attestation.Envelope.Payload);
|
||||
var payloadDigest = ComputeSha256Digest(payloadBytes);
|
||||
|
||||
if (payloadDigest != attestation.StatementDigest)
|
||||
{
|
||||
errors.Add($"Envelope payload digest mismatch: expected {attestation.StatementDigest}, got {payloadDigest}");
|
||||
}
|
||||
}
|
||||
catch (FormatException)
|
||||
{
|
||||
errors.Add("Invalid base64 in envelope payload");
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
var statementBytes = SerializeToCanonicalJson(attestation.Statement);
|
||||
var computedDigest = ComputeSha256Digest(statementBytes);
|
||||
if (computedDigest != attestation.StatementDigest)
|
||||
{
|
||||
errors.Add($"Statement digest mismatch: expected {attestation.StatementDigest}, got {computedDigest}");
|
||||
}
|
||||
}
|
||||
|
||||
// Verify signature if envelope exists and verifier is provided
|
||||
if (attestation.Envelope is not null && attestation.Envelope.Signatures.Length > 0)
|
||||
{
|
||||
if (verifier is null)
|
||||
{
|
||||
errors.Add("Signature verifier not provided for signed attestation");
|
||||
}
|
||||
else
|
||||
{
|
||||
// Signature verification would be done here
|
||||
// For now, mark as not verified without a verifier
|
||||
signatureVerified = false;
|
||||
}
|
||||
}
|
||||
|
||||
return Task.FromResult(new TestRunAttestationVerificationResult
|
||||
{
|
||||
IsValid = errors.Count == 0,
|
||||
Errors = errors.ToImmutable(),
|
||||
SignatureVerified = signatureVerified,
|
||||
VerifiedKeyId = verifiedKeyId,
|
||||
VerifiedAt = _timeProvider.GetUtcNow()
|
||||
});
|
||||
}
|
||||
|
||||
private TestRunInTotoStatement CreateInTotoStatement(
|
||||
RunManifest manifest,
|
||||
ImmutableArray<TestRunOutput> outputs,
|
||||
TestRunEvidence evidence)
|
||||
{
|
||||
var subjects = outputs.Select(o => new TestRunSubject
|
||||
{
|
||||
Name = o.Name,
|
||||
Digest = ImmutableDictionary<string, string>.Empty
|
||||
.Add("sha256", o.Digest.Replace("sha256:", ""))
|
||||
}).ToImmutableArray();
|
||||
|
||||
return new TestRunInTotoStatement
|
||||
{
|
||||
Subject = subjects,
|
||||
Predicate = new TestRunPredicate
|
||||
{
|
||||
RunId = manifest.RunId,
|
||||
ManifestSchemaVersion = manifest.SchemaVersion,
|
||||
SbomDigests = manifest.SbomDigests.Select(s => s.Digest).ToImmutableArray(),
|
||||
VexDigests = evidence.VexDigests,
|
||||
FeedSnapshot = new TestRunFeedSnapshotRef
|
||||
{
|
||||
FeedId = manifest.FeedSnapshot.FeedId,
|
||||
Version = manifest.FeedSnapshot.Version,
|
||||
Digest = manifest.FeedSnapshot.Digest,
|
||||
SnapshotAt = manifest.FeedSnapshot.SnapshotAt
|
||||
},
|
||||
PolicyDigest = manifest.PolicySnapshot.LatticeRulesDigest,
|
||||
ToolVersions = new TestRunToolVersionsRef
|
||||
{
|
||||
ScannerVersion = manifest.ToolVersions.ScannerVersion,
|
||||
SbomGeneratorVersion = manifest.ToolVersions.SbomGeneratorVersion,
|
||||
ReachabilityEngineVersion = manifest.ToolVersions.ReachabilityEngineVersion,
|
||||
AttestorVersion = manifest.ToolVersions.AttestorVersion
|
||||
},
|
||||
ExecutedAt = _timeProvider.GetUtcNow(),
|
||||
InitiatedAt = manifest.InitiatedAt,
|
||||
DurationMs = evidence.DurationMs,
|
||||
TestCount = evidence.TestCount,
|
||||
PassedCount = evidence.PassedCount,
|
||||
FailedCount = evidence.FailedCount,
|
||||
SkippedCount = evidence.SkippedCount,
|
||||
Success = evidence.Success,
|
||||
DeterminismVerified = evidence.DeterminismVerified,
|
||||
Metadata = evidence.Metadata
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private async Task<TestRunDsseEnvelope?> CreateDsseEnvelopeAsync(
|
||||
byte[] payload,
|
||||
CancellationToken ct)
|
||||
{
|
||||
var signatures = ImmutableArray.CreateBuilder<TestRunDsseSignature>();
|
||||
|
||||
if (_signer is not null)
|
||||
{
|
||||
var signResult = await _signer.SignAsync(payload, ct);
|
||||
signatures.Add(new TestRunDsseSignature
|
||||
{
|
||||
KeyId = signResult.KeyId,
|
||||
Sig = signResult.Signature,
|
||||
Algorithm = signResult.Algorithm
|
||||
});
|
||||
}
|
||||
|
||||
if (signatures.Count == 0)
|
||||
{
|
||||
return null; // Return null if no signatures
|
||||
}
|
||||
|
||||
return new TestRunDsseEnvelope
|
||||
{
|
||||
PayloadType = DssePayloadType,
|
||||
Payload = Convert.ToBase64String(payload),
|
||||
Signatures = signatures.ToImmutable()
|
||||
};
|
||||
}
|
||||
|
||||
private static byte[] SerializeToCanonicalJson<T>(T value)
|
||||
{
|
||||
// Use standard JSON serialization with sorted keys
|
||||
// For full RFC 8785 compliance, use a dedicated canonicalizer
|
||||
return JsonSerializer.SerializeToUtf8Bytes(value, JsonOptions);
|
||||
}
|
||||
|
||||
private static string ComputeSha256Digest(byte[] data)
|
||||
{
|
||||
var hash = SHA256.HashData(data);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for test run attestation generation.
|
||||
/// </summary>
|
||||
public interface ITestRunAttestationGenerator
|
||||
{
|
||||
/// <summary>
|
||||
/// Generates a test run attestation.
|
||||
/// </summary>
|
||||
Task<TestRunAttestation> GenerateAsync(
|
||||
RunManifest manifest,
|
||||
ImmutableArray<TestRunOutput> outputs,
|
||||
TestRunEvidence evidence,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Generates attestations for multiple test runs.
|
||||
/// </summary>
|
||||
Task<ImmutableArray<TestRunAttestation>> GenerateBatchAsync(
|
||||
IEnumerable<(RunManifest Manifest, ImmutableArray<TestRunOutput> Outputs, TestRunEvidence Evidence)> runs,
|
||||
CancellationToken ct = default);
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a test run attestation.
|
||||
/// </summary>
|
||||
Task<TestRunAttestationVerificationResult> VerifyAsync(
|
||||
TestRunAttestation attestation,
|
||||
ITestRunAttestationVerifier? verifier = null,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for signing test run attestations.
|
||||
/// </summary>
|
||||
public interface ITestRunAttestationSigner
|
||||
{
|
||||
/// <summary>
|
||||
/// Signs the attestation payload.
|
||||
/// </summary>
|
||||
Task<TestRunSignatureResult> SignAsync(byte[] payload, CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for verifying test run attestation signatures.
|
||||
/// </summary>
|
||||
public interface ITestRunAttestationVerifier
|
||||
{
|
||||
/// <summary>
|
||||
/// Verifies the attestation signature.
|
||||
/// </summary>
|
||||
Task<TestRunSignatureVerification> VerifyAsync(
|
||||
TestRunDsseEnvelope envelope,
|
||||
byte[] payload,
|
||||
CancellationToken ct = default);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signing operation.
|
||||
/// </summary>
|
||||
public sealed record TestRunSignatureResult
|
||||
{
|
||||
public required string KeyId { get; init; }
|
||||
public required string Signature { get; init; }
|
||||
public string? Algorithm { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of signature verification.
|
||||
/// </summary>
|
||||
public sealed record TestRunSignatureVerification
|
||||
{
|
||||
public bool Verified { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public string? KeyId { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Interface for generating attestation IDs.
|
||||
/// </summary>
|
||||
public interface IAttestationIdGenerator
|
||||
{
|
||||
string NewId();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Default GUID-based attestation ID generator.
|
||||
/// </summary>
|
||||
public sealed class GuidAttestationIdGenerator : IAttestationIdGenerator
|
||||
{
|
||||
public string NewId() => Guid.NewGuid().ToString("N");
|
||||
}
|
||||
@@ -0,0 +1,321 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TestRunAttestationModels.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.3
|
||||
// Description: Models for test run attestations linking outputs to inputs (SBOMs, VEX)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Testing.Manifests.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Test run attestation linking test outputs to their inputs (SBOMs, VEX documents).
|
||||
/// </summary>
|
||||
public sealed record TestRunAttestation
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique identifier for this attestation.
|
||||
/// </summary>
|
||||
public required string AttestationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Reference to the run manifest this attestation covers.
|
||||
/// </summary>
|
||||
public required string RunId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the attestation was generated (UTC).
|
||||
/// </summary>
|
||||
public required DateTimeOffset CreatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The in-toto statement payload.
|
||||
/// </summary>
|
||||
public required TestRunInTotoStatement Statement { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the canonical JSON statement.
|
||||
/// </summary>
|
||||
public required string StatementDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope containing signed statement.
|
||||
/// </summary>
|
||||
public TestRunDsseEnvelope? Envelope { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether all test outputs were successfully produced.
|
||||
/// </summary>
|
||||
public bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test outputs covered by this attestation.
|
||||
/// </summary>
|
||||
public int OutputCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// In-toto v1 statement for test run attestation.
|
||||
/// </summary>
|
||||
public sealed record TestRunInTotoStatement
|
||||
{
|
||||
public const string StatementTypeUri = "https://in-toto.io/Statement/v1";
|
||||
public const string PredicateTypeUri = "https://stellaops.io/attestation/test-run/v1";
|
||||
|
||||
[JsonPropertyName("_type")]
|
||||
public string Type { get; init; } = StatementTypeUri;
|
||||
|
||||
[JsonPropertyName("subject")]
|
||||
public required ImmutableArray<TestRunSubject> Subject { get; init; }
|
||||
|
||||
[JsonPropertyName("predicateType")]
|
||||
public string PredicateType { get; init; } = PredicateTypeUri;
|
||||
|
||||
[JsonPropertyName("predicate")]
|
||||
public required TestRunPredicate Predicate { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Subject entry in the in-toto statement (test output artifacts).
|
||||
/// </summary>
|
||||
public sealed record TestRunSubject
|
||||
{
|
||||
[JsonPropertyName("name")]
|
||||
public required string Name { get; init; }
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
public required ImmutableDictionary<string, string> Digest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Predicate payload for test run attestation.
|
||||
/// Links test outputs to input artifacts (SBOMs, VEX documents, feeds).
|
||||
/// </summary>
|
||||
public sealed record TestRunPredicate
|
||||
{
|
||||
/// <summary>
|
||||
/// Unique run identifier from the manifest.
|
||||
/// </summary>
|
||||
[JsonPropertyName("runId")]
|
||||
public required string RunId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Schema version of the manifest.
|
||||
/// </summary>
|
||||
[JsonPropertyName("manifestSchemaVersion")]
|
||||
public required string ManifestSchemaVersion { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digests of SBOM inputs used in this test run.
|
||||
/// </summary>
|
||||
[JsonPropertyName("sbomDigests")]
|
||||
public required ImmutableArray<string> SbomDigests { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digests of VEX documents used in this test run.
|
||||
/// </summary>
|
||||
[JsonPropertyName("vexDigests")]
|
||||
public ImmutableArray<string> VexDigests { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Feed snapshot information used for vulnerability matching.
|
||||
/// </summary>
|
||||
[JsonPropertyName("feedSnapshot")]
|
||||
public required TestRunFeedSnapshotRef FeedSnapshot { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Policy configuration digest.
|
||||
/// </summary>
|
||||
[JsonPropertyName("policyDigest")]
|
||||
public required string PolicyDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Tool versions used in the test run.
|
||||
/// </summary>
|
||||
[JsonPropertyName("toolVersions")]
|
||||
public required TestRunToolVersionsRef ToolVersions { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when the test run was executed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("executedAt")]
|
||||
public required DateTimeOffset ExecutedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// UTC timestamp when the test run was initiated.
|
||||
/// </summary>
|
||||
[JsonPropertyName("initiatedAt")]
|
||||
public required DateTimeOffset InitiatedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Duration of the test run in milliseconds.
|
||||
/// </summary>
|
||||
[JsonPropertyName("durationMs")]
|
||||
public long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases executed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("testCount")]
|
||||
public int TestCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases that passed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("passedCount")]
|
||||
public int PassedCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases that failed.
|
||||
/// </summary>
|
||||
[JsonPropertyName("failedCount")]
|
||||
public int FailedCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases that were skipped.
|
||||
/// </summary>
|
||||
[JsonPropertyName("skippedCount")]
|
||||
public int SkippedCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the test run completed successfully (all tests passed).
|
||||
/// </summary>
|
||||
[JsonPropertyName("success")]
|
||||
public bool Success { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Determinism verification status.
|
||||
/// </summary>
|
||||
[JsonPropertyName("determinismVerified")]
|
||||
public bool DeterminismVerified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata about the test run.
|
||||
/// </summary>
|
||||
[JsonPropertyName("metadata")]
|
||||
public ImmutableDictionary<string, string>? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to feed snapshot used in test run.
|
||||
/// </summary>
|
||||
public sealed record TestRunFeedSnapshotRef
|
||||
{
|
||||
[JsonPropertyName("feedId")]
|
||||
public required string FeedId { get; init; }
|
||||
|
||||
[JsonPropertyName("version")]
|
||||
public required string Version { get; init; }
|
||||
|
||||
[JsonPropertyName("digest")]
|
||||
public required string Digest { get; init; }
|
||||
|
||||
[JsonPropertyName("snapshotAt")]
|
||||
public required DateTimeOffset SnapshotAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reference to tool versions used in test run.
|
||||
/// </summary>
|
||||
public sealed record TestRunToolVersionsRef
|
||||
{
|
||||
[JsonPropertyName("scannerVersion")]
|
||||
public required string ScannerVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("sbomGeneratorVersion")]
|
||||
public required string SbomGeneratorVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("reachabilityEngineVersion")]
|
||||
public required string ReachabilityEngineVersion { get; init; }
|
||||
|
||||
[JsonPropertyName("attestorVersion")]
|
||||
public required string AttestorVersion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE envelope for test run attestation.
|
||||
/// </summary>
|
||||
public sealed record TestRunDsseEnvelope
|
||||
{
|
||||
[JsonPropertyName("payloadType")]
|
||||
public required string PayloadType { get; init; }
|
||||
|
||||
[JsonPropertyName("payload")]
|
||||
public required string Payload { get; init; }
|
||||
|
||||
[JsonPropertyName("signatures")]
|
||||
public required ImmutableArray<TestRunDsseSignature> Signatures { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// DSSE signature entry for test run attestation.
|
||||
/// </summary>
|
||||
public sealed record TestRunDsseSignature
|
||||
{
|
||||
[JsonPropertyName("keyid")]
|
||||
public required string KeyId { get; init; }
|
||||
|
||||
[JsonPropertyName("sig")]
|
||||
public required string Sig { get; init; }
|
||||
|
||||
[JsonPropertyName("algorithm")]
|
||||
public string? Algorithm { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Test output artifact with digest.
|
||||
/// </summary>
|
||||
public sealed record TestRunOutput
|
||||
{
|
||||
/// <summary>
|
||||
/// Name/identifier of the output artifact.
|
||||
/// </summary>
|
||||
public required string Name { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the output artifact.
|
||||
/// </summary>
|
||||
public required string Digest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Media type of the output artifact.
|
||||
/// </summary>
|
||||
public string? MediaType { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Size in bytes of the output artifact.
|
||||
/// </summary>
|
||||
public long? Size { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of test run attestation verification.
|
||||
/// </summary>
|
||||
public sealed record TestRunAttestationVerificationResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Whether the attestation is valid.
|
||||
/// </summary>
|
||||
public bool IsValid { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Validation errors if any.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> Errors { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Whether the signature was successfully verified.
|
||||
/// </summary>
|
||||
public bool SignatureVerified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Key ID used for verification.
|
||||
/// </summary>
|
||||
public string? VerifiedKeyId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// When the verification was performed.
|
||||
/// </summary>
|
||||
public DateTimeOffset VerifiedAt { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,222 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// TestRunEvidence.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.3
|
||||
// Description: Evidence collected during a test run for attestation generation
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Testing.Manifests.Attestation;
|
||||
|
||||
/// <summary>
|
||||
/// Evidence collected during a test run for attestation generation.
|
||||
/// Captures test execution statistics and inputs used.
|
||||
/// </summary>
|
||||
public sealed record TestRunEvidence
|
||||
{
|
||||
/// <summary>
|
||||
/// Total number of test cases executed.
|
||||
/// </summary>
|
||||
public required int TestCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases that passed.
|
||||
/// </summary>
|
||||
public required int PassedCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases that failed.
|
||||
/// </summary>
|
||||
public required int FailedCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of test cases that were skipped.
|
||||
/// </summary>
|
||||
public int SkippedCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the test run completed successfully (all tests passed).
|
||||
/// </summary>
|
||||
public bool Success => FailedCount == 0;
|
||||
|
||||
/// <summary>
|
||||
/// Duration of the test run in milliseconds.
|
||||
/// </summary>
|
||||
public long DurationMs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether determinism was verified (multiple runs produced identical results).
|
||||
/// </summary>
|
||||
public bool DeterminismVerified { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Digests of VEX documents used in this test run.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> VexDigests { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Additional metadata about the test run.
|
||||
/// </summary>
|
||||
public ImmutableDictionary<string, string>? Metadata { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Test framework used (e.g., "xunit", "nunit").
|
||||
/// </summary>
|
||||
public string? TestFramework { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Test result file paths (e.g., TRX files).
|
||||
/// </summary>
|
||||
public ImmutableArray<string> ResultFiles { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Categories/traits of tests that were executed.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> ExecutedCategories { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Git commit SHA at time of test run.
|
||||
/// </summary>
|
||||
public string? GitCommitSha { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Git branch name at time of test run.
|
||||
/// </summary>
|
||||
public string? GitBranch { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CI build number or run ID.
|
||||
/// </summary>
|
||||
public string? CiBuildId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// CI workflow name.
|
||||
/// </summary>
|
||||
public string? CiWorkflow { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Builder for creating TestRunEvidence instances.
|
||||
/// </summary>
|
||||
public sealed class TestRunEvidenceBuilder
|
||||
{
|
||||
private int _testCount;
|
||||
private int _passedCount;
|
||||
private int _failedCount;
|
||||
private int _skippedCount;
|
||||
private long _durationMs;
|
||||
private bool _determinismVerified;
|
||||
private ImmutableArray<string>.Builder _vexDigests = ImmutableArray.CreateBuilder<string>();
|
||||
private ImmutableDictionary<string, string>.Builder _metadata = ImmutableDictionary.CreateBuilder<string, string>();
|
||||
private string? _testFramework;
|
||||
private ImmutableArray<string>.Builder _resultFiles = ImmutableArray.CreateBuilder<string>();
|
||||
private ImmutableArray<string>.Builder _executedCategories = ImmutableArray.CreateBuilder<string>();
|
||||
private string? _gitCommitSha;
|
||||
private string? _gitBranch;
|
||||
private string? _ciBuildId;
|
||||
private string? _ciWorkflow;
|
||||
|
||||
public TestRunEvidenceBuilder WithTestCount(int total, int passed, int failed, int skipped = 0)
|
||||
{
|
||||
_testCount = total;
|
||||
_passedCount = passed;
|
||||
_failedCount = failed;
|
||||
_skippedCount = skipped;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder WithDuration(long durationMs)
|
||||
{
|
||||
_durationMs = durationMs;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder WithDuration(TimeSpan duration)
|
||||
{
|
||||
_durationMs = (long)duration.TotalMilliseconds;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder WithDeterminismVerified(bool verified)
|
||||
{
|
||||
_determinismVerified = verified;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder AddVexDigest(string digest)
|
||||
{
|
||||
_vexDigests.Add(digest);
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder AddVexDigests(IEnumerable<string> digests)
|
||||
{
|
||||
_vexDigests.AddRange(digests);
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder AddMetadata(string key, string value)
|
||||
{
|
||||
_metadata[key] = value;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder WithTestFramework(string framework)
|
||||
{
|
||||
_testFramework = framework;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder AddResultFile(string path)
|
||||
{
|
||||
_resultFiles.Add(path);
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder AddExecutedCategory(string category)
|
||||
{
|
||||
_executedCategories.Add(category);
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder WithGitInfo(string? commitSha, string? branch)
|
||||
{
|
||||
_gitCommitSha = commitSha;
|
||||
_gitBranch = branch;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidenceBuilder WithCiInfo(string? buildId, string? workflow)
|
||||
{
|
||||
_ciBuildId = buildId;
|
||||
_ciWorkflow = workflow;
|
||||
return this;
|
||||
}
|
||||
|
||||
public TestRunEvidence Build()
|
||||
{
|
||||
return new TestRunEvidence
|
||||
{
|
||||
TestCount = _testCount,
|
||||
PassedCount = _passedCount,
|
||||
FailedCount = _failedCount,
|
||||
SkippedCount = _skippedCount,
|
||||
DurationMs = _durationMs,
|
||||
DeterminismVerified = _determinismVerified,
|
||||
VexDigests = _vexDigests.ToImmutable(),
|
||||
Metadata = _metadata.Count > 0 ? _metadata.ToImmutable() : null,
|
||||
TestFramework = _testFramework,
|
||||
ResultFiles = _resultFiles.ToImmutable(),
|
||||
ExecutedCategories = _executedCategories.ToImmutable(),
|
||||
GitCommitSha = _gitCommitSha,
|
||||
GitBranch = _gitBranch,
|
||||
CiBuildId = _ciBuildId,
|
||||
CiWorkflow = _ciWorkflow
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new builder instance.
|
||||
/// </summary>
|
||||
public static TestRunEvidenceBuilder Create() => new();
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
using System.Collections.Immutable;
|
||||
using StellaOps.Testing.Manifests.Attestation;
|
||||
|
||||
namespace StellaOps.Testing.Manifests.Models;
|
||||
|
||||
@@ -72,6 +73,18 @@ public sealed record RunManifest
|
||||
/// SHA-256 hash of this manifest (excluding this field).
|
||||
/// </summary>
|
||||
public string? ManifestDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Optional attestation linking test outputs to this manifest's inputs.
|
||||
/// Generated after test run completion to provide supply-chain linkage.
|
||||
/// </summary>
|
||||
public TestRunAttestation? Attestation { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Output artifacts produced by this test run.
|
||||
/// Used as subjects in the attestation.
|
||||
/// </summary>
|
||||
public ImmutableArray<TestRunOutput> Outputs { get; init; } = [];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
|
||||
@@ -0,0 +1,236 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ContractSpecDiffTests.cs
|
||||
// Tests that verify OpenAPI specifications match code implementations
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.1
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Architecture.Contracts.Tests.Infrastructure;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Architecture.Contracts.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Contract specification diff tests.
|
||||
/// Verifies that OpenAPI specifications match actual code implementations.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Architecture)]
|
||||
[Trait("Category", TestCategories.Contract)]
|
||||
public class ContractSpecDiffTests
|
||||
{
|
||||
private static readonly string RepoRoot = FindRepoRoot();
|
||||
private static readonly string DocsApiPath = Path.Combine(RepoRoot, "docs", "api");
|
||||
private static readonly string SrcPath = Path.Combine(RepoRoot, "src");
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that all OpenAPI spec files can be parsed without errors.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void AllOpenApiSpecs_CanBeParsed()
|
||||
{
|
||||
// Arrange
|
||||
var specFiles = GetOpenApiSpecFiles();
|
||||
|
||||
// Act & Assert
|
||||
foreach (var specFile in specFiles)
|
||||
{
|
||||
var action = () => OpenApiParser.Parse(specFile);
|
||||
action.Should().NotThrow($"Spec file {Path.GetFileName(specFile)} should parse successfully");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that OpenAPI specs contain required metadata.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void AllOpenApiSpecs_HaveRequiredMetadata()
|
||||
{
|
||||
// Arrange
|
||||
var specFiles = GetOpenApiSpecFiles();
|
||||
|
||||
// Act & Assert
|
||||
foreach (var specFile in specFiles)
|
||||
{
|
||||
var spec = OpenApiParser.Parse(specFile);
|
||||
|
||||
spec.Title.Should().NotBeNullOrWhiteSpace(
|
||||
$"Spec {Path.GetFileName(specFile)} should have a title");
|
||||
|
||||
spec.Version.Should().NotBeNullOrWhiteSpace(
|
||||
$"Spec {Path.GetFileName(specFile)} should have a version");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that discovered endpoints have proper response codes defined.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void DiscoveredEndpoints_HaveResponseCodes()
|
||||
{
|
||||
// Arrange
|
||||
var endpoints = DiscoverAllEndpoints();
|
||||
|
||||
// Act & Assert
|
||||
foreach (var endpoint in endpoints)
|
||||
{
|
||||
// Skip if no Produces() annotations found (may be using different pattern)
|
||||
if (endpoint.ResponseCodes.IsEmpty)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
endpoint.ResponseCodes.Should().Contain(
|
||||
c => c >= 200 && c < 300,
|
||||
$"Endpoint {endpoint.Method} {endpoint.Path} should have a success response code");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies endpoint discovery works correctly on known endpoint files.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void EndpointDiscovery_FindsKnownEndpoints()
|
||||
{
|
||||
// Arrange
|
||||
var scannerEndpointsFile = Path.Combine(
|
||||
SrcPath, "Scanner", "StellaOps.Scanner.WebService", "Endpoints", "ScanEndpoints.cs");
|
||||
|
||||
if (!File.Exists(scannerEndpointsFile))
|
||||
{
|
||||
// Skip if file doesn't exist (may be different structure)
|
||||
return;
|
||||
}
|
||||
|
||||
// Act
|
||||
var endpoints = EndpointDiscoverer.DiscoverFromFile(scannerEndpointsFile);
|
||||
|
||||
// Assert
|
||||
endpoints.Should().NotBeEmpty("Scanner endpoints file should contain discoverable endpoints");
|
||||
endpoints.Should().Contain(e => e.Method == "POST", "Should find POST endpoint for scan submission");
|
||||
endpoints.Should().Contain(e => e.Method == "GET", "Should find GET endpoints for status");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that OpenAPI specs have unique operation IDs.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void OpenApiSpecs_HaveUniqueOperationIds()
|
||||
{
|
||||
// Arrange
|
||||
var specs = LoadAllSpecs();
|
||||
var allOperationIds = new Dictionary<string, List<string>>();
|
||||
|
||||
// Act
|
||||
foreach (var spec in specs)
|
||||
{
|
||||
foreach (var endpoint in spec.Endpoints.Where(e => !string.IsNullOrEmpty(e.OperationId)))
|
||||
{
|
||||
var opId = endpoint.OperationId!;
|
||||
if (!allOperationIds.ContainsKey(opId))
|
||||
{
|
||||
allOperationIds[opId] = [];
|
||||
}
|
||||
allOperationIds[opId].Add($"{spec.SourcePath}: {endpoint.Method} {endpoint.Path}");
|
||||
}
|
||||
}
|
||||
|
||||
// Assert
|
||||
var duplicates = allOperationIds.Where(kv => kv.Value.Count > 1).ToList();
|
||||
duplicates.Should().BeEmpty(
|
||||
$"Operation IDs should be unique. Duplicates found: {string.Join(", ", duplicates.Select(d => d.Key))}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a spec-diff report (informational, does not fail).
|
||||
/// This test produces a report showing differences between specs and code.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SpecDiff_GeneratesReport()
|
||||
{
|
||||
// Arrange
|
||||
var specs = LoadAllSpecs();
|
||||
var endpoints = DiscoverAllEndpoints();
|
||||
|
||||
// Act
|
||||
var result = SpecDiffComparer.Compare(specs, endpoints);
|
||||
var report = SpecDiffComparer.GenerateReport(result);
|
||||
|
||||
// Assert - just verify report was generated
|
||||
report.Should().NotBeNullOrEmpty("Spec diff report should be generated");
|
||||
|
||||
// Output for visibility (will show in test output)
|
||||
Console.WriteLine(report);
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static string FindRepoRoot()
|
||||
{
|
||||
var current = Directory.GetCurrentDirectory();
|
||||
|
||||
while (current is not null)
|
||||
{
|
||||
if (Directory.Exists(Path.Combine(current, ".git")) ||
|
||||
File.Exists(Path.Combine(current, "CLAUDE.md")))
|
||||
{
|
||||
return current;
|
||||
}
|
||||
current = Directory.GetParent(current)?.FullName;
|
||||
}
|
||||
|
||||
// Fallback: assume we're in a test output directory
|
||||
return Path.GetFullPath(Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "..", ".."));
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> GetOpenApiSpecFiles()
|
||||
{
|
||||
if (!Directory.Exists(DocsApiPath))
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
return Directory.GetFiles(DocsApiPath, "*.yaml", SearchOption.AllDirectories)
|
||||
.Where(f => f.Contains("openapi", StringComparison.OrdinalIgnoreCase) ||
|
||||
Path.GetFileName(f).EndsWith("-api.yaml", StringComparison.OrdinalIgnoreCase))
|
||||
.ToImmutableArray();
|
||||
}
|
||||
|
||||
private static ImmutableArray<OpenApiSpec> LoadAllSpecs()
|
||||
{
|
||||
var specFiles = GetOpenApiSpecFiles();
|
||||
var specs = new List<OpenApiSpec>();
|
||||
|
||||
foreach (var file in specFiles)
|
||||
{
|
||||
try
|
||||
{
|
||||
specs.Add(OpenApiParser.Parse(file));
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Skip unparseable specs
|
||||
}
|
||||
}
|
||||
|
||||
return [.. specs];
|
||||
}
|
||||
|
||||
private static ImmutableArray<DiscoveredEndpoint> DiscoverAllEndpoints()
|
||||
{
|
||||
var allEndpoints = new List<DiscoveredEndpoint>();
|
||||
|
||||
// Discover from all WebService directories
|
||||
var webServiceDirs = Directory.GetDirectories(SrcPath, "*WebService*", SearchOption.AllDirectories);
|
||||
|
||||
foreach (var dir in webServiceDirs)
|
||||
{
|
||||
allEndpoints.AddRange(EndpointDiscoverer.DiscoverFromDirectory(dir));
|
||||
}
|
||||
|
||||
return [.. allEndpoints];
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,183 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// EndpointDiscoverer.cs
|
||||
// Discovers API endpoints from source code using static analysis
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using System.Text.RegularExpressions;
|
||||
|
||||
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Discovers API endpoints from C# source files using regex-based static analysis.
|
||||
/// </summary>
|
||||
public static partial class EndpointDiscoverer
|
||||
{
|
||||
// Regex patterns for endpoint discovery
|
||||
[GeneratedRegex(@"\.Map(Get|Post|Put|Delete|Patch)\s*\(\s*""([^""]+)""", RegexOptions.Compiled)]
|
||||
private static partial Regex MapMethodRegex();
|
||||
|
||||
[GeneratedRegex(@"\.WithName\s*\(\s*""([^""]+)""", RegexOptions.Compiled)]
|
||||
private static partial Regex WithNameRegex();
|
||||
|
||||
[GeneratedRegex(@"\.Produces(?:<[^>]+>)?\s*\(\s*(?:StatusCodes\.)?Status(\d+)", RegexOptions.Compiled)]
|
||||
private static partial Regex ProducesRegex();
|
||||
|
||||
[GeneratedRegex(@"\.RequireAuthorization\s*\(", RegexOptions.Compiled)]
|
||||
private static partial Regex RequireAuthRegex();
|
||||
|
||||
[GeneratedRegex(@"\.MapGroup\s*\(\s*""([^""]+)""", RegexOptions.Compiled)]
|
||||
private static partial Regex MapGroupRegex();
|
||||
|
||||
/// <summary>
|
||||
/// Discovers endpoints from all C# files in a directory.
|
||||
/// </summary>
|
||||
public static ImmutableArray<DiscoveredEndpoint> DiscoverFromDirectory(string directory)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(directory);
|
||||
|
||||
if (!Directory.Exists(directory))
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var endpoints = new List<DiscoveredEndpoint>();
|
||||
var csFiles = Directory.GetFiles(directory, "*.cs", SearchOption.AllDirectories)
|
||||
.Where(f => f.Contains("Endpoints", StringComparison.OrdinalIgnoreCase) ||
|
||||
f.Contains("Controllers", StringComparison.OrdinalIgnoreCase));
|
||||
|
||||
foreach (var file in csFiles)
|
||||
{
|
||||
endpoints.AddRange(DiscoverFromFile(file));
|
||||
}
|
||||
|
||||
return [.. endpoints];
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Discovers endpoints from a single C# source file.
|
||||
/// </summary>
|
||||
public static ImmutableArray<DiscoveredEndpoint> DiscoverFromFile(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
if (!File.Exists(filePath))
|
||||
{
|
||||
return [];
|
||||
}
|
||||
|
||||
var content = File.ReadAllText(filePath);
|
||||
var lines = content.Split('\n');
|
||||
var endpoints = new List<DiscoveredEndpoint>();
|
||||
|
||||
// Try to find the base group path
|
||||
var baseGroupMatch = MapGroupRegex().Match(content);
|
||||
var baseGroup = baseGroupMatch.Success ? baseGroupMatch.Groups[1].Value : "";
|
||||
|
||||
for (var lineNum = 0; lineNum < lines.Length; lineNum++)
|
||||
{
|
||||
var line = lines[lineNum];
|
||||
var mapMatch = MapMethodRegex().Match(line);
|
||||
|
||||
if (!mapMatch.Success)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var method = mapMatch.Groups[1].Value.ToUpperInvariant();
|
||||
var path = mapMatch.Groups[2].Value;
|
||||
|
||||
// Look ahead for chained methods (Produces, WithName, RequireAuthorization)
|
||||
var chainedContent = GetChainedContent(lines, lineNum);
|
||||
|
||||
var endpointName = ExtractWithName(chainedContent);
|
||||
var responseCodes = ExtractProducesCodes(chainedContent);
|
||||
var requiresAuth = RequireAuthRegex().IsMatch(chainedContent);
|
||||
|
||||
// Combine base group with path
|
||||
var fullPath = CombinePaths(baseGroup, path);
|
||||
|
||||
endpoints.Add(new DiscoveredEndpoint
|
||||
{
|
||||
Method = method,
|
||||
Path = fullPath,
|
||||
EndpointName = endpointName,
|
||||
SourceFile = filePath,
|
||||
SourceLine = lineNum + 1,
|
||||
ResponseCodes = [.. responseCodes.OrderBy(c => c)],
|
||||
RequiresAuth = requiresAuth
|
||||
});
|
||||
}
|
||||
|
||||
return [.. endpoints];
|
||||
}
|
||||
|
||||
private static string GetChainedContent(string[] lines, int startLine)
|
||||
{
|
||||
var builder = new System.Text.StringBuilder();
|
||||
var openParens = 0;
|
||||
var started = false;
|
||||
|
||||
for (var i = startLine; i < Math.Min(startLine + 15, lines.Length); i++)
|
||||
{
|
||||
var line = lines[i];
|
||||
builder.AppendLine(line);
|
||||
|
||||
foreach (var ch in line)
|
||||
{
|
||||
if (ch == '(') { openParens++; started = true; }
|
||||
if (ch == ')') { openParens--; }
|
||||
}
|
||||
|
||||
// Stop if we hit a semicolon at end of statement
|
||||
if (started && line.TrimEnd().EndsWith(';'))
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
// Stop if we've closed all parens and see another Map call
|
||||
if (started && openParens <= 0 && MapMethodRegex().IsMatch(lines[Math.Min(i + 1, lines.Length - 1)]))
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
private static string? ExtractWithName(string content)
|
||||
{
|
||||
var match = WithNameRegex().Match(content);
|
||||
return match.Success ? match.Groups[1].Value : null;
|
||||
}
|
||||
|
||||
private static List<int> ExtractProducesCodes(string content)
|
||||
{
|
||||
var codes = new List<int>();
|
||||
var matches = ProducesRegex().Matches(content);
|
||||
|
||||
foreach (Match match in matches)
|
||||
{
|
||||
if (int.TryParse(match.Groups[1].Value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var code))
|
||||
{
|
||||
codes.Add(code);
|
||||
}
|
||||
}
|
||||
|
||||
return codes;
|
||||
}
|
||||
|
||||
private static string CombinePaths(string basePath, string path)
|
||||
{
|
||||
if (string.IsNullOrEmpty(basePath))
|
||||
{
|
||||
return path;
|
||||
}
|
||||
|
||||
basePath = basePath.TrimEnd('/');
|
||||
path = path.TrimStart('/');
|
||||
|
||||
return $"{basePath}/{path}";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,149 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// OpenApiParser.cs
|
||||
// Parses OpenAPI YAML specifications into structured format
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Globalization;
|
||||
using YamlDotNet.RepresentationModel;
|
||||
|
||||
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Parses OpenAPI 3.x YAML specifications.
|
||||
/// </summary>
|
||||
public static class OpenApiParser
|
||||
{
|
||||
private static readonly string[] HttpMethods = ["get", "post", "put", "delete", "patch", "options", "head"];
|
||||
|
||||
/// <summary>
|
||||
/// Parses an OpenAPI specification from a YAML file.
|
||||
/// </summary>
|
||||
public static OpenApiSpec Parse(string filePath)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(filePath);
|
||||
|
||||
var content = File.ReadAllText(filePath);
|
||||
return ParseContent(content, filePath);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Parses OpenAPI specification from YAML content.
|
||||
/// </summary>
|
||||
public static OpenApiSpec ParseContent(string yamlContent, string sourcePath)
|
||||
{
|
||||
var yaml = new YamlStream();
|
||||
using var reader = new StringReader(yamlContent);
|
||||
yaml.Load(reader);
|
||||
|
||||
if (yaml.Documents.Count == 0)
|
||||
{
|
||||
throw new InvalidOperationException($"No YAML documents found in {sourcePath}");
|
||||
}
|
||||
|
||||
var root = (YamlMappingNode)yaml.Documents[0].RootNode;
|
||||
|
||||
var info = GetMappingNode(root, "info");
|
||||
var title = GetScalarValue(info, "title") ?? "Untitled";
|
||||
var version = GetScalarValue(info, "version") ?? "1.0.0";
|
||||
|
||||
var endpoints = new List<OpenApiEndpoint>();
|
||||
|
||||
if (root.Children.TryGetValue(new YamlScalarNode("paths"), out var pathsNode) &&
|
||||
pathsNode is YamlMappingNode paths)
|
||||
{
|
||||
foreach (var pathEntry in paths.Children)
|
||||
{
|
||||
var path = ((YamlScalarNode)pathEntry.Key).Value ?? "";
|
||||
if (pathEntry.Value is YamlMappingNode pathItem)
|
||||
{
|
||||
endpoints.AddRange(ParsePathItem(path, pathItem));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return new OpenApiSpec
|
||||
{
|
||||
SourcePath = sourcePath,
|
||||
Title = title,
|
||||
Version = version,
|
||||
Endpoints = [.. endpoints]
|
||||
};
|
||||
}
|
||||
|
||||
private static IEnumerable<OpenApiEndpoint> ParsePathItem(string path, YamlMappingNode pathItem)
|
||||
{
|
||||
foreach (var methodEntry in pathItem.Children)
|
||||
{
|
||||
var methodName = ((YamlScalarNode)methodEntry.Key).Value?.ToLowerInvariant() ?? "";
|
||||
|
||||
if (!HttpMethods.Contains(methodName))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (methodEntry.Value is not YamlMappingNode operation)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
var operationId = GetScalarValue(operation, "operationId");
|
||||
var summary = GetScalarValue(operation, "summary");
|
||||
|
||||
var responseCodes = new List<int>();
|
||||
if (operation.Children.TryGetValue(new YamlScalarNode("responses"), out var responsesNode) &&
|
||||
responsesNode is YamlMappingNode responses)
|
||||
{
|
||||
foreach (var responseEntry in responses.Children)
|
||||
{
|
||||
var codeStr = ((YamlScalarNode)responseEntry.Key).Value ?? "";
|
||||
if (int.TryParse(codeStr, NumberStyles.Integer, CultureInfo.InvariantCulture, out var code))
|
||||
{
|
||||
responseCodes.Add(code);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var requiresAuth = false;
|
||||
if (operation.Children.TryGetValue(new YamlScalarNode("security"), out var securityNode))
|
||||
{
|
||||
requiresAuth = securityNode is YamlSequenceNode { Children.Count: > 0 };
|
||||
}
|
||||
|
||||
yield return new OpenApiEndpoint
|
||||
{
|
||||
Method = methodName.ToUpperInvariant(),
|
||||
Path = path,
|
||||
OperationId = operationId,
|
||||
Summary = summary,
|
||||
ResponseCodes = [.. responseCodes.OrderBy(c => c)],
|
||||
RequiresAuth = requiresAuth
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
private static YamlMappingNode? GetMappingNode(YamlMappingNode parent, string key)
|
||||
{
|
||||
if (parent.Children.TryGetValue(new YamlScalarNode(key), out var node) &&
|
||||
node is YamlMappingNode mapping)
|
||||
{
|
||||
return mapping;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private static string? GetScalarValue(YamlMappingNode? parent, string key)
|
||||
{
|
||||
if (parent is null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
if (parent.Children.TryGetValue(new YamlScalarNode(key), out var node) &&
|
||||
node is YamlScalarNode scalar)
|
||||
{
|
||||
return scalar.Value;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,193 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// OpenApiSpec.cs
|
||||
// Contract spec-diff infrastructure for comparing OpenAPI specs against code
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Represents a parsed OpenAPI specification.
|
||||
/// </summary>
|
||||
public sealed record OpenApiSpec
|
||||
{
|
||||
/// <summary>
|
||||
/// The source file path of the specification.
|
||||
/// </summary>
|
||||
public required string SourcePath { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// API title from the info section.
|
||||
/// </summary>
|
||||
public required string Title { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// API version from the info section.
|
||||
/// </summary>
|
||||
public required string Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// All endpoint definitions from the paths section.
|
||||
/// </summary>
|
||||
public required ImmutableArray<OpenApiEndpoint> Endpoints { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a single endpoint definition from an OpenAPI spec.
|
||||
/// </summary>
|
||||
public sealed record OpenApiEndpoint
|
||||
{
|
||||
/// <summary>
|
||||
/// The HTTP method (GET, POST, PUT, DELETE, PATCH).
|
||||
/// </summary>
|
||||
public required string Method { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The path pattern (e.g., /api/v1/scans/{scanId}).
|
||||
/// </summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Operation ID if specified.
|
||||
/// </summary>
|
||||
public string? OperationId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Summary description of the endpoint.
|
||||
/// </summary>
|
||||
public string? Summary { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected response status codes.
|
||||
/// </summary>
|
||||
public required ImmutableArray<int> ResponseCodes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the endpoint requires authentication.
|
||||
/// </summary>
|
||||
public bool RequiresAuth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a normalized key for comparison.
|
||||
/// </summary>
|
||||
public string ToComparisonKey() => $"{Method.ToUpperInvariant()} {NormalizePath(Path)}";
|
||||
|
||||
private static string NormalizePath(string path)
|
||||
{
|
||||
// Normalize path parameter syntax: {param} -> {*}
|
||||
return System.Text.RegularExpressions.Regex.Replace(
|
||||
path,
|
||||
@"\{[^}]+\}",
|
||||
"{*}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an endpoint discovered from code via reflection or static analysis.
|
||||
/// </summary>
|
||||
public sealed record DiscoveredEndpoint
|
||||
{
|
||||
/// <summary>
|
||||
/// The HTTP method (GET, POST, PUT, DELETE, PATCH).
|
||||
/// </summary>
|
||||
public required string Method { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The route pattern from code.
|
||||
/// </summary>
|
||||
public required string Path { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The endpoint name (from WithName()).
|
||||
/// </summary>
|
||||
public string? EndpointName { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// The source file where this endpoint is defined.
|
||||
/// </summary>
|
||||
public required string SourceFile { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Line number in source file.
|
||||
/// </summary>
|
||||
public int SourceLine { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected response status codes from Produces() attributes.
|
||||
/// </summary>
|
||||
public required ImmutableArray<int> ResponseCodes { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the endpoint requires authorization.
|
||||
/// </summary>
|
||||
public bool RequiresAuth { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Creates a normalized key for comparison.
|
||||
/// </summary>
|
||||
public string ToComparisonKey() => $"{Method.ToUpperInvariant()} {NormalizePath(Path)}";
|
||||
|
||||
private static string NormalizePath(string path)
|
||||
{
|
||||
return System.Text.RegularExpressions.Regex.Replace(
|
||||
path,
|
||||
@"\{[^}]+\}",
|
||||
"{*}");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing spec endpoints against discovered endpoints.
|
||||
/// </summary>
|
||||
public sealed record SpecDiffResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Endpoints in spec but not in code (orphaned specs).
|
||||
/// </summary>
|
||||
public required ImmutableArray<OpenApiEndpoint> OrphanedSpecs { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints in code but not in spec (undocumented).
|
||||
/// </summary>
|
||||
public required ImmutableArray<DiscoveredEndpoint> UndocumentedEndpoints { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints with mismatched response codes.
|
||||
/// </summary>
|
||||
public required ImmutableArray<ResponseCodeMismatch> ResponseMismatches { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Endpoints with mismatched auth requirements.
|
||||
/// </summary>
|
||||
public required ImmutableArray<AuthMismatch> AuthMismatches { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the diff is clean (no issues found).
|
||||
/// </summary>
|
||||
public bool IsClean =>
|
||||
OrphanedSpecs.IsEmpty &&
|
||||
UndocumentedEndpoints.IsEmpty &&
|
||||
ResponseMismatches.IsEmpty &&
|
||||
AuthMismatches.IsEmpty;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a mismatch in response codes between spec and code.
|
||||
/// </summary>
|
||||
public sealed record ResponseCodeMismatch
|
||||
{
|
||||
public required string EndpointKey { get; init; }
|
||||
public required ImmutableArray<int> SpecCodes { get; init; }
|
||||
public required ImmutableArray<int> CodeCodes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a mismatch in auth requirements between spec and code.
|
||||
/// </summary>
|
||||
public sealed record AuthMismatch
|
||||
{
|
||||
public required string EndpointKey { get; init; }
|
||||
public required bool SpecRequiresAuth { get; init; }
|
||||
public required bool CodeRequiresAuth { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,156 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SpecDiffComparer.cs
|
||||
// Compares OpenAPI specifications against discovered endpoints
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Architecture.Contracts.Tests.Infrastructure;
|
||||
|
||||
/// <summary>
|
||||
/// Compares OpenAPI specifications against discovered code endpoints.
|
||||
/// </summary>
|
||||
public static class SpecDiffComparer
|
||||
{
|
||||
/// <summary>
|
||||
/// Compares a set of OpenAPI specs against discovered endpoints.
|
||||
/// </summary>
|
||||
public static SpecDiffResult Compare(
|
||||
IEnumerable<OpenApiSpec> specs,
|
||||
IEnumerable<DiscoveredEndpoint> discovered)
|
||||
{
|
||||
var specEndpoints = specs
|
||||
.SelectMany(s => s.Endpoints)
|
||||
.ToDictionary(e => e.ToComparisonKey(), e => e);
|
||||
|
||||
var codeEndpoints = discovered
|
||||
.ToDictionary(e => e.ToComparisonKey(), e => e);
|
||||
|
||||
var orphanedSpecs = new List<OpenApiEndpoint>();
|
||||
var undocumented = new List<DiscoveredEndpoint>();
|
||||
var responseMismatches = new List<ResponseCodeMismatch>();
|
||||
var authMismatches = new List<AuthMismatch>();
|
||||
|
||||
// Find orphaned specs (in spec but not in code)
|
||||
foreach (var (key, specEndpoint) in specEndpoints)
|
||||
{
|
||||
if (!codeEndpoints.ContainsKey(key))
|
||||
{
|
||||
orphanedSpecs.Add(specEndpoint);
|
||||
}
|
||||
}
|
||||
|
||||
// Find undocumented endpoints (in code but not in spec)
|
||||
foreach (var (key, codeEndpoint) in codeEndpoints)
|
||||
{
|
||||
if (!specEndpoints.ContainsKey(key))
|
||||
{
|
||||
undocumented.Add(codeEndpoint);
|
||||
}
|
||||
}
|
||||
|
||||
// Find mismatches in matching endpoints
|
||||
foreach (var (key, specEndpoint) in specEndpoints)
|
||||
{
|
||||
if (!codeEndpoints.TryGetValue(key, out var codeEndpoint))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check response codes
|
||||
if (!specEndpoint.ResponseCodes.SequenceEqual(codeEndpoint.ResponseCodes))
|
||||
{
|
||||
responseMismatches.Add(new ResponseCodeMismatch
|
||||
{
|
||||
EndpointKey = key,
|
||||
SpecCodes = specEndpoint.ResponseCodes,
|
||||
CodeCodes = codeEndpoint.ResponseCodes
|
||||
});
|
||||
}
|
||||
|
||||
// Check auth requirements
|
||||
if (specEndpoint.RequiresAuth != codeEndpoint.RequiresAuth)
|
||||
{
|
||||
authMismatches.Add(new AuthMismatch
|
||||
{
|
||||
EndpointKey = key,
|
||||
SpecRequiresAuth = specEndpoint.RequiresAuth,
|
||||
CodeRequiresAuth = codeEndpoint.RequiresAuth
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return new SpecDiffResult
|
||||
{
|
||||
OrphanedSpecs = [.. orphanedSpecs],
|
||||
UndocumentedEndpoints = [.. undocumented],
|
||||
ResponseMismatches = [.. responseMismatches],
|
||||
AuthMismatches = [.. authMismatches]
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a human-readable diff report.
|
||||
/// </summary>
|
||||
public static string GenerateReport(SpecDiffResult result)
|
||||
{
|
||||
var builder = new System.Text.StringBuilder();
|
||||
builder.AppendLine("# Spec-Diff Report");
|
||||
builder.AppendLine();
|
||||
|
||||
if (result.IsClean)
|
||||
{
|
||||
builder.AppendLine("No differences found. Specs and code are in sync.");
|
||||
return builder.ToString();
|
||||
}
|
||||
|
||||
if (result.OrphanedSpecs.Length > 0)
|
||||
{
|
||||
builder.AppendLine("## Orphaned Specs (in spec but not in code)");
|
||||
builder.AppendLine();
|
||||
foreach (var endpoint in result.OrphanedSpecs)
|
||||
{
|
||||
builder.AppendLine($"- {endpoint.Method} {endpoint.Path}");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (result.UndocumentedEndpoints.Length > 0)
|
||||
{
|
||||
builder.AppendLine("## Undocumented Endpoints (in code but not in spec)");
|
||||
builder.AppendLine();
|
||||
foreach (var endpoint in result.UndocumentedEndpoints)
|
||||
{
|
||||
builder.AppendLine($"- {endpoint.Method} {endpoint.Path} ({endpoint.SourceFile}:{endpoint.SourceLine})");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (result.ResponseMismatches.Length > 0)
|
||||
{
|
||||
builder.AppendLine("## Response Code Mismatches");
|
||||
builder.AppendLine();
|
||||
foreach (var mismatch in result.ResponseMismatches)
|
||||
{
|
||||
builder.AppendLine($"- {mismatch.EndpointKey}");
|
||||
builder.AppendLine($" Spec: [{string.Join(", ", mismatch.SpecCodes)}]");
|
||||
builder.AppendLine($" Code: [{string.Join(", ", mismatch.CodeCodes)}]");
|
||||
}
|
||||
builder.AppendLine();
|
||||
}
|
||||
|
||||
if (result.AuthMismatches.Length > 0)
|
||||
{
|
||||
builder.AppendLine("## Auth Requirement Mismatches");
|
||||
builder.AppendLine();
|
||||
foreach (var mismatch in result.AuthMismatches)
|
||||
{
|
||||
builder.AppendLine($"- {mismatch.EndpointKey}");
|
||||
builder.AppendLine($" Spec requires auth: {mismatch.SpecRequiresAuth}");
|
||||
builder.AppendLine($" Code requires auth: {mismatch.CodeRequiresAuth}");
|
||||
}
|
||||
}
|
||||
|
||||
return builder.ToString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,312 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// SchemaComplianceTests.cs
|
||||
// Tests that verify database schemas comply with specification documents
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.1
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.RegularExpressions;
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Architecture.Contracts.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Schema compliance tests.
|
||||
/// Verifies that database migrations align with specification documents.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Architecture)]
|
||||
[Trait("Category", TestCategories.Contract)]
|
||||
public partial class SchemaComplianceTests
|
||||
{
|
||||
private static readonly string RepoRoot = FindRepoRoot();
|
||||
private static readonly string DocsDbPath = Path.Combine(RepoRoot, "docs", "db");
|
||||
private static readonly string SrcPath = Path.Combine(RepoRoot, "src");
|
||||
|
||||
[GeneratedRegex(@"CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?([a-z_]+\.)?([a-z_]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex CreateTableRegex();
|
||||
|
||||
[GeneratedRegex(@"ALTER\s+TABLE\s+(?:IF\s+EXISTS\s+)?([a-z_]+\.)?([a-z_]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex AlterTableRegex();
|
||||
|
||||
[GeneratedRegex(@"CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?:IF\s+NOT\s+EXISTS\s+)?([a-z_]+)", RegexOptions.IgnoreCase | RegexOptions.Compiled)]
|
||||
private static partial Regex CreateIndexRegex();
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that database specification document exists.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void DatabaseSpecification_Exists()
|
||||
{
|
||||
// Arrange
|
||||
var specPath = Path.Combine(DocsDbPath, "SPECIFICATION.md");
|
||||
|
||||
// Assert
|
||||
File.Exists(specPath).Should().BeTrue(
|
||||
"Database specification document should exist at docs/db/SPECIFICATION.md");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that all migration files follow naming convention.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void MigrationFiles_FollowNamingConvention()
|
||||
{
|
||||
// Arrange
|
||||
var migrationFiles = GetMigrationFiles();
|
||||
|
||||
// Act & Assert
|
||||
foreach (var file in migrationFiles)
|
||||
{
|
||||
var fileName = Path.GetFileName(file);
|
||||
|
||||
// Should start with a number (version/sequence)
|
||||
fileName.Should().MatchRegex(@"^\d+",
|
||||
$"Migration file {fileName} should start with a version number");
|
||||
|
||||
// Should have .sql extension
|
||||
Path.GetExtension(file).Should().Be(".sql",
|
||||
$"Migration file {fileName} should have .sql extension");
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that migrations use schema-qualified table names.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Migrations_UseSchemaQualifiedTableNames()
|
||||
{
|
||||
// Arrange
|
||||
var migrationFiles = GetMigrationFiles();
|
||||
var violations = new List<string>();
|
||||
|
||||
// Act
|
||||
foreach (var file in migrationFiles)
|
||||
{
|
||||
var content = File.ReadAllText(file);
|
||||
var fileName = Path.GetFileName(file);
|
||||
|
||||
// Check CREATE TABLE statements
|
||||
var createMatches = CreateTableRegex().Matches(content);
|
||||
foreach (Match match in createMatches)
|
||||
{
|
||||
var schema = match.Groups[1].Value;
|
||||
var table = match.Groups[2].Value;
|
||||
|
||||
if (string.IsNullOrEmpty(schema))
|
||||
{
|
||||
violations.Add($"{fileName}: CREATE TABLE {table} missing schema qualifier");
|
||||
}
|
||||
}
|
||||
|
||||
// Check ALTER TABLE statements
|
||||
var alterMatches = AlterTableRegex().Matches(content);
|
||||
foreach (Match match in alterMatches)
|
||||
{
|
||||
var schema = match.Groups[1].Value;
|
||||
var table = match.Groups[2].Value;
|
||||
|
||||
if (string.IsNullOrEmpty(schema))
|
||||
{
|
||||
violations.Add($"{fileName}: ALTER TABLE {table} missing schema qualifier");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Assert
|
||||
violations.Should().BeEmpty(
|
||||
$"All table operations should use schema-qualified names. Violations: {string.Join(", ", violations.Take(10))}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that migration files are idempotent (use IF NOT EXISTS / IF EXISTS).
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Migrations_AreIdempotent()
|
||||
{
|
||||
// Arrange
|
||||
var migrationFiles = GetMigrationFiles();
|
||||
var nonIdempotent = new List<string>();
|
||||
|
||||
// Act
|
||||
foreach (var file in migrationFiles)
|
||||
{
|
||||
var content = File.ReadAllText(file);
|
||||
var fileName = Path.GetFileName(file);
|
||||
|
||||
// Check CREATE TABLE without IF NOT EXISTS
|
||||
if (Regex.IsMatch(content, @"CREATE\s+TABLE\s+(?!IF\s+NOT\s+EXISTS)", RegexOptions.IgnoreCase))
|
||||
{
|
||||
nonIdempotent.Add($"{fileName}: CREATE TABLE without IF NOT EXISTS");
|
||||
}
|
||||
|
||||
// Check CREATE INDEX without IF NOT EXISTS
|
||||
if (Regex.IsMatch(content, @"CREATE\s+(?:UNIQUE\s+)?INDEX\s+(?!IF\s+NOT\s+EXISTS)", RegexOptions.IgnoreCase))
|
||||
{
|
||||
nonIdempotent.Add($"{fileName}: CREATE INDEX without IF NOT EXISTS");
|
||||
}
|
||||
}
|
||||
|
||||
// Assert - this is a warning, not a hard failure
|
||||
// Some migrations may intentionally not be idempotent
|
||||
if (nonIdempotent.Any())
|
||||
{
|
||||
Console.WriteLine("Warning: Non-idempotent migrations found:");
|
||||
foreach (var item in nonIdempotent)
|
||||
{
|
||||
Console.WriteLine($" - {item}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that schema documentation exists for all schemas used in migrations.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void SchemaDocumentation_ExistsForAllSchemas()
|
||||
{
|
||||
// Arrange
|
||||
var migrationFiles = GetMigrationFiles();
|
||||
var schemasUsed = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
var schemasDocumented = new HashSet<string>(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// Find schemas used in migrations
|
||||
foreach (var file in migrationFiles)
|
||||
{
|
||||
var content = File.ReadAllText(file);
|
||||
|
||||
// Extract schema names from CREATE SCHEMA
|
||||
var createSchemaMatches = Regex.Matches(content, @"CREATE\s+SCHEMA\s+(?:IF\s+NOT\s+EXISTS\s+)?([a-z_]+)", RegexOptions.IgnoreCase);
|
||||
foreach (Match match in createSchemaMatches)
|
||||
{
|
||||
schemasUsed.Add(match.Groups[1].Value);
|
||||
}
|
||||
|
||||
// Extract schema names from table operations
|
||||
var tableMatches = CreateTableRegex().Matches(content);
|
||||
foreach (Match match in tableMatches)
|
||||
{
|
||||
var schema = match.Groups[1].Value.TrimEnd('.');
|
||||
if (!string.IsNullOrEmpty(schema))
|
||||
{
|
||||
schemasUsed.Add(schema);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Find documented schemas
|
||||
var schemaDocsPath = Path.Combine(DocsDbPath, "schemas");
|
||||
if (Directory.Exists(schemaDocsPath))
|
||||
{
|
||||
var docFiles = Directory.GetFiles(schemaDocsPath, "*.md", SearchOption.TopDirectoryOnly);
|
||||
foreach (var docFile in docFiles)
|
||||
{
|
||||
var schemaName = Path.GetFileNameWithoutExtension(docFile);
|
||||
schemasDocumented.Add(schemaName);
|
||||
}
|
||||
}
|
||||
|
||||
// Assert
|
||||
var undocumented = schemasUsed.Except(schemasDocumented).ToList();
|
||||
|
||||
// Output for visibility
|
||||
if (undocumented.Any())
|
||||
{
|
||||
Console.WriteLine($"Schemas without documentation: {string.Join(", ", undocumented)}");
|
||||
}
|
||||
|
||||
// Soft assertion - warn but don't fail
|
||||
schemasUsed.Should().NotBeEmpty("Should find schemas used in migrations");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that migrations have corresponding down/rollback scripts where appropriate.
|
||||
/// </summary>
|
||||
[Fact]
|
||||
public void Migrations_HaveDownScripts()
|
||||
{
|
||||
// Arrange
|
||||
var migrationFiles = GetMigrationFiles();
|
||||
var upScripts = migrationFiles.Where(f =>
|
||||
!Path.GetFileName(f).Contains("_down", StringComparison.OrdinalIgnoreCase) &&
|
||||
!Path.GetFileName(f).Contains("_rollback", StringComparison.OrdinalIgnoreCase)).ToList();
|
||||
|
||||
var missingDownScripts = new List<string>();
|
||||
|
||||
// Act
|
||||
foreach (var upScript in upScripts)
|
||||
{
|
||||
var fileName = Path.GetFileName(upScript);
|
||||
var directory = Path.GetDirectoryName(upScript)!;
|
||||
|
||||
// Look for corresponding down script
|
||||
var baseName = Path.GetFileNameWithoutExtension(fileName);
|
||||
var expectedDownNames = new[]
|
||||
{
|
||||
$"{baseName}_down.sql",
|
||||
$"{baseName}_rollback.sql",
|
||||
$"{baseName}.down.sql"
|
||||
};
|
||||
|
||||
var hasDownScript = expectedDownNames.Any(downName =>
|
||||
File.Exists(Path.Combine(directory, downName)));
|
||||
|
||||
if (!hasDownScript)
|
||||
{
|
||||
missingDownScripts.Add(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
// Assert - informational
|
||||
if (missingDownScripts.Any())
|
||||
{
|
||||
Console.WriteLine($"Migrations without down scripts ({missingDownScripts.Count}):");
|
||||
foreach (var script in missingDownScripts.Take(10))
|
||||
{
|
||||
Console.WriteLine($" - {script}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static string FindRepoRoot()
|
||||
{
|
||||
var current = Directory.GetCurrentDirectory();
|
||||
|
||||
while (current is not null)
|
||||
{
|
||||
if (Directory.Exists(Path.Combine(current, ".git")) ||
|
||||
File.Exists(Path.Combine(current, "CLAUDE.md")))
|
||||
{
|
||||
return current;
|
||||
}
|
||||
current = Directory.GetParent(current)?.FullName;
|
||||
}
|
||||
|
||||
return Path.GetFullPath(Path.Combine(Directory.GetCurrentDirectory(), "..", "..", "..", "..", ".."));
|
||||
}
|
||||
|
||||
private static ImmutableArray<string> GetMigrationFiles()
|
||||
{
|
||||
var migrationDirs = new List<string>();
|
||||
|
||||
// Find all Migrations directories
|
||||
if (Directory.Exists(SrcPath))
|
||||
{
|
||||
migrationDirs.AddRange(
|
||||
Directory.GetDirectories(SrcPath, "Migrations", SearchOption.AllDirectories));
|
||||
}
|
||||
|
||||
var allMigrations = new List<string>();
|
||||
foreach (var dir in migrationDirs)
|
||||
{
|
||||
allMigrations.AddRange(Directory.GetFiles(dir, "*.sql", SearchOption.AllDirectories));
|
||||
}
|
||||
|
||||
return [.. allMigrations];
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- Test packages inherited from Directory.Build.props -->
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="YamlDotNet" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<!-- Include OpenAPI specs as content files for testing -->
|
||||
<ItemGroup>
|
||||
<None Include="..\..\..\..\docs\api\**\*.yaml" Link="Specs\%(RecursiveDir)%(Filename)%(Extension)">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
<None Include="..\..\..\..\docs\contracts\**\*.yaml" Link="Contracts\%(RecursiveDir)%(Filename)%(Extension)">
|
||||
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
|
||||
</None>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,7 +1,7 @@
|
||||
# Architecture Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,381 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ControlPlaneOutageTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.3
|
||||
// Description: Tests for control-plane behavior during full outage scenarios
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Chaos.ControlPlane.Tests.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Chaos.ControlPlane.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for control-plane behavior during full outage scenarios.
|
||||
/// Validates graceful degradation, data durability, and recovery.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Chaos)]
|
||||
[Trait("Category", "ControlPlane")]
|
||||
public class ControlPlaneOutageTests : IClassFixture<ControlPlaneClusterFixture>
|
||||
{
|
||||
private readonly ControlPlaneClusterFixture _fixture;
|
||||
|
||||
public ControlPlaneOutageTests(ControlPlaneClusterFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_fixture.FailureInjector.RecoverAll();
|
||||
_fixture.ClearEventLog();
|
||||
}
|
||||
|
||||
#region Authority Outage Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Authority_Outage_CachedTokens_AllowTemporaryAccess()
|
||||
{
|
||||
// Arrange - Issue tokens while Authority is healthy
|
||||
var token1 = await _fixture.IssueTokenAsync("user-1", TimeSpan.FromHours(1));
|
||||
var token2 = await _fixture.IssueTokenAsync("user-2", TimeSpan.FromHours(1));
|
||||
|
||||
token1.Success.Should().BeTrue();
|
||||
token2.Success.Should().BeTrue();
|
||||
|
||||
// Act - Authority goes down
|
||||
_fixture.FailureInjector.InjectFullOutage("authority");
|
||||
|
||||
// Try to validate tokens
|
||||
var validation1 = await _fixture.ValidateTokenAsync(token1.Token!.TokenId);
|
||||
var validation2 = await _fixture.ValidateTokenAsync(token2.Token!.TokenId);
|
||||
|
||||
// Assert - Cached tokens should still validate
|
||||
validation1.Success.Should().BeTrue();
|
||||
validation1.IsValid.Should().BeTrue();
|
||||
validation1.Source.Should().Be(ValidationSource.Cache);
|
||||
validation1.Warning.Should().Contain("Authority unavailable");
|
||||
|
||||
validation2.Success.Should().BeTrue();
|
||||
validation2.IsValid.Should().BeTrue();
|
||||
validation2.Source.Should().Be(ValidationSource.Cache);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Authority_Outage_NewTokens_CannotBeIssued()
|
||||
{
|
||||
// Arrange - Authority goes down
|
||||
_fixture.FailureInjector.InjectFullOutage("authority");
|
||||
|
||||
// Act - Try to issue a new token
|
||||
var result = await _fixture.IssueTokenAsync("new-user", TimeSpan.FromHours(1));
|
||||
|
||||
// Assert - Should fail
|
||||
result.Success.Should().BeFalse();
|
||||
result.Token.Should().BeNull();
|
||||
result.Error.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Authority_Outage_UncachedTokens_FailValidation()
|
||||
{
|
||||
// Arrange - Authority goes down immediately
|
||||
_fixture.FailureInjector.InjectFullOutage("authority");
|
||||
|
||||
// Act - Try to validate a token that was never cached
|
||||
var result = await _fixture.ValidateTokenAsync("nonexistent-token");
|
||||
|
||||
// Assert - Should fail gracefully
|
||||
result.Success.Should().BeFalse();
|
||||
result.IsValid.Should().BeFalse();
|
||||
result.Source.Should().Be(ValidationSource.None);
|
||||
result.Error.Should().Contain("not in cache");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Authority_Recovery_TokenValidation_UsesAuthorityAgain()
|
||||
{
|
||||
// Arrange - Issue token while healthy
|
||||
var token = await _fixture.IssueTokenAsync("user-1", TimeSpan.FromHours(1));
|
||||
token.Success.Should().BeTrue();
|
||||
|
||||
// Authority goes down
|
||||
_fixture.FailureInjector.InjectFullOutage("authority");
|
||||
var duringOutage = await _fixture.ValidateTokenAsync(token.Token!.TokenId);
|
||||
duringOutage.Source.Should().Be(ValidationSource.Cache);
|
||||
|
||||
// Act - Authority recovers
|
||||
_fixture.FailureInjector.RecoverService("authority");
|
||||
var afterRecovery = await _fixture.ValidateTokenAsync(token.Token!.TokenId);
|
||||
|
||||
// Assert - Should use Authority again
|
||||
afterRecovery.Success.Should().BeTrue();
|
||||
afterRecovery.IsValid.Should().BeTrue();
|
||||
afterRecovery.Source.Should().Be(ValidationSource.Authority);
|
||||
afterRecovery.Warning.Should().BeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Scheduler Outage Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Scheduler_Outage_PendingJobs_NotLost()
|
||||
{
|
||||
// Arrange - Enqueue jobs while healthy
|
||||
var job1 = await _fixture.EnqueueJobAsync("scan", "image:tag1");
|
||||
var job2 = await _fixture.EnqueueJobAsync("scan", "image:tag2");
|
||||
|
||||
job1.Success.Should().BeTrue();
|
||||
job2.Success.Should().BeTrue();
|
||||
|
||||
// Scheduler goes down
|
||||
_fixture.FailureInjector.InjectFullOutage("scheduler");
|
||||
|
||||
// Act - Enqueue more jobs during outage
|
||||
var job3 = await _fixture.EnqueueJobAsync("scan", "image:tag3");
|
||||
var job4 = await _fixture.EnqueueJobAsync("policy", "check-1");
|
||||
|
||||
// Assert - Jobs are persisted locally even during outage
|
||||
job3.Success.Should().BeTrue();
|
||||
job3.Warning.Should().Contain("scheduler notification failed");
|
||||
|
||||
job4.Success.Should().BeTrue();
|
||||
job4.Warning.Should().Contain("scheduler notification failed");
|
||||
|
||||
// All jobs should be in the queue
|
||||
_fixture.GetPendingJobCount().Should().Be(4);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scheduler_Recovery_PendingJobs_ProcessedInOrder()
|
||||
{
|
||||
// Arrange - Scheduler down, enqueue jobs
|
||||
_fixture.FailureInjector.InjectFullOutage("scheduler");
|
||||
|
||||
await _fixture.EnqueueJobAsync("scan", "image:tag1");
|
||||
await _fixture.EnqueueJobAsync("scan", "image:tag2");
|
||||
await _fixture.EnqueueJobAsync("scan", "image:tag3");
|
||||
|
||||
var pendingBefore = _fixture.GetPendingJobCount();
|
||||
pendingBefore.Should().Be(3);
|
||||
|
||||
// Act - Scheduler recovers
|
||||
_fixture.FailureInjector.RecoverService("scheduler");
|
||||
var processedCount = await _fixture.ProcessPendingJobsAsync();
|
||||
|
||||
// Assert - All jobs processed
|
||||
processedCount.Should().Be(3);
|
||||
_fixture.GetPendingJobCount().Should().Be(0);
|
||||
|
||||
var allJobs = _fixture.GetAllJobs();
|
||||
allJobs.Should().AllSatisfy(j => j.Status.Should().Be(JobStatus.Processing));
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scheduler_IntermittentOutage_NoJobDuplication()
|
||||
{
|
||||
// Arrange - Normal operation
|
||||
var job1 = await _fixture.EnqueueJobAsync("scan", "image:tag1");
|
||||
job1.Success.Should().BeTrue();
|
||||
job1.Warning.Should().BeNull();
|
||||
|
||||
// Outage
|
||||
_fixture.FailureInjector.InjectFullOutage("scheduler");
|
||||
var job2 = await _fixture.EnqueueJobAsync("scan", "image:tag2");
|
||||
job2.Warning.Should().NotBeNull();
|
||||
|
||||
// Recovery
|
||||
_fixture.FailureInjector.RecoverService("scheduler");
|
||||
var job3 = await _fixture.EnqueueJobAsync("scan", "image:tag3");
|
||||
job3.Warning.Should().BeNull();
|
||||
|
||||
// Assert - No duplicates
|
||||
var allJobs = _fixture.GetAllJobs();
|
||||
var uniqueJobIds = allJobs.Select(j => j.JobId).Distinct().Count();
|
||||
uniqueJobIds.Should().Be(allJobs.Count);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Full Control-Plane Outage Tests
|
||||
|
||||
[Fact]
|
||||
public async Task FullControlPlane_Outage_DataPersistence_Verified()
|
||||
{
|
||||
// Arrange - Persist data while healthy
|
||||
var data1 = await _fixture.PersistDataAsync("config-1", "value-1");
|
||||
var data2 = await _fixture.PersistDataAsync("config-2", "value-2");
|
||||
var data3 = await _fixture.PersistDataAsync("config-3", "value-3");
|
||||
|
||||
data1.Success.Should().BeTrue();
|
||||
data2.Success.Should().BeTrue();
|
||||
data3.Success.Should().BeTrue();
|
||||
|
||||
// Act - Full control-plane outage
|
||||
_fixture.InjectFullControlPlaneOutage();
|
||||
|
||||
// Recovery
|
||||
_fixture.RecoverControlPlane();
|
||||
|
||||
// Assert - All data should be intact
|
||||
var read1 = await _fixture.ReadDataAsync("config-1");
|
||||
var read2 = await _fixture.ReadDataAsync("config-2");
|
||||
var read3 = await _fixture.ReadDataAsync("config-3");
|
||||
|
||||
read1.Success.Should().BeTrue();
|
||||
read1.Data.Should().NotBeNull();
|
||||
read1.Data!.Value.Should().Be("value-1");
|
||||
|
||||
read2.Success.Should().BeTrue();
|
||||
read2.Data!.Value.Should().Be("value-2");
|
||||
|
||||
read3.Success.Should().BeTrue();
|
||||
read3.Data!.Value.Should().Be("value-3");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FullControlPlane_Outage_AllOperations_FailGracefully()
|
||||
{
|
||||
// Arrange - Full outage
|
||||
_fixture.InjectFullControlPlaneOutage();
|
||||
|
||||
// Act - Try various operations
|
||||
var tokenResult = await _fixture.IssueTokenAsync("user", TimeSpan.FromHours(1));
|
||||
var persistResult = await _fixture.PersistDataAsync("key", "value");
|
||||
var readResult = await _fixture.ReadDataAsync("any-key");
|
||||
|
||||
// Assert - All fail gracefully without exceptions
|
||||
tokenResult.Success.Should().BeFalse();
|
||||
tokenResult.Error.Should().NotBeNullOrEmpty();
|
||||
|
||||
persistResult.Success.Should().BeFalse();
|
||||
persistResult.Error.Should().NotBeNullOrEmpty();
|
||||
|
||||
readResult.Success.Should().BeFalse();
|
||||
readResult.Error.Should().NotBeNullOrEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FullControlPlane_Recovery_SystemResumes_NormalOperation()
|
||||
{
|
||||
// Arrange - Operations before outage
|
||||
var tokenBefore = await _fixture.IssueTokenAsync("user-before", TimeSpan.FromHours(1));
|
||||
await _fixture.PersistDataAsync("data-before", "value-before");
|
||||
|
||||
// Full outage
|
||||
_fixture.InjectFullControlPlaneOutage();
|
||||
|
||||
// Verify outage
|
||||
var duringOutage = await _fixture.IssueTokenAsync("user-during", TimeSpan.FromHours(1));
|
||||
duringOutage.Success.Should().BeFalse();
|
||||
|
||||
// Act - Recovery
|
||||
_fixture.RecoverControlPlane();
|
||||
|
||||
// Assert - Normal operations resume
|
||||
var tokenAfter = await _fixture.IssueTokenAsync("user-after", TimeSpan.FromHours(1));
|
||||
tokenAfter.Success.Should().BeTrue();
|
||||
|
||||
var dataAfter = await _fixture.PersistDataAsync("data-after", "value-after");
|
||||
dataAfter.Success.Should().BeTrue();
|
||||
|
||||
var readBefore = await _fixture.ReadDataAsync("data-before");
|
||||
readBefore.Success.Should().BeTrue();
|
||||
readBefore.Data!.Value.Should().Be("value-before");
|
||||
|
||||
var readAfter = await _fixture.ReadDataAsync("data-after");
|
||||
readAfter.Success.Should().BeTrue();
|
||||
readAfter.Data!.Value.Should().Be("value-after");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FullControlPlane_Outage_EventLogCaptures_AllFailures()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.ClearEventLog();
|
||||
_fixture.InjectFullControlPlaneOutage();
|
||||
|
||||
// Act - Generate failures
|
||||
await _fixture.IssueTokenAsync("user", TimeSpan.FromHours(1));
|
||||
await _fixture.PersistDataAsync("key", "value");
|
||||
|
||||
// Assert - Event log contains failure entries
|
||||
var events = _fixture.EventLog;
|
||||
events.Should().NotBeEmpty();
|
||||
|
||||
var failureEvents = events.Where(e =>
|
||||
e.EventType.Contains("Failed", StringComparison.OrdinalIgnoreCase));
|
||||
failureEvents.Should().NotBeEmpty();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Data Integrity Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Database_Outage_WritesFail_ReadsFail()
|
||||
{
|
||||
// Arrange - Database outage only
|
||||
_fixture.FailureInjector.InjectFullOutage("database");
|
||||
|
||||
// Act
|
||||
var writeResult = await _fixture.PersistDataAsync("test-key", "test-value");
|
||||
var readResult = await _fixture.ReadDataAsync("test-key");
|
||||
|
||||
// Assert
|
||||
writeResult.Success.Should().BeFalse();
|
||||
readResult.Success.Should().BeFalse();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Database_Recovery_DataVersioning_Correct()
|
||||
{
|
||||
// Arrange - Write initial version
|
||||
var v1 = await _fixture.PersistDataAsync("versioned-key", "value-v1");
|
||||
v1.Success.Should().BeTrue();
|
||||
v1.Data!.Version.Should().Be(1);
|
||||
|
||||
// Update
|
||||
var v2 = await _fixture.PersistDataAsync("versioned-key", "value-v2");
|
||||
v2.Success.Should().BeTrue();
|
||||
v2.Data!.Version.Should().Be(2);
|
||||
|
||||
// Database outage
|
||||
_fixture.FailureInjector.InjectFullOutage("database");
|
||||
var failedWrite = await _fixture.PersistDataAsync("versioned-key", "value-v3");
|
||||
failedWrite.Success.Should().BeFalse();
|
||||
|
||||
// Recovery
|
||||
_fixture.FailureInjector.RecoverService("database");
|
||||
|
||||
// Act - Write after recovery
|
||||
var v3 = await _fixture.PersistDataAsync("versioned-key", "value-v3");
|
||||
|
||||
// Assert - Version continues from last successful write
|
||||
v3.Success.Should().BeTrue();
|
||||
v3.Data!.Version.Should().Be(3);
|
||||
v3.Data.Value.Should().Be("value-v3");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MixedOutage_SomeServices_OthersHealthy()
|
||||
{
|
||||
// Arrange - Only Authority and Database down, Scheduler healthy
|
||||
_fixture.FailureInjector.InjectFullOutage("authority");
|
||||
_fixture.FailureInjector.InjectFullOutage("database");
|
||||
|
||||
// Act - Scheduler operations should work
|
||||
var jobResult = await _fixture.EnqueueJobAsync("scan", "test-image");
|
||||
|
||||
// Assert
|
||||
jobResult.Success.Should().BeTrue();
|
||||
_fixture.GetPendingJobCount().Should().BeGreaterThanOrEqualTo(1);
|
||||
|
||||
// But Authority/Database operations fail
|
||||
var tokenResult = await _fixture.IssueTokenAsync("user", TimeSpan.FromHours(1));
|
||||
tokenResult.Success.Should().BeFalse();
|
||||
|
||||
var persistResult = await _fixture.PersistDataAsync("key", "value");
|
||||
persistResult.Success.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,525 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ControlPlaneClusterFixture.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.3
|
||||
// Description: Test fixture for simulating control-plane cluster with outage scenarios
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Chaos.ControlPlane.Tests.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture that simulates a control-plane cluster with multiple services.
|
||||
/// Enables chaos testing of outage scenarios.
|
||||
/// </summary>
|
||||
public sealed class ControlPlaneClusterFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly ServiceFailureInjector _failureInjector = new();
|
||||
private readonly ConcurrentDictionary<string, MockServiceState> _serviceStates = new();
|
||||
private readonly ConcurrentQueue<ClusterEvent> _eventLog = new();
|
||||
private readonly ConcurrentDictionary<string, CachedToken> _tokenCache = new();
|
||||
private readonly ConcurrentQueue<PendingJob> _pendingJobs = new();
|
||||
private readonly ConcurrentDictionary<string, PersistedData> _dataStore = new();
|
||||
private long _eventSequence;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the failure injector for this cluster.
|
||||
/// </summary>
|
||||
public ServiceFailureInjector FailureInjector => _failureInjector;
|
||||
|
||||
/// <summary>
|
||||
/// Gets all cluster events.
|
||||
/// </summary>
|
||||
public IReadOnlyCollection<ClusterEvent> EventLog => _eventLog.ToImmutableArray();
|
||||
|
||||
/// <summary>
|
||||
/// Gets all cached tokens.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, CachedToken> TokenCache => _tokenCache.ToImmutableDictionary();
|
||||
|
||||
/// <summary>
|
||||
/// Gets all pending jobs.
|
||||
/// </summary>
|
||||
public IReadOnlyCollection<PendingJob> PendingJobs => _pendingJobs.ToImmutableArray();
|
||||
|
||||
/// <summary>
|
||||
/// Gets all persisted data.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, PersistedData> DataStore => _dataStore.ToImmutableDictionary();
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
// Register default control-plane services
|
||||
RegisterService("authority", ServiceType.Authority);
|
||||
RegisterService("scheduler", ServiceType.Scheduler);
|
||||
RegisterService("gateway", ServiceType.Gateway);
|
||||
RegisterService("backend", ServiceType.Backend);
|
||||
RegisterService("database", ServiceType.Database);
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_failureInjector.RecoverAll();
|
||||
_serviceStates.Clear();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers a service in the cluster.
|
||||
/// </summary>
|
||||
public void RegisterService(string serviceId, ServiceType serviceType)
|
||||
{
|
||||
_failureInjector.RegisterService(serviceId);
|
||||
_serviceStates[serviceId] = new MockServiceState
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
ServiceType = serviceType,
|
||||
IsHealthy = true,
|
||||
StartedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates Authority service issuing a token.
|
||||
/// </summary>
|
||||
public async Task<TokenResult> IssueTokenAsync(string userId, TimeSpan validity, CancellationToken ct = default)
|
||||
{
|
||||
var requestResult = await _failureInjector.SimulateRequestAsync("authority", ct);
|
||||
|
||||
if (!requestResult.Success)
|
||||
{
|
||||
LogEvent("authority", "TokenIssueFailed", $"User: {userId}, Error: {requestResult.Error}");
|
||||
return new TokenResult
|
||||
{
|
||||
Success = false,
|
||||
Error = requestResult.Error
|
||||
};
|
||||
}
|
||||
|
||||
var token = new CachedToken
|
||||
{
|
||||
TokenId = Guid.NewGuid().ToString("N"),
|
||||
UserId = userId,
|
||||
IssuedAt = DateTimeOffset.UtcNow,
|
||||
ExpiresAt = DateTimeOffset.UtcNow.Add(validity),
|
||||
IsValid = true
|
||||
};
|
||||
|
||||
_tokenCache[token.TokenId] = token;
|
||||
LogEvent("authority", "TokenIssued", $"TokenId: {token.TokenId}, User: {userId}");
|
||||
|
||||
return new TokenResult
|
||||
{
|
||||
Success = true,
|
||||
Token = token
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Validates a token, using cache if Authority is unavailable.
|
||||
/// </summary>
|
||||
public async Task<ValidationResult> ValidateTokenAsync(string tokenId, CancellationToken ct = default)
|
||||
{
|
||||
// Try to reach Authority
|
||||
var requestResult = await _failureInjector.SimulateRequestAsync("authority", ct);
|
||||
|
||||
if (requestResult.Success)
|
||||
{
|
||||
// Authority available - validate directly
|
||||
if (_tokenCache.TryGetValue(tokenId, out var token))
|
||||
{
|
||||
var isValid = token.IsValid && token.ExpiresAt > DateTimeOffset.UtcNow;
|
||||
LogEvent("authority", "TokenValidated", $"TokenId: {tokenId}, Valid: {isValid}");
|
||||
return new ValidationResult
|
||||
{
|
||||
Success = true,
|
||||
IsValid = isValid,
|
||||
Source = ValidationSource.Authority
|
||||
};
|
||||
}
|
||||
|
||||
return new ValidationResult
|
||||
{
|
||||
Success = true,
|
||||
IsValid = false,
|
||||
Source = ValidationSource.Authority,
|
||||
Error = "Token not found"
|
||||
};
|
||||
}
|
||||
|
||||
// Authority unavailable - check local cache
|
||||
if (_tokenCache.TryGetValue(tokenId, out var cachedToken))
|
||||
{
|
||||
var isValid = cachedToken.IsValid && cachedToken.ExpiresAt > DateTimeOffset.UtcNow;
|
||||
LogEvent("authority", "TokenValidatedFromCache", $"TokenId: {tokenId}, Valid: {isValid}");
|
||||
return new ValidationResult
|
||||
{
|
||||
Success = true,
|
||||
IsValid = isValid,
|
||||
Source = ValidationSource.Cache,
|
||||
Warning = "Authority unavailable, used cached token"
|
||||
};
|
||||
}
|
||||
|
||||
LogEvent("authority", "TokenValidationFailed", $"TokenId: {tokenId}, Authority unavailable, no cache");
|
||||
return new ValidationResult
|
||||
{
|
||||
Success = false,
|
||||
IsValid = false,
|
||||
Source = ValidationSource.None,
|
||||
Error = "Authority unavailable and token not in cache"
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Enqueues a job with the Scheduler.
|
||||
/// </summary>
|
||||
public async Task<JobResult> EnqueueJobAsync(string jobType, string payload, CancellationToken ct = default)
|
||||
{
|
||||
var job = new PendingJob
|
||||
{
|
||||
JobId = Guid.NewGuid().ToString("N"),
|
||||
JobType = jobType,
|
||||
Payload = payload,
|
||||
EnqueuedAt = DateTimeOffset.UtcNow,
|
||||
Status = JobStatus.Pending
|
||||
};
|
||||
|
||||
// Always persist to local queue first (durability)
|
||||
_pendingJobs.Enqueue(job);
|
||||
LogEvent("scheduler", "JobEnqueued", $"JobId: {job.JobId}, Type: {jobType}");
|
||||
|
||||
// Try to notify scheduler
|
||||
var requestResult = await _failureInjector.SimulateRequestAsync("scheduler", ct);
|
||||
|
||||
if (!requestResult.Success)
|
||||
{
|
||||
LogEvent("scheduler", "SchedulerNotifyFailed", $"JobId: {job.JobId}, Error: {requestResult.Error}");
|
||||
return new JobResult
|
||||
{
|
||||
Success = true,
|
||||
Job = job,
|
||||
Warning = "Job persisted but scheduler notification failed"
|
||||
};
|
||||
}
|
||||
|
||||
return new JobResult
|
||||
{
|
||||
Success = true,
|
||||
Job = job
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Processes pending jobs when scheduler recovers.
|
||||
/// </summary>
|
||||
public async Task<int> ProcessPendingJobsAsync(CancellationToken ct = default)
|
||||
{
|
||||
var requestResult = await _failureInjector.SimulateRequestAsync("scheduler", ct);
|
||||
|
||||
if (!requestResult.Success)
|
||||
{
|
||||
LogEvent("scheduler", "ProcessingFailed", $"Error: {requestResult.Error}");
|
||||
return 0;
|
||||
}
|
||||
|
||||
var processedCount = 0;
|
||||
var jobsSnapshot = _pendingJobs.ToArray();
|
||||
|
||||
foreach (var job in jobsSnapshot.Where(j => j.Status == JobStatus.Pending))
|
||||
{
|
||||
job.Status = JobStatus.Processing;
|
||||
job.ProcessedAt = DateTimeOffset.UtcNow;
|
||||
processedCount++;
|
||||
LogEvent("scheduler", "JobProcessed", $"JobId: {job.JobId}");
|
||||
}
|
||||
|
||||
return processedCount;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Persists data to the data store.
|
||||
/// </summary>
|
||||
public async Task<PersistResult> PersistDataAsync(string key, string value, CancellationToken ct = default)
|
||||
{
|
||||
var data = new PersistedData
|
||||
{
|
||||
Key = key,
|
||||
Value = value,
|
||||
PersistedAt = DateTimeOffset.UtcNow,
|
||||
Version = 1
|
||||
};
|
||||
|
||||
// Check if database is available
|
||||
var dbResult = await _failureInjector.SimulateRequestAsync("database", ct);
|
||||
|
||||
if (!dbResult.Success)
|
||||
{
|
||||
LogEvent("database", "PersistFailed", $"Key: {key}, Error: {dbResult.Error}");
|
||||
return new PersistResult
|
||||
{
|
||||
Success = false,
|
||||
Error = dbResult.Error
|
||||
};
|
||||
}
|
||||
|
||||
if (_dataStore.TryGetValue(key, out var existing))
|
||||
{
|
||||
data.Version = existing.Version + 1;
|
||||
}
|
||||
|
||||
_dataStore[key] = data;
|
||||
LogEvent("database", "DataPersisted", $"Key: {key}, Version: {data.Version}");
|
||||
|
||||
return new PersistResult
|
||||
{
|
||||
Success = true,
|
||||
Data = data
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Reads data from the data store.
|
||||
/// </summary>
|
||||
public async Task<ReadResult> ReadDataAsync(string key, CancellationToken ct = default)
|
||||
{
|
||||
var dbResult = await _failureInjector.SimulateRequestAsync("database", ct);
|
||||
|
||||
if (!dbResult.Success)
|
||||
{
|
||||
LogEvent("database", "ReadFailed", $"Key: {key}, Error: {dbResult.Error}");
|
||||
return new ReadResult
|
||||
{
|
||||
Success = false,
|
||||
Error = dbResult.Error
|
||||
};
|
||||
}
|
||||
|
||||
if (_dataStore.TryGetValue(key, out var data))
|
||||
{
|
||||
LogEvent("database", "DataRead", $"Key: {key}, Version: {data.Version}");
|
||||
return new ReadResult
|
||||
{
|
||||
Success = true,
|
||||
Data = data
|
||||
};
|
||||
}
|
||||
|
||||
return new ReadResult
|
||||
{
|
||||
Success = true,
|
||||
Data = null
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a full control-plane outage (all services down).
|
||||
/// </summary>
|
||||
public void InjectFullControlPlaneOutage()
|
||||
{
|
||||
foreach (var serviceId in _serviceStates.Keys)
|
||||
{
|
||||
_failureInjector.InjectFullOutage(serviceId);
|
||||
}
|
||||
LogEvent("cluster", "FullOutageInjected", "All services down");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recovers all control-plane services.
|
||||
/// </summary>
|
||||
public void RecoverControlPlane()
|
||||
{
|
||||
_failureInjector.RecoverAll();
|
||||
LogEvent("cluster", "ControlPlaneRecovered", "All services recovered");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the count of pending jobs that haven't been lost.
|
||||
/// </summary>
|
||||
public int GetPendingJobCount() => _pendingJobs.Count(j => j.Status == JobStatus.Pending);
|
||||
|
||||
/// <summary>
|
||||
/// Gets all jobs (for verification).
|
||||
/// </summary>
|
||||
public IReadOnlyList<PendingJob> GetAllJobs() => _pendingJobs.ToImmutableArray();
|
||||
|
||||
/// <summary>
|
||||
/// Verifies all persisted data is intact.
|
||||
/// </summary>
|
||||
public bool VerifyDataIntegrity(IEnumerable<string> expectedKeys)
|
||||
{
|
||||
return expectedKeys.All(key => _dataStore.ContainsKey(key));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears the event log.
|
||||
/// </summary>
|
||||
public void ClearEventLog()
|
||||
{
|
||||
while (_eventLog.TryDequeue(out _)) { }
|
||||
}
|
||||
|
||||
private void LogEvent(string service, string eventType, string details)
|
||||
{
|
||||
var seq = Interlocked.Increment(ref _eventSequence);
|
||||
_eventLog.Enqueue(new ClusterEvent
|
||||
{
|
||||
Sequence = seq,
|
||||
Timestamp = DateTimeOffset.UtcNow,
|
||||
Service = service,
|
||||
EventType = eventType,
|
||||
Details = details
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Mock state for a service in the cluster.
|
||||
/// </summary>
|
||||
public sealed class MockServiceState
|
||||
{
|
||||
public required string ServiceId { get; init; }
|
||||
public required ServiceType ServiceType { get; init; }
|
||||
public bool IsHealthy { get; set; }
|
||||
public DateTimeOffset StartedAt { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of services in the control-plane.
|
||||
/// </summary>
|
||||
public enum ServiceType
|
||||
{
|
||||
Authority,
|
||||
Scheduler,
|
||||
Gateway,
|
||||
Backend,
|
||||
Database
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a cached authentication token.
|
||||
/// </summary>
|
||||
public sealed class CachedToken
|
||||
{
|
||||
public required string TokenId { get; init; }
|
||||
public required string UserId { get; init; }
|
||||
public required DateTimeOffset IssuedAt { get; init; }
|
||||
public required DateTimeOffset ExpiresAt { get; init; }
|
||||
public required bool IsValid { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of token issuance.
|
||||
/// </summary>
|
||||
public sealed record TokenResult
|
||||
{
|
||||
public required bool Success { get; init; }
|
||||
public CachedToken? Token { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of token validation.
|
||||
/// </summary>
|
||||
public sealed record ValidationResult
|
||||
{
|
||||
public required bool Success { get; init; }
|
||||
public required bool IsValid { get; init; }
|
||||
public required ValidationSource Source { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public string? Warning { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Source of token validation.
|
||||
/// </summary>
|
||||
public enum ValidationSource
|
||||
{
|
||||
None,
|
||||
Authority,
|
||||
Cache
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a pending job in the scheduler queue.
|
||||
/// </summary>
|
||||
public sealed class PendingJob
|
||||
{
|
||||
public required string JobId { get; init; }
|
||||
public required string JobType { get; init; }
|
||||
public required string Payload { get; init; }
|
||||
public required DateTimeOffset EnqueuedAt { get; init; }
|
||||
public JobStatus Status { get; set; }
|
||||
public DateTimeOffset? ProcessedAt { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Job status.
|
||||
/// </summary>
|
||||
public enum JobStatus
|
||||
{
|
||||
Pending,
|
||||
Processing,
|
||||
Completed,
|
||||
Failed
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of job enqueue operation.
|
||||
/// </summary>
|
||||
public sealed record JobResult
|
||||
{
|
||||
public required bool Success { get; init; }
|
||||
public PendingJob? Job { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public string? Warning { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents persisted data.
|
||||
/// </summary>
|
||||
public sealed class PersistedData
|
||||
{
|
||||
public required string Key { get; init; }
|
||||
public required string Value { get; init; }
|
||||
public required DateTimeOffset PersistedAt { get; init; }
|
||||
public required int Version { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of data persistence operation.
|
||||
/// </summary>
|
||||
public sealed record PersistResult
|
||||
{
|
||||
public required bool Success { get; init; }
|
||||
public PersistedData? Data { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of data read operation.
|
||||
/// </summary>
|
||||
public sealed record ReadResult
|
||||
{
|
||||
public required bool Success { get; init; }
|
||||
public PersistedData? Data { get; init; }
|
||||
public string? Error { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents an event in the cluster.
|
||||
/// </summary>
|
||||
public sealed record ClusterEvent
|
||||
{
|
||||
public required long Sequence { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
public required string Service { get; init; }
|
||||
public required string EventType { get; init; }
|
||||
public required string Details { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,273 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ServiceFailureInjector.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.3
|
||||
// Description: Service failure injection for control-plane chaos testing
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
|
||||
namespace StellaOps.Chaos.ControlPlane.Tests.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Injects failures into control-plane services for chaos testing.
|
||||
/// Supports various failure modes: full outage, partial failures, latency injection.
|
||||
/// </summary>
|
||||
public sealed class ServiceFailureInjector
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, ServiceState> _serviceStates = new();
|
||||
private readonly ConcurrentDictionary<string, FailureConfig> _failureConfigs = new();
|
||||
private readonly Random _random = new(42); // Deterministic for reproducibility
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current state of a service.
|
||||
/// </summary>
|
||||
public ServiceState GetServiceState(string serviceId)
|
||||
{
|
||||
return _serviceStates.GetOrAdd(serviceId, _ => new ServiceState
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Status = ServiceStatus.Healthy,
|
||||
LastUpdated = DateTimeOffset.UtcNow
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers a service for failure injection.
|
||||
/// </summary>
|
||||
public void RegisterService(string serviceId)
|
||||
{
|
||||
_serviceStates.TryAdd(serviceId, new ServiceState
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Status = ServiceStatus.Healthy,
|
||||
LastUpdated = DateTimeOffset.UtcNow
|
||||
});
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Causes a complete service outage.
|
||||
/// </summary>
|
||||
public void InjectFullOutage(string serviceId)
|
||||
{
|
||||
if (_serviceStates.TryGetValue(serviceId, out var state))
|
||||
{
|
||||
state.Status = ServiceStatus.Down;
|
||||
state.LastUpdated = DateTimeOffset.UtcNow;
|
||||
state.OutageStarted = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
_failureConfigs[serviceId] = new FailureConfig
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
FailureMode = FailureMode.FullOutage,
|
||||
FailureRate = 1.0
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Causes partial failures (random request failures).
|
||||
/// </summary>
|
||||
public void InjectPartialFailure(string serviceId, double failureRate = 0.5)
|
||||
{
|
||||
if (_serviceStates.TryGetValue(serviceId, out var state))
|
||||
{
|
||||
state.Status = ServiceStatus.Degraded;
|
||||
state.LastUpdated = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
_failureConfigs[serviceId] = new FailureConfig
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
FailureMode = FailureMode.PartialFailure,
|
||||
FailureRate = Math.Clamp(failureRate, 0.0, 1.0)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Injects latency into service responses.
|
||||
/// </summary>
|
||||
public void InjectLatency(string serviceId, TimeSpan baseLatency, TimeSpan jitter = default)
|
||||
{
|
||||
if (_serviceStates.TryGetValue(serviceId, out var state))
|
||||
{
|
||||
state.Status = ServiceStatus.Slow;
|
||||
state.LastUpdated = DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
_failureConfigs[serviceId] = new FailureConfig
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
FailureMode = FailureMode.LatencyInjection,
|
||||
BaseLatency = baseLatency,
|
||||
LatencyJitter = jitter
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recovers a service from failure.
|
||||
/// </summary>
|
||||
public void RecoverService(string serviceId)
|
||||
{
|
||||
if (_serviceStates.TryGetValue(serviceId, out var state))
|
||||
{
|
||||
state.Status = ServiceStatus.Healthy;
|
||||
state.LastUpdated = DateTimeOffset.UtcNow;
|
||||
state.OutageStarted = null;
|
||||
}
|
||||
|
||||
_failureConfigs.TryRemove(serviceId, out _);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Recovers all services.
|
||||
/// </summary>
|
||||
public void RecoverAll()
|
||||
{
|
||||
foreach (var serviceId in _serviceStates.Keys)
|
||||
{
|
||||
RecoverService(serviceId);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a request to a service, applying any configured failures.
|
||||
/// </summary>
|
||||
/// <returns>True if request succeeds, false if it fails due to injected failure.</returns>
|
||||
public async Task<ServiceRequestResult> SimulateRequestAsync(
|
||||
string serviceId,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var state = GetServiceState(serviceId);
|
||||
|
||||
if (!_failureConfigs.TryGetValue(serviceId, out var config))
|
||||
{
|
||||
// No failure configured, request succeeds
|
||||
return new ServiceRequestResult
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Success = true,
|
||||
Latency = TimeSpan.Zero
|
||||
};
|
||||
}
|
||||
|
||||
switch (config.FailureMode)
|
||||
{
|
||||
case FailureMode.FullOutage:
|
||||
return new ServiceRequestResult
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Success = false,
|
||||
Error = $"Service {serviceId} is down (full outage)",
|
||||
Latency = TimeSpan.Zero
|
||||
};
|
||||
|
||||
case FailureMode.PartialFailure:
|
||||
var shouldFail = _random.NextDouble() < config.FailureRate;
|
||||
return new ServiceRequestResult
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Success = !shouldFail,
|
||||
Error = shouldFail ? $"Service {serviceId} request failed (partial failure)" : null,
|
||||
Latency = TimeSpan.Zero
|
||||
};
|
||||
|
||||
case FailureMode.LatencyInjection:
|
||||
var jitterMs = _random.NextDouble() * config.LatencyJitter.TotalMilliseconds;
|
||||
var totalLatency = config.BaseLatency + TimeSpan.FromMilliseconds(jitterMs);
|
||||
|
||||
// Simulate latency (in real test, would actually delay)
|
||||
return new ServiceRequestResult
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Success = true,
|
||||
Latency = totalLatency
|
||||
};
|
||||
|
||||
default:
|
||||
return new ServiceRequestResult
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
Success = true,
|
||||
Latency = TimeSpan.Zero
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all services currently in outage.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> GetServicesInOutage()
|
||||
{
|
||||
return _serviceStates
|
||||
.Where(kvp => kvp.Value.Status == ServiceStatus.Down)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all services currently degraded.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> GetDegradedServices()
|
||||
{
|
||||
return _serviceStates
|
||||
.Where(kvp => kvp.Value.Status is ServiceStatus.Degraded or ServiceStatus.Slow)
|
||||
.Select(kvp => kvp.Key)
|
||||
.ToList();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// State of a service for chaos testing.
|
||||
/// </summary>
|
||||
public sealed class ServiceState
|
||||
{
|
||||
public required string ServiceId { get; init; }
|
||||
public ServiceStatus Status { get; set; }
|
||||
public DateTimeOffset LastUpdated { get; set; }
|
||||
public DateTimeOffset? OutageStarted { get; set; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Service status levels.
|
||||
/// </summary>
|
||||
public enum ServiceStatus
|
||||
{
|
||||
Healthy,
|
||||
Degraded,
|
||||
Slow,
|
||||
Down
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for failure injection.
|
||||
/// </summary>
|
||||
public sealed class FailureConfig
|
||||
{
|
||||
public required string ServiceId { get; init; }
|
||||
public FailureMode FailureMode { get; init; }
|
||||
public double FailureRate { get; init; }
|
||||
public TimeSpan BaseLatency { get; init; }
|
||||
public TimeSpan LatencyJitter { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Types of failure modes.
|
||||
/// </summary>
|
||||
public enum FailureMode
|
||||
{
|
||||
None,
|
||||
FullOutage,
|
||||
PartialFailure,
|
||||
LatencyInjection
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of a simulated service request.
|
||||
/// </summary>
|
||||
public sealed record ServiceRequestResult
|
||||
{
|
||||
public required string ServiceId { get; init; }
|
||||
public required bool Success { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public TimeSpan Latency { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,406 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// PartialOutageTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.3
|
||||
// Description: Tests for control-plane behavior during partial outages
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Chaos.ControlPlane.Tests.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Chaos.ControlPlane.Tests;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for control-plane behavior during partial outage scenarios.
|
||||
/// Validates graceful degradation under latency injection and partial failures.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Chaos)]
|
||||
[Trait("Category", "ControlPlane")]
|
||||
[Trait("Category", "PartialOutage")]
|
||||
public class PartialOutageTests : IClassFixture<ControlPlaneClusterFixture>
|
||||
{
|
||||
private readonly ControlPlaneClusterFixture _fixture;
|
||||
|
||||
public PartialOutageTests(ControlPlaneClusterFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_fixture.FailureInjector.RecoverAll();
|
||||
_fixture.ClearEventLog();
|
||||
}
|
||||
|
||||
#region Partial Failure Rate Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Authority_50PercentFailure_SomeTokensIssued()
|
||||
{
|
||||
// Arrange - 50% failure rate
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.5);
|
||||
|
||||
// Act - Try to issue multiple tokens
|
||||
var results = new List<TokenResult>();
|
||||
for (var i = 0; i < 20; i++)
|
||||
{
|
||||
results.Add(await _fixture.IssueTokenAsync($"user-{i}", TimeSpan.FromHours(1)));
|
||||
}
|
||||
|
||||
// Assert - Roughly half should succeed (with some variance due to randomness)
|
||||
var successCount = results.Count(r => r.Success);
|
||||
var failureCount = results.Count(r => !r.Success);
|
||||
|
||||
// Allow reasonable variance (30-70% success due to random seeding)
|
||||
successCount.Should().BeGreaterThan(3);
|
||||
failureCount.Should().BeGreaterThan(3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Database_25PercentFailure_MostWritesSucceed()
|
||||
{
|
||||
// Arrange - 25% failure rate
|
||||
_fixture.FailureInjector.InjectPartialFailure("database", 0.25);
|
||||
|
||||
// Act - Try multiple writes
|
||||
var results = new List<PersistResult>();
|
||||
for (var i = 0; i < 20; i++)
|
||||
{
|
||||
results.Add(await _fixture.PersistDataAsync($"key-{i}", $"value-{i}"));
|
||||
}
|
||||
|
||||
// Assert - Most should succeed
|
||||
var successCount = results.Count(r => r.Success);
|
||||
successCount.Should().BeGreaterThan(10); // At least half should succeed
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scheduler_HighFailureRate_JobsStillPersisted()
|
||||
{
|
||||
// Arrange - 80% failure rate
|
||||
_fixture.FailureInjector.InjectPartialFailure("scheduler", 0.8);
|
||||
|
||||
// Act - Enqueue jobs
|
||||
var results = new List<JobResult>();
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
results.Add(await _fixture.EnqueueJobAsync("scan", $"image-{i}"));
|
||||
}
|
||||
|
||||
// Assert - All jobs should be persisted locally (just notification may fail)
|
||||
results.Should().AllSatisfy(r => r.Success.Should().BeTrue());
|
||||
|
||||
// Jobs should all be in the pending queue
|
||||
_fixture.GetPendingJobCount().Should().Be(10);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PartialFailure_RetrySucceeds_Eventually()
|
||||
{
|
||||
// Arrange - 50% failure rate
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.5);
|
||||
|
||||
// Act - Keep trying until success (max 10 attempts)
|
||||
TokenResult? successResult = null;
|
||||
for (var attempt = 0; attempt < 10; attempt++)
|
||||
{
|
||||
var result = await _fixture.IssueTokenAsync("retry-user", TimeSpan.FromHours(1));
|
||||
if (result.Success)
|
||||
{
|
||||
successResult = result;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Assert - Should eventually succeed
|
||||
successResult.Should().NotBeNull();
|
||||
successResult!.Success.Should().BeTrue();
|
||||
successResult.Token.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Latency Injection Tests
|
||||
|
||||
[Fact]
|
||||
public async Task Authority_HighLatency_OperationsComplete()
|
||||
{
|
||||
// Arrange - 500ms base latency
|
||||
_fixture.FailureInjector.InjectLatency(
|
||||
"authority",
|
||||
TimeSpan.FromMilliseconds(500),
|
||||
TimeSpan.FromMilliseconds(100));
|
||||
|
||||
// Act - Issue token
|
||||
var result = await _fixture.IssueTokenAsync("latency-user", TimeSpan.FromHours(1));
|
||||
|
||||
// Assert - Should still complete
|
||||
result.Success.Should().BeTrue();
|
||||
result.Token.Should().NotBeNull();
|
||||
|
||||
// The service state should show "Slow" status
|
||||
var serviceState = _fixture.FailureInjector.GetServiceState("authority");
|
||||
serviceState.Status.Should().Be(ServiceStatus.Slow);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Database_VariableLatency_NoDataCorruption()
|
||||
{
|
||||
// Arrange - Variable latency with jitter
|
||||
_fixture.FailureInjector.InjectLatency(
|
||||
"database",
|
||||
TimeSpan.FromMilliseconds(200),
|
||||
TimeSpan.FromMilliseconds(300));
|
||||
|
||||
// Act - Multiple concurrent-like writes
|
||||
var tasks = new List<Task<PersistResult>>();
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
tasks.Add(_fixture.PersistDataAsync($"latency-key-{i}", $"value-{i}"));
|
||||
}
|
||||
|
||||
var results = await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All should succeed
|
||||
results.Should().AllSatisfy(r => r.Success.Should().BeTrue());
|
||||
|
||||
// Verify data integrity
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var read = await _fixture.ReadDataAsync($"latency-key-{i}");
|
||||
read.Success.Should().BeTrue();
|
||||
read.Data!.Value.Should().Be($"value-{i}");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task Scheduler_Latency_JobOrdering_Preserved()
|
||||
{
|
||||
// Arrange - Latency injection
|
||||
_fixture.FailureInjector.InjectLatency(
|
||||
"scheduler",
|
||||
TimeSpan.FromMilliseconds(100),
|
||||
TimeSpan.FromMilliseconds(50));
|
||||
|
||||
// Act - Enqueue jobs in sequence
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
await _fixture.EnqueueJobAsync("scan", $"ordered-{i}");
|
||||
}
|
||||
|
||||
// Assert - Jobs should be in order
|
||||
var jobs = _fixture.GetAllJobs().ToList();
|
||||
jobs.Should().HaveCount(5);
|
||||
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
jobs[i].Payload.Should().Be($"ordered-{i}");
|
||||
}
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Degraded Service Tests
|
||||
|
||||
[Fact]
|
||||
public async Task DegradedAuthority_CacheHitRate_Improves()
|
||||
{
|
||||
// Arrange - Issue tokens while healthy
|
||||
var tokens = new List<string>();
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
var result = await _fixture.IssueTokenAsync($"user-{i}", TimeSpan.FromHours(1));
|
||||
if (result.Success)
|
||||
{
|
||||
tokens.Add(result.Token!.TokenId);
|
||||
}
|
||||
}
|
||||
|
||||
// Authority becomes degraded (partial failure)
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.7);
|
||||
|
||||
// Act - Validate cached tokens
|
||||
var validations = new List<ValidationResult>();
|
||||
foreach (var tokenId in tokens)
|
||||
{
|
||||
validations.Add(await _fixture.ValidateTokenAsync(tokenId));
|
||||
}
|
||||
|
||||
// Assert - All should succeed (either from Authority or cache)
|
||||
validations.Should().AllSatisfy(v =>
|
||||
{
|
||||
v.Success.Should().BeTrue();
|
||||
v.IsValid.Should().BeTrue();
|
||||
});
|
||||
|
||||
// Some should come from cache
|
||||
var cacheHits = validations.Count(v => v.Source == ValidationSource.Cache);
|
||||
cacheHits.Should().BeGreaterThan(0);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MultipleDegraded_Services_GracefulDegradation()
|
||||
{
|
||||
// Arrange - Multiple services degraded
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.3);
|
||||
_fixture.FailureInjector.InjectLatency("database", TimeSpan.FromMilliseconds(200));
|
||||
_fixture.FailureInjector.InjectPartialFailure("scheduler", 0.2);
|
||||
|
||||
// Act - Perform various operations
|
||||
var tokenResults = new List<bool>();
|
||||
var persistResults = new List<bool>();
|
||||
var jobResults = new List<bool>();
|
||||
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var tokenResult = await _fixture.IssueTokenAsync($"user-{i}", TimeSpan.FromHours(1));
|
||||
tokenResults.Add(tokenResult.Success);
|
||||
|
||||
var persistResult = await _fixture.PersistDataAsync($"key-{i}", $"value-{i}");
|
||||
persistResults.Add(persistResult.Success);
|
||||
|
||||
var jobResult = await _fixture.EnqueueJobAsync("scan", $"image-{i}");
|
||||
jobResults.Add(jobResult.Success);
|
||||
}
|
||||
|
||||
// Assert - System remains functional despite degradation
|
||||
tokenResults.Count(r => r).Should().BeGreaterThan(5);
|
||||
persistResults.Count(r => r).Should().BeGreaterThan(5);
|
||||
jobResults.Should().AllSatisfy(r => r.Should().BeTrue()); // Jobs always persist locally
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Recovery from Partial Outage Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PartialOutage_Recovery_FullFunctionality_Restored()
|
||||
{
|
||||
// Arrange - Start with partial failure
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.5);
|
||||
|
||||
// Some operations fail during partial outage
|
||||
var duringOutage = new List<TokenResult>();
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
duringOutage.Add(await _fixture.IssueTokenAsync($"user-{i}", TimeSpan.FromHours(1)));
|
||||
}
|
||||
|
||||
// Act - Recover
|
||||
_fixture.FailureInjector.RecoverService("authority");
|
||||
|
||||
// All operations should succeed now
|
||||
var afterRecovery = new List<TokenResult>();
|
||||
for (var i = 5; i < 10; i++)
|
||||
{
|
||||
afterRecovery.Add(await _fixture.IssueTokenAsync($"user-{i}", TimeSpan.FromHours(1)));
|
||||
}
|
||||
|
||||
// Assert
|
||||
afterRecovery.Should().AllSatisfy(r => r.Success.Should().BeTrue());
|
||||
|
||||
var serviceState = _fixture.FailureInjector.GetServiceState("authority");
|
||||
serviceState.Status.Should().Be(ServiceStatus.Healthy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task LatencyRecovery_PerformanceReturns_ToNormal()
|
||||
{
|
||||
// Arrange - High latency
|
||||
_fixture.FailureInjector.InjectLatency(
|
||||
"database",
|
||||
TimeSpan.FromSeconds(1),
|
||||
TimeSpan.FromMilliseconds(500));
|
||||
|
||||
// Note: In real scenario, we'd measure actual latency
|
||||
// Here we just verify state changes
|
||||
var slowState = _fixture.FailureInjector.GetServiceState("database");
|
||||
slowState.Status.Should().Be(ServiceStatus.Slow);
|
||||
|
||||
// Act - Recover
|
||||
_fixture.FailureInjector.RecoverService("database");
|
||||
|
||||
// Assert - Back to healthy
|
||||
var healthyState = _fixture.FailureInjector.GetServiceState("database");
|
||||
healthyState.Status.Should().Be(ServiceStatus.Healthy);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Service Isolation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SingleService_Degraded_OthersUnaffected()
|
||||
{
|
||||
// Arrange - Only Authority degraded
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.9);
|
||||
|
||||
// Act - Database and Scheduler operations
|
||||
var dbResult = await _fixture.PersistDataAsync("isolated-key", "isolated-value");
|
||||
var jobResult = await _fixture.EnqueueJobAsync("scan", "isolated-image");
|
||||
|
||||
// Assert - Unaffected services work normally
|
||||
dbResult.Success.Should().BeTrue();
|
||||
jobResult.Success.Should().BeTrue();
|
||||
|
||||
// But Authority is degraded
|
||||
var authState = _fixture.FailureInjector.GetServiceState("authority");
|
||||
authState.Status.Should().Be(ServiceStatus.Degraded);
|
||||
|
||||
var dbState = _fixture.FailureInjector.GetServiceState("database");
|
||||
dbState.Status.Should().Be(ServiceStatus.Healthy);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task CascadingDegradation_DoesNotOccur()
|
||||
{
|
||||
// Arrange - Database degraded
|
||||
_fixture.FailureInjector.InjectPartialFailure("database", 0.5);
|
||||
|
||||
// Act - Authority should work independently
|
||||
var tokenResult = await _fixture.IssueTokenAsync("cascade-user", TimeSpan.FromHours(1));
|
||||
|
||||
// Assert - Authority unaffected by database degradation
|
||||
tokenResult.Success.Should().BeTrue();
|
||||
|
||||
// Scheduler also unaffected
|
||||
var jobResult = await _fixture.EnqueueJobAsync("cascade-scan", "image");
|
||||
jobResult.Success.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Flapping Service Tests
|
||||
|
||||
[Fact]
|
||||
public async Task FlappingService_SystemRemains_Stable()
|
||||
{
|
||||
// Simulate a flapping service (alternating between healthy and degraded)
|
||||
var allResults = new List<bool>();
|
||||
|
||||
for (var cycle = 0; cycle < 5; cycle++)
|
||||
{
|
||||
// Service degrades
|
||||
_fixture.FailureInjector.InjectPartialFailure("authority", 0.5);
|
||||
|
||||
for (var i = 0; i < 3; i++)
|
||||
{
|
||||
var result = await _fixture.IssueTokenAsync($"flap-user-{cycle}-{i}", TimeSpan.FromHours(1));
|
||||
allResults.Add(result.Success);
|
||||
}
|
||||
|
||||
// Service recovers
|
||||
_fixture.FailureInjector.RecoverService("authority");
|
||||
|
||||
for (var i = 0; i < 3; i++)
|
||||
{
|
||||
var result = await _fixture.IssueTokenAsync($"stable-user-{cycle}-{i}", TimeSpan.FromHours(1));
|
||||
allResults.Add(result.Success);
|
||||
}
|
||||
}
|
||||
|
||||
// Assert - System handled the flapping without crashing
|
||||
// Most operations during stable periods should succeed
|
||||
allResults.Should().NotBeEmpty();
|
||||
allResults.Count(r => r).Should().BeGreaterThan(allResults.Count / 2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<RootNamespace>StellaOps.Chaos.ControlPlane.Tests</RootNamespace>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Sprint: Testing Enhancement Advisory - Phase 3.3 -->
|
||||
<!-- Description: Control-plane outage E2E chaos tests -->
|
||||
|
||||
<ItemGroup>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Testcontainers" />
|
||||
<PackageReference Include="Testcontainers.PostgreSql" />
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,7 +1,7 @@
|
||||
# Chaos Router Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -23,7 +23,7 @@ Validate integration flows for registry webhooks, SCM webhooks, CI templates, of
|
||||
- `docs/modules/platform/architecture-overview.md`
|
||||
- `docs/modules/sbom-service/architecture.md`
|
||||
- `docs/modules/signals/architecture.md`
|
||||
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
- `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
|
||||
## Working Agreement
|
||||
- 1. Use fixed time and IDs in fixtures and test data.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Integration E2E Integrations Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -17,7 +17,7 @@ Validate replayable verdict bundles and determinism across the verdict pipeline.
|
||||
- `docs/modules/replay/architecture.md`
|
||||
- `docs/modules/vex-lens/architecture.md`
|
||||
- `docs/modules/scanner/architecture.md`
|
||||
- `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
- `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`
|
||||
|
||||
## Working Agreement
|
||||
- 1. Use fixed time and IDs in fixtures and manifest data.
|
||||
|
||||
@@ -9,6 +9,8 @@
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<!-- Suppress xUnit1051: E2E tests don't need responsive cancellation -->
|
||||
<NoWarn>$(NoWarn);xUnit1051</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup> <PackageReference Include="xunit.v3" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Replayable Verdict E2E Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/permament/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Interop.Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Offline.E2E.Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,387 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CompetitorBenchmarkTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.1
|
||||
// Description: Benchmark tests comparing StellaOps against Trivy/Grype with ground truth
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
using Xunit.Sdk;
|
||||
|
||||
namespace StellaOps.Parity.Tests.Competitors;
|
||||
|
||||
/// <summary>
|
||||
/// Benchmark tests comparing StellaOps scanner against Trivy and Grype.
|
||||
/// Uses ground truth data for objective accuracy measurements.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Benchmark)]
|
||||
[Trait("Category", "CompetitorParity")]
|
||||
public class CompetitorBenchmarkTests : IAsyncLifetime
|
||||
{
|
||||
private ParityHarness _harness = null!;
|
||||
private ToolAvailability _toolAvailability = null!;
|
||||
|
||||
/// <summary>
|
||||
/// Minimum F1 score ratio that StellaOps must achieve relative to competitors.
|
||||
/// 0.95 means StellaOps F1 must be >= 95% of competitor F1.
|
||||
/// </summary>
|
||||
private const double MinimumF1Ratio = 0.95;
|
||||
|
||||
public async ValueTask InitializeAsync()
|
||||
{
|
||||
_harness = new ParityHarness();
|
||||
_toolAvailability = await _harness.CheckToolsAsync();
|
||||
}
|
||||
|
||||
public async ValueTask DisposeAsync()
|
||||
{
|
||||
await _harness.DisposeAsync();
|
||||
}
|
||||
|
||||
#region Base Image Benchmarks
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(GetBaseImages))]
|
||||
public async Task BaseImages_StellaOps_VsTrivy_Parity(string fixtureName, string imageRef)
|
||||
{
|
||||
// Skip if Trivy is not available
|
||||
if (!_toolAvailability.TrivyAvailable)
|
||||
throw SkipException.ForSkip("Trivy not available on this system");
|
||||
|
||||
// Arrange
|
||||
var fixture = ExpandedCorpusFixtures.BaseImages.First(f => f.Name == fixtureName);
|
||||
|
||||
// Act
|
||||
var trivyOutput = await _harness.RunTrivyAsync(imageRef);
|
||||
|
||||
// Assert - Trivy should complete successfully
|
||||
trivyOutput.Success.Should().BeTrue($"Trivy failed to scan {imageRef}: {trivyOutput.Error}");
|
||||
trivyOutput.FindingsJson.Should().NotBeNull();
|
||||
|
||||
// Note: Full comparison requires StellaOps scanner integration
|
||||
// This test validates that the corpus image can be scanned
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(GetBaseImages))]
|
||||
public async Task BaseImages_StellaOps_VsGrype_Parity(string fixtureName, string imageRef)
|
||||
{
|
||||
if (!_toolAvailability.GrypeAvailable)
|
||||
throw SkipException.ForSkip("Grype not available on this system");
|
||||
|
||||
var fixture = ExpandedCorpusFixtures.BaseImages.First(f => f.Name == fixtureName);
|
||||
|
||||
var grypeOutput = await _harness.RunGrypeAsync(imageRef);
|
||||
|
||||
grypeOutput.Success.Should().BeTrue($"Grype failed to scan {imageRef}: {grypeOutput.Error}");
|
||||
grypeOutput.FindingsJson.Should().NotBeNull();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Language Runtime Benchmarks
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(GetLanguageRuntimes))]
|
||||
public async Task LanguageRuntimes_StellaOps_VsTrivy_Parity(string fixtureName, string imageRef)
|
||||
{
|
||||
if (!_toolAvailability.TrivyAvailable)
|
||||
throw SkipException.ForSkip("Trivy not available on this system");
|
||||
|
||||
var fixture = ExpandedCorpusFixtures.LanguageRuntimes.First(f => f.Name == fixtureName);
|
||||
|
||||
var trivyOutput = await _harness.RunTrivyAsync(imageRef);
|
||||
|
||||
trivyOutput.Success.Should().BeTrue($"Trivy failed to scan {imageRef}: {trivyOutput.Error}");
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(GetLanguageRuntimes))]
|
||||
public async Task LanguageRuntimes_Syft_SbomGeneration(string fixtureName, string imageRef)
|
||||
{
|
||||
if (!_toolAvailability.SyftAvailable)
|
||||
throw SkipException.ForSkip("Syft not available on this system");
|
||||
|
||||
var fixture = ExpandedCorpusFixtures.LanguageRuntimes.First(f => f.Name == fixtureName);
|
||||
|
||||
var syftOutput = await _harness.RunSyftAsync(imageRef);
|
||||
|
||||
syftOutput.Success.Should().BeTrue($"Syft failed to scan {imageRef}: {syftOutput.Error}");
|
||||
syftOutput.SbomJson.Should().NotBeNull();
|
||||
|
||||
// Verify minimum package count
|
||||
var packageCount = CountSpdxPackages(syftOutput.SbomJson!);
|
||||
packageCount.Should().BeGreaterThanOrEqualTo(fixture.ExpectedMinPackages,
|
||||
$"Expected at least {fixture.ExpectedMinPackages} packages in {imageRef}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Known Vulnerable Image Benchmarks
|
||||
|
||||
[Fact]
|
||||
public async Task VulnerableImages_AllScanners_DetectKnownCVEs()
|
||||
{
|
||||
if (!_toolAvailability.AllAvailable)
|
||||
throw SkipException.ForSkip("Not all scanners available");
|
||||
|
||||
var vulnerableFixtures = ExpandedCorpusFixtures.GetVulnerableFixtures().ToList();
|
||||
var results = new List<(ParityImageFixture Fixture, int TrivyCves, int GrypeCves)>();
|
||||
|
||||
foreach (var fixture in vulnerableFixtures)
|
||||
{
|
||||
var runResult = await _harness.RunAllAsync(fixture);
|
||||
|
||||
var trivyCves = CountTrivyVulnerabilities(runResult.TrivyOutput?.FindingsJson);
|
||||
var grypeCves = CountGrypeVulnerabilities(runResult.GrypeOutput?.FindingsJson);
|
||||
|
||||
results.Add((fixture, trivyCves, grypeCves));
|
||||
|
||||
// Both scanners should find at least the minimum expected CVEs
|
||||
trivyCves.Should().BeGreaterThanOrEqualTo(fixture.ExpectedMinCVEs,
|
||||
$"Trivy should find at least {fixture.ExpectedMinCVEs} CVEs in {fixture.Image}");
|
||||
grypeCves.Should().BeGreaterThanOrEqualTo(fixture.ExpectedMinCVEs,
|
||||
$"Grype should find at least {fixture.ExpectedMinCVEs} CVEs in {fixture.Image}");
|
||||
}
|
||||
}
|
||||
|
||||
[Theory]
|
||||
[MemberData(nameof(GetVulnerableImages))]
|
||||
public async Task VulnerableImages_CompareFindings(string fixtureName, string imageRef, int expectedMinCves)
|
||||
{
|
||||
if (!_toolAvailability.TrivyAvailable || !_toolAvailability.GrypeAvailable)
|
||||
throw SkipException.ForSkip("Both Trivy and Grype required for comparison");
|
||||
|
||||
var fixture = ExpandedCorpusFixtures.AllFixtures.First(f => f.Name == fixtureName);
|
||||
|
||||
// Run both scanners
|
||||
var trivyOutput = await _harness.RunTrivyAsync(imageRef);
|
||||
var grypeOutput = await _harness.RunGrypeAsync(imageRef);
|
||||
|
||||
trivyOutput.Success.Should().BeTrue();
|
||||
grypeOutput.Success.Should().BeTrue();
|
||||
|
||||
// Compare findings
|
||||
var comparison = new VulnerabilityComparisonLogic();
|
||||
var result = comparison.Compare(trivyOutput, grypeOutput);
|
||||
|
||||
result.Success.Should().BeTrue();
|
||||
|
||||
// Log comparison results for analysis
|
||||
result.BaselineCveCount.Should().BeGreaterThanOrEqualTo(expectedMinCves);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Ground Truth Validation (When Available)
|
||||
|
||||
[Fact]
|
||||
public async Task GroundTruth_MetricsCalculation_ValidatesCorrectly()
|
||||
{
|
||||
// Arrange - Create sample ground truth and findings
|
||||
var groundTruth = new GroundTruth
|
||||
{
|
||||
ImageDigest = "sha256:test123",
|
||||
ImageRef = "test:latest",
|
||||
EstablishedAt = DateTimeOffset.UtcNow,
|
||||
ExpectedFindings =
|
||||
[
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0001", Package = "openssl" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0002", Package = "curl" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0003", Package = "zlib" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0004", Package = "libpng" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0005", Package = "expat" }
|
||||
],
|
||||
KnownFalsePositives = ["CVE-2024-FP01"]
|
||||
};
|
||||
|
||||
var scannerFindings = new List<ScannerFinding>
|
||||
{
|
||||
new() { CveId = "CVE-2024-0001", Package = "openssl" },
|
||||
new() { CveId = "CVE-2024-0002", Package = "curl" },
|
||||
new() { CveId = "CVE-2024-0003", Package = "zlib" },
|
||||
// Missing CVE-2024-0004 (false negative)
|
||||
// Missing CVE-2024-0005 (false negative)
|
||||
new() { CveId = "CVE-2024-0006", Package = "new-vuln" }, // False positive
|
||||
new() { CveId = "CVE-2024-FP01", Package = "known-fp" } // Known false positive
|
||||
};
|
||||
|
||||
// Act
|
||||
var metrics = GroundTruthMetrics.Calculate(scannerFindings, groundTruth);
|
||||
|
||||
// Assert
|
||||
metrics.TruePositiveCount.Should().Be(3); // 0001, 0002, 0003
|
||||
metrics.FalseNegativeCount.Should().Be(2); // 0004, 0005 missing
|
||||
metrics.FalsePositiveCount.Should().Be(1); // 0006 (FP01 excluded as known)
|
||||
|
||||
metrics.Precision.Should().BeApproximately(0.75, 0.01); // 3/(3+1)
|
||||
metrics.Recall.Should().BeApproximately(0.6, 0.01); // 3/(3+2)
|
||||
|
||||
var expectedF1 = 2 * (0.75 * 0.6) / (0.75 + 0.6);
|
||||
metrics.F1Score.Should().BeApproximately(expectedF1, 0.01);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task GroundTruth_CompareScenario_BetterScannerIdentified()
|
||||
{
|
||||
// Arrange
|
||||
var groundTruth = new GroundTruth
|
||||
{
|
||||
ImageDigest = "sha256:compare",
|
||||
ImageRef = "compare:latest",
|
||||
EstablishedAt = DateTimeOffset.UtcNow,
|
||||
ExpectedFindings =
|
||||
[
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0001", Package = "pkg1" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0002", Package = "pkg2" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0003", Package = "pkg3" },
|
||||
new GroundTruthFinding { CveId = "CVE-2024-0004", Package = "pkg4" }
|
||||
]
|
||||
};
|
||||
|
||||
// Scanner A: High precision, lower recall
|
||||
var scannerAFindings = new List<ScannerFinding>
|
||||
{
|
||||
new() { CveId = "CVE-2024-0001", Package = "pkg1" },
|
||||
new() { CveId = "CVE-2024-0002", Package = "pkg2" }
|
||||
};
|
||||
|
||||
// Scanner B: Lower precision, higher recall
|
||||
var scannerBFindings = new List<ScannerFinding>
|
||||
{
|
||||
new() { CveId = "CVE-2024-0001", Package = "pkg1" },
|
||||
new() { CveId = "CVE-2024-0002", Package = "pkg2" },
|
||||
new() { CveId = "CVE-2024-0003", Package = "pkg3" },
|
||||
new() { CveId = "CVE-2024-FP01", Package = "fp1" }
|
||||
};
|
||||
|
||||
// Act
|
||||
var metricsA = GroundTruthMetrics.Calculate(scannerAFindings, groundTruth);
|
||||
metricsA.ScannerName = "ScannerA";
|
||||
|
||||
var metricsB = GroundTruthMetrics.Calculate(scannerBFindings, groundTruth);
|
||||
metricsB.ScannerName = "ScannerB";
|
||||
|
||||
var comparison = GroundTruthMetrics.Compare(metricsB, metricsA);
|
||||
|
||||
// Assert
|
||||
metricsA.Precision.Should().Be(1.0); // 2/2 = 100%
|
||||
metricsA.Recall.Should().Be(0.5); // 2/4 = 50%
|
||||
|
||||
metricsB.Precision.Should().Be(0.75); // 3/4 = 75%
|
||||
metricsB.Recall.Should().Be(0.75); // 3/4 = 75%
|
||||
|
||||
// Scanner B should have better F1 (0.75 > ~0.67)
|
||||
metricsB.F1Score.Should().BeGreaterThan(metricsA.F1Score);
|
||||
comparison.ScannerABetter.Should().BeTrue(); // B is "ScannerA" in compare
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Corpus Coverage Tests
|
||||
|
||||
[Fact]
|
||||
public void ExpandedCorpus_HasAtLeast50Images()
|
||||
{
|
||||
var stats = ExpandedCorpusFixtures.GetStats();
|
||||
|
||||
stats.TotalImages.Should().BeGreaterThanOrEqualTo(50,
|
||||
"Expanded corpus should have at least 50 images");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExpandedCorpus_HasAllCategories()
|
||||
{
|
||||
var stats = ExpandedCorpusFixtures.GetStats();
|
||||
|
||||
stats.BaseImages.Should().BeGreaterThanOrEqualTo(8, "Should have 8+ base images");
|
||||
stats.LanguageRuntimes.Should().BeGreaterThanOrEqualTo(12, "Should have 12+ language runtimes");
|
||||
stats.ApplicationStacks.Should().BeGreaterThanOrEqualTo(12, "Should have 12+ app stacks");
|
||||
stats.EnterpriseImages.Should().BeGreaterThanOrEqualTo(8, "Should have 8+ enterprise images");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ExpandedCorpus_HasVulnerableImages()
|
||||
{
|
||||
var stats = ExpandedCorpusFixtures.GetStats();
|
||||
var vulnerableFixtures = ExpandedCorpusFixtures.GetVulnerableFixtures().ToList();
|
||||
|
||||
stats.VulnerableImages.Should().BeGreaterThanOrEqualTo(2,
|
||||
"Should have at least 2 images with known CVEs");
|
||||
|
||||
vulnerableFixtures.Should().AllSatisfy(f =>
|
||||
f.ExpectedMinCVEs.Should().BeGreaterThan(0));
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region MemberData Providers
|
||||
|
||||
public static IEnumerable<object[]> GetBaseImages()
|
||||
{
|
||||
return ExpandedCorpusFixtures.BaseImages.Select(f => new object[] { f.Name, f.Image });
|
||||
}
|
||||
|
||||
public static IEnumerable<object[]> GetLanguageRuntimes()
|
||||
{
|
||||
return ExpandedCorpusFixtures.LanguageRuntimes.Select(f => new object[] { f.Name, f.Image });
|
||||
}
|
||||
|
||||
public static IEnumerable<object[]> GetVulnerableImages()
|
||||
{
|
||||
return ExpandedCorpusFixtures.GetVulnerableFixtures()
|
||||
.Select(f => new object[] { f.Name, f.Image, f.ExpectedMinCVEs });
|
||||
}
|
||||
|
||||
public static IEnumerable<object[]> GetAllCorpusImages()
|
||||
{
|
||||
return ExpandedCorpusFixtures.AllFixtures.Select(f => new object[] { f.Name, f.Image });
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static int CountSpdxPackages(JsonDocument spdxDoc)
|
||||
{
|
||||
if (spdxDoc.RootElement.TryGetProperty("packages", out var packages))
|
||||
{
|
||||
return packages.GetArrayLength();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
private static int CountTrivyVulnerabilities(JsonDocument? trivyDoc)
|
||||
{
|
||||
if (trivyDoc is null) return 0;
|
||||
|
||||
var count = 0;
|
||||
if (trivyDoc.RootElement.TryGetProperty("Results", out var results))
|
||||
{
|
||||
foreach (var result in results.EnumerateArray())
|
||||
{
|
||||
if (result.TryGetProperty("Vulnerabilities", out var vulns))
|
||||
{
|
||||
count += vulns.GetArrayLength();
|
||||
}
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
private static int CountGrypeVulnerabilities(JsonDocument? grypeDoc)
|
||||
{
|
||||
if (grypeDoc is null) return 0;
|
||||
|
||||
if (grypeDoc.RootElement.TryGetProperty("matches", out var matches))
|
||||
{
|
||||
return matches.GetArrayLength();
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,170 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ExpandedCorpusFixtures.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.1
|
||||
// Description: Expanded corpus of 50+ container images for competitor benchmarking
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
namespace StellaOps.Parity.Tests.Competitors;
|
||||
|
||||
/// <summary>
|
||||
/// Expanded corpus of container images for comprehensive competitor benchmarking.
|
||||
/// Target: 50+ images across base images, language runtimes, application stacks, and enterprise scenarios.
|
||||
/// </summary>
|
||||
public static class ExpandedCorpusFixtures
|
||||
{
|
||||
/// <summary>
|
||||
/// Base OS images - 10 images covering major distributions.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<ParityImageFixture> BaseImages { get; } =
|
||||
[
|
||||
// Alpine variants
|
||||
new ParityImageFixture { Name = "alpine-3.18", Image = "alpine:3.18", Description = "Alpine 3.18", PackageManagers = ["apk"], ExpectedMinPackages = 10, Category = ImageCategory.BaseOS },
|
||||
new ParityImageFixture { Name = "alpine-3.19", Image = "alpine:3.19", Description = "Alpine 3.19", PackageManagers = ["apk"], ExpectedMinPackages = 10, Category = ImageCategory.BaseOS },
|
||||
new ParityImageFixture { Name = "alpine-3.20", Image = "alpine:3.20", Description = "Alpine 3.20 (latest)", PackageManagers = ["apk"], ExpectedMinPackages = 10, Category = ImageCategory.BaseOS },
|
||||
|
||||
// Debian variants
|
||||
new ParityImageFixture { Name = "debian-bullseye", Image = "debian:bullseye-slim", Description = "Debian 11 Bullseye", PackageManagers = ["apt"], ExpectedMinPackages = 50, Category = ImageCategory.BaseOS },
|
||||
new ParityImageFixture { Name = "debian-bookworm", Image = "debian:bookworm-slim", Description = "Debian 12 Bookworm", PackageManagers = ["apt"], ExpectedMinPackages = 50, Category = ImageCategory.BaseOS },
|
||||
|
||||
// Ubuntu variants
|
||||
new ParityImageFixture { Name = "ubuntu-20.04", Image = "ubuntu:20.04", Description = "Ubuntu 20.04 LTS", PackageManagers = ["apt"], ExpectedMinPackages = 80, Category = ImageCategory.BaseOS },
|
||||
new ParityImageFixture { Name = "ubuntu-22.04", Image = "ubuntu:22.04", Description = "Ubuntu 22.04 LTS", PackageManagers = ["apt"], ExpectedMinPackages = 80, Category = ImageCategory.BaseOS },
|
||||
new ParityImageFixture { Name = "ubuntu-24.04", Image = "ubuntu:24.04", Description = "Ubuntu 24.04 LTS", PackageManagers = ["apt"], ExpectedMinPackages = 80, Category = ImageCategory.BaseOS },
|
||||
|
||||
// RHEL-compatible
|
||||
new ParityImageFixture { Name = "rockylinux-8", Image = "rockylinux:8", Description = "Rocky Linux 8", PackageManagers = ["rpm"], ExpectedMinPackages = 100, Category = ImageCategory.BaseOS },
|
||||
new ParityImageFixture { Name = "rockylinux-9", Image = "rockylinux:9-minimal", Description = "Rocky Linux 9 Minimal", PackageManagers = ["rpm"], ExpectedMinPackages = 30, Category = ImageCategory.BaseOS }
|
||||
];
|
||||
|
||||
/// <summary>
|
||||
/// Language runtime images - 15 images covering major languages.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<ParityImageFixture> LanguageRuntimes { get; } =
|
||||
[
|
||||
// Node.js variants
|
||||
new ParityImageFixture { Name = "node-18-alpine", Image = "node:18-alpine", Description = "Node.js 18 on Alpine", PackageManagers = ["apk", "npm"], ExpectedMinPackages = 50, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "node-20-alpine", Image = "node:20-alpine", Description = "Node.js 20 on Alpine", PackageManagers = ["apk", "npm"], ExpectedMinPackages = 50, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "node-20-bookworm", Image = "node:20-bookworm-slim", Description = "Node.js 20 on Debian", PackageManagers = ["apt", "npm"], ExpectedMinPackages = 100, Category = ImageCategory.LanguageRuntime },
|
||||
|
||||
// Python variants
|
||||
new ParityImageFixture { Name = "python-3.11-alpine", Image = "python:3.11-alpine", Description = "Python 3.11 on Alpine", PackageManagers = ["apk", "pip"], ExpectedMinPackages = 50, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "python-3.12-slim", Image = "python:3.12-slim", Description = "Python 3.12 Slim", PackageManagers = ["apt", "pip"], ExpectedMinPackages = 80, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "python-3.12-bookworm", Image = "python:3.12-bookworm", Description = "Python 3.12 Full", PackageManagers = ["apt", "pip"], ExpectedMinPackages = 150, Category = ImageCategory.LanguageRuntime },
|
||||
|
||||
// Java variants
|
||||
new ParityImageFixture { Name = "temurin-17-jdk", Image = "eclipse-temurin:17-jdk-jammy", Description = "Java 17 JDK", PackageManagers = ["apt", "maven"], ExpectedMinPackages = 100, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "temurin-21-jdk", Image = "eclipse-temurin:21-jdk-jammy", Description = "Java 21 JDK", PackageManagers = ["apt", "maven"], ExpectedMinPackages = 100, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "temurin-21-jre", Image = "eclipse-temurin:21-jre-jammy", Description = "Java 21 JRE", PackageManagers = ["apt"], ExpectedMinPackages = 80, Category = ImageCategory.LanguageRuntime },
|
||||
|
||||
// Go variants
|
||||
new ParityImageFixture { Name = "golang-1.21", Image = "golang:1.21-bookworm", Description = "Go 1.21", PackageManagers = ["apt", "go"], ExpectedMinPackages = 150, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "golang-1.22", Image = "golang:1.22-bookworm", Description = "Go 1.22", PackageManagers = ["apt", "go"], ExpectedMinPackages = 150, Category = ImageCategory.LanguageRuntime },
|
||||
|
||||
// Rust
|
||||
new ParityImageFixture { Name = "rust-1.75", Image = "rust:1.75-bookworm", Description = "Rust 1.75", PackageManagers = ["apt", "cargo"], ExpectedMinPackages = 100, Category = ImageCategory.LanguageRuntime },
|
||||
|
||||
// .NET
|
||||
new ParityImageFixture { Name = "dotnet-8-aspnet", Image = "mcr.microsoft.com/dotnet/aspnet:8.0-bookworm-slim", Description = ".NET 8 ASP.NET", PackageManagers = ["apt", "nuget"], ExpectedMinPackages = 80, Category = ImageCategory.LanguageRuntime },
|
||||
new ParityImageFixture { Name = "dotnet-8-sdk", Image = "mcr.microsoft.com/dotnet/sdk:8.0-bookworm-slim", Description = ".NET 8 SDK", PackageManagers = ["apt", "nuget"], ExpectedMinPackages = 100, Category = ImageCategory.LanguageRuntime },
|
||||
|
||||
// PHP
|
||||
new ParityImageFixture { Name = "php-8.2-fpm", Image = "php:8.2-fpm-bookworm", Description = "PHP 8.2 FPM", PackageManagers = ["apt", "composer"], ExpectedMinPackages = 80, Category = ImageCategory.LanguageRuntime }
|
||||
];
|
||||
|
||||
/// <summary>
|
||||
/// Application stack images - 15 images covering common databases and web servers.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<ParityImageFixture> ApplicationStacks { get; } =
|
||||
[
|
||||
// Databases
|
||||
new ParityImageFixture { Name = "postgres-14", Image = "postgres:14", Description = "PostgreSQL 14", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.KnownVulnerable, ExpectedMinCVEs = 3 },
|
||||
new ParityImageFixture { Name = "postgres-15", Image = "postgres:15", Description = "PostgreSQL 15", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "postgres-16", Image = "postgres:16", Description = "PostgreSQL 16", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "mysql-8", Image = "mysql:8.0", Description = "MySQL 8.0", PackageManagers = ["rpm"], ExpectedMinPackages = 80, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "mariadb-11", Image = "mariadb:11", Description = "MariaDB 11", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "mongo-7", Image = "mongo:7", Description = "MongoDB 7", PackageManagers = ["apt"], ExpectedMinPackages = 80, Category = ImageCategory.ComplexApp },
|
||||
|
||||
// Cache/Queue
|
||||
new ParityImageFixture { Name = "redis-7", Image = "redis:7.2-bookworm", Description = "Redis 7.2", PackageManagers = ["apt"], ExpectedMinPackages = 50, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "valkey-8", Image = "valkey/valkey:8.0-bookworm", Description = "Valkey 8.0", PackageManagers = ["apt"], ExpectedMinPackages = 50, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "rabbitmq-3", Image = "rabbitmq:3.13-management", Description = "RabbitMQ 3.13", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.ComplexApp },
|
||||
|
||||
// Web Servers
|
||||
new ParityImageFixture { Name = "nginx-1.24", Image = "nginx:1.24", Description = "nginx 1.24", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.KnownVulnerable, ExpectedMinCVEs = 5 },
|
||||
new ParityImageFixture { Name = "nginx-1.25", Image = "nginx:1.25", Description = "nginx 1.25", PackageManagers = ["apt"], ExpectedMinPackages = 100, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "httpd-2.4", Image = "httpd:2.4", Description = "Apache HTTPD 2.4", PackageManagers = ["apt"], ExpectedMinPackages = 80, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "traefik-2", Image = "traefik:v2.11", Description = "Traefik 2.11", PackageManagers = ["go"], ExpectedMinPackages = 20, Category = ImageCategory.ComplexApp },
|
||||
|
||||
// Container Tools
|
||||
new ParityImageFixture { Name = "registry-2", Image = "registry:2", Description = "Docker Registry", PackageManagers = ["go"], ExpectedMinPackages = 20, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "vault-1.15", Image = "hashicorp/vault:1.15", Description = "HashiCorp Vault", PackageManagers = ["go"], ExpectedMinPackages = 20, Category = ImageCategory.ComplexApp }
|
||||
];
|
||||
|
||||
/// <summary>
|
||||
/// Enterprise/complex images - 10 images with multi-stage builds and complex dependencies.
|
||||
/// </summary>
|
||||
public static IReadOnlyList<ParityImageFixture> EnterpriseImages { get; } =
|
||||
[
|
||||
// CMS/Applications
|
||||
new ParityImageFixture { Name = "wordpress-6.4", Image = "wordpress:6.4-php8.2-apache", Description = "WordPress 6.4", PackageManagers = ["apt", "composer"], ExpectedMinPackages = 200, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "drupal-10", Image = "drupal:10", Description = "Drupal 10", PackageManagers = ["apt", "composer"], ExpectedMinPackages = 200, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "ghost-5", Image = "ghost:5", Description = "Ghost CMS 5", PackageManagers = ["apt", "npm"], ExpectedMinPackages = 150, Category = ImageCategory.ComplexApp },
|
||||
|
||||
// Monitoring
|
||||
new ParityImageFixture { Name = "prometheus-2", Image = "prom/prometheus:v2.49.0", Description = "Prometheus 2.49", PackageManagers = ["go"], ExpectedMinPackages = 30, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "grafana-10", Image = "grafana/grafana:10.3.0", Description = "Grafana 10.3", PackageManagers = ["go", "npm"], ExpectedMinPackages = 50, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "alertmanager", Image = "prom/alertmanager:v0.27.0", Description = "Alertmanager", PackageManagers = ["go"], ExpectedMinPackages = 20, Category = ImageCategory.ComplexApp },
|
||||
|
||||
// GitOps/CI
|
||||
new ParityImageFixture { Name = "gitea-1.21", Image = "gitea/gitea:1.21", Description = "Gitea 1.21", PackageManagers = ["go", "npm"], ExpectedMinPackages = 50, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "jenkins-lts", Image = "jenkins/jenkins:lts-jdk17", Description = "Jenkins LTS", PackageManagers = ["apt", "maven"], ExpectedMinPackages = 150, Category = ImageCategory.ComplexApp },
|
||||
|
||||
// Security
|
||||
new ParityImageFixture { Name = "trivy-scanner", Image = "aquasec/trivy:0.48.0", Description = "Trivy Scanner", PackageManagers = ["go"], ExpectedMinPackages = 30, Category = ImageCategory.ComplexApp },
|
||||
new ParityImageFixture { Name = "clair-scanner", Image = "quay.io/projectquay/clair:4.7.2", Description = "Clair Scanner", PackageManagers = ["go"], ExpectedMinPackages = 30, Category = ImageCategory.ComplexApp }
|
||||
];
|
||||
|
||||
/// <summary>
|
||||
/// Gets all corpus fixtures (50+ images).
|
||||
/// </summary>
|
||||
public static IReadOnlyList<ParityImageFixture> AllFixtures { get; } =
|
||||
[.. BaseImages, .. LanguageRuntimes, .. ApplicationStacks, .. EnterpriseImages];
|
||||
|
||||
/// <summary>
|
||||
/// Gets fixtures by category.
|
||||
/// </summary>
|
||||
public static IEnumerable<ParityImageFixture> GetByCategory(ImageCategory category)
|
||||
=> AllFixtures.Where(f => f.Category == category);
|
||||
|
||||
/// <summary>
|
||||
/// Gets fixtures with expected CVEs for vulnerability comparison.
|
||||
/// </summary>
|
||||
public static IEnumerable<ParityImageFixture> GetVulnerableFixtures()
|
||||
=> AllFixtures.Where(f => f.ExpectedMinCVEs > 0);
|
||||
|
||||
/// <summary>
|
||||
/// Gets fixture count statistics.
|
||||
/// </summary>
|
||||
public static CorpusStats GetStats() => new()
|
||||
{
|
||||
TotalImages = AllFixtures.Count,
|
||||
BaseImages = BaseImages.Count,
|
||||
LanguageRuntimes = LanguageRuntimes.Count,
|
||||
ApplicationStacks = ApplicationStacks.Count,
|
||||
EnterpriseImages = EnterpriseImages.Count,
|
||||
VulnerableImages = AllFixtures.Count(f => f.ExpectedMinCVEs > 0)
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Statistics about the corpus.
|
||||
/// </summary>
|
||||
public sealed record CorpusStats
|
||||
{
|
||||
public int TotalImages { get; init; }
|
||||
public int BaseImages { get; init; }
|
||||
public int LanguageRuntimes { get; init; }
|
||||
public int ApplicationStacks { get; init; }
|
||||
public int EnterpriseImages { get; init; }
|
||||
public int VulnerableImages { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,383 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// GroundTruthMetrics.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.1
|
||||
// Description: Ground truth-based metrics calculation for competitor benchmarking
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text;
|
||||
using System.Text.Json;
|
||||
using System.Text.Json.Serialization;
|
||||
|
||||
namespace StellaOps.Parity.Tests.Competitors;
|
||||
|
||||
/// <summary>
|
||||
/// Calculates precision, recall, and F1 score metrics against ground truth data.
|
||||
/// Used for benchmarking scanner accuracy against known vulnerability sets.
|
||||
/// </summary>
|
||||
public static class GroundTruthMetrics
|
||||
{
|
||||
/// <summary>
|
||||
/// Calculates metrics comparing scanner findings against ground truth.
|
||||
/// </summary>
|
||||
/// <param name="scannerFindings">Findings reported by the scanner.</param>
|
||||
/// <param name="groundTruth">Ground truth vulnerability data.</param>
|
||||
/// <returns>Metrics result with precision, recall, and F1 score.</returns>
|
||||
public static MetricsResult Calculate(
|
||||
IReadOnlyCollection<ScannerFinding> scannerFindings,
|
||||
GroundTruth groundTruth)
|
||||
{
|
||||
ArgumentNullException.ThrowIfNull(scannerFindings);
|
||||
ArgumentNullException.ThrowIfNull(groundTruth);
|
||||
|
||||
var expectedCves = groundTruth.ExpectedFindings
|
||||
.Select(f => f.CveId)
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var knownFalsePositives = groundTruth.KnownFalsePositives
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
var scannerCves = scannerFindings
|
||||
.Select(f => f.CveId)
|
||||
.ToHashSet(StringComparer.OrdinalIgnoreCase);
|
||||
|
||||
// True positives: CVEs found by scanner that are in ground truth
|
||||
var truePositives = scannerCves
|
||||
.Intersect(expectedCves, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
// False positives: CVEs found by scanner that are NOT in ground truth
|
||||
// (excluding known false positives which are expected scanner behavior)
|
||||
var falsePositives = scannerCves
|
||||
.Except(expectedCves, StringComparer.OrdinalIgnoreCase)
|
||||
.Except(knownFalsePositives, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
// False negatives: CVEs in ground truth that scanner did NOT find
|
||||
var falseNegatives = expectedCves
|
||||
.Except(scannerCves, StringComparer.OrdinalIgnoreCase)
|
||||
.ToList();
|
||||
|
||||
// Calculate metrics
|
||||
var tp = truePositives.Count;
|
||||
var fp = falsePositives.Count;
|
||||
var fn = falseNegatives.Count;
|
||||
|
||||
var precision = tp + fp > 0 ? (double)tp / (tp + fp) : 1.0;
|
||||
var recall = tp + fn > 0 ? (double)tp / (tp + fn) : 1.0;
|
||||
var f1Score = precision + recall > 0
|
||||
? 2 * (precision * recall) / (precision + recall)
|
||||
: 0.0;
|
||||
|
||||
return new MetricsResult
|
||||
{
|
||||
ImageDigest = groundTruth.ImageDigest,
|
||||
ScannerName = string.Empty, // Set by caller
|
||||
TruePositiveCount = tp,
|
||||
FalsePositiveCount = fp,
|
||||
FalseNegativeCount = fn,
|
||||
Precision = precision,
|
||||
Recall = recall,
|
||||
F1Score = f1Score,
|
||||
TruePositives = [.. truePositives],
|
||||
FalsePositives = [.. falsePositives],
|
||||
FalseNegatives = [.. falseNegatives],
|
||||
GroundTruthCount = expectedCves.Count,
|
||||
ScannerFindingsCount = scannerCves.Count
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compares metrics from two scanners against the same ground truth.
|
||||
/// </summary>
|
||||
public static ComparisonResult Compare(MetricsResult scannerA, MetricsResult scannerB)
|
||||
{
|
||||
return new ComparisonResult
|
||||
{
|
||||
ScannerA = scannerA.ScannerName,
|
||||
ScannerB = scannerB.ScannerName,
|
||||
F1ScoreDelta = scannerA.F1Score - scannerB.F1Score,
|
||||
PrecisionDelta = scannerA.Precision - scannerB.Precision,
|
||||
RecallDelta = scannerA.Recall - scannerB.Recall,
|
||||
ScannerAMetrics = scannerA,
|
||||
ScannerBMetrics = scannerB,
|
||||
ScannerABetter = scannerA.F1Score >= scannerB.F1Score
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ground truth data for a container image.
|
||||
/// </summary>
|
||||
public sealed record GroundTruth
|
||||
{
|
||||
/// <summary>
|
||||
/// SHA-256 digest of the container image.
|
||||
/// </summary>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Image reference (e.g., "alpine:3.19").
|
||||
/// </summary>
|
||||
public required string ImageRef { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Date when ground truth was established.
|
||||
/// </summary>
|
||||
public required DateTimeOffset EstablishedAt { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Expected vulnerability findings that should be detected.
|
||||
/// </summary>
|
||||
public required ImmutableArray<GroundTruthFinding> ExpectedFindings { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Known false positives that scanners commonly report but are not actual vulnerabilities.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> KnownFalsePositives { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Known false negatives that are difficult to detect.
|
||||
/// </summary>
|
||||
public ImmutableArray<string> KnownFalseNegatives { get; init; } = [];
|
||||
|
||||
/// <summary>
|
||||
/// Notes about the ground truth data.
|
||||
/// </summary>
|
||||
public string? Notes { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A single expected vulnerability finding in ground truth.
|
||||
/// </summary>
|
||||
public sealed record GroundTruthFinding
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier (e.g., "CVE-2024-1234").
|
||||
/// </summary>
|
||||
public required string CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package name containing the vulnerability.
|
||||
/// </summary>
|
||||
public required string Package { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether the vulnerability is reachable in typical usage.
|
||||
/// </summary>
|
||||
public bool Reachable { get; init; } = true;
|
||||
|
||||
/// <summary>
|
||||
/// Severity level.
|
||||
/// </summary>
|
||||
public string? Severity { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Fixed version, if available.
|
||||
/// </summary>
|
||||
public string? FixedVersion { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// A finding reported by a scanner.
|
||||
/// </summary>
|
||||
public sealed record ScannerFinding
|
||||
{
|
||||
/// <summary>
|
||||
/// CVE identifier.
|
||||
/// </summary>
|
||||
public required string CveId { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Package name.
|
||||
/// </summary>
|
||||
public required string Package { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Installed version.
|
||||
/// </summary>
|
||||
public string? Version { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Severity level reported by scanner.
|
||||
/// </summary>
|
||||
public string? Severity { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of metrics calculation.
|
||||
/// </summary>
|
||||
public sealed record MetricsResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Image digest that was scanned.
|
||||
/// </summary>
|
||||
public required string ImageDigest { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Name of the scanner.
|
||||
/// </summary>
|
||||
public required string ScannerName { get; set; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of true positives (correctly identified CVEs).
|
||||
/// </summary>
|
||||
public required int TruePositiveCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of false positives (incorrectly reported CVEs).
|
||||
/// </summary>
|
||||
public required int FalsePositiveCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Number of false negatives (missed CVEs).
|
||||
/// </summary>
|
||||
public required int FalseNegativeCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Precision: TP / (TP + FP). Measures accuracy of positive predictions.
|
||||
/// </summary>
|
||||
public required double Precision { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Recall: TP / (TP + FN). Measures coverage of actual positives.
|
||||
/// </summary>
|
||||
public required double Recall { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// F1 Score: Harmonic mean of precision and recall.
|
||||
/// </summary>
|
||||
public required double F1Score { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of true positive CVE IDs.
|
||||
/// </summary>
|
||||
public required ImmutableArray<string> TruePositives { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of false positive CVE IDs.
|
||||
/// </summary>
|
||||
public required ImmutableArray<string> FalsePositives { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// List of false negative CVE IDs.
|
||||
/// </summary>
|
||||
public required ImmutableArray<string> FalseNegatives { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total count in ground truth.
|
||||
/// </summary>
|
||||
public required int GroundTruthCount { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Total findings reported by scanner.
|
||||
/// </summary>
|
||||
public required int ScannerFindingsCount { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two scanners.
|
||||
/// </summary>
|
||||
public sealed record ComparisonResult
|
||||
{
|
||||
/// <summary>
|
||||
/// Name of scanner A.
|
||||
/// </summary>
|
||||
public required string ScannerA { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Name of scanner B.
|
||||
/// </summary>
|
||||
public required string ScannerB { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// F1 score difference (A - B). Positive means A is better.
|
||||
/// </summary>
|
||||
public required double F1ScoreDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Precision difference (A - B).
|
||||
/// </summary>
|
||||
public required double PrecisionDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Recall difference (A - B).
|
||||
/// </summary>
|
||||
public required double RecallDelta { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Full metrics for scanner A.
|
||||
/// </summary>
|
||||
public required MetricsResult ScannerAMetrics { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Full metrics for scanner B.
|
||||
/// </summary>
|
||||
public required MetricsResult ScannerBMetrics { get; init; }
|
||||
|
||||
/// <summary>
|
||||
/// Whether scanner A has equal or better F1 score.
|
||||
/// </summary>
|
||||
public required bool ScannerABetter { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loader for ground truth JSON files.
|
||||
/// </summary>
|
||||
public static class GroundTruthLoader
|
||||
{
|
||||
private static readonly JsonSerializerOptions JsonOptions = new()
|
||||
{
|
||||
PropertyNameCaseInsensitive = true,
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
};
|
||||
|
||||
/// <summary>
|
||||
/// Loads ground truth from a JSON file.
|
||||
/// </summary>
|
||||
public static async Task<GroundTruth> LoadAsync(string filePath, CancellationToken ct = default)
|
||||
{
|
||||
var json = await File.ReadAllTextAsync(filePath, ct);
|
||||
return JsonSerializer.Deserialize<GroundTruth>(json, JsonOptions)
|
||||
?? throw new InvalidOperationException($"Failed to deserialize ground truth from {filePath}");
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Loads all ground truth files from a directory.
|
||||
/// </summary>
|
||||
public static async Task<IReadOnlyList<GroundTruth>> LoadAllAsync(string directory, CancellationToken ct = default)
|
||||
{
|
||||
var files = Directory.GetFiles(directory, "*.json", SearchOption.AllDirectories);
|
||||
var results = new List<GroundTruth>();
|
||||
|
||||
foreach (var file in files)
|
||||
{
|
||||
try
|
||||
{
|
||||
results.Add(await LoadAsync(file, ct));
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
throw new InvalidOperationException($"Failed to load ground truth from {file}", ex);
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Saves ground truth to a JSON file.
|
||||
/// </summary>
|
||||
public static async Task SaveAsync(GroundTruth groundTruth, string filePath, CancellationToken ct = default)
|
||||
{
|
||||
var json = JsonSerializer.Serialize(groundTruth, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = true,
|
||||
Converters = { new JsonStringEnumConverter() }
|
||||
});
|
||||
await File.WriteAllTextAsync(filePath, json, ct);
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Parity.Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# AuditPack Unit Tests Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
Reference in New Issue
Block a user