sln build fix (again), tests fixes, audit work and doctors work
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.AirGap Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,354 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// CrossServiceClockSkewTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.2
|
||||
// Description: Tests for cross-service behavior under clock skew conditions
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.ClockSkew.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.ClockSkew;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for cross-service interactions under various clock skew scenarios.
|
||||
/// Validates that HLC maintains correct event ordering despite wall clock differences.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.HLC)]
|
||||
[Trait("Category", "ClockSkew")]
|
||||
public class CrossServiceClockSkewTests : IClassFixture<ClockSkewServiceFixture>
|
||||
{
|
||||
private readonly ClockSkewServiceFixture _fixture;
|
||||
|
||||
public CrossServiceClockSkewTests(ClockSkewServiceFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_fixture.ResetAllClocks();
|
||||
_fixture.ClearEventLog();
|
||||
}
|
||||
|
||||
#region Scanner-Concelier Skew Tests
|
||||
|
||||
[Fact]
|
||||
public void Scanner_Concelier_5SecondSkew_EventOrderingMaintained()
|
||||
{
|
||||
// Arrange - Scanner is 5 seconds ahead of Concelier
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(5));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.Zero);
|
||||
|
||||
// Act - Scanner sends scan result to Concelier
|
||||
var scanComplete = _fixture.SendEvent("scanner", "concelier", "ScanComplete", "scan-123");
|
||||
var advisoryQuery = _fixture.SendEvent("concelier", "scanner", "AdvisoryQuery", "query-456");
|
||||
|
||||
// Assert - HLC ordering maintained despite wall clock skew
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Scanner's HLC should be ahead but causality preserved
|
||||
scanComplete.SourceHlcTimestamp.Should().BeLessThan(scanComplete.TargetHlcTimestamp);
|
||||
advisoryQuery.SourceHlcTimestamp.Should().BeLessThan(advisoryQuery.TargetHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Scanner_Concelier_BackwardSkew_StillOrders()
|
||||
{
|
||||
// Arrange - Scanner is 5 seconds BEHIND Concelier
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(-5));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.Zero);
|
||||
|
||||
// Act - Messages flow both directions
|
||||
var evt1 = _fixture.SendEvent("scanner", "concelier", "ScanRequest");
|
||||
var evt2 = _fixture.SendEvent("concelier", "scanner", "AdvisoryData");
|
||||
var evt3 = _fixture.SendEvent("scanner", "concelier", "ScanComplete");
|
||||
|
||||
// Assert
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Each response should have higher HLC than request
|
||||
evt1.TargetHlcTimestamp.Should().BeGreaterThan(evt1.SourceHlcTimestamp);
|
||||
evt2.TargetHlcTimestamp.Should().BeGreaterThan(evt2.SourceHlcTimestamp);
|
||||
evt3.TargetHlcTimestamp.Should().BeGreaterThan(evt3.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Gateway-Backend Skew Tests
|
||||
|
||||
[Fact]
|
||||
public void Gateway_Backend_ClockDrift_NoTimestampConflicts()
|
||||
{
|
||||
// Arrange - Gateway ahead, Backend behind
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromSeconds(3));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-2));
|
||||
|
||||
// Act - Simulate request/response flow
|
||||
var request = _fixture.SendEvent("gateway", "backend", "HttpRequest", "/api/scan");
|
||||
var processing = _fixture.GenerateLocalEvent("backend", "ProcessingStarted");
|
||||
_fixture.AdvanceServiceTime("backend", TimeSpan.FromMilliseconds(50));
|
||||
var response = _fixture.SendEvent("backend", "gateway", "HttpResponse", "200 OK");
|
||||
|
||||
// Assert - Ordering should be: request < processing < response
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
request.TargetHlcTimestamp.Should().BeLessThan(response.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Gateway_Backend_RapidRequests_UniqueTimestamps()
|
||||
{
|
||||
// Arrange - Different skews
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromMilliseconds(500));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromMilliseconds(-500));
|
||||
|
||||
// Act - Send 100 rapid requests
|
||||
var events = new List<CrossServiceEvent>();
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
events.Add(_fixture.SendEvent("gateway", "backend", "Request", $"req-{i}"));
|
||||
}
|
||||
|
||||
// Assert - All HLC timestamps should be unique
|
||||
var allTimestamps = events
|
||||
.SelectMany(e => new[] { e.SourceHlcTimestamp, e.TargetHlcTimestamp })
|
||||
.ToList();
|
||||
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region All Services Random Skew Tests
|
||||
|
||||
[Fact]
|
||||
public void AllServices_RandomSkew_UpTo30Seconds_SystemFunctions()
|
||||
{
|
||||
// Arrange - Apply random skew up to 30 seconds to all services
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(15));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.FromSeconds(-10));
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromSeconds(25));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-30));
|
||||
|
||||
// Act - Simulate multi-service workflow
|
||||
// Gateway receives request
|
||||
var gatewayReceive = _fixture.GenerateLocalEvent("gateway", "RequestReceived");
|
||||
|
||||
// Gateway calls backend
|
||||
var toBackend = _fixture.SendEvent("gateway", "backend", "BackendCall");
|
||||
|
||||
// Backend calls scanner
|
||||
var toScanner = _fixture.SendEvent("backend", "scanner", "ScanRequest");
|
||||
|
||||
// Scanner calls concelier
|
||||
var toConcelier = _fixture.SendEvent("scanner", "concelier", "AdvisoryLookup");
|
||||
|
||||
// Response chain
|
||||
var fromConcelier = _fixture.SendEvent("concelier", "scanner", "AdvisoryResponse");
|
||||
var fromScanner = _fixture.SendEvent("scanner", "backend", "ScanResponse");
|
||||
var fromBackend = _fixture.SendEvent("backend", "gateway", "BackendResponse");
|
||||
|
||||
// Assert - HLC maintains ordering despite extreme clock skew
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Response should always be after request in HLC terms
|
||||
fromConcelier.SourceHlcTimestamp.Should().BeGreaterThan(toConcelier.TargetHlcTimestamp);
|
||||
fromScanner.SourceHlcTimestamp.Should().BeGreaterThan(toScanner.TargetHlcTimestamp);
|
||||
fromBackend.SourceHlcTimestamp.Should().BeGreaterThan(toBackend.TargetHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void AllServices_DriftingClocks_MaintainsCausality()
|
||||
{
|
||||
// Arrange - Start synchronized
|
||||
_fixture.ResetAllClocks();
|
||||
|
||||
var events = new List<CrossServiceEvent>();
|
||||
|
||||
// Act - Simulate clock drift over time
|
||||
for (var round = 0; round < 10; round++)
|
||||
{
|
||||
// Apply random drift
|
||||
_fixture.ApplyRandomDrift(TimeSpan.FromSeconds(2));
|
||||
|
||||
// Generate cross-service events
|
||||
events.Add(_fixture.SendEvent("gateway", "backend", $"Round{round}-1"));
|
||||
events.Add(_fixture.SendEvent("backend", "scanner", $"Round{round}-2"));
|
||||
events.Add(_fixture.SendEvent("scanner", "concelier", $"Round{round}-3"));
|
||||
events.Add(_fixture.SendEvent("concelier", "gateway", $"Round{round}-4"));
|
||||
|
||||
// Advance base time
|
||||
_fixture.AdvanceAllTime(TimeSpan.FromMilliseconds(100));
|
||||
}
|
||||
|
||||
// Assert - Despite drift, HLC ordering maintained
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public void Services_IdenticalTimestamps_StillUnique()
|
||||
{
|
||||
// Arrange - All services at exactly the same time
|
||||
_fixture.ResetAllClocks();
|
||||
|
||||
// Act - All services generate events "simultaneously"
|
||||
var events = new List<CrossServiceEvent>
|
||||
{
|
||||
_fixture.SendEvent("scanner", "concelier", "Msg1"),
|
||||
_fixture.SendEvent("concelier", "gateway", "Msg2"),
|
||||
_fixture.SendEvent("gateway", "backend", "Msg3"),
|
||||
_fixture.SendEvent("backend", "scanner", "Msg4")
|
||||
};
|
||||
|
||||
// Assert - All timestamps unique
|
||||
var allTimestamps = events
|
||||
.SelectMany(e => new[] { e.SourceHlcTimestamp, e.TargetHlcTimestamp })
|
||||
.ToList();
|
||||
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Services_ExtremeSkew_60Seconds_HandledCorrectly()
|
||||
{
|
||||
// Arrange - Extreme 60 second skew
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(60));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-60));
|
||||
|
||||
// Act
|
||||
var evt1 = _fixture.SendEvent("scanner", "backend", "ExtremeSkew1");
|
||||
var evt2 = _fixture.SendEvent("backend", "scanner", "ExtremeSkew2");
|
||||
|
||||
// Assert - HLC still maintains ordering
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Maximum observed skew should reflect the clock difference
|
||||
var maxSkew = _fixture.GetMaxObservedSkew();
|
||||
maxSkew.Should().BeGreaterThan(TimeSpan.FromSeconds(100)); // 60 + 60 = 120 sec difference
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Services_ClockJump_Forward_Handled()
|
||||
{
|
||||
// Arrange
|
||||
_fixture.ResetAllClocks();
|
||||
|
||||
var evt1 = _fixture.SendEvent("scanner", "concelier", "BeforeJump");
|
||||
var ts1 = evt1.TargetHlcTimestamp;
|
||||
|
||||
// Clock jumps forward 10 seconds on scanner
|
||||
_fixture.AdvanceServiceTime("scanner", TimeSpan.FromSeconds(10));
|
||||
|
||||
// Act
|
||||
var evt2 = _fixture.SendEvent("scanner", "concelier", "AfterJump");
|
||||
var ts2 = evt2.SourceHlcTimestamp;
|
||||
|
||||
// Assert - New timestamp should be ahead
|
||||
ts2.Should().BeGreaterThan(ts1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void Services_ClockJump_Backward_HandledByHLC()
|
||||
{
|
||||
// Arrange - Scanner at +10 seconds
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(10));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.Zero);
|
||||
|
||||
var evt1 = _fixture.SendEvent("scanner", "concelier", "HighTime");
|
||||
|
||||
// "Fix" scanner's clock by moving it back (simulating NTP correction)
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.Zero);
|
||||
|
||||
// Act - Scanner continues operating
|
||||
var evt2 = _fixture.SendEvent("scanner", "concelier", "NormalTime");
|
||||
|
||||
// Assert - HLC ensures monotonicity despite wall clock going backwards
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
evt2.SourceHlcTimestamp.Should().BeGreaterThan(evt1.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Workflow Simulation Tests
|
||||
|
||||
[Fact]
|
||||
public void FullScanWorkflow_WithSkew_MaintainsOrdering()
|
||||
{
|
||||
// Arrange - Realistic skew scenario
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromMilliseconds(100));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromMilliseconds(-50));
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromMilliseconds(200));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.FromMilliseconds(-100));
|
||||
|
||||
// Act - Simulate full scan workflow
|
||||
// 1. Gateway receives scan request
|
||||
var step1 = _fixture.SendEvent("gateway", "backend", "ScanRequest", "image:tag");
|
||||
|
||||
// 2. Backend dispatches to scanner
|
||||
_fixture.AdvanceServiceTime("backend", TimeSpan.FromMilliseconds(10));
|
||||
var step2 = _fixture.SendEvent("backend", "scanner", "DispatchScan");
|
||||
|
||||
// 3. Scanner queries advisories
|
||||
_fixture.AdvanceServiceTime("scanner", TimeSpan.FromMilliseconds(50));
|
||||
var step3 = _fixture.SendEvent("scanner", "concelier", "AdvisoryQuery");
|
||||
|
||||
// 4. Concelier responds
|
||||
_fixture.AdvanceServiceTime("concelier", TimeSpan.FromMilliseconds(20));
|
||||
var step4 = _fixture.SendEvent("concelier", "scanner", "AdvisoryResponse");
|
||||
|
||||
// 5. Scanner completes
|
||||
_fixture.AdvanceServiceTime("scanner", TimeSpan.FromMilliseconds(30));
|
||||
var step5 = _fixture.SendEvent("scanner", "backend", "ScanComplete");
|
||||
|
||||
// 6. Backend responds to gateway
|
||||
_fixture.AdvanceServiceTime("backend", TimeSpan.FromMilliseconds(10));
|
||||
var step6 = _fixture.SendEvent("backend", "gateway", "ScanResult");
|
||||
|
||||
// Assert - Full causal chain maintained
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// Verify causal chain
|
||||
step1.TargetHlcTimestamp.Should().BeLessThan(step2.SourceHlcTimestamp);
|
||||
step2.TargetHlcTimestamp.Should().BeLessThan(step3.SourceHlcTimestamp);
|
||||
step3.TargetHlcTimestamp.Should().BeLessThan(step4.SourceHlcTimestamp);
|
||||
step4.TargetHlcTimestamp.Should().BeLessThan(step5.SourceHlcTimestamp);
|
||||
step5.TargetHlcTimestamp.Should().BeLessThan(step6.SourceHlcTimestamp);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ConcurrentWorkflows_WithSkew_AllMaintainOrdering()
|
||||
{
|
||||
// Arrange - Set up skew
|
||||
_fixture.SetServiceClockSkew("gateway", TimeSpan.FromSeconds(1));
|
||||
_fixture.SetServiceClockSkew("backend", TimeSpan.FromSeconds(-1));
|
||||
_fixture.SetServiceClockSkew("scanner", TimeSpan.FromSeconds(2));
|
||||
_fixture.SetServiceClockSkew("concelier", TimeSpan.FromSeconds(-2));
|
||||
|
||||
var allEvents = new List<CrossServiceEvent>();
|
||||
|
||||
// Act - Run 5 concurrent workflows
|
||||
for (var workflow = 0; workflow < 5; workflow++)
|
||||
{
|
||||
allEvents.Add(_fixture.SendEvent("gateway", "backend", $"Workflow{workflow}-Start"));
|
||||
allEvents.Add(_fixture.SendEvent("backend", "scanner", $"Workflow{workflow}-Scan"));
|
||||
allEvents.Add(_fixture.SendEvent("scanner", "concelier", $"Workflow{workflow}-Lookup"));
|
||||
allEvents.Add(_fixture.SendEvent("concelier", "scanner", $"Workflow{workflow}-Data"));
|
||||
allEvents.Add(_fixture.SendEvent("scanner", "backend", $"Workflow{workflow}-Done"));
|
||||
allEvents.Add(_fixture.SendEvent("backend", "gateway", $"Workflow{workflow}-Complete"));
|
||||
}
|
||||
|
||||
// Assert - All events maintain HLC ordering
|
||||
_fixture.VerifyHlcOrdering().Should().BeTrue();
|
||||
|
||||
// All timestamps should be unique
|
||||
var allTimestamps = allEvents
|
||||
.SelectMany(e => new[] { e.SourceHlcTimestamp, e.TargetHlcTimestamp })
|
||||
.ToList();
|
||||
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,365 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ClockSkewServiceFixture.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.2
|
||||
// Description: Test fixture for simulating clock skew across multiple services
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.ClockSkew.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture that simulates multiple services with independent clocks.
|
||||
/// Allows testing cross-service interactions under various clock skew conditions.
|
||||
/// </summary>
|
||||
public sealed class ClockSkewServiceFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, ServiceClock> _services = new();
|
||||
private readonly ConcurrentBag<CrossServiceEvent> _eventLog = [];
|
||||
private readonly Random _random = new(42); // Deterministic seed
|
||||
private long _globalEventSequence;
|
||||
|
||||
/// <summary>
|
||||
/// Gets the event log containing all cross-service events.
|
||||
/// </summary>
|
||||
public IReadOnlyCollection<CrossServiceEvent> EventLog => _eventLog.ToImmutableArray();
|
||||
|
||||
/// <summary>
|
||||
/// Gets all registered services.
|
||||
/// </summary>
|
||||
public IReadOnlyDictionary<string, ServiceClock> Services => _services.ToImmutableDictionary();
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask InitializeAsync()
|
||||
{
|
||||
// Create default services
|
||||
CreateService("scanner", TimeSpan.Zero);
|
||||
CreateService("concelier", TimeSpan.Zero);
|
||||
CreateService("gateway", TimeSpan.Zero);
|
||||
CreateService("backend", TimeSpan.Zero);
|
||||
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <inheritdoc />
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
service.Dispose();
|
||||
}
|
||||
_services.Clear();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new service with the specified clock offset.
|
||||
/// </summary>
|
||||
/// <param name="serviceId">Service identifier.</param>
|
||||
/// <param name="clockOffset">Offset from base time (positive = ahead, negative = behind).</param>
|
||||
/// <returns>The created service clock.</returns>
|
||||
public ServiceClock CreateService(string serviceId, TimeSpan clockOffset)
|
||||
{
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
var serviceTime = baseTime + clockOffset;
|
||||
|
||||
var timeProvider = new FakeTimeProvider(serviceTime);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var hlc = new HybridLogicalClock.HybridLogicalClock(
|
||||
timeProvider,
|
||||
serviceId,
|
||||
stateStore,
|
||||
NullLogger<HybridLogicalClock.HybridLogicalClock>.Instance,
|
||||
TimeSpan.FromMinutes(1));
|
||||
|
||||
var service = new ServiceClock(serviceId, timeProvider, hlc, clockOffset);
|
||||
_services[serviceId] = service;
|
||||
return service;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets clock skew for a service.
|
||||
/// </summary>
|
||||
public void SetServiceClockSkew(string serviceId, TimeSpan skew)
|
||||
{
|
||||
var service = GetService(serviceId);
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
service.TimeProvider.SetUtcNow(baseTime + skew);
|
||||
service.ClockOffset = skew;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Applies random clock drift to all services within the specified bounds.
|
||||
/// </summary>
|
||||
public void ApplyRandomDrift(TimeSpan maxDrift)
|
||||
{
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
var driftMs = (_random.NextDouble() * 2 - 1) * maxDrift.TotalMilliseconds;
|
||||
var drift = TimeSpan.FromMilliseconds(driftMs);
|
||||
service.TimeProvider.Advance(drift);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a cross-service call from source to target.
|
||||
/// </summary>
|
||||
public CrossServiceEvent SendEvent(
|
||||
string sourceService,
|
||||
string targetService,
|
||||
string eventType,
|
||||
string? payload = null)
|
||||
{
|
||||
var source = GetService(sourceService);
|
||||
var target = GetService(targetService);
|
||||
|
||||
// Source generates timestamp
|
||||
var sourceTimestamp = source.HlcService.Tick();
|
||||
var sourceWallTime = source.TimeProvider.GetUtcNow();
|
||||
|
||||
// Target receives and generates its timestamp
|
||||
var targetTimestamp = target.HlcService.Receive(sourceTimestamp);
|
||||
var targetWallTime = target.TimeProvider.GetUtcNow();
|
||||
|
||||
var eventSeq = Interlocked.Increment(ref _globalEventSequence);
|
||||
|
||||
var evt = new CrossServiceEvent
|
||||
{
|
||||
Sequence = eventSeq,
|
||||
SourceService = sourceService,
|
||||
TargetService = targetService,
|
||||
EventType = eventType,
|
||||
Payload = payload,
|
||||
SourceHlcTimestamp = sourceTimestamp,
|
||||
TargetHlcTimestamp = targetTimestamp,
|
||||
SourceWallTime = sourceWallTime,
|
||||
TargetWallTime = targetWallTime,
|
||||
SourceClockOffset = source.ClockOffset,
|
||||
TargetClockOffset = target.ClockOffset
|
||||
};
|
||||
|
||||
_eventLog.Add(evt);
|
||||
return evt;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates service generating a local event.
|
||||
/// </summary>
|
||||
public LocalServiceEvent GenerateLocalEvent(string serviceId, string eventType, string? payload = null)
|
||||
{
|
||||
var service = GetService(serviceId);
|
||||
var hlcTimestamp = service.HlcService.Tick();
|
||||
var wallTime = service.TimeProvider.GetUtcNow();
|
||||
|
||||
return new LocalServiceEvent
|
||||
{
|
||||
ServiceId = serviceId,
|
||||
EventType = eventType,
|
||||
Payload = payload,
|
||||
HlcTimestamp = hlcTimestamp,
|
||||
WallTime = wallTime,
|
||||
ClockOffset = service.ClockOffset
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for all services by the specified duration.
|
||||
/// </summary>
|
||||
public void AdvanceAllTime(TimeSpan duration)
|
||||
{
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
service.TimeProvider.Advance(duration);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for a specific service.
|
||||
/// </summary>
|
||||
public void AdvanceServiceTime(string serviceId, TimeSpan duration)
|
||||
{
|
||||
var service = GetService(serviceId);
|
||||
service.TimeProvider.Advance(duration);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that all events in the log maintain causal ordering based on HLC timestamps.
|
||||
/// </summary>
|
||||
public bool VerifyHlcOrdering()
|
||||
{
|
||||
var events = _eventLog.ToList();
|
||||
|
||||
foreach (var evt in events)
|
||||
{
|
||||
// For cross-service events, target HLC should be > source HLC
|
||||
if (evt.TargetHlcTimestamp <= evt.SourceHlcTimestamp)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the maximum wall clock difference observed across any event pair.
|
||||
/// </summary>
|
||||
public TimeSpan GetMaxObservedSkew()
|
||||
{
|
||||
var events = _eventLog.ToList();
|
||||
if (events.Count == 0) return TimeSpan.Zero;
|
||||
|
||||
var maxSkew = TimeSpan.Zero;
|
||||
foreach (var evt in events)
|
||||
{
|
||||
var skew = (evt.TargetWallTime - evt.SourceWallTime).Duration();
|
||||
if (skew > maxSkew) maxSkew = skew;
|
||||
}
|
||||
|
||||
return maxSkew;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears the event log.
|
||||
/// </summary>
|
||||
public void ClearEventLog()
|
||||
{
|
||||
_eventLog.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets all service clocks to base time with no offset.
|
||||
/// </summary>
|
||||
public void ResetAllClocks()
|
||||
{
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
foreach (var service in _services.Values)
|
||||
{
|
||||
service.TimeProvider.SetUtcNow(baseTime);
|
||||
service.ClockOffset = TimeSpan.Zero;
|
||||
}
|
||||
}
|
||||
|
||||
private ServiceClock GetService(string serviceId)
|
||||
{
|
||||
return _services.TryGetValue(serviceId, out var service)
|
||||
? service
|
||||
: throw new ArgumentException($"Service '{serviceId}' not found", nameof(serviceId));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a service with its own clock for testing.
|
||||
/// </summary>
|
||||
public sealed class ServiceClock : IDisposable
|
||||
{
|
||||
public ServiceClock(
|
||||
string serviceId,
|
||||
FakeTimeProvider timeProvider,
|
||||
IHybridLogicalClock hlcService,
|
||||
TimeSpan clockOffset)
|
||||
{
|
||||
ServiceId = serviceId;
|
||||
TimeProvider = timeProvider;
|
||||
HlcService = hlcService;
|
||||
ClockOffset = clockOffset;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the service identifier.
|
||||
/// </summary>
|
||||
public string ServiceId { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the fake time provider for this service.
|
||||
/// </summary>
|
||||
public FakeTimeProvider TimeProvider { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets the HLC service for this service.
|
||||
/// </summary>
|
||||
public IHybridLogicalClock HlcService { get; }
|
||||
|
||||
/// <summary>
|
||||
/// Gets or sets the clock offset from base time.
|
||||
/// </summary>
|
||||
public TimeSpan ClockOffset { get; set; }
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
// Cleanup if needed
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for deterministic testing.
|
||||
/// </summary>
|
||||
public sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _utcNow;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset? initialTime = null)
|
||||
{
|
||||
_utcNow = initialTime ?? DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _utcNow;
|
||||
}
|
||||
}
|
||||
|
||||
public void Advance(TimeSpan duration)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = _utcNow.Add(duration);
|
||||
}
|
||||
}
|
||||
|
||||
public void SetUtcNow(DateTimeOffset time)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = time;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a cross-service event for testing.
|
||||
/// </summary>
|
||||
public sealed record CrossServiceEvent
|
||||
{
|
||||
public required long Sequence { get; init; }
|
||||
public required string SourceService { get; init; }
|
||||
public required string TargetService { get; init; }
|
||||
public required string EventType { get; init; }
|
||||
public string? Payload { get; init; }
|
||||
public required HlcTimestamp SourceHlcTimestamp { get; init; }
|
||||
public required HlcTimestamp TargetHlcTimestamp { get; init; }
|
||||
public required DateTimeOffset SourceWallTime { get; init; }
|
||||
public required DateTimeOffset TargetWallTime { get; init; }
|
||||
public required TimeSpan SourceClockOffset { get; init; }
|
||||
public required TimeSpan TargetClockOffset { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a local service event for testing.
|
||||
/// </summary>
|
||||
public sealed record LocalServiceEvent
|
||||
{
|
||||
public required string ServiceId { get; init; }
|
||||
public required string EventType { get; init; }
|
||||
public string? Payload { get; init; }
|
||||
public required HlcTimestamp HlcTimestamp { get; init; }
|
||||
public required DateTimeOffset WallTime { get; init; }
|
||||
public required TimeSpan ClockOffset { get; init; }
|
||||
}
|
||||
@@ -0,0 +1,41 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<!-- Enable xUnit v3 (project doesn't end with .Tests so needs explicit opt-in) -->
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<!-- Sprint: Testing Enhancement Advisory - Phase 3.2 -->
|
||||
<!-- Description: Cross-service clock skew integration tests -->
|
||||
|
||||
<ItemGroup>
|
||||
<!-- xUnit packages (project doesn't end with .Tests so must be explicit) -->
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="Microsoft.Extensions.Logging.Abstractions" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Determinism Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -6,6 +6,7 @@ using System.Net.Http.Json;
|
||||
using Microsoft.AspNetCore.Mvc.Testing;
|
||||
using StellaOps.ReachGraph.Schema;
|
||||
using StellaOps.Scanner.CallGraph;
|
||||
using StellaOps.Scanner.Contracts;
|
||||
using StellaOps.Scanner.Reachability;
|
||||
using Xunit;
|
||||
|
||||
|
||||
@@ -16,6 +16,8 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<!-- Suppress xUnit1051: E2E integration tests don't need responsive cancellation -->
|
||||
<NoWarn>$(NoWarn);xUnit1051</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup> <PackageReference Include="xunit.v3" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.E2E Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,264 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// DistributedHlcTests.cs
|
||||
// Integration tests for multi-node HLC scenarios
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.HLC.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.HLC;
|
||||
|
||||
/// <summary>
|
||||
/// Integration tests for distributed HLC scenarios with multiple nodes.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.HLC)]
|
||||
public class DistributedHlcTests : IClassFixture<MultiNodeHlcFixture>
|
||||
{
|
||||
private readonly MultiNodeHlcFixture _fixture;
|
||||
|
||||
public DistributedHlcTests(MultiNodeHlcFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Multi-Node Causal Ordering Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ThreeNode_ConcurrentTicks_MaintainCausalOrder()
|
||||
{
|
||||
// Arrange - Create 3 nodes with synchronized time
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("node-a", baseTime);
|
||||
_fixture.CreateNode("node-b", baseTime);
|
||||
_fixture.CreateNode("node-c", baseTime);
|
||||
|
||||
// Act - Each node generates ticks
|
||||
var timestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
|
||||
// Concurrent ticks at same physical time
|
||||
timestamps.Add(_fixture.Tick("node-a"));
|
||||
timestamps.Add(_fixture.Tick("node-b"));
|
||||
timestamps.Add(_fixture.Tick("node-c"));
|
||||
|
||||
// More ticks
|
||||
_fixture.AdvanceAllTime(TimeSpan.FromMilliseconds(1));
|
||||
timestamps.Add(_fixture.Tick("node-a"));
|
||||
timestamps.Add(_fixture.Tick("node-b"));
|
||||
timestamps.Add(_fixture.Tick("node-c"));
|
||||
|
||||
// Assert - All timestamps should be unique
|
||||
timestamps.Should().OnlyHaveUniqueItems();
|
||||
|
||||
// Timestamps from same node should be monotonically increasing
|
||||
var nodeATimestamps = timestamps.Where(t => t.NodeId == "node-a").ToList();
|
||||
nodeATimestamps.Should().BeInAscendingOrder();
|
||||
|
||||
var nodeBTimestamps = timestamps.Where(t => t.NodeId == "node-b").ToList();
|
||||
nodeBTimestamps.Should().BeInAscendingOrder();
|
||||
|
||||
var nodeCTimestamps = timestamps.Where(t => t.NodeId == "node-c").ToList();
|
||||
nodeCTimestamps.Should().BeInAscendingOrder();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task TwoNode_MessageExchange_PreservesCausality()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("sender", baseTime);
|
||||
_fixture.CreateNode("receiver", baseTime);
|
||||
|
||||
// Act - Sender creates event
|
||||
var senderTs1 = _fixture.Tick("sender");
|
||||
|
||||
// Receiver gets the message
|
||||
var receiverTs1 = await _fixture.SendMessageAsync("sender", "receiver", senderTs1);
|
||||
|
||||
// Receiver generates new event
|
||||
var receiverTs2 = _fixture.Tick("receiver");
|
||||
|
||||
// Assert - Causal ordering preserved
|
||||
receiverTs1.Should().BeGreaterThan(senderTs1);
|
||||
receiverTs2.Should().BeGreaterThan(receiverTs1);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task FiveNode_Broadcast_AllNodesAdvance()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
_fixture.CreateNode($"node-{i}", baseTime);
|
||||
}
|
||||
|
||||
// Act - Node 0 broadcasts
|
||||
var originTs = _fixture.Tick("node-0");
|
||||
var results = await _fixture.BroadcastAsync("node-0", originTs);
|
||||
|
||||
// Assert - All 4 other nodes received and advanced their clocks
|
||||
results.Should().HaveCount(4);
|
||||
|
||||
foreach (var (nodeId, receivedTs) in results)
|
||||
{
|
||||
receivedTs.Should().BeGreaterThan(originTs,
|
||||
$"Node {nodeId} should have advanced past origin timestamp");
|
||||
}
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ThreeNode_ChainedMessages_MaintainTransitiveCausality()
|
||||
{
|
||||
// Arrange - A -> B -> C chain
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("chain-a", baseTime);
|
||||
_fixture.CreateNode("chain-b", baseTime);
|
||||
_fixture.CreateNode("chain-c", baseTime);
|
||||
|
||||
// Act - Chain of messages
|
||||
var tsA = _fixture.Tick("chain-a");
|
||||
var tsB = await _fixture.SendMessageAsync("chain-a", "chain-b", tsA);
|
||||
var tsC = await _fixture.SendMessageAsync("chain-b", "chain-c", tsB);
|
||||
|
||||
// Assert - Transitive causality: A < B < C
|
||||
tsA.Should().BeLessThan(tsB);
|
||||
tsB.Should().BeLessThan(tsC);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Clock Skew Tests
|
||||
|
||||
[Fact]
|
||||
public async Task TwoNode_ClockSkew_StillMaintainsOrdering()
|
||||
{
|
||||
// Arrange - Node B is 5 seconds ahead
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("slow-node", baseTime);
|
||||
_fixture.CreateNode("fast-node", baseTime.AddSeconds(5)); // 5 seconds ahead
|
||||
|
||||
// Act - Fast node sends to slow node
|
||||
var fastTs = _fixture.Tick("fast-node");
|
||||
var slowReceived = await _fixture.SendMessageAsync("fast-node", "slow-node", fastTs);
|
||||
|
||||
// Slow node generates new event
|
||||
var slowTs = _fixture.Tick("slow-node");
|
||||
|
||||
// Assert - Despite clock skew, ordering is maintained
|
||||
slowReceived.Should().BeGreaterThan(fastTs);
|
||||
slowTs.Should().BeGreaterThan(slowReceived);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ThreeNode_VariableClockSkew_EventualConsistency()
|
||||
{
|
||||
// Arrange - Nodes with different clock skews
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("sync-a", baseTime);
|
||||
_fixture.CreateNode("sync-b", baseTime.AddSeconds(2)); // 2 sec ahead
|
||||
_fixture.CreateNode("sync-c", baseTime.AddSeconds(-3)); // 3 sec behind
|
||||
|
||||
// Act - Exchange messages
|
||||
var tsA = _fixture.Tick("sync-a");
|
||||
await _fixture.SendMessageAsync("sync-a", "sync-b", tsA);
|
||||
await _fixture.SendMessageAsync("sync-a", "sync-c", tsA);
|
||||
|
||||
// All nodes now generate events
|
||||
var tsA2 = _fixture.Tick("sync-a");
|
||||
var tsB2 = _fixture.Tick("sync-b");
|
||||
var tsC2 = _fixture.Tick("sync-c");
|
||||
|
||||
// Assert - All new events should be after original
|
||||
tsA2.Should().BeGreaterThan(tsA);
|
||||
tsB2.Should().BeGreaterThan(tsA);
|
||||
tsC2.Should().BeGreaterThan(tsA);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region High Frequency Tests
|
||||
|
||||
[Fact]
|
||||
public void HighFrequency_RapidTicks_AllUnique()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("rapid-node", baseTime);
|
||||
|
||||
// Act - Generate 1000 ticks rapidly
|
||||
var timestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
for (var i = 0; i < 1000; i++)
|
||||
{
|
||||
timestamps.Add(_fixture.Tick("rapid-node"));
|
||||
}
|
||||
|
||||
// Assert - All unique and monotonically increasing
|
||||
timestamps.Should().OnlyHaveUniqueItems();
|
||||
timestamps.Should().BeInAscendingOrder();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task HighFrequency_ConcurrentNodes_NoConflicts()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
_fixture.CreateNode($"concurrent-{i}", baseTime);
|
||||
}
|
||||
|
||||
// Act - All nodes tick 100 times each
|
||||
var allTimestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
for (var tick = 0; tick < 100; tick++)
|
||||
{
|
||||
for (var node = 0; node < 10; node++)
|
||||
{
|
||||
allTimestamps.Add(_fixture.Tick($"concurrent-{node}"));
|
||||
}
|
||||
_fixture.AdvanceAllTime(TimeSpan.FromMilliseconds(1));
|
||||
}
|
||||
|
||||
// Assert - All 1000 timestamps should be unique
|
||||
allTimestamps.Should().OnlyHaveUniqueItems();
|
||||
|
||||
await Task.CompletedTask;
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Edge Cases
|
||||
|
||||
[Fact]
|
||||
public async Task LargeCluster_TenNodes_ScalesCorrectly()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
_fixture.CreateNode($"cluster-{i:D2}", baseTime);
|
||||
}
|
||||
|
||||
// Act - Simulate gossip-style message propagation
|
||||
var initialTs = _fixture.Tick("cluster-00");
|
||||
|
||||
// Fan-out from node 0 to all others
|
||||
var firstWave = await _fixture.BroadcastAsync("cluster-00", initialTs);
|
||||
|
||||
// Each node in first wave broadcasts to others
|
||||
foreach (var (nodeId, receivedTs) in firstWave)
|
||||
{
|
||||
await _fixture.BroadcastAsync(nodeId, receivedTs);
|
||||
}
|
||||
|
||||
// Assert - Causal ordering maintained across all events
|
||||
_fixture.VerifyCausalOrdering().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,300 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// MultiNodeHlcFixture.cs
|
||||
// Test fixture for multi-node HLC testing scenarios
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using Microsoft.Extensions.Logging.Abstractions;
|
||||
using StellaOps.HybridLogicalClock;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.HLC.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture that manages multiple HLC nodes with controllable time and network.
|
||||
/// </summary>
|
||||
public sealed class MultiNodeHlcFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly Dictionary<string, NodeContext> _nodes = [];
|
||||
private readonly NetworkPartitionSimulator _partitionSimulator = new();
|
||||
private readonly List<HlcTimestamp> _eventLog = [];
|
||||
private readonly object _eventLogLock = new();
|
||||
|
||||
/// <summary>
|
||||
/// Gets the network partition simulator for controlling connectivity.
|
||||
/// </summary>
|
||||
public NetworkPartitionSimulator PartitionSimulator => _partitionSimulator;
|
||||
|
||||
/// <summary>
|
||||
/// Gets all logged events in order of occurrence.
|
||||
/// </summary>
|
||||
public IReadOnlyList<HlcTimestamp> EventLog
|
||||
{
|
||||
get
|
||||
{
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
return _eventLog.ToList().AsReadOnly();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask InitializeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
_nodes.Clear();
|
||||
_partitionSimulator.HealAll();
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a new node with its own HLC instance.
|
||||
/// </summary>
|
||||
public IHybridLogicalClock CreateNode(string nodeId, DateTimeOffset? initialTime = null)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
if (_nodes.ContainsKey(nodeId))
|
||||
{
|
||||
throw new ArgumentException($"Node {nodeId} already exists", nameof(nodeId));
|
||||
}
|
||||
|
||||
var timeProvider = new FakeTimeProvider(initialTime ?? DateTimeOffset.UtcNow);
|
||||
var stateStore = new InMemoryHlcStateStore();
|
||||
var clock = new HybridLogicalClock.HybridLogicalClock(
|
||||
timeProvider,
|
||||
nodeId,
|
||||
stateStore,
|
||||
NullLogger<HybridLogicalClock.HybridLogicalClock>.Instance,
|
||||
TimeSpan.FromMinutes(1));
|
||||
|
||||
var context = new NodeContext(clock, timeProvider, stateStore, nodeId);
|
||||
_nodes[nodeId] = context;
|
||||
|
||||
return clock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the HLC instance for a node.
|
||||
/// </summary>
|
||||
public IHybridLogicalClock GetNode(string nodeId)
|
||||
{
|
||||
if (!_nodes.TryGetValue(nodeId, out var context))
|
||||
{
|
||||
throw new ArgumentException($"Node {nodeId} does not exist", nameof(nodeId));
|
||||
}
|
||||
return context.Clock;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the time provider for a node (for advancing time).
|
||||
/// </summary>
|
||||
public FakeTimeProvider GetTimeProvider(string nodeId)
|
||||
{
|
||||
if (!_nodes.TryGetValue(nodeId, out var context))
|
||||
{
|
||||
throw new ArgumentException($"Node {nodeId} does not exist", nameof(nodeId));
|
||||
}
|
||||
return context.TimeProvider;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for a specific node.
|
||||
/// </summary>
|
||||
public void AdvanceTime(string nodeId, TimeSpan duration)
|
||||
{
|
||||
GetTimeProvider(nodeId).Advance(duration);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Advances time for all nodes uniformly.
|
||||
/// </summary>
|
||||
public void AdvanceAllTime(TimeSpan duration)
|
||||
{
|
||||
foreach (var context in _nodes.Values)
|
||||
{
|
||||
context.TimeProvider.Advance(duration);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets absolute time for a node (for creating clock skew).
|
||||
/// </summary>
|
||||
public void SetTime(string nodeId, DateTimeOffset time)
|
||||
{
|
||||
GetTimeProvider(nodeId).SetUtcNow(time);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a tick on a node and logs the event.
|
||||
/// </summary>
|
||||
public HlcTimestamp Tick(string nodeId)
|
||||
{
|
||||
var clock = GetNode(nodeId);
|
||||
var timestamp = clock.Tick();
|
||||
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
_eventLog.Add(timestamp);
|
||||
}
|
||||
|
||||
return timestamp;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sends a message from one node to another (simulating distributed communication).
|
||||
/// Respects network partitions and latency.
|
||||
/// </summary>
|
||||
public async Task<HlcTimestamp> SendMessageAsync(
|
||||
string fromNode,
|
||||
string toNode,
|
||||
HlcTimestamp messageTimestamp,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
// Check partition
|
||||
if (_partitionSimulator.IsPartitioned(fromNode, toNode))
|
||||
{
|
||||
throw new NetworkPartitionException(fromNode, toNode);
|
||||
}
|
||||
|
||||
// Apply latency
|
||||
var latency = _partitionSimulator.GetLatency(fromNode, toNode);
|
||||
var delay = latency.ComputeDelay();
|
||||
if (delay > TimeSpan.Zero)
|
||||
{
|
||||
// For testing, we advance the receiver's time instead of waiting
|
||||
AdvanceTime(toNode, delay);
|
||||
}
|
||||
|
||||
// Receiver processes the message
|
||||
var receiverClock = GetNode(toNode);
|
||||
var newTimestamp = receiverClock.Receive(messageTimestamp);
|
||||
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
_eventLog.Add(newTimestamp);
|
||||
}
|
||||
|
||||
return await Task.FromResult(newTimestamp);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Broadcasts a message from one node to all others.
|
||||
/// Returns timestamps from nodes that received the message.
|
||||
/// </summary>
|
||||
public async Task<Dictionary<string, HlcTimestamp>> BroadcastAsync(
|
||||
string fromNode,
|
||||
HlcTimestamp messageTimestamp,
|
||||
CancellationToken ct = default)
|
||||
{
|
||||
var results = new Dictionary<string, HlcTimestamp>();
|
||||
|
||||
foreach (var nodeId in _nodes.Keys)
|
||||
{
|
||||
if (nodeId == fromNode)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
try
|
||||
{
|
||||
var received = await SendMessageAsync(fromNode, nodeId, messageTimestamp, ct);
|
||||
results[nodeId] = received;
|
||||
}
|
||||
catch (NetworkPartitionException)
|
||||
{
|
||||
// Node is partitioned, skip
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all node IDs in the cluster.
|
||||
/// </summary>
|
||||
public IReadOnlyList<string> GetNodeIds() => _nodes.Keys.ToList().AsReadOnly();
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that all timestamps in the event log maintain causal ordering.
|
||||
/// </summary>
|
||||
public bool VerifyCausalOrdering()
|
||||
{
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
for (var i = 1; i < _eventLog.Count; i++)
|
||||
{
|
||||
// Each event should be >= the previous event from the same node
|
||||
var current = _eventLog[i];
|
||||
var previous = _eventLog
|
||||
.Take(i)
|
||||
.Where(e => e.NodeId == current.NodeId)
|
||||
.LastOrDefault();
|
||||
|
||||
if (previous != default && current <= previous)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears the event log.
|
||||
/// </summary>
|
||||
public void ClearEventLog()
|
||||
{
|
||||
lock (_eventLogLock)
|
||||
{
|
||||
_eventLog.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
private sealed record NodeContext(
|
||||
IHybridLogicalClock Clock,
|
||||
FakeTimeProvider TimeProvider,
|
||||
InMemoryHlcStateStore StateStore,
|
||||
string NodeId);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Fake time provider for deterministic testing.
|
||||
/// </summary>
|
||||
public sealed class FakeTimeProvider : TimeProvider
|
||||
{
|
||||
private DateTimeOffset _utcNow;
|
||||
private readonly object _lock = new();
|
||||
|
||||
public FakeTimeProvider(DateTimeOffset? initialTime = null)
|
||||
{
|
||||
_utcNow = initialTime ?? DateTimeOffset.UtcNow;
|
||||
}
|
||||
|
||||
public override DateTimeOffset GetUtcNow()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _utcNow;
|
||||
}
|
||||
}
|
||||
|
||||
public void Advance(TimeSpan duration)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = _utcNow.Add(duration);
|
||||
}
|
||||
}
|
||||
|
||||
public void SetUtcNow(DateTimeOffset time)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_utcNow = time;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,230 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// NetworkPartitionSimulator.cs
|
||||
// Simulates network partitions between distributed nodes for testing
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Concurrent;
|
||||
using System.Collections.Immutable;
|
||||
|
||||
namespace StellaOps.Integration.HLC.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Simulates network partitions between nodes for distributed testing scenarios.
|
||||
/// </summary>
|
||||
public sealed class NetworkPartitionSimulator
|
||||
{
|
||||
private readonly ConcurrentDictionary<string, HashSet<string>> _partitions = new();
|
||||
private readonly ConcurrentDictionary<(string From, string To), LatencyConfig> _latencies = new();
|
||||
private readonly object _lock = new();
|
||||
|
||||
/// <summary>
|
||||
/// Isolates a node from all other nodes (full partition).
|
||||
/// </summary>
|
||||
public void IsolateNode(string nodeId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_partitions[nodeId] = ["*"]; // Special marker for full isolation
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Partitions communication between two specific nodes.
|
||||
/// </summary>
|
||||
public void PartitionNodes(string nodeA, string nodeB)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeA);
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeB);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
if (!_partitions.TryGetValue(nodeA, out var aPartitions))
|
||||
{
|
||||
aPartitions = [];
|
||||
_partitions[nodeA] = aPartitions;
|
||||
}
|
||||
aPartitions.Add(nodeB);
|
||||
|
||||
if (!_partitions.TryGetValue(nodeB, out var bPartitions))
|
||||
{
|
||||
bPartitions = [];
|
||||
_partitions[nodeB] = bPartitions;
|
||||
}
|
||||
bPartitions.Add(nodeA);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heals the partition for a specific node (restores connectivity).
|
||||
/// </summary>
|
||||
public void HealNode(string nodeId)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(nodeId);
|
||||
|
||||
lock (_lock)
|
||||
{
|
||||
_partitions.TryRemove(nodeId, out _);
|
||||
|
||||
// Also remove this node from other nodes' partition lists
|
||||
foreach (var kvp in _partitions)
|
||||
{
|
||||
kvp.Value.Remove(nodeId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heals partition between two specific nodes.
|
||||
/// </summary>
|
||||
public void HealPartition(string nodeA, string nodeB)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
if (_partitions.TryGetValue(nodeA, out var aPartitions))
|
||||
{
|
||||
aPartitions.Remove(nodeB);
|
||||
}
|
||||
if (_partitions.TryGetValue(nodeB, out var bPartitions))
|
||||
{
|
||||
bPartitions.Remove(nodeA);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Heals all partitions (restores full connectivity).
|
||||
/// </summary>
|
||||
public void HealAll()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
_partitions.Clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Checks if communication between two nodes is blocked.
|
||||
/// </summary>
|
||||
public bool IsPartitioned(string fromNode, string toNode)
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
// Check if fromNode is fully isolated
|
||||
if (_partitions.TryGetValue(fromNode, out var fromPartitions))
|
||||
{
|
||||
if (fromPartitions.Contains("*") || fromPartitions.Contains(toNode))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if toNode is fully isolated
|
||||
if (_partitions.TryGetValue(toNode, out var toPartitions))
|
||||
{
|
||||
if (toPartitions.Contains("*") || toPartitions.Contains(fromNode))
|
||||
{
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Sets simulated latency between two nodes.
|
||||
/// </summary>
|
||||
public void SetLatency(string fromNode, string toNode, TimeSpan baseLatency, double jitterPercent = 0)
|
||||
{
|
||||
_latencies[(fromNode, toNode)] = new LatencyConfig(baseLatency, jitterPercent);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the configured latency between two nodes (or default if not set).
|
||||
/// </summary>
|
||||
public LatencyConfig GetLatency(string fromNode, string toNode)
|
||||
{
|
||||
if (_latencies.TryGetValue((fromNode, toNode), out var config))
|
||||
{
|
||||
return config;
|
||||
}
|
||||
return LatencyConfig.Default;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all latency configurations.
|
||||
/// </summary>
|
||||
public void ClearLatencies()
|
||||
{
|
||||
_latencies.Clear();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the current partition state for diagnostic purposes.
|
||||
/// </summary>
|
||||
public ImmutableDictionary<string, ImmutableHashSet<string>> GetPartitionState()
|
||||
{
|
||||
lock (_lock)
|
||||
{
|
||||
return _partitions.ToImmutableDictionary(
|
||||
kvp => kvp.Key,
|
||||
kvp => kvp.Value.ToImmutableHashSet());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configuration for simulated network latency.
|
||||
/// </summary>
|
||||
public sealed record LatencyConfig
|
||||
{
|
||||
public static readonly LatencyConfig Default = new(TimeSpan.Zero, 0);
|
||||
|
||||
public TimeSpan BaseLatency { get; }
|
||||
public double JitterPercent { get; }
|
||||
|
||||
public LatencyConfig(TimeSpan baseLatency, double jitterPercent)
|
||||
{
|
||||
BaseLatency = baseLatency;
|
||||
JitterPercent = Math.Clamp(jitterPercent, 0, 100);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the actual delay including jitter.
|
||||
/// </summary>
|
||||
public TimeSpan ComputeDelay(Random? random = null)
|
||||
{
|
||||
if (BaseLatency <= TimeSpan.Zero)
|
||||
{
|
||||
return TimeSpan.Zero;
|
||||
}
|
||||
|
||||
if (JitterPercent <= 0)
|
||||
{
|
||||
return BaseLatency;
|
||||
}
|
||||
|
||||
random ??= Random.Shared;
|
||||
var jitterFactor = 1.0 + ((random.NextDouble() * 2 - 1) * JitterPercent / 100);
|
||||
return TimeSpan.FromTicks((long)(BaseLatency.Ticks * jitterFactor));
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Exception thrown when a network partition prevents communication.
|
||||
/// </summary>
|
||||
public sealed class NetworkPartitionException : Exception
|
||||
{
|
||||
public string FromNode { get; }
|
||||
public string ToNode { get; }
|
||||
|
||||
public NetworkPartitionException(string fromNode, string toNode)
|
||||
: base($"Network partition: {fromNode} cannot communicate with {toNode}")
|
||||
{
|
||||
FromNode = fromNode;
|
||||
ToNode = toNode;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,318 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// HlcNetworkPartitionTests.cs
|
||||
// Integration tests for HLC behavior during network partitions
|
||||
// Sprint: Testing Enhancement Advisory - Phase 1.2
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.HLC.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.HLC;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for HLC behavior during network partitions.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.HLC)]
|
||||
[Trait("Category", TestCategories.Chaos)]
|
||||
public class HlcNetworkPartitionTests : IClassFixture<MultiNodeHlcFixture>
|
||||
{
|
||||
private readonly MultiNodeHlcFixture _fixture;
|
||||
|
||||
public HlcNetworkPartitionTests(MultiNodeHlcFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Basic Partition Tests
|
||||
|
||||
[Fact]
|
||||
public async Task NetworkPartition_SplitBrain_NoDataLoss()
|
||||
{
|
||||
// Arrange - Create 3 nodes
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("part-a", baseTime);
|
||||
_fixture.CreateNode("part-b", baseTime);
|
||||
_fixture.CreateNode("part-c", baseTime);
|
||||
|
||||
// Act - Partition node-c from A and B
|
||||
_fixture.PartitionSimulator.IsolateNode("part-c");
|
||||
|
||||
// A and B can still communicate
|
||||
var tsA = _fixture.Tick("part-a");
|
||||
var receivedB = await _fixture.SendMessageAsync("part-a", "part-b", tsA);
|
||||
|
||||
// C cannot receive messages
|
||||
var sendToC = async () => await _fixture.SendMessageAsync("part-a", "part-c", tsA);
|
||||
await sendToC.Should().ThrowAsync<NetworkPartitionException>();
|
||||
|
||||
// C continues to operate independently
|
||||
var tsC = _fixture.Tick("part-c");
|
||||
|
||||
// Assert - All nodes have generated valid timestamps
|
||||
tsA.Should().NotBeNull();
|
||||
receivedB.Should().BeGreaterThan(tsA);
|
||||
tsC.Should().NotBeNull();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NetworkPartition_HealedPartition_CorrectReconciliation()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("heal-a", baseTime);
|
||||
_fixture.CreateNode("heal-b", baseTime);
|
||||
|
||||
// Generate initial events
|
||||
var tsA1 = _fixture.Tick("heal-a");
|
||||
await _fixture.SendMessageAsync("heal-a", "heal-b", tsA1);
|
||||
|
||||
// Create partition
|
||||
_fixture.PartitionSimulator.PartitionNodes("heal-a", "heal-b");
|
||||
|
||||
// Both nodes operate independently during partition
|
||||
_fixture.AdvanceTime("heal-a", TimeSpan.FromSeconds(10));
|
||||
_fixture.AdvanceTime("heal-b", TimeSpan.FromSeconds(5));
|
||||
|
||||
var tsA_during = _fixture.Tick("heal-a");
|
||||
var tsB_during = _fixture.Tick("heal-b");
|
||||
|
||||
// Heal partition
|
||||
_fixture.PartitionSimulator.HealPartition("heal-a", "heal-b");
|
||||
|
||||
// A sends its state to B
|
||||
var tsB_after = await _fixture.SendMessageAsync("heal-a", "heal-b", tsA_during);
|
||||
|
||||
// Assert - B has reconciled and its new timestamp is greater
|
||||
tsB_after.Should().BeGreaterThan(tsA_during);
|
||||
tsB_after.Should().BeGreaterThan(tsB_during);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task NetworkPartition_AsymmetricPartition_HandledGracefully()
|
||||
{
|
||||
// Arrange - A can send to B, but B cannot send to A
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("asym-a", baseTime);
|
||||
_fixture.CreateNode("asym-b", baseTime);
|
||||
|
||||
// Only partition B -> A direction
|
||||
_fixture.PartitionSimulator.PartitionNodes("asym-b", "asym-a");
|
||||
_fixture.PartitionSimulator.HealPartition("asym-a", "asym-b"); // Allow A -> B
|
||||
|
||||
// Act - A can send to B
|
||||
var tsA = _fixture.Tick("asym-a");
|
||||
var receivedB = await _fixture.SendMessageAsync("asym-a", "asym-b", tsA);
|
||||
|
||||
// B cannot send to A
|
||||
var tsB = _fixture.Tick("asym-b");
|
||||
var sendToA = async () => await _fixture.SendMessageAsync("asym-b", "asym-a", tsB);
|
||||
|
||||
// Assert
|
||||
receivedB.Should().BeGreaterThan(tsA);
|
||||
await sendToA.Should().ThrowAsync<NetworkPartitionException>();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Split Brain Scenarios
|
||||
|
||||
[Fact]
|
||||
public async Task SplitBrain_TwoPartitions_IndependentProgress()
|
||||
{
|
||||
// Arrange - 4 nodes split into two groups: {A,B} and {C,D}
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("split-a", baseTime);
|
||||
_fixture.CreateNode("split-b", baseTime);
|
||||
_fixture.CreateNode("split-c", baseTime);
|
||||
_fixture.CreateNode("split-d", baseTime);
|
||||
|
||||
// Create split brain: A-B can communicate, C-D can communicate, no cross-group
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-a", "split-c");
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-a", "split-d");
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-b", "split-c");
|
||||
_fixture.PartitionSimulator.PartitionNodes("split-b", "split-d");
|
||||
|
||||
// Act - Both groups operate independently
|
||||
var tsA = _fixture.Tick("split-a");
|
||||
var receivedB = await _fixture.SendMessageAsync("split-a", "split-b", tsA);
|
||||
|
||||
var tsC = _fixture.Tick("split-c");
|
||||
var receivedD = await _fixture.SendMessageAsync("split-c", "split-d", tsC);
|
||||
|
||||
// Verify cross-group communication fails
|
||||
var crossGroup = async () => await _fixture.SendMessageAsync("split-a", "split-c", tsA);
|
||||
await crossGroup.Should().ThrowAsync<NetworkPartitionException>();
|
||||
|
||||
// Assert - Both groups made progress
|
||||
receivedB.Should().BeGreaterThan(tsA);
|
||||
receivedD.Should().BeGreaterThan(tsC);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SplitBrain_Merge_ConvergesToConsistentState()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("merge-a", baseTime);
|
||||
_fixture.CreateNode("merge-b", baseTime);
|
||||
|
||||
// Both nodes work independently
|
||||
_fixture.PartitionSimulator.IsolateNode("merge-a");
|
||||
_fixture.PartitionSimulator.IsolateNode("merge-b");
|
||||
|
||||
// Advance time differently
|
||||
_fixture.AdvanceTime("merge-a", TimeSpan.FromSeconds(100));
|
||||
_fixture.AdvanceTime("merge-b", TimeSpan.FromSeconds(50));
|
||||
|
||||
// Generate many events on each
|
||||
for (var i = 0; i < 100; i++)
|
||||
{
|
||||
_fixture.Tick("merge-a");
|
||||
_fixture.Tick("merge-b");
|
||||
}
|
||||
|
||||
var tsA_before = _fixture.GetNode("merge-a").Current;
|
||||
var tsB_before = _fixture.GetNode("merge-b").Current;
|
||||
|
||||
// Heal partition
|
||||
_fixture.PartitionSimulator.HealAll();
|
||||
|
||||
// Exchange messages to synchronize
|
||||
var tsA_sent = _fixture.Tick("merge-a");
|
||||
var tsB_received = await _fixture.SendMessageAsync("merge-a", "merge-b", tsA_sent);
|
||||
|
||||
var tsB_sent = _fixture.Tick("merge-b");
|
||||
var tsA_received = await _fixture.SendMessageAsync("merge-b", "merge-a", tsB_sent);
|
||||
|
||||
// Assert - Both nodes are now synchronized (new events are greater than pre-merge)
|
||||
tsA_received.Should().BeGreaterThan(tsA_before);
|
||||
tsB_received.Should().BeGreaterThan(tsB_before);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Recovery Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PartitionRecovery_LongPartition_NoClockDrift()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("long-a", baseTime);
|
||||
_fixture.CreateNode("long-b", baseTime);
|
||||
|
||||
// Initial sync
|
||||
var initial = _fixture.Tick("long-a");
|
||||
await _fixture.SendMessageAsync("long-a", "long-b", initial);
|
||||
|
||||
// Long partition (simulated by large time advance)
|
||||
_fixture.PartitionSimulator.PartitionNodes("long-a", "long-b");
|
||||
|
||||
// A advances by 1 hour
|
||||
_fixture.AdvanceTime("long-a", TimeSpan.FromHours(1));
|
||||
// B advances by only 30 minutes (slower clock)
|
||||
_fixture.AdvanceTime("long-b", TimeSpan.FromMinutes(30));
|
||||
|
||||
// Generate events during partition
|
||||
var tsA_partition = _fixture.Tick("long-a");
|
||||
var tsB_partition = _fixture.Tick("long-b");
|
||||
|
||||
// Heal and sync
|
||||
_fixture.PartitionSimulator.HealAll();
|
||||
var tsB_synced = await _fixture.SendMessageAsync("long-a", "long-b", tsA_partition);
|
||||
|
||||
// Assert - B's clock has caught up to A's time
|
||||
tsB_synced.PhysicalTime.Should().BeGreaterThanOrEqualTo(tsA_partition.PhysicalTime);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task MultiplePartitionCycles_Stable()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("cycle-a", baseTime);
|
||||
_fixture.CreateNode("cycle-b", baseTime);
|
||||
|
||||
// Multiple partition/heal cycles
|
||||
for (var cycle = 0; cycle < 5; cycle++)
|
||||
{
|
||||
// Sync phase
|
||||
var ts = _fixture.Tick("cycle-a");
|
||||
await _fixture.SendMessageAsync("cycle-a", "cycle-b", ts);
|
||||
|
||||
// Partition phase
|
||||
_fixture.PartitionSimulator.PartitionNodes("cycle-a", "cycle-b");
|
||||
_fixture.AdvanceTime("cycle-a", TimeSpan.FromSeconds(10));
|
||||
_fixture.AdvanceTime("cycle-b", TimeSpan.FromSeconds(8));
|
||||
_fixture.Tick("cycle-a");
|
||||
_fixture.Tick("cycle-b");
|
||||
|
||||
// Heal
|
||||
_fixture.PartitionSimulator.HealAll();
|
||||
}
|
||||
|
||||
// Final sync
|
||||
var finalA = _fixture.Tick("cycle-a");
|
||||
var finalB = await _fixture.SendMessageAsync("cycle-a", "cycle-b", finalA);
|
||||
|
||||
// Assert - System is stable after multiple cycles
|
||||
finalB.Should().BeGreaterThan(finalA);
|
||||
_fixture.VerifyCausalOrdering().Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Latency Tests
|
||||
|
||||
[Fact]
|
||||
public async Task HighLatency_MessageDelivery_MaintainsOrdering()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("latency-a", baseTime);
|
||||
_fixture.CreateNode("latency-b", baseTime);
|
||||
|
||||
// Set high latency (500ms with 20% jitter)
|
||||
_fixture.PartitionSimulator.SetLatency("latency-a", "latency-b",
|
||||
TimeSpan.FromMilliseconds(500), 20);
|
||||
|
||||
// Act - Send message with latency
|
||||
var tsA = _fixture.Tick("latency-a");
|
||||
var tsB = await _fixture.SendMessageAsync("latency-a", "latency-b", tsA);
|
||||
|
||||
// Assert - B's timestamp accounts for latency
|
||||
tsB.Should().BeGreaterThan(tsA);
|
||||
// Physical time should have advanced by at least the latency
|
||||
(tsB.PhysicalTime - tsA.PhysicalTime).Should().BeGreaterThanOrEqualTo(400); // ~500ms - jitter
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task VariableLatency_MultipleMessages_OrderPreserved()
|
||||
{
|
||||
// Arrange
|
||||
var baseTime = new DateTimeOffset(2026, 1, 12, 0, 0, 0, TimeSpan.Zero);
|
||||
_fixture.CreateNode("var-a", baseTime);
|
||||
_fixture.CreateNode("var-b", baseTime);
|
||||
|
||||
// Variable latency
|
||||
_fixture.PartitionSimulator.SetLatency("var-a", "var-b",
|
||||
TimeSpan.FromMilliseconds(100), 50); // High jitter
|
||||
|
||||
// Act - Send multiple messages
|
||||
var timestamps = new List<HybridLogicalClock.HlcTimestamp>();
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var ts = _fixture.Tick("var-a");
|
||||
var received = await _fixture.SendMessageAsync("var-a", "var-b", ts);
|
||||
timestamps.Add(received);
|
||||
}
|
||||
|
||||
// Assert - All received timestamps are monotonically increasing
|
||||
timestamps.Should().BeInAscendingOrder();
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<!-- Enable xUnit v3 (project doesn't end with .Tests so needs explicit opt-in) -->
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- xUnit packages (project doesn't end with .Tests so must be explicit) -->
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.HybridLogicalClock\StellaOps.HybridLogicalClock.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,319 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ArtifactImmutabilityTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 2.1
|
||||
// Description: Tests for artifact immutability verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.Immutability.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Immutability;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for verifying artifact immutability across builds.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.Immutability)]
|
||||
public class ArtifactImmutabilityTests : IClassFixture<ArtifactVerificationFixture>
|
||||
{
|
||||
private readonly ArtifactVerificationFixture _fixture;
|
||||
|
||||
public ArtifactImmutabilityTests(ArtifactVerificationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region Byte-Identical Build Tests
|
||||
|
||||
[Fact]
|
||||
public void BuildArtifacts_SameContent_ProduceIdenticalDigests()
|
||||
{
|
||||
// Arrange - Create two "builds" with identical content
|
||||
var content = "deterministic content for testing"u8.ToArray();
|
||||
|
||||
var artifact1Path = _fixture.CreateTestArtifact("build1/output.dll", content);
|
||||
var artifact2Path = _fixture.CreateTestArtifact("build2/output.dll", content);
|
||||
|
||||
// Act
|
||||
var digest1 = ArtifactVerificationFixture.ComputeFileDigest(artifact1Path);
|
||||
var digest2 = ArtifactVerificationFixture.ComputeFileDigest(artifact2Path);
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2);
|
||||
digest1.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildArtifacts_DifferentContent_ProduceDifferentDigests()
|
||||
{
|
||||
// Arrange
|
||||
var content1 = "content version 1"u8.ToArray();
|
||||
var content2 = "content version 2"u8.ToArray();
|
||||
|
||||
var artifact1Path = _fixture.CreateTestArtifact("diff1/output.dll", content1);
|
||||
var artifact2Path = _fixture.CreateTestArtifact("diff2/output.dll", content2);
|
||||
|
||||
// Act
|
||||
var digest1 = ArtifactVerificationFixture.ComputeFileDigest(artifact1Path);
|
||||
var digest2 = ArtifactVerificationFixture.ComputeFileDigest(artifact2Path);
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildManifests_ByteIdentical_WhenAllArtifactsMatch()
|
||||
{
|
||||
// Arrange
|
||||
var sharedDigest = "sha256:abc123def456";
|
||||
var buildA = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-a",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", sharedDigest, 1024, "application/octet-stream"),
|
||||
new ArtifactEntry("app.pdb", "sha256:pdb123", 2048, "application/octet-stream")
|
||||
]
|
||||
};
|
||||
|
||||
var buildB = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-b",
|
||||
BuildTime = DateTimeOffset.UtcNow.AddMinutes(5),
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", sharedDigest, 1024, "application/octet-stream"),
|
||||
new ArtifactEntry("app.pdb", "sha256:pdb123", 2048, "application/octet-stream")
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.CompareBuilds(buildA, buildB);
|
||||
|
||||
// Assert
|
||||
result.ByteIdentical.Should().BeTrue();
|
||||
result.Mismatches.Should().BeEmpty();
|
||||
result.OnlyInBuildA.Should().BeEmpty();
|
||||
result.OnlyInBuildB.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildManifests_NotIdentical_WhenDigestsDiffer()
|
||||
{
|
||||
// Arrange
|
||||
var buildA = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-a",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:version1", 1024, null)
|
||||
]
|
||||
};
|
||||
|
||||
var buildB = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-b",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:version2", 1025, null)
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.CompareBuilds(buildA, buildB);
|
||||
|
||||
// Assert
|
||||
result.ByteIdentical.Should().BeFalse();
|
||||
result.Mismatches.Should().HaveCount(1);
|
||||
result.Mismatches[0].Name.Should().Be("app.dll");
|
||||
result.Mismatches[0].DigestA.Should().Be("sha256:version1");
|
||||
result.Mismatches[0].DigestB.Should().Be("sha256:version2");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void BuildManifests_DetectsMissingArtifacts()
|
||||
{
|
||||
// Arrange
|
||||
var buildA = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-a",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:abc", 1024, null),
|
||||
new ArtifactEntry("extra.dll", "sha256:extra", 512, null)
|
||||
]
|
||||
};
|
||||
|
||||
var buildB = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-b",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:abc", 1024, null),
|
||||
new ArtifactEntry("new.dll", "sha256:new", 256, null)
|
||||
]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.CompareBuilds(buildA, buildB);
|
||||
|
||||
// Assert
|
||||
result.ByteIdentical.Should().BeFalse();
|
||||
result.OnlyInBuildA.Should().Contain("extra.dll");
|
||||
result.OnlyInBuildB.Should().Contain("new.dll");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SBOM Linkage Tests
|
||||
|
||||
[Fact]
|
||||
public void SbomLinkage_AllArtifactsLinked_WhenDigestsMatch()
|
||||
{
|
||||
// Arrange
|
||||
var artifactManifest = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-1",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:app123", 1024, null),
|
||||
new ArtifactEntry("lib.dll", "sha256:lib456", 2048, null)
|
||||
]
|
||||
};
|
||||
|
||||
var sbomManifest = new SbomManifest
|
||||
{
|
||||
Digest = "sha256:sbom789",
|
||||
Format = "spdx-2.3",
|
||||
ReferencedDigests = ["sha256:app123", "sha256:lib456", "sha256:other"]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.VerifySbomLinkage(artifactManifest, sbomManifest);
|
||||
|
||||
// Assert
|
||||
result.AllLinked.Should().BeTrue();
|
||||
result.LinkedArtifacts.Should().HaveCount(2);
|
||||
result.UnlinkedArtifacts.Should().BeEmpty();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void SbomLinkage_DetectsUnlinkedArtifacts()
|
||||
{
|
||||
// Arrange
|
||||
var artifactManifest = new ArtifactManifest
|
||||
{
|
||||
BuildId = "build-1",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts =
|
||||
[
|
||||
new ArtifactEntry("app.dll", "sha256:app123", 1024, null),
|
||||
new ArtifactEntry("lib.dll", "sha256:lib456", 2048, null),
|
||||
new ArtifactEntry("untracked.dll", "sha256:untracked", 512, null)
|
||||
]
|
||||
};
|
||||
|
||||
var sbomManifest = new SbomManifest
|
||||
{
|
||||
Digest = "sha256:sbom789",
|
||||
Format = "cyclonedx-1.5",
|
||||
ReferencedDigests = ["sha256:app123", "sha256:lib456"]
|
||||
};
|
||||
|
||||
// Act
|
||||
var result = _fixture.VerifySbomLinkage(artifactManifest, sbomManifest);
|
||||
|
||||
// Assert
|
||||
result.AllLinked.Should().BeFalse();
|
||||
result.UnlinkedArtifacts.Should().Contain("untracked.dll");
|
||||
result.LinkedArtifacts.Should().Contain("app.dll");
|
||||
result.LinkedArtifacts.Should().Contain("lib.dll");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Content Addressability Tests
|
||||
|
||||
[Fact]
|
||||
public void ContentAddressability_DigestDeterministic()
|
||||
{
|
||||
// Arrange
|
||||
var content = System.Text.Encoding.UTF8.GetBytes(
|
||||
"{\"name\":\"test\",\"version\":\"1.0.0\"}");
|
||||
|
||||
// Act - compute multiple times
|
||||
var digest1 = ArtifactVerificationFixture.ComputeDigest(content);
|
||||
var digest2 = ArtifactVerificationFixture.ComputeDigest(content);
|
||||
var digest3 = ArtifactVerificationFixture.ComputeDigest(content);
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2);
|
||||
digest2.Should().Be(digest3);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ContentAddressability_EmptyContent_HasValidDigest()
|
||||
{
|
||||
// Arrange
|
||||
var emptyContent = Array.Empty<byte>();
|
||||
|
||||
// Act
|
||||
var digest = ArtifactVerificationFixture.ComputeDigest(emptyContent);
|
||||
|
||||
// Assert - SHA-256 of empty input is well-known
|
||||
digest.Should().Be("sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Multi-Artifact Build Tests
|
||||
|
||||
[Fact]
|
||||
public void MultiArtifactBuild_AllArtifactsTracked()
|
||||
{
|
||||
// Arrange - Simulate a build with multiple output types
|
||||
var artifacts = new List<(string Name, byte[] Content)>
|
||||
{
|
||||
("bin/app.dll", "app binary content"u8.ToArray()),
|
||||
("bin/app.pdb", "debug symbols"u8.ToArray()),
|
||||
("bin/app.deps.json", "{\"dependencies\":{}}"u8.ToArray()),
|
||||
("bin/app.runtimeconfig.json", "{\"runtimeOptions\":{}}"u8.ToArray())
|
||||
};
|
||||
|
||||
var entries = ImmutableArray.CreateBuilder<ArtifactEntry>();
|
||||
foreach (var (name, content) in artifacts)
|
||||
{
|
||||
var path = _fixture.CreateTestArtifact($"multi/{name}", content);
|
||||
var digest = ArtifactVerificationFixture.ComputeFileDigest(path);
|
||||
entries.Add(new ArtifactEntry(name, digest, content.Length, null));
|
||||
}
|
||||
|
||||
var manifest = new ArtifactManifest
|
||||
{
|
||||
BuildId = "multi-build-1",
|
||||
BuildTime = DateTimeOffset.UtcNow,
|
||||
Artifacts = entries.ToImmutable()
|
||||
};
|
||||
|
||||
// Act
|
||||
_fixture.RegisterManifest("multi-build", manifest);
|
||||
var retrieved = _fixture.GetManifest("multi-build");
|
||||
|
||||
// Assert
|
||||
retrieved.Should().NotBeNull();
|
||||
retrieved!.Artifacts.Should().HaveCount(4);
|
||||
retrieved.Artifacts.Select(a => a.Name).Should().Contain("bin/app.dll");
|
||||
retrieved.Artifacts.Select(a => a.Name).Should().Contain("bin/app.pdb");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -0,0 +1,351 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ContainerDigestVerificationTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 2.1
|
||||
// Description: Tests for container image digest verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using System.Text.Json;
|
||||
using FluentAssertions;
|
||||
using StellaOps.Integration.Immutability.Fixtures;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Immutability;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for container image digest verification.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Integration)]
|
||||
[Trait("Category", TestCategories.Immutability)]
|
||||
public class ContainerDigestVerificationTests : IClassFixture<ArtifactVerificationFixture>
|
||||
{
|
||||
private readonly ArtifactVerificationFixture _fixture;
|
||||
|
||||
public ContainerDigestVerificationTests(ArtifactVerificationFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
}
|
||||
|
||||
#region OCI Manifest Digest Tests
|
||||
|
||||
[Fact]
|
||||
public void OciManifest_DigestMatchesContent()
|
||||
{
|
||||
// Arrange - Create a simulated OCI manifest
|
||||
var manifest = new OciManifest
|
||||
{
|
||||
SchemaVersion = 2,
|
||||
MediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
Config = new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.config.v1+json",
|
||||
Size = 1234,
|
||||
Digest = "sha256:config123"
|
||||
},
|
||||
Layers =
|
||||
[
|
||||
new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
Size = 50000,
|
||||
Digest = "sha256:layer1abc"
|
||||
},
|
||||
new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
Size = 30000,
|
||||
Digest = "sha256:layer2def"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// Act - Serialize and compute digest
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(manifest, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
var digest = ArtifactVerificationFixture.ComputeDigest(json);
|
||||
|
||||
// Compute again to verify determinism
|
||||
var json2 = JsonSerializer.SerializeToUtf8Bytes(manifest, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
var digest2 = ArtifactVerificationFixture.ComputeDigest(json2);
|
||||
|
||||
// Assert
|
||||
digest.Should().Be(digest2);
|
||||
digest.Should().StartWith("sha256:");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OciManifest_DifferentLayers_ProduceDifferentDigest()
|
||||
{
|
||||
// Arrange
|
||||
var manifest1 = CreateTestOciManifest("sha256:layer1");
|
||||
var manifest2 = CreateTestOciManifest("sha256:layer2");
|
||||
|
||||
// Act
|
||||
var digest1 = ComputeManifestDigest(manifest1);
|
||||
var digest2 = ComputeManifestDigest(manifest2);
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void OciManifest_SameLayers_ProduceSameDigest()
|
||||
{
|
||||
// Arrange - Same content but created at different times
|
||||
var manifest1 = CreateTestOciManifest("sha256:sharedlayer");
|
||||
var manifest2 = CreateTestOciManifest("sha256:sharedlayer");
|
||||
|
||||
// Act
|
||||
var digest1 = ComputeManifestDigest(manifest1);
|
||||
var digest2 = ComputeManifestDigest(manifest2);
|
||||
|
||||
// Assert
|
||||
digest1.Should().Be(digest2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Image Reference Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void ImageReference_TagAndDigest_BothResolvable()
|
||||
{
|
||||
// Arrange
|
||||
var imageRef = new ImageReference
|
||||
{
|
||||
Registry = "registry.example.com",
|
||||
Repository = "myapp",
|
||||
Tag = "v1.0.0",
|
||||
Digest = "sha256:abc123def456"
|
||||
};
|
||||
|
||||
// Act
|
||||
var tagRef = imageRef.ToTagReference();
|
||||
var digestRef = imageRef.ToDigestReference();
|
||||
|
||||
// Assert
|
||||
tagRef.Should().Be("registry.example.com/myapp:v1.0.0");
|
||||
digestRef.Should().Be("registry.example.com/myapp@sha256:abc123def456");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ImageReference_DigestOnly_IsImmutable()
|
||||
{
|
||||
// Arrange
|
||||
var imageRef = new ImageReference
|
||||
{
|
||||
Registry = "ghcr.io",
|
||||
Repository = "org/image",
|
||||
Digest = "sha256:immutabledigest"
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
imageRef.IsImmutable.Should().BeTrue();
|
||||
imageRef.ToDigestReference().Should().Be("ghcr.io/org/image@sha256:immutabledigest");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void ImageReference_TagOnly_IsNotImmutable()
|
||||
{
|
||||
// Arrange
|
||||
var imageRef = new ImageReference
|
||||
{
|
||||
Registry = "docker.io",
|
||||
Repository = "library/nginx",
|
||||
Tag = "latest"
|
||||
};
|
||||
|
||||
// Act & Assert
|
||||
imageRef.IsImmutable.Should().BeFalse();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Layer Verification Tests
|
||||
|
||||
[Fact]
|
||||
public void LayerChain_DigestsFormMerkleTree()
|
||||
{
|
||||
// Arrange - Simulate layer chain
|
||||
var layers = new[]
|
||||
{
|
||||
"sha256:base",
|
||||
"sha256:deps",
|
||||
"sha256:app"
|
||||
};
|
||||
|
||||
// Act - Compute chain digest (simplified Merkle)
|
||||
var chainDigest = ComputeLayerChainDigest(layers);
|
||||
|
||||
// Assert
|
||||
chainDigest.Should().StartWith("sha256:");
|
||||
|
||||
// Verify determinism
|
||||
var chainDigest2 = ComputeLayerChainDigest(layers);
|
||||
chainDigest.Should().Be(chainDigest2);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public void LayerChain_OrderMatters()
|
||||
{
|
||||
// Arrange
|
||||
var layers1 = new[] { "sha256:a", "sha256:b", "sha256:c" };
|
||||
var layers2 = new[] { "sha256:c", "sha256:b", "sha256:a" };
|
||||
|
||||
// Act
|
||||
var digest1 = ComputeLayerChainDigest(layers1);
|
||||
var digest2 = ComputeLayerChainDigest(layers2);
|
||||
|
||||
// Assert
|
||||
digest1.Should().NotBe(digest2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region SBOM-to-Image Linkage Tests
|
||||
|
||||
[Fact]
|
||||
public void SbomImageLinkage_VerifiesDigestChain()
|
||||
{
|
||||
// Arrange
|
||||
var imageDigest = "sha256:imageabc123";
|
||||
var sbomDigest = "sha256:sbomabc123";
|
||||
|
||||
var linkage = new SbomImageLinkage
|
||||
{
|
||||
ImageDigest = imageDigest,
|
||||
SbomDigest = sbomDigest,
|
||||
SbomFormat = "spdx-2.3",
|
||||
LinkedAt = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
// Act
|
||||
var linkageDigest = ComputeLinkageDigest(linkage);
|
||||
|
||||
// Assert
|
||||
linkageDigest.Should().StartWith("sha256:");
|
||||
|
||||
// Verify different image produces different linkage
|
||||
var linkage2 = linkage with { ImageDigest = "sha256:differentimage" };
|
||||
var linkageDigest2 = ComputeLinkageDigest(linkage2);
|
||||
linkageDigest.Should().NotBe(linkageDigest2);
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Helper Methods
|
||||
|
||||
private static OciManifest CreateTestOciManifest(string layerDigest)
|
||||
{
|
||||
return new OciManifest
|
||||
{
|
||||
SchemaVersion = 2,
|
||||
MediaType = "application/vnd.oci.image.manifest.v1+json",
|
||||
Config = new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.config.v1+json",
|
||||
Size = 1000,
|
||||
Digest = "sha256:config"
|
||||
},
|
||||
Layers =
|
||||
[
|
||||
new OciDescriptor
|
||||
{
|
||||
MediaType = "application/vnd.oci.image.layer.v1.tar+gzip",
|
||||
Size = 10000,
|
||||
Digest = layerDigest
|
||||
}
|
||||
]
|
||||
};
|
||||
}
|
||||
|
||||
private static string ComputeManifestDigest(OciManifest manifest)
|
||||
{
|
||||
var json = JsonSerializer.SerializeToUtf8Bytes(manifest, new JsonSerializerOptions
|
||||
{
|
||||
PropertyNamingPolicy = JsonNamingPolicy.CamelCase,
|
||||
WriteIndented = false
|
||||
});
|
||||
return ArtifactVerificationFixture.ComputeDigest(json);
|
||||
}
|
||||
|
||||
private static string ComputeLayerChainDigest(string[] layerDigests)
|
||||
{
|
||||
var combined = string.Join("|", layerDigests);
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(combined);
|
||||
return ArtifactVerificationFixture.ComputeDigest(bytes);
|
||||
}
|
||||
|
||||
private static string ComputeLinkageDigest(SbomImageLinkage linkage)
|
||||
{
|
||||
var combined = $"{linkage.ImageDigest}|{linkage.SbomDigest}|{linkage.SbomFormat}";
|
||||
var bytes = System.Text.Encoding.UTF8.GetBytes(combined);
|
||||
return ArtifactVerificationFixture.ComputeDigest(bytes);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
#region Test Models
|
||||
|
||||
/// <summary>
|
||||
/// Simplified OCI manifest for testing.
|
||||
/// </summary>
|
||||
public sealed record OciManifest
|
||||
{
|
||||
public int SchemaVersion { get; init; }
|
||||
public required string MediaType { get; init; }
|
||||
public required OciDescriptor Config { get; init; }
|
||||
public required ImmutableArray<OciDescriptor> Layers { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// OCI descriptor for config or layer.
|
||||
/// </summary>
|
||||
public sealed record OciDescriptor
|
||||
{
|
||||
public required string MediaType { get; init; }
|
||||
public required long Size { get; init; }
|
||||
public required string Digest { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Container image reference.
|
||||
/// </summary>
|
||||
public sealed record ImageReference
|
||||
{
|
||||
public required string Registry { get; init; }
|
||||
public required string Repository { get; init; }
|
||||
public string? Tag { get; init; }
|
||||
public string? Digest { get; init; }
|
||||
|
||||
public bool IsImmutable => !string.IsNullOrEmpty(Digest);
|
||||
|
||||
public string ToTagReference() =>
|
||||
$"{Registry}/{Repository}:{Tag ?? "latest"}";
|
||||
|
||||
public string ToDigestReference() =>
|
||||
$"{Registry}/{Repository}@{Digest}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Linkage between SBOM and container image.
|
||||
/// </summary>
|
||||
public sealed record SbomImageLinkage
|
||||
{
|
||||
public required string ImageDigest { get; init; }
|
||||
public required string SbomDigest { get; init; }
|
||||
public required string SbomFormat { get; init; }
|
||||
public required DateTimeOffset LinkedAt { get; init; }
|
||||
}
|
||||
|
||||
#endregion
|
||||
@@ -0,0 +1,240 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ArtifactVerificationFixture.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 2.1
|
||||
// Description: Test fixture for artifact immutability verification
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Collections.Immutable;
|
||||
using System.Security.Cryptography;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Immutability.Fixtures;
|
||||
|
||||
/// <summary>
|
||||
/// Test fixture for verifying artifact immutability across builds.
|
||||
/// </summary>
|
||||
public sealed class ArtifactVerificationFixture : IAsyncLifetime
|
||||
{
|
||||
private readonly Dictionary<string, ArtifactManifest> _manifests = [];
|
||||
private readonly string _workDir;
|
||||
|
||||
public ArtifactVerificationFixture()
|
||||
{
|
||||
_workDir = Path.Combine(Path.GetTempPath(), $"artifact-verify-{Guid.NewGuid():N}");
|
||||
Directory.CreateDirectory(_workDir);
|
||||
}
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask InitializeAsync() => ValueTask.CompletedTask;
|
||||
|
||||
/// <inheritdoc/>
|
||||
public ValueTask DisposeAsync()
|
||||
{
|
||||
try
|
||||
{
|
||||
if (Directory.Exists(_workDir))
|
||||
{
|
||||
Directory.Delete(_workDir, recursive: true);
|
||||
}
|
||||
}
|
||||
catch
|
||||
{
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
return ValueTask.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the working directory for artifact storage.
|
||||
/// </summary>
|
||||
public string WorkDirectory => _workDir;
|
||||
|
||||
/// <summary>
|
||||
/// Registers an artifact manifest for verification.
|
||||
/// </summary>
|
||||
public void RegisterManifest(string name, ArtifactManifest manifest)
|
||||
{
|
||||
ArgumentException.ThrowIfNullOrWhiteSpace(name);
|
||||
ArgumentNullException.ThrowIfNull(manifest);
|
||||
_manifests[name] = manifest;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a registered manifest.
|
||||
/// </summary>
|
||||
public ArtifactManifest? GetManifest(string name)
|
||||
{
|
||||
return _manifests.TryGetValue(name, out var manifest) ? manifest : null;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 digest of a file.
|
||||
/// </summary>
|
||||
public static string ComputeFileDigest(string filePath)
|
||||
{
|
||||
using var stream = File.OpenRead(filePath);
|
||||
var hash = SHA256.HashData(stream);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Computes SHA-256 digest of byte content.
|
||||
/// </summary>
|
||||
public static string ComputeDigest(byte[] content)
|
||||
{
|
||||
var hash = SHA256.HashData(content);
|
||||
return $"sha256:{Convert.ToHexString(hash).ToLowerInvariant()}";
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Creates a test artifact file with deterministic content.
|
||||
/// </summary>
|
||||
public string CreateTestArtifact(string name, byte[] content)
|
||||
{
|
||||
var path = Path.Combine(_workDir, name);
|
||||
var dir = Path.GetDirectoryName(path);
|
||||
if (dir is not null && !Directory.Exists(dir))
|
||||
{
|
||||
Directory.CreateDirectory(dir);
|
||||
}
|
||||
File.WriteAllBytes(path, content);
|
||||
return path;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies that two builds produced identical artifacts.
|
||||
/// </summary>
|
||||
public ArtifactComparisonResult CompareBuilds(
|
||||
ArtifactManifest buildA,
|
||||
ArtifactManifest buildB)
|
||||
{
|
||||
var mismatches = new List<ArtifactMismatch>();
|
||||
var onlyInA = new List<string>();
|
||||
var onlyInB = new List<string>();
|
||||
|
||||
var artifactsA = buildA.Artifacts.ToDictionary(a => a.Name);
|
||||
var artifactsB = buildB.Artifacts.ToDictionary(a => a.Name);
|
||||
|
||||
foreach (var (name, artifactA) in artifactsA)
|
||||
{
|
||||
if (artifactsB.TryGetValue(name, out var artifactB))
|
||||
{
|
||||
if (artifactA.Digest != artifactB.Digest)
|
||||
{
|
||||
mismatches.Add(new ArtifactMismatch(
|
||||
name,
|
||||
artifactA.Digest,
|
||||
artifactB.Digest,
|
||||
artifactA.Size,
|
||||
artifactB.Size));
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
onlyInA.Add(name);
|
||||
}
|
||||
}
|
||||
|
||||
foreach (var name in artifactsB.Keys)
|
||||
{
|
||||
if (!artifactsA.ContainsKey(name))
|
||||
{
|
||||
onlyInB.Add(name);
|
||||
}
|
||||
}
|
||||
|
||||
return new ArtifactComparisonResult(
|
||||
ByteIdentical: mismatches.Count == 0 && onlyInA.Count == 0 && onlyInB.Count == 0,
|
||||
Mismatches: [.. mismatches],
|
||||
OnlyInBuildA: [.. onlyInA],
|
||||
OnlyInBuildB: [.. onlyInB]);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies SBOM content-addressability with artifact.
|
||||
/// </summary>
|
||||
public SbomLinkageResult VerifySbomLinkage(
|
||||
ArtifactManifest artifactManifest,
|
||||
SbomManifest sbomManifest)
|
||||
{
|
||||
var linkedArtifacts = new List<string>();
|
||||
var unlinkedArtifacts = new List<string>();
|
||||
|
||||
foreach (var artifact in artifactManifest.Artifacts)
|
||||
{
|
||||
var isLinked = sbomManifest.ReferencedDigests.Contains(artifact.Digest);
|
||||
if (isLinked)
|
||||
{
|
||||
linkedArtifacts.Add(artifact.Name);
|
||||
}
|
||||
else
|
||||
{
|
||||
unlinkedArtifacts.Add(artifact.Name);
|
||||
}
|
||||
}
|
||||
|
||||
return new SbomLinkageResult(
|
||||
AllLinked: unlinkedArtifacts.Count == 0,
|
||||
LinkedArtifacts: [.. linkedArtifacts],
|
||||
UnlinkedArtifacts: [.. unlinkedArtifacts],
|
||||
SbomDigest: sbomManifest.Digest);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Manifest describing build artifacts.
|
||||
/// </summary>
|
||||
public sealed record ArtifactManifest
|
||||
{
|
||||
public required string BuildId { get; init; }
|
||||
public required DateTimeOffset BuildTime { get; init; }
|
||||
public required ImmutableArray<ArtifactEntry> Artifacts { get; init; }
|
||||
public ImmutableDictionary<string, string>? Metadata { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Entry for a single artifact in a manifest.
|
||||
/// </summary>
|
||||
public sealed record ArtifactEntry(
|
||||
string Name,
|
||||
string Digest,
|
||||
long Size,
|
||||
string? MediaType);
|
||||
|
||||
/// <summary>
|
||||
/// Result of comparing two builds.
|
||||
/// </summary>
|
||||
public sealed record ArtifactComparisonResult(
|
||||
bool ByteIdentical,
|
||||
ImmutableArray<ArtifactMismatch> Mismatches,
|
||||
ImmutableArray<string> OnlyInBuildA,
|
||||
ImmutableArray<string> OnlyInBuildB);
|
||||
|
||||
/// <summary>
|
||||
/// Details of a mismatched artifact between builds.
|
||||
/// </summary>
|
||||
public sealed record ArtifactMismatch(
|
||||
string Name,
|
||||
string DigestA,
|
||||
string DigestB,
|
||||
long SizeA,
|
||||
long SizeB);
|
||||
|
||||
/// <summary>
|
||||
/// SBOM manifest for linkage verification.
|
||||
/// </summary>
|
||||
public sealed record SbomManifest
|
||||
{
|
||||
public required string Digest { get; init; }
|
||||
public required string Format { get; init; }
|
||||
public required ImmutableHashSet<string> ReferencedDigests { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of SBOM linkage verification.
|
||||
/// </summary>
|
||||
public sealed record SbomLinkageResult(
|
||||
bool AllLinked,
|
||||
ImmutableArray<string> LinkedArtifacts,
|
||||
ImmutableArray<string> UnlinkedArtifacts,
|
||||
string SbomDigest);
|
||||
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<TargetFramework>net10.0</TargetFramework>
|
||||
<Nullable>enable</Nullable>
|
||||
<ImplicitUsings>enable</ImplicitUsings>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<LangVersion>preview</LangVersion>
|
||||
<!-- Enable xUnit v3 (project doesn't end with .Tests so needs explicit opt-in) -->
|
||||
<UseXunitV3>true</UseXunitV3>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<!-- xUnit packages (project doesn't end with .Tests so must be explicit) -->
|
||||
<PackageReference Include="Microsoft.NET.Test.Sdk" />
|
||||
<PackageReference Include="xunit.v3" />
|
||||
<PackageReference Include="xunit.runner.visualstudio">
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
</PackageReference>
|
||||
<PackageReference Include="FluentAssertions" />
|
||||
<PackageReference Include="Moq" />
|
||||
<PackageReference Include="coverlet.collector">
|
||||
<IncludeAssets>runtime; build; native; contentfiles; analyzers; buildtransitive</IncludeAssets>
|
||||
<PrivateAssets>all</PrivateAssets>
|
||||
</PackageReference>
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj" />
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -0,0 +1,451 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// ColdPathLatencyTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
// Description: Tests for cold-start latency budgets (first request after service start)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for cold-start latency (first request after service initialization).
|
||||
/// Validates that services can respond within budget even on first request.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Performance)]
|
||||
[Trait("Category", "Latency")]
|
||||
[Trait("Category", "ColdPath")]
|
||||
public class ColdPathLatencyTests : IClassFixture<PerformanceTestFixture>
|
||||
{
|
||||
private readonly PerformanceTestFixture _fixture;
|
||||
private readonly LatencyBudgetEnforcer _enforcer;
|
||||
|
||||
public ColdPathLatencyTests(PerformanceTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_enforcer = new LatencyBudgetEnforcer();
|
||||
_enforcer.RegisterDefaultBudgets();
|
||||
}
|
||||
|
||||
#region Scanner Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task FirstRequest_ColdStart_CompletesWithin5Seconds()
|
||||
{
|
||||
// Arrange - Simulate cold start by creating new service instance
|
||||
var coldStartSimulator = new ColdStartSimulator("scanner");
|
||||
await coldStartSimulator.ResetStateAsync();
|
||||
|
||||
// Act - First request (cold)
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => coldStartSimulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
measurement.IsColdStart.Should().BeTrue();
|
||||
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"Cold start took {measurement.Duration.TotalMilliseconds:F0}ms, " +
|
||||
$"budget is {result.ExpectedBudget.TotalMilliseconds:F0}ms");
|
||||
|
||||
_fixture.RecordMeasurement("scanner_cold_start_ms", measurement.Duration.TotalMilliseconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_ColdStart_CompletesWithin3Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("sbom-generator");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.sbom",
|
||||
() => simulator.SimulateSbomGenerationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"SBOM generation cold start: {measurement.Duration.TotalMilliseconds:F0}ms, " +
|
||||
$"budget: {result.ExpectedBudget.TotalMilliseconds:F0}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Concelier Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierLookup_ColdStart_CompletesWithin2Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => simulator.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierMerge_ColdStart_CompletesWithin4Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.merge",
|
||||
() => simulator.SimulateAdvisoryMergeAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyEvaluate_ColdStart_CompletesWithin2Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.evaluate",
|
||||
() => simulator.SimulatePolicyEvaluationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyScore_ColdStart_CompletesWithin1Second()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.score",
|
||||
() => simulator.SimulateRiskScoringAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Authority Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityToken_ColdStart_CompletesWithin1Second()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.token",
|
||||
() => simulator.SimulateTokenIssuanceAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityValidate_ColdStart_CompletesWithin500ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.validate",
|
||||
() => simulator.SimulateTokenValidationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Attestor Cold Start Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorSign_ColdStart_CompletesWithin2Seconds()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.sign",
|
||||
() => simulator.SimulateSigningAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorVerify_ColdStart_CompletesWithin1Second()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.verify",
|
||||
() => simulator.SimulateVerificationAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold Start with Payload Size Variations
|
||||
|
||||
[Theory]
|
||||
[InlineData("small", 1)] // 1 KB payload
|
||||
[InlineData("medium", 100)] // 100 KB payload
|
||||
[InlineData("large", 1000)] // 1 MB payload
|
||||
public async Task ColdStart_WithVaryingPayloadSizes_StaysWithinBudget(string size, int sizeKb)
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.ResetStateAsync();
|
||||
simulator.ConfigurePayloadSize(sizeKb * 1024);
|
||||
|
||||
// Register a larger budget for larger payloads
|
||||
var budget = TimeSpan.FromSeconds(5 + (sizeKb / 500.0));
|
||||
_enforcer.RegisterBudget($"scanner.scan.{size}", budget, TimeSpan.FromMilliseconds(500));
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
$"scanner.scan.{size}",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"Cold start with {size} payload: {measurement.Duration.TotalMilliseconds:F0}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold Start After Extended Idle
|
||||
|
||||
[Fact]
|
||||
public async Task ColdStart_AfterExtendedIdle_StillMeetsBudget()
|
||||
{
|
||||
// Arrange - Simulate service that has been idle (potential resource cleanup)
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.ResetStateAsync();
|
||||
simulator.SimulateExtendedIdle(TimeSpan.FromMinutes(30));
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold Start Statistics
|
||||
|
||||
[Fact]
|
||||
public async Task ColdStart_MultipleSamples_GeneratesStatistics()
|
||||
{
|
||||
// Arrange
|
||||
const int sampleCount = 5;
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
|
||||
// Act - Collect multiple cold start samples
|
||||
for (var i = 0; i < sampleCount; i++)
|
||||
{
|
||||
await simulator.ResetStateAsync();
|
||||
await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
}
|
||||
|
||||
// Assert - Generate statistics
|
||||
var stats = _enforcer.ComputeStatistics("scanner.scan");
|
||||
stats.SampleCount.Should().Be(sampleCount);
|
||||
stats.P95.Should().BeLessThan(TimeSpan.FromSeconds(5));
|
||||
stats.Max.Should().BeGreaterThan(TimeSpan.Zero);
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates cold-start scenarios for services.
|
||||
/// </summary>
|
||||
public sealed class ColdStartSimulator
|
||||
{
|
||||
private readonly string _serviceName;
|
||||
private readonly Random _random = new(42);
|
||||
private int _payloadSize = 1024;
|
||||
private TimeSpan _idleTime = TimeSpan.Zero;
|
||||
private bool _isWarm;
|
||||
|
||||
public ColdStartSimulator(string serviceName)
|
||||
{
|
||||
_serviceName = serviceName;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Resets the simulator to cold state.
|
||||
/// </summary>
|
||||
public Task ResetStateAsync()
|
||||
{
|
||||
_isWarm = false;
|
||||
return Task.CompletedTask;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Configures payload size for the simulated request.
|
||||
/// </summary>
|
||||
public void ConfigurePayloadSize(int bytes)
|
||||
{
|
||||
_payloadSize = bytes;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates extended idle period.
|
||||
/// </summary>
|
||||
public void SimulateExtendedIdle(TimeSpan duration)
|
||||
{
|
||||
_idleTime = duration;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates the first request (cold start).
|
||||
/// </summary>
|
||||
public async Task SimulateFirstRequestAsync()
|
||||
{
|
||||
// Simulate initialization overhead
|
||||
var initDelay = GetInitializationDelay();
|
||||
await Task.Delay(initDelay);
|
||||
|
||||
// Simulate actual work
|
||||
var workDelay = GetWorkDelay();
|
||||
await Task.Delay(workDelay);
|
||||
|
||||
_isWarm = true;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Simulates a subsequent request (warm path).
|
||||
/// </summary>
|
||||
public async Task SimulateSubsequentRequestAsync()
|
||||
{
|
||||
// No initialization overhead for warm requests
|
||||
var workDelay = GetWorkDelay();
|
||||
await Task.Delay(workDelay);
|
||||
}
|
||||
|
||||
public Task SimulateSbomGenerationAsync() => SimulateOperationAsync(50, 150);
|
||||
public Task SimulateAdvisoryLookupAsync() => SimulateOperationAsync(30, 100);
|
||||
public Task SimulateAdvisoryMergeAsync() => SimulateOperationAsync(100, 300);
|
||||
public Task SimulatePolicyEvaluationAsync() => SimulateOperationAsync(40, 120);
|
||||
public Task SimulateRiskScoringAsync() => SimulateOperationAsync(20, 60);
|
||||
public Task SimulateTokenIssuanceAsync() => SimulateOperationAsync(10, 50);
|
||||
public Task SimulateTokenValidationAsync() => SimulateOperationAsync(5, 20);
|
||||
public Task SimulateSigningAsync() => SimulateOperationAsync(50, 150);
|
||||
public Task SimulateVerificationAsync() => SimulateOperationAsync(30, 100);
|
||||
|
||||
private async Task SimulateOperationAsync(int minMs, int maxMs)
|
||||
{
|
||||
var delay = _isWarm
|
||||
? _random.Next(minMs, maxMs)
|
||||
: _random.Next(minMs * 2, maxMs * 3); // Cold start is slower
|
||||
|
||||
await Task.Delay(delay);
|
||||
_isWarm = true;
|
||||
}
|
||||
|
||||
private int GetInitializationDelay()
|
||||
{
|
||||
// Base initialization cost
|
||||
var baseDelay = _serviceName switch
|
||||
{
|
||||
"scanner" => 200,
|
||||
"concelier" => 100,
|
||||
"policy" => 80,
|
||||
"authority" => 50,
|
||||
"attestor" => 100,
|
||||
"sbom-generator" => 120,
|
||||
_ => 100
|
||||
};
|
||||
|
||||
// Add time for idle recovery if applicable
|
||||
if (_idleTime > TimeSpan.FromMinutes(5))
|
||||
{
|
||||
baseDelay += 50;
|
||||
}
|
||||
|
||||
return baseDelay + _random.Next(0, 50);
|
||||
}
|
||||
|
||||
private int GetWorkDelay()
|
||||
{
|
||||
// Work time proportional to payload
|
||||
var baseWork = Math.Max(10, _payloadSize / 10000);
|
||||
return baseWork + _random.Next(0, 20);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,364 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// LatencyBudgetEnforcer.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
// Description: Latency budget enforcement for cold/warm path testing
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using System.Diagnostics;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Enforces latency budgets for service operations.
|
||||
/// Distinguishes between cold-start and warm-path latency expectations.
|
||||
/// </summary>
|
||||
public sealed class LatencyBudgetEnforcer
|
||||
{
|
||||
private readonly Dictionary<string, LatencyBudget> _budgets = new();
|
||||
private readonly List<LatencyMeasurement> _measurements = [];
|
||||
|
||||
/// <summary>
|
||||
/// Default cold-start budget (first request after service start).
|
||||
/// </summary>
|
||||
public static readonly TimeSpan DefaultColdStartBudget = TimeSpan.FromSeconds(5);
|
||||
|
||||
/// <summary>
|
||||
/// Default warm-path budget (subsequent requests).
|
||||
/// </summary>
|
||||
public static readonly TimeSpan DefaultWarmPathBudget = TimeSpan.FromMilliseconds(500);
|
||||
|
||||
/// <summary>
|
||||
/// Registers a latency budget for an operation.
|
||||
/// </summary>
|
||||
public void RegisterBudget(
|
||||
string operationName,
|
||||
TimeSpan coldStartBudget,
|
||||
TimeSpan warmPathBudget)
|
||||
{
|
||||
_budgets[operationName] = new LatencyBudget
|
||||
{
|
||||
OperationName = operationName,
|
||||
ColdStartBudget = coldStartBudget,
|
||||
WarmPathBudget = warmPathBudget
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Registers default budgets for common operations.
|
||||
/// </summary>
|
||||
public void RegisterDefaultBudgets()
|
||||
{
|
||||
// Scanner operations
|
||||
RegisterBudget("scanner.scan", TimeSpan.FromSeconds(5), TimeSpan.FromMilliseconds(500));
|
||||
RegisterBudget("scanner.sbom", TimeSpan.FromSeconds(3), TimeSpan.FromMilliseconds(300));
|
||||
|
||||
// Concelier operations
|
||||
RegisterBudget("concelier.lookup", TimeSpan.FromSeconds(2), TimeSpan.FromMilliseconds(100));
|
||||
RegisterBudget("concelier.merge", TimeSpan.FromSeconds(4), TimeSpan.FromMilliseconds(400));
|
||||
|
||||
// Policy operations
|
||||
RegisterBudget("policy.evaluate", TimeSpan.FromSeconds(2), TimeSpan.FromMilliseconds(200));
|
||||
RegisterBudget("policy.score", TimeSpan.FromSeconds(1), TimeSpan.FromMilliseconds(100));
|
||||
|
||||
// Authority operations
|
||||
RegisterBudget("authority.token", TimeSpan.FromSeconds(1), TimeSpan.FromMilliseconds(50));
|
||||
RegisterBudget("authority.validate", TimeSpan.FromMilliseconds(500), TimeSpan.FromMilliseconds(20));
|
||||
|
||||
// Attestor operations
|
||||
RegisterBudget("attestor.sign", TimeSpan.FromSeconds(2), TimeSpan.FromMilliseconds(200));
|
||||
RegisterBudget("attestor.verify", TimeSpan.FromSeconds(1), TimeSpan.FromMilliseconds(100));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the budget for an operation.
|
||||
/// </summary>
|
||||
public LatencyBudget GetBudget(string operationName)
|
||||
{
|
||||
if (_budgets.TryGetValue(operationName, out var budget))
|
||||
{
|
||||
return budget;
|
||||
}
|
||||
|
||||
// Return default budget
|
||||
return new LatencyBudget
|
||||
{
|
||||
OperationName = operationName,
|
||||
ColdStartBudget = DefaultColdStartBudget,
|
||||
WarmPathBudget = DefaultWarmPathBudget
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Measures the latency of an operation.
|
||||
/// </summary>
|
||||
public async Task<LatencyMeasurement> MeasureAsync(
|
||||
string operationName,
|
||||
Func<Task> operation,
|
||||
bool isColdStart = false)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
await operation();
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = true,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return measurement;
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = false,
|
||||
Error = ex.Message,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return measurement;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Measures the latency of an operation with a result.
|
||||
/// </summary>
|
||||
public async Task<(LatencyMeasurement Measurement, T? Result)> MeasureAsync<T>(
|
||||
string operationName,
|
||||
Func<Task<T>> operation,
|
||||
bool isColdStart = false)
|
||||
{
|
||||
var stopwatch = Stopwatch.StartNew();
|
||||
|
||||
try
|
||||
{
|
||||
var result = await operation();
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = true,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return (measurement, result);
|
||||
}
|
||||
catch (Exception ex)
|
||||
{
|
||||
stopwatch.Stop();
|
||||
|
||||
var measurement = new LatencyMeasurement
|
||||
{
|
||||
OperationName = operationName,
|
||||
Duration = stopwatch.Elapsed,
|
||||
IsColdStart = isColdStart,
|
||||
Success = false,
|
||||
Error = ex.Message,
|
||||
Timestamp = DateTimeOffset.UtcNow
|
||||
};
|
||||
|
||||
_measurements.Add(measurement);
|
||||
return (measurement, default);
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Verifies a measurement is within budget.
|
||||
/// </summary>
|
||||
public BudgetVerificationResult VerifyWithinBudget(LatencyMeasurement measurement)
|
||||
{
|
||||
var budget = GetBudget(measurement.OperationName);
|
||||
var expectedBudget = measurement.IsColdStart
|
||||
? budget.ColdStartBudget
|
||||
: budget.WarmPathBudget;
|
||||
|
||||
var isWithinBudget = measurement.Duration <= expectedBudget;
|
||||
var percentageOfBudget = measurement.Duration.TotalMilliseconds / expectedBudget.TotalMilliseconds * 100;
|
||||
|
||||
return new BudgetVerificationResult
|
||||
{
|
||||
Measurement = measurement,
|
||||
Budget = budget,
|
||||
ExpectedBudget = expectedBudget,
|
||||
IsWithinBudget = isWithinBudget,
|
||||
PercentageOfBudget = percentageOfBudget,
|
||||
Overage = isWithinBudget ? TimeSpan.Zero : measurement.Duration - expectedBudget
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all measurements for an operation.
|
||||
/// </summary>
|
||||
public IReadOnlyList<LatencyMeasurement> GetMeasurements(string operationName)
|
||||
{
|
||||
return _measurements
|
||||
.Where(m => m.OperationName == operationName)
|
||||
.ToList()
|
||||
.AsReadOnly();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets all measurements.
|
||||
/// </summary>
|
||||
public IReadOnlyList<LatencyMeasurement> GetAllMeasurements() => _measurements.AsReadOnly();
|
||||
|
||||
/// <summary>
|
||||
/// Computes statistics for an operation.
|
||||
/// </summary>
|
||||
public LatencyStatistics ComputeStatistics(string operationName)
|
||||
{
|
||||
var measurements = _measurements
|
||||
.Where(m => m.OperationName == operationName && m.Success)
|
||||
.ToList();
|
||||
|
||||
if (measurements.Count == 0)
|
||||
{
|
||||
return new LatencyStatistics
|
||||
{
|
||||
OperationName = operationName,
|
||||
SampleCount = 0
|
||||
};
|
||||
}
|
||||
|
||||
var durations = measurements.Select(m => m.Duration.TotalMilliseconds).OrderBy(d => d).ToList();
|
||||
|
||||
return new LatencyStatistics
|
||||
{
|
||||
OperationName = operationName,
|
||||
SampleCount = durations.Count,
|
||||
Min = TimeSpan.FromMilliseconds(durations.Min()),
|
||||
Max = TimeSpan.FromMilliseconds(durations.Max()),
|
||||
Mean = TimeSpan.FromMilliseconds(durations.Average()),
|
||||
Median = TimeSpan.FromMilliseconds(Percentile(durations, 50)),
|
||||
P95 = TimeSpan.FromMilliseconds(Percentile(durations, 95)),
|
||||
P99 = TimeSpan.FromMilliseconds(Percentile(durations, 99))
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Generates a latency report.
|
||||
/// </summary>
|
||||
public LatencyReport GenerateReport()
|
||||
{
|
||||
var operationNames = _measurements.Select(m => m.OperationName).Distinct();
|
||||
var statistics = operationNames.Select(ComputeStatistics).ToList();
|
||||
var budgetResults = _measurements.Select(VerifyWithinBudget).ToList();
|
||||
|
||||
return new LatencyReport
|
||||
{
|
||||
GeneratedAt = DateTimeOffset.UtcNow,
|
||||
TotalMeasurements = _measurements.Count,
|
||||
SuccessfulMeasurements = _measurements.Count(m => m.Success),
|
||||
Statistics = statistics.AsReadOnly(),
|
||||
BudgetViolations = budgetResults.Where(r => !r.IsWithinBudget).ToList().AsReadOnly(),
|
||||
OverallComplianceRate = budgetResults.Count > 0
|
||||
? (double)budgetResults.Count(r => r.IsWithinBudget) / budgetResults.Count * 100
|
||||
: 100
|
||||
};
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Clears all measurements.
|
||||
/// </summary>
|
||||
public void ClearMeasurements()
|
||||
{
|
||||
_measurements.Clear();
|
||||
}
|
||||
|
||||
private static double Percentile(List<double> sortedData, double percentile)
|
||||
{
|
||||
if (sortedData.Count == 0) return 0;
|
||||
if (sortedData.Count == 1) return sortedData[0];
|
||||
|
||||
var index = (percentile / 100.0) * (sortedData.Count - 1);
|
||||
var lower = (int)Math.Floor(index);
|
||||
var upper = (int)Math.Ceiling(index);
|
||||
|
||||
if (lower == upper) return sortedData[lower];
|
||||
|
||||
var weight = index - lower;
|
||||
return sortedData[lower] * (1 - weight) + sortedData[upper] * weight;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a latency budget for an operation.
|
||||
/// </summary>
|
||||
public sealed record LatencyBudget
|
||||
{
|
||||
public required string OperationName { get; init; }
|
||||
public required TimeSpan ColdStartBudget { get; init; }
|
||||
public required TimeSpan WarmPathBudget { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Represents a latency measurement.
|
||||
/// </summary>
|
||||
public sealed record LatencyMeasurement
|
||||
{
|
||||
public required string OperationName { get; init; }
|
||||
public required TimeSpan Duration { get; init; }
|
||||
public required bool IsColdStart { get; init; }
|
||||
public required bool Success { get; init; }
|
||||
public string? Error { get; init; }
|
||||
public required DateTimeOffset Timestamp { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Result of budget verification.
|
||||
/// </summary>
|
||||
public sealed record BudgetVerificationResult
|
||||
{
|
||||
public required LatencyMeasurement Measurement { get; init; }
|
||||
public required LatencyBudget Budget { get; init; }
|
||||
public required TimeSpan ExpectedBudget { get; init; }
|
||||
public required bool IsWithinBudget { get; init; }
|
||||
public required double PercentageOfBudget { get; init; }
|
||||
public required TimeSpan Overage { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Latency statistics for an operation.
|
||||
/// </summary>
|
||||
public sealed record LatencyStatistics
|
||||
{
|
||||
public required string OperationName { get; init; }
|
||||
public required int SampleCount { get; init; }
|
||||
public TimeSpan Min { get; init; }
|
||||
public TimeSpan Max { get; init; }
|
||||
public TimeSpan Mean { get; init; }
|
||||
public TimeSpan Median { get; init; }
|
||||
public TimeSpan P95 { get; init; }
|
||||
public TimeSpan P99 { get; init; }
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Comprehensive latency report.
|
||||
/// </summary>
|
||||
public sealed record LatencyReport
|
||||
{
|
||||
public required DateTimeOffset GeneratedAt { get; init; }
|
||||
public required int TotalMeasurements { get; init; }
|
||||
public required int SuccessfulMeasurements { get; init; }
|
||||
public required IReadOnlyList<LatencyStatistics> Statistics { get; init; }
|
||||
public required IReadOnlyList<BudgetVerificationResult> BudgetViolations { get; init; }
|
||||
public required double OverallComplianceRate { get; init; }
|
||||
}
|
||||
@@ -8,6 +8,9 @@
|
||||
<LangVersion>preview</LangVersion>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<TreatWarningsAsErrors>true</TreatWarningsAsErrors>
|
||||
<!-- Suppress xUnit analyzer warnings (same as Directory.Build.props does for .Tests projects) -->
|
||||
<NoWarn>$(NoWarn);xUnit1031;xUnit1041;xUnit1051;xUnit1026;xUnit1013;xUnit2013;xUnit3003</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup>
|
||||
@@ -23,6 +26,7 @@
|
||||
<ProjectReference Include="..\..\..\Scanner\StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj" />
|
||||
<ProjectReference Include="..\..\..\Attestor\__Libraries\StellaOps.Attestor.ProofChain\StellaOps.Attestor.ProofChain.csproj" />
|
||||
<ProjectReference Include="..\..\..\Policy\StellaOps.Policy.Scoring\StellaOps.Policy.Scoring.csproj" />
|
||||
<ProjectReference Include="..\..\..\__Libraries\StellaOps.TestKit\StellaOps.TestKit.csproj" />
|
||||
</ItemGroup>
|
||||
|
||||
<ItemGroup>
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Performance Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -0,0 +1,460 @@
|
||||
// -----------------------------------------------------------------------------
|
||||
// WarmPathLatencyTests.cs
|
||||
// Sprint: Testing Enhancement Advisory - Phase 3.4
|
||||
// Description: Tests for warm-path latency budgets (subsequent requests)
|
||||
// -----------------------------------------------------------------------------
|
||||
|
||||
using FluentAssertions;
|
||||
using StellaOps.TestKit;
|
||||
using Xunit;
|
||||
|
||||
namespace StellaOps.Integration.Performance;
|
||||
|
||||
/// <summary>
|
||||
/// Tests for warm-path latency (subsequent requests after initialization).
|
||||
/// Validates that services maintain low latency for steady-state operation.
|
||||
/// </summary>
|
||||
[Trait("Category", TestCategories.Performance)]
|
||||
[Trait("Category", "Latency")]
|
||||
[Trait("Category", "WarmPath")]
|
||||
public class WarmPathLatencyTests : IClassFixture<PerformanceTestFixture>
|
||||
{
|
||||
private readonly PerformanceTestFixture _fixture;
|
||||
private readonly LatencyBudgetEnforcer _enforcer;
|
||||
|
||||
public WarmPathLatencyTests(PerformanceTestFixture fixture)
|
||||
{
|
||||
_fixture = fixture;
|
||||
_enforcer = new LatencyBudgetEnforcer();
|
||||
_enforcer.RegisterDefaultBudgets();
|
||||
}
|
||||
|
||||
#region Scanner Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task SubsequentRequests_WarmPath_CompletesWithin500ms()
|
||||
{
|
||||
// Arrange - Warm up the service
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.SimulateFirstRequestAsync();
|
||||
|
||||
// Act - Subsequent request (warm)
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
measurement.IsColdStart.Should().BeFalse();
|
||||
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue(
|
||||
$"Warm path took {measurement.Duration.TotalMilliseconds:F0}ms, " +
|
||||
$"budget is {result.ExpectedBudget.TotalMilliseconds:F0}ms");
|
||||
|
||||
_fixture.RecordMeasurement("scanner_warm_path_ms", measurement.Duration.TotalMilliseconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task SbomGeneration_WarmPath_CompletesWithin300ms()
|
||||
{
|
||||
// Arrange - Warm up
|
||||
var simulator = new ColdStartSimulator("sbom-generator");
|
||||
await simulator.SimulateSbomGenerationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.sbom",
|
||||
() => simulator.SimulateSbomGenerationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Concelier Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierLookup_WarmPath_CompletesWithin100ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.SimulateAdvisoryLookupAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => simulator.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task ConcelierMerge_WarmPath_CompletesWithin400ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.SimulateAdvisoryMergeAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.merge",
|
||||
() => simulator.SimulateAdvisoryMergeAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Policy Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyEvaluate_WarmPath_CompletesWithin200ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.SimulatePolicyEvaluationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.evaluate",
|
||||
() => simulator.SimulatePolicyEvaluationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task PolicyScore_WarmPath_CompletesWithin100ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("policy");
|
||||
await simulator.SimulateRiskScoringAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"policy.score",
|
||||
() => simulator.SimulateRiskScoringAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Authority Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityToken_WarmPath_CompletesWithin50ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.SimulateTokenIssuanceAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.token",
|
||||
() => simulator.SimulateTokenIssuanceAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AuthorityValidate_WarmPath_CompletesWithin20ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("authority");
|
||||
await simulator.SimulateTokenValidationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"authority.validate",
|
||||
() => simulator.SimulateTokenValidationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Attestor Warm Path Tests
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorSign_WarmPath_CompletesWithin200ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.SimulateSigningAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.sign",
|
||||
() => simulator.SimulateSigningAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task AttestorVerify_WarmPath_CompletesWithin100ms()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("attestor");
|
||||
await simulator.SimulateVerificationAsync();
|
||||
|
||||
// Act
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"attestor.verify",
|
||||
() => simulator.SimulateVerificationAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
// Assert
|
||||
measurement.Success.Should().BeTrue();
|
||||
var result = _enforcer.VerifyWithinBudget(measurement);
|
||||
result.IsWithinBudget.Should().BeTrue();
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Sustained Load Tests
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_SustainedLoad_MaintainsLowLatency()
|
||||
{
|
||||
// Arrange - Warm up
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.SimulateFirstRequestAsync();
|
||||
|
||||
// Act - 100 consecutive requests
|
||||
const int requestCount = 100;
|
||||
for (var i = 0; i < requestCount; i++)
|
||||
{
|
||||
await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
}
|
||||
|
||||
// Assert - P95 should be within budget
|
||||
var stats = _enforcer.ComputeStatistics("scanner.scan");
|
||||
stats.SampleCount.Should().Be(requestCount);
|
||||
stats.P95.Should().BeLessThanOrEqualTo(TimeSpan.FromMilliseconds(500));
|
||||
|
||||
_fixture.RecordMeasurement("scanner_warm_p95_ms", stats.P95.TotalMilliseconds);
|
||||
_fixture.RecordMeasurement("scanner_warm_mean_ms", stats.Mean.TotalMilliseconds);
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_BurstLoad_StaysWithinBudget()
|
||||
{
|
||||
// Arrange - Warm up each service
|
||||
var scannerSim = new ColdStartSimulator("scanner");
|
||||
var concelierSim = new ColdStartSimulator("concelier");
|
||||
var policySim = new ColdStartSimulator("policy");
|
||||
|
||||
await scannerSim.SimulateFirstRequestAsync();
|
||||
await concelierSim.SimulateAdvisoryLookupAsync();
|
||||
await policySim.SimulatePolicyEvaluationAsync();
|
||||
|
||||
// Act - Burst of requests across services
|
||||
var tasks = new List<Task<LatencyMeasurement>>();
|
||||
for (var i = 0; i < 30; i++)
|
||||
{
|
||||
tasks.Add(_enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => scannerSim.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false));
|
||||
tasks.Add(_enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => concelierSim.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: false));
|
||||
tasks.Add(_enforcer.MeasureAsync(
|
||||
"policy.evaluate",
|
||||
() => policySim.SimulatePolicyEvaluationAsync(),
|
||||
isColdStart: false));
|
||||
}
|
||||
|
||||
await Task.WhenAll(tasks);
|
||||
|
||||
// Assert - All measurements within budget
|
||||
var report = _enforcer.GenerateReport();
|
||||
report.BudgetViolations.Should().BeEmpty(
|
||||
"All warm path requests should complete within budget");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Latency Consistency Tests
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_LatencyVariance_StaysAcceptable()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.SimulateFirstRequestAsync();
|
||||
|
||||
// Act - Collect samples
|
||||
const int sampleCount = 50;
|
||||
for (var i = 0; i < sampleCount; i++)
|
||||
{
|
||||
await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
}
|
||||
|
||||
// Assert - P99/Median ratio should be reasonable (< 3x)
|
||||
var stats = _enforcer.ComputeStatistics("scanner.scan");
|
||||
var p99ToMedianRatio = stats.P99.TotalMilliseconds / stats.Median.TotalMilliseconds;
|
||||
p99ToMedianRatio.Should().BeLessThan(3.0,
|
||||
"P99 latency should not exceed 3x median latency");
|
||||
}
|
||||
|
||||
[Fact]
|
||||
public async Task WarmPath_NoLatencySpikes_OverTime()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("concelier");
|
||||
await simulator.SimulateAdvisoryLookupAsync();
|
||||
|
||||
// Act - Extended test with pauses
|
||||
var maxLatency = TimeSpan.Zero;
|
||||
var budget = _enforcer.GetBudget("concelier.lookup").WarmPathBudget;
|
||||
|
||||
for (var batch = 0; batch < 5; batch++)
|
||||
{
|
||||
// Process batch
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"concelier.lookup",
|
||||
() => simulator.SimulateAdvisoryLookupAsync(),
|
||||
isColdStart: false);
|
||||
|
||||
if (measurement.Duration > maxLatency)
|
||||
{
|
||||
maxLatency = measurement.Duration;
|
||||
}
|
||||
}
|
||||
|
||||
// Brief pause between batches
|
||||
await Task.Delay(10);
|
||||
}
|
||||
|
||||
// Assert - Max latency should not spike above budget
|
||||
maxLatency.Should().BeLessThanOrEqualTo(budget,
|
||||
$"Max latency was {maxLatency.TotalMilliseconds:F0}ms, budget is {budget.TotalMilliseconds:F0}ms");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Cold to Warm Transition Tests
|
||||
|
||||
[Fact]
|
||||
public async Task ColdToWarm_TransitionIsSmooth()
|
||||
{
|
||||
// Arrange
|
||||
var simulator = new ColdStartSimulator("scanner");
|
||||
await simulator.ResetStateAsync();
|
||||
|
||||
// Act - First request (cold)
|
||||
var coldMeasurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateFirstRequestAsync(),
|
||||
isColdStart: true);
|
||||
|
||||
// Subsequent requests (warm)
|
||||
var warmMeasurements = new List<LatencyMeasurement>();
|
||||
for (var i = 0; i < 5; i++)
|
||||
{
|
||||
var measurement = await _enforcer.MeasureAsync(
|
||||
"scanner.scan",
|
||||
() => simulator.SimulateSubsequentRequestAsync(),
|
||||
isColdStart: false);
|
||||
warmMeasurements.Add(measurement);
|
||||
}
|
||||
|
||||
// Assert
|
||||
coldMeasurement.Success.Should().BeTrue();
|
||||
warmMeasurements.Should().AllSatisfy(m => m.Success.Should().BeTrue());
|
||||
|
||||
// Warm requests should be significantly faster than cold
|
||||
var avgWarmLatency = TimeSpan.FromMilliseconds(
|
||||
warmMeasurements.Average(m => m.Duration.TotalMilliseconds));
|
||||
avgWarmLatency.Should().BeLessThan(coldMeasurement.Duration,
|
||||
"Warm path should be faster than cold start");
|
||||
}
|
||||
|
||||
#endregion
|
||||
|
||||
#region Report Generation Tests
|
||||
|
||||
[Fact]
|
||||
public async Task GenerateLatencyReport_AfterMultipleOperations()
|
||||
{
|
||||
// Arrange & Act - Run various operations
|
||||
var scannerSim = new ColdStartSimulator("scanner");
|
||||
var concelierSim = new ColdStartSimulator("concelier");
|
||||
|
||||
// Cold starts
|
||||
await _enforcer.MeasureAsync("scanner.scan",
|
||||
() => scannerSim.SimulateFirstRequestAsync(), isColdStart: true);
|
||||
await _enforcer.MeasureAsync("concelier.lookup",
|
||||
() => concelierSim.SimulateAdvisoryLookupAsync(), isColdStart: true);
|
||||
|
||||
// Warm paths
|
||||
for (var i = 0; i < 10; i++)
|
||||
{
|
||||
await _enforcer.MeasureAsync("scanner.scan",
|
||||
() => scannerSim.SimulateSubsequentRequestAsync(), isColdStart: false);
|
||||
await _enforcer.MeasureAsync("concelier.lookup",
|
||||
() => concelierSim.SimulateAdvisoryLookupAsync(), isColdStart: false);
|
||||
}
|
||||
|
||||
// Generate report
|
||||
var report = _enforcer.GenerateReport();
|
||||
|
||||
// Assert
|
||||
report.TotalMeasurements.Should().Be(22);
|
||||
report.SuccessfulMeasurements.Should().Be(22);
|
||||
report.Statistics.Should().HaveCount(2);
|
||||
report.OverallComplianceRate.Should().BeGreaterThanOrEqualTo(90);
|
||||
|
||||
// Save report for verification
|
||||
_fixture.SaveReport("latency-report.txt",
|
||||
$"Generated: {report.GeneratedAt}\n" +
|
||||
$"Compliance: {report.OverallComplianceRate:F1}%\n" +
|
||||
$"Violations: {report.BudgetViolations.Count}");
|
||||
}
|
||||
|
||||
#endregion
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Platform Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -15,6 +15,8 @@
|
||||
<Nullable>enable</Nullable>
|
||||
<IsPackable>false</IsPackable>
|
||||
<IsTestProject>true</IsTestProject>
|
||||
<!-- Suppress xUnit1051: Integration tests don't need responsive cancellation -->
|
||||
<NoWarn>$(NoWarn);xUnit1051</NoWarn>
|
||||
</PropertyGroup>
|
||||
|
||||
<ItemGroup> <PackageReference Include="xunit.v3" />
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.ProofChain Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Reachability Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# StellaOps.Integration.Unknowns Task Board
|
||||
|
||||
This board mirrors active sprint tasks for this module.
|
||||
Source of truth: `docs/implplan/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
Source of truth: `docs-archived/implplan/2025-12-29-csproj-audit/SPRINT_20251229_049_BE_csproj_audit_maint_tests.md`.
|
||||
|
||||
| Task ID | Status | Notes |
|
||||
| --- | --- | --- |
|
||||
|
||||
Reference in New Issue
Block a user